Skip to content

Instantly share code, notes, and snippets.

@portante
Created February 15, 2018 20:22
Show Gist options
  • Save portante/33c9827dd713cd13a93c2a962dab3afd to your computer and use it in GitHub Desktop.
Save portante/33c9827dd713cd13a93c2a962dab3afd to your computer and use it in GitHub Desktop.
A simple systemd journal client that reads *all* messages, calculating a rate for the "LogSmasher" messages it is looking for, both for a data size interval of the messages we are looking for and for all ignored messages.
#!/usr/bin/env python
import sys
import os
import time
import datetime
from systemd import journal
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
j = journal.Reader()
# the fileno() method ends up creating the inotify file
# descriptor which in turn prevents this client from leaking
# open journal log files.
j.fileno()
j.seek_tail()
j.get_previous()
tot_count = 0
tot_size = 0
tot_ign_count = 0
tot_ign_size = 0
rpt_count = 0
rpt_size = 0
rpt_ign_count = 0
rpt_ign_size = 0
MB = 1024 * 1024
REPORT_TARGET = 1 * MB
rpt_time = start = time.time()
waits = 0
while True:
for entry in j:
msg = entry['MESSAGE']
msg_len = len(msg)
if msg.startswith('LogSmasher for OpenShift generated at:'):
# Track all messages we are interested
rpt_count += 1
rpt_size += msg_len
else:
# Track all messeges we are ignoring (so we always see
# the overall rate
rpt_ign_count += 1
rpt_ign_size += msg_len
if (rpt_size + rpt_ign_size) >= REPORT_TARGET:
now = time.time()
source_realtime_timestamp = entry['_SOURCE_REALTIME_TIMESTAMP']
#realtime_timestamp = entry['__REALTIME_TIMESTAMP']
#print datetime.datetime.fromtimestamp(now), realtime_timestamp, source_realtime_timestamp
# Update the overall rate data from this report interval
tot_count += rpt_count
tot_size += rpt_size
tot_ign_count += rpt_ign_count
tot_ign_size += rpt_ign_size
interval = now - rpt_time
duration = now - start
# Calculate the size rates for the report interval and for
# the entire duration of the test, including ignored messages.
rpt_size_r = (rpt_size / MB) / interval
rpt_ign_size_r = (rpt_ign_size / MB) / interval
tot_size_r = (tot_size / MB) / duration
tot_ign_size_r = (tot_ign_size / MB) / duration
# Calculate the msg rates for the report interval and for
# the entire duration of the test, including ignored messages.
rpt_rate = rpt_count / interval
rpt_ign_rate = rpt_ign_count / interval
tot_rate = tot_count / duration
tot_ign_rate = tot_ign_count / duration
print "interval %.3f MB/sec %.3f/sec (ignored %.3f MB/sec %.3f/sec), " \
"overall %.3f MB/sec %d/sec (ignored %.3f MB/sec %.3f/sec) %s %d" % (
rpt_size_r, rpt_rate, rpt_ign_size_r, rpt_ign_rate,
tot_size_r, tot_rate, tot_ign_size_r, tot_ign_rate,
# The last two numbers emitted are the difference between the
# timestamp of "now" vs the source timestamp in the record, which
# would give us how far behind this reader is from the source of
# the logs, and it emits the # of times it called the journal API
# wait(), which indicates it is able to keep up with the logs
# flowing through the journal.
datetime.datetime.fromtimestamp(now) - source_realtime_timestamp, waits)
rpt_count = rpt_size = rpt_ign_count = rpt_ign_size = 0
rpt_time = now
j.wait()
waits += 1
#!/usr/bin/env python
import os
import sys
import time
import random
def main():
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
count = 0
while True:
hash = random.getrandbits(128)
randombits = random.getrandbits(1138)
# LogSmasher for OpenShift generated at: Thu Feb 15 15:00:29 2018 counter: 155465831 hash: 301537333606896094207606455646972209816 data: 304325040...810513516
print 'LogSmasher for OpenShift generated at:',time.ctime(),' counter:',count,' hash:',hash,' data:',randombits
count = count + 1
# 1250 events/second
time.sleep(.0008)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment