Last active
August 7, 2018 04:50
-
-
Save jhodges10/faef2ab531fe92316d892f76c42dd49c to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import sys | |
import time | |
import json | |
from pprint import pprint | |
from json import JSONDecodeError | |
data_dir = os.path.abspath("G:\hyperscratch\Dash Network Stats") | |
def process_network_stats(file_name): | |
file_path = os.path.join(data_dir, file_name) | |
network_stats_dict = {} | |
with open(file_path, 'r') as network_stats: | |
try: | |
network_stats_dict = json.load(network_stats) | |
except JSONDecodeError: | |
print("Probably a blank measurement") | |
mn_count = 0 | |
return mn_count | |
mn_count = parse_stats(network_stats_dict) | |
# print(mn_count) | |
return mn_count | |
def parse_stats(network_stats_dict): | |
if len(list(network_stats_dict.keys())) < 1: | |
return None | |
# pprint(network_stats_dict) | |
try: | |
mn_count_enabled = network_stats_dict['raw']['mn_count_enabled'] | |
return mn_count_enabled | |
except KeyError as e: | |
mn_count = network_stats_dict['raw']['mn_count'] | |
# print("Key Error: {}".format(e)) | |
return mn_count | |
except Exception as e: | |
print(e) | |
return None | |
def date_parser(filename_date): | |
date_1 = str(filename_date).split('report-')[1] | |
date = date_1.split("-")[0] | |
return date | |
def parse_directory(directory): | |
files_list = os.listdir(data_dir) | |
days_processed_dict = {} | |
dates_list = [] | |
alt_data = 0 | |
for count, file_name in enumerate(files_list): | |
# Reset temp count every time | |
temp_count = 0 | |
# Parse date from filename (could do regex but I'm lazy) | |
date = date_parser(file_name) | |
# Check if we already have something for this day | |
if date in dates_list: | |
continue | |
else: | |
# Check for mn_count_enabled | |
mn_count_enabled = process_network_stats(file_name) | |
if mn_count_enabled is None: | |
# Check another file from the same day but 6 hours later | |
try: | |
temp_count = count + 72 | |
new_fn = files_list[temp_count] | |
# Save filename to dict | |
mn_count_on_day = process_network_stats(new_fn) | |
days_processed_dict[date] = mn_count_on_day | |
print("Saved alt data for {}".format(date)) | |
alt_data += 1 | |
except Exception as e: | |
print("Questionable algo used") | |
print(e) | |
continue | |
else: | |
# Save filename to dict | |
mn_count_on_day = process_network_stats(file_name) | |
days_processed_dict[date] = mn_count_on_day | |
print("Saved data for {}".format(date)) | |
print("Alternate data used: {}".format(alt_data)) | |
dates_list.append(date) | |
return days_processed_dict | |
if __name__ == "__main__": | |
try: | |
directory = sys.argv[1] | |
except IndexError: | |
print("You can pass a directory as python parse_mn_history.py [dir]") | |
print("Pausing for 4 seconds to let that sink in.") | |
print("Using default specified dir") | |
time.sleep(4) | |
directory = data_dir | |
mn_count_historical = parse_directory(directory) | |
print(mn_count_historical) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment