Created
May 18, 2022 21:14
-
-
Save lachesis/480a1811c75999205cb17a119b49ef94 to your computer and use it in GitHub Desktop.
NWS weather forecast, hourly for next 7 days
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python3 | |
import datetime | |
import hashlib | |
import itertools | |
import os | |
import socket | |
from pprint import pprint | |
import dateutil.parser | |
import lxml.etree | |
import pytz | |
import requests | |
# some docs | |
# https://graphical.weather.gov/xml/rest.php | |
# https://graphical.weather.gov/xml/DWMLgen/schema/latest_DWML.txt | |
# https://graphical.weather.gov/xml/docs/elementInputNames.php | |
FEATURES = [ | |
# ('mint', 'Daily Minimum Temperature'), | |
# ('maxt', 'Daily Maximum Temperature'), | |
# ('appt', 'Apparent Temperature'), | |
('temp', 'Temperature'), | |
('dew', 'Dew Point Temperature'), | |
('rh', 'Relative Humidity'), | |
# ('minrh', 'Daily Minimum Relative Humidity'), | |
# ('maxrh', 'Daily Maximum Relative Humidity'), | |
# ('td_r', 'Real-time Mesoscale Analysis Dewpoint Temperature'), | |
# ('temp_r', 'Real-time Mesoscale Analysis Temperature'), | |
# ('precipa_r', 'RTMA Liquid Precipitation Amount'), | |
#('qpf', 'Liquid Precipitation Amount'), | |
#('pop12', '12 Hourly Probability of Precipitation'), | |
('wspd', 'Wind Speed'), | |
('wgust', 'Wind Speed Gust'), | |
('wdir', 'Wind Direction'), | |
#('wx', 'Weather'), # not a number | |
] | |
FEATURE_TAGS = [ | |
'temperature', | |
'humidity', | |
'wind-speed', | |
'direction', | |
'precipitation', | |
'probability-of-precipitation', | |
] | |
def weathergov_api_request(): | |
base_url = "https://graphical.weather.gov/xml/sample_products/browser_interface/ndfdXMLclient.php" | |
features = [x[0] for x in FEATURES] | |
sdate = datetime.datetime.utcnow().date() | |
edate = sdate + datetime.timedelta(7) | |
params = dict( | |
lat=os.getenv('LAT'), | |
lon=os.getenv('LNG'), | |
product='time-series', | |
begin="", | |
end="", | |
#begin=sdate.isoformat(), | |
#end=edate.isoformat(), | |
) | |
for feature in features: | |
params[feature] = feature | |
resp = requests.get( | |
base_url, | |
params=params, | |
headers={"User-Agent": "Personal Weather Script ([email protected])"}, | |
timeout=60, | |
) | |
resp.raise_for_status() | |
return resp.content | |
def parse_dwml(txt): | |
#print(txt) | |
root = lxml.etree.fromstring(txt) | |
# load the time layouts | |
time_layouts = {} | |
for tl in root.xpath('//time-layout'): | |
key = tl.xpath('layout-key')[0].text | |
times = [ | |
dateutil.parser.parse(x.text) | |
for x in tl.xpath('start-valid-time') | |
if x.text | |
] | |
time_layouts[key] = times | |
#print(time_layouts) | |
# load each feature | |
features = [] | |
for feat in itertools.chain( *(root.xpath('//' + tag) for tag in FEATURE_TAGS) ): | |
name = feat.xpath('name')[0].text | |
values = [x.text for x in feat.xpath('value')] | |
tl_key = feat.get('time-layout') | |
units = feat.get('units') | |
time_layout = time_layouts[tl_key] | |
time_values = list(zip(time_layout, values)) | |
time_values.sort() | |
features.append((name, units, time_values)) | |
fnames = [n for (k, n) in FEATURES] | |
features.sort(key=lambda x: fnames.index(x[0]) if x[0] in fnames else 1000) | |
return features | |
def tabulate(features): | |
# features => [(name, unit, time_values), ...] | |
# time_values => [(dt, val), (dt, val), ...] | |
# find min and max times | |
stime = min(dt for _,__,tvs in features for (dt, val) in tvs) | |
etime = max(dt for _,__,tvs in features for (dt, val) in tvs) | |
# for each hour, find the best val for each feature | |
out = [] | |
for hour_idx in range(0, int((etime-stime).total_seconds()//3600 + 2)): | |
hour = stime + datetime.timedelta(0, 3600 * hour_idx) | |
hour_data = [hour] | |
for name, unit, time_vals in features: | |
last_val = None | |
for dt, val in time_vals: | |
if dt > hour: | |
break | |
last_val = val | |
hour_data.append(last_val) | |
out.append(hour_data) | |
rev_features = {y: x for (x, y) in FEATURES} | |
# render | |
header = [ 'hour' ] + [ rev_features.get(name, name) for name, _, __ in features ] | |
tsv = [header] | |
for row in out: | |
if row[0].hour == 0: | |
tsv.append(header) | |
tsv.append(row) | |
return '\n'.join('\t'.join(str(x) for x in row) for row in tsv) | |
def dt2unix(dt): | |
epoch = datetime.datetime(1970, 1, 1, 0, 0, 0) | |
if dt.tzinfo: | |
epoch = epoch.replace(tzinfo=pytz.UTC).astimezone(dt.tzinfo) | |
return int((dt - epoch).total_seconds()) | |
def write_to_grafana(features): | |
msg = [] | |
rev_features = {y: x for (x, y) in FEATURES} | |
for name, units, tvals in features: | |
sname = rev_features.get(name, 'unk_'+name) | |
for dt, val in tvals: | |
msg.append(f"nws.{sname} {val} {dt2unix(dt)}") | |
msg.append("") # trailing \n for good luck | |
# write to carbon cache directly | |
s = socket.socket() | |
s.connect(('192.168.60.2', 2003)) | |
s.send('\n'.join(msg).encode('utf-8')) | |
s.close() | |
def cached_call(key, fn, *args, **kwargs): | |
cdir = os.getenv('CACHEDIR') | |
if not cdir: | |
return fn(*args, **kwargs) # no cache | |
ckey = os.path.join(cdir, hashlib.sha256(key.encode()).hexdigest()) | |
try: | |
with open(ckey, 'rb') as inp: | |
return inp.read() | |
except Exception: | |
res = fn(*args, **kwargs) | |
try: | |
with open(ckey, 'wb') as out: | |
out.write(res) | |
except Exception: | |
pass | |
return res | |
def main(): | |
txt = cached_call('weather', weathergov_api_request) | |
features = parse_dwml(txt) | |
write_to_grafana(features) | |
print(tabulate(features)) | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment