Last active
May 29, 2024 07:54
-
-
Save wolph/558158dd92ce08cb2253c07934a12ad8 to your computer and use it in GitHub Desktop.
This prometheus exports all of your AIDA64 data to a Prometheus server so you can chart all of them using Grafana: https://grafana.com/grafana/dashboards/11339
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import re | |
import mmap | |
import typing | |
import hashlib | |
import logging | |
import argparse | |
import datetime | |
import ipaddress | |
import dataclasses | |
import xml.etree.cElementTree as ET | |
import prometheus_client | |
from prometheus_client import core | |
from prometheus_client import exposition | |
PREFIX = 'aida' | |
IGNORED_IDS = {'sdate', 'stime', 'stimens', 'suptimens', 'smemspeed', | |
'sgpu1bustyp', 'sgpu1perfcap', 'sgpu2bustyp', 'sgpu2perfcap'} | |
ID_LABEL_REWRITES = [ | |
(re.compile(r'^(t)([a-z]+)$'), r'temp-\2', ['type']), | |
(re.compile(r'^(vdimm)(\w+)$'), r'\1-\2', ['bank']), | |
(re.compile(r'^(snic)(\d+)(tot)(\w+)$'), r'\1\3-\2-\4', | |
['nic', 'direction']), | |
(re.compile(r'^(snic)(\d+)(\w+)(rate)$'), r'\1\4-\2-\3', | |
['nic', 'direction']), | |
(re.compile(r'^(snic)(\d+)(connspd)$'), r'\1\3-\2', ['nic']), | |
(re.compile(r'^(scpu)(\d+)(\w+)$'), r'\1c\3-\2', ['core']), | |
(re.compile(r'^(tcc|scc)'), r'\1', ['cpu', 'core']), | |
(re.compile(r'^(sdsk)(\d+)(\w+)$'), r'\1-\2-\3', ['disk', 'type']), | |
(re.compile(r'^(tpcie)(\d+)$'), r'\1-\2', ['slot']), | |
(re.compile(r'^t(temp)(\d+)$'), r'\1-\2', ['sensor']), | |
(re.compile(r'^(thdd)(\d+)(ts)(\d+)$'), r'\1\3-\2-\4', ['disk', 'sensor']), | |
(re.compile(r'^(thdd)(\d+)$'), r'\1-\2', ['disk']), | |
(re.compile(r'^(fcha)(\d+)$'), r'\1-\2', ['fan']), | |
(re.compile(r'^s(\w*mem)(uti)$'), r'mem\2-\1', ['type']), | |
(re.compile(r'^s(\w*mem)$'), r'mem-\1', ['type']), | |
(re.compile(r'^(sdrv)(\w+)(free|used)(spc)$'), | |
r'\1\4-\2-\3', ['drive', 'type']), | |
(re.compile(r'^(sdrv)(\w+)(spc|uti)$'), | |
r'\1\3-\2', ['drive', 'type']), | |
# (re.compile(r'^(sgpu)(\d)(\w+)(uti)$'), | |
# r'\1\4-\2-\3', ['gpu', 'type']), | |
(re.compile(r'^([stfdvp]gpu)(\d+)(\w*?)(uti|)$'), r'\1\4-\2-\3', ['gpu', | |
'type']), | |
(re.compile(r'^(f)(\w+)$'), r'fan-\2', ['type']), | |
(re.compile(r'^(v)(\w+)$'), r'volt-\2', ['type']), | |
(re.compile(r'^(p)(\w+)$'), r'pwr-\2', ['type']), | |
(re.compile(r'^(s)(\w+)clk$'), r'clk-\2', ['type']), | |
] | |
@dataclasses.dataclass | |
class Measurement: | |
# Nearly all metrics are gauges but some special ones are better suited | |
# as counter or histograms | |
MetricFamily = core.GaugeMetricFamily | |
group: str | |
id_: id | |
documentation: str | |
raw_value: str | |
values: typing.List[float] = dataclasses.field(default_factory=list) | |
labels: typing.List[str] = dataclasses.field(default_factory=list) | |
label_values: typing.List[typing.List[str]] = dataclasses.field( | |
default_factory=list) | |
@classmethod | |
def from_element(cls, elem): | |
group = elem.tag | |
id_ = elem.find('id').text.lower() | |
documentation = elem.find('label').text | |
raw_value = elem.find('value').text | |
return Measurement(group=group, id_=id_, | |
documentation=documentation, raw_value=raw_value) | |
def __post_init__(self): | |
# Try to break early if this id is ignored | |
if self.id_ in IGNORED_IDS: | |
return | |
for search, replace, labels in ID_LABEL_REWRITES: | |
self.id_, changes = search.subn(replace, self.id_) | |
if not changes: | |
continue | |
self.id_, *label_values = self.id_.split('-') | |
self.label_values = [label_values + [self.documentation]] | |
self.labels = labels + ['documentation'] | |
# Parse values if `self.parse_{id_}` is available and falls back to | |
# `self.parse` alternatively | |
parser_name = 'parse_' + self.id_.split('-')[0] | |
parser = getattr(self, parser_name, self.parse) | |
logging.debug('Parsing %r using %r', self, parser) | |
parser() | |
@property | |
def key(self): | |
key = '_'.join((PREFIX, self.group, self.id_)) | |
key_parts = [] | |
seen = set() | |
for part in key.split('_'): | |
if part in seen: | |
continue | |
else: | |
key_parts.append(part) | |
seen.add(part) | |
return '_'.join(key_parts) | |
def get_metric(self): | |
return self.MetricFamily(self.key, self.documentation, labels=self.labels) | |
def parse(self): | |
try: | |
self.values.append(float(self.raw_value)) | |
except (TypeError, ValueError): | |
logging.warning('Incorrect value for %r', self) | |
def parse_suptime(self): | |
self.MetricFamily = core.CounterMetricFamily | |
parts = self.raw_value.split() | |
if parts[1:]: | |
# days is in 123d format, this strips the `d` | |
days = int(parts[0].rstrip('d')) | |
else: | |
days = 0 | |
hours, minutes, seconds = map(int, parts[-1].split(':')) | |
uptime = datetime.timedelta(days=days, hours=hours, minutes=minutes, | |
seconds=seconds) | |
self.values.append(uptime.total_seconds()) | |
def parse_sdramfsb(self): | |
numerator, denominator = map(int, self.raw_value.split(':')) | |
self.values.append(numerator / denominator) | |
def parse_smemtim(self): | |
self.labels = ['CL', 'tRCD', 'tRP', 'tRAS', 'CMD'] | |
self.label_values = [re.split(r'[^\d]', self.raw_value)] | |
def parse_sdeskres(self): | |
self.labels = ['dimension'] | |
self.label_values = [['width'], ['height']] | |
width, height = self.raw_value.split(' x ') | |
self.values = [int(width), int(height)] | |
def parse_ssmasta(self): | |
if self.raw_value == 'OK': | |
value = 0 | |
else: | |
# Convert the smart status into a unique repeatable number | |
value = int(hashlib.sha1(self.raw_value).hexdigest(), 16) % (2**31) | |
self.values.append(value) | |
def parse_sslista(self): | |
if self.raw_value.lower() == 'disabled': | |
self.values.append(0) | |
else: | |
self.values.append(1) | |
def parse_spriipaddr(self): | |
self.values.append(int(ipaddress.ip_address(self.raw_value))) | |
parse_sextipaddr = parse_spriipaddr | |
# Uses strings instead of numbers in many cases | |
def parse_sbatt(self): | |
if self.raw_value.lower().isdigit(): | |
self.values.append(int(self.raw_value)) | |
class AidaCollector: | |
def __init__(self, address='AIDA64_SensorValues'): | |
self.length = None | |
self.address = address | |
def read(self): | |
# Make sure to only run the idiocy below once | |
if self.length: | |
data = mmap.mmap(0, self.length, self.address).read() | |
else: | |
# Don't ask... bloody Windows gives permission denied if you read | |
# beyond the mmap size but doesn't give a method to find out how | |
# large it is | |
for length in range(32768, 0, -1024): | |
try: | |
self.length = length | |
data = mmap.mmap(0, self.length, self.address).read() | |
break | |
except OSError as e: | |
pass | |
return data.strip(b'\x00') | |
def collect(self): | |
metrics = dict() | |
for element in ET.fromstring('<root>%s</root>' % self.read()): | |
measurement = Measurement.from_element(element) | |
if measurement.id_ in IGNORED_IDS: | |
logging.debug('Ignoring %s', measurement) | |
continue | |
if not measurement.values: | |
continue | |
if measurement.key not in metrics: | |
metrics[measurement.key] = measurement.get_metric() | |
logging.info(measurement) | |
if measurement.labels: | |
for label_values, value in zip(measurement.label_values, | |
measurement.values): | |
metrics[measurement.key].add_metric(label_values, value) | |
else: | |
for value in measurement.values: | |
metrics[measurement.key].add_metric([], value) | |
yield from metrics.values() | |
def main(): | |
parser = argparse.ArgumentParser() | |
parser.add_argument('-v', '--verbose', action='count', default=0) | |
parser.add_argument('-q', '--quiet', action='count', default=0) | |
parser.add_argument('-p', '--port', type=int, default=9091) | |
args = parser.parse_args() | |
verbosity = args.verbose - args.quiet | |
if verbosity > 1: | |
level = logging.DEBUG | |
elif verbosity == 0: | |
level = logging.INFO | |
elif verbosity == -1: | |
level = logging.WARNING | |
else: | |
level = logging.ERROR | |
logging.basicConfig(level=level) | |
registry = prometheus_client.CollectorRegistry() | |
registry.register(AidaCollector()) | |
if level: | |
exposition.generate_latest(registry) | |
MetricsHandler = prometheus_client.MetricsHandler.factory(registry) | |
httpd = exposition.ThreadingWSGIServer(('', args.port), MetricsHandler) | |
httpd.serve_forever() | |
if __name__ == '__main__': | |
main() | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"aliasColors": {}, | |
"bars": false, | |
"dashLength": 10, | |
"dashes": false, | |
"datasource": "Prometheus", | |
"fill": 0, | |
"fillGradient": 0, | |
"gridPos": { | |
"h": 9, | |
"w": 12, | |
"x": 12, | |
"y": 27 | |
}, | |
"id": 8, | |
"legend": { | |
"alignAsTable": true, | |
"avg": false, | |
"current": false, | |
"max": false, | |
"min": false, | |
"rightSide": true, | |
"show": true, | |
"total": false, | |
"values": false | |
}, | |
"lines": true, | |
"linewidth": 1, | |
"nullPointMode": "null", | |
"options": { | |
"dataLinks": [] | |
}, | |
"percentage": false, | |
"pointradius": 2, | |
"points": false, | |
"renderer": "flot", | |
"seriesOverrides": [ | |
{ | |
"alias": "/Activity$/", | |
"yaxis": 2 | |
} | |
], | |
"spaceLength": 10, | |
"stack": false, | |
"steppedLine": false, | |
"targets": [ | |
{ | |
"expr": "aida_sys_sdsk{instance=~\"$instance\"}", | |
"legendFormat": "{{instance}} {{documentation}}", | |
"refId": "A" | |
} | |
], | |
"thresholds": [], | |
"timeFrom": null, | |
"timeRegions": [], | |
"timeShift": null, | |
"title": "Disk Activity", | |
"tooltip": { | |
"shared": true, | |
"sort": 0, | |
"value_type": "individual" | |
}, | |
"type": "graph", | |
"xaxis": { | |
"buckets": null, | |
"mode": "time", | |
"name": null, | |
"show": true, | |
"values": [] | |
}, | |
"yaxes": [ | |
{ | |
"format": "MBs", | |
"label": "", | |
"logBase": 1, | |
"max": null, | |
"min": null, | |
"show": true | |
}, | |
{ | |
"format": "percent", | |
"label": null, | |
"logBase": 1, | |
"max": "100", | |
"min": "0", | |
"show": true | |
} | |
], | |
"yaxis": { | |
"align": false, | |
"alignLevel": null | |
} | |
} |
Well... I think your write-up does a great job already :)
This exporter was meant to go together with Grafana using this dashboard: https://grafana.com/grafana/dashboards/11339
If you have Grafana running, you can probably get Prometheus running in a similar/easy way. If you're only running Prometheus right now, I suggest you add Grafana to the mix for pretty charts.
yes, thanks! The grafana Dashboard Website got me to this gist here. :)
thanks again!
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Thanxs for this!! This was excactly what i was looking for!
After hours to find out how to run prometheus on docker/windows, i got it running.
Maybe you want to put this into your discription how to get this running for people like me, which never touched prometheus.
create a prometheus.yml somewhere on your disk
change the ip:port that match to your system. For me, 127.0.0.1 did not work, so i used the ip of my system.
then open command promt with wsl2/docker running
change the path c:\tmp\prometheus.yml to the path to your prom.yml
docker run -p 9090:9090 -v c:\tmp\prometheus.yml:/etc/prometheus/prometheus.yml --name=prometheus prom/prometheus --config.file=/etc/prometheus/prometheus.yml