Skip to content

Instantly share code, notes, and snippets.

@JonathonReinhart
Created May 4, 2026 06:15
Show Gist options
  • Select an option

  • Save JonathonReinhart/745b6c29ade75e1b08783339db20297e to your computer and use it in GitHub Desktop.

Select an option

Save JonathonReinhart/745b6c29ade75e1b08783339db20297e to your computer and use it in GitHub Desktop.
This script fetches UEFI guids.csv files from multiple sources, then compares and merges them.
#!/usr/bin/env python3
"""
This script fetches UEFI guids.csv files from multiple sources, then compares and merges them.
"""
import csv
import io
import logging
import urllib.request
from typing import NewType, TypeAlias
from uuid import UUID
GuidDb: TypeAlias = dict[UUID, str]
Url = NewType("Url", str)
LOG = logging.getLogger(__name__)
URLS: list[Url] = [
# https://github.com/fwupd/fwupd/issues/5869
Url("https://fwupd.org/lvfs/shards/export/csv"),
Url(
"https://raw.githubusercontent.com/LongSoft/UEFITool/refs/heads/new_engine/common/guids.csv"
),
Url(
"https://raw.githubusercontent.com/Sentinel-One/efi_fuzz/refs/heads/master/guids.csv"
),
]
def fetch_uefi_guids(url: str) -> GuidDb:
result = {}
with urllib.request.urlopen(url) as http_response:
text_reader = io.TextIOWrapper(http_response, encoding="ascii")
for row in csv.reader(text_reader):
guid = UUID(row[0])
name = row[1]
result[guid] = name
return result
def main() -> None:
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
)
# Fetch all guids.csv databases
url_dbs: dict[Url, GuidDb] = {}
for url in URLS:
LOG.info(f"Fetching {url}")
guids = fetch_uefi_guids(url)
LOG.info(f"Got {url} : {len(guids)} entries")
url_dbs[url] = guids
del guids, url
# Compare
for url, url_db in url_dbs.items():
# Merge GUID dbs for all _other_ URLs
other_dbs = (db for db in url_dbs.values() if db is not url_db)
others_merged = set().union(*(db.keys() for db in other_dbs))
unique = url_db.keys() - others_merged # GUIDs unique to this db
missing = others_merged - url_db.keys() # GUIDs missing in this db
# TODO: Verify GUIDs have matching names, too.
print(f"* {url}")
print(f" * Total: {len(url_db)}")
print(f" * Unique: {len(unique)}")
print(f" * Missing: {len(missing)}")
# Merge
merged_db: GuidDb = {}
for db in url_dbs.values():
merged_db |= db
LOG.info(f"Merged result has {len(merged_db)} entries")
# Write
out_filename = "merged.csv"
with open(out_filename, "w") as f:
csv_writer = csv.writer(f)
for guid, name in merged_db.items():
csv_writer.writerow([guid, name])
LOG.info(f"Wrote {out_filename}")
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment