Last active
September 19, 2025 07:04
-
-
Save oilbeater/19267d83a35f59ccdec924097fb0bf30 to your computer and use it in GitHub Desktop.
This script will automatically fetch a CVE note from canonical team to know why the CVE is still unfixed
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
"""fetch_ubuntu_cve_notes.py | |
--------------------------------------------- | |
Given one or more CVE identifiers, download the corresponding | |
Ubuntu security advisory page (https://ubuntu.com/security/<CVE>) | |
and print the text that appears under the **Notes** heading. | |
Dependencies | |
------------ | |
```bash | |
pip install requests beautifulsoup4 | |
``` | |
Typical usage | |
------------- | |
```bash | |
# direct arguments | |
python fetch_ubuntu_cve_notes.py CVE‑2016‑2781 CVE‑2023‑0461 | |
# with trivy | |
python3 fetch_ubuntu_cve_notes.py $(trivy image --quiet -f=json --scanners vuln kubeovn/kube-ovn:v1.14.0 | jq -r '.Results[].Vulnerabilities[]? | .VulnerabilityID' | sort -u) | |
``` | |
© 2025 – public domain / CC0. | |
""" | |
from __future__ import annotations | |
import argparse | |
import json | |
import sys | |
from pathlib import Path | |
from typing import Dict, List | |
import requests | |
from bs4 import BeautifulSoup | |
# --------------------------------------------------------------------------- | |
# Constants | |
# --------------------------------------------------------------------------- | |
UA = ( | |
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 " | |
"(KHTML, like Gecko) Chrome/124.0 Safari/537.36" | |
) | |
BASE_URL = "https://ubuntu.com/security/{}" | |
# --------------------------------------------------------------------------- | |
# Low‑level helpers | |
# --------------------------------------------------------------------------- | |
class FetchError(RuntimeError): | |
"""Raised when the CVE page cannot be downloaded.""" | |
def fetch_html(url: str) -> str: | |
"""Return the decoded HTML document fetched from *url* (UTF‑8).""" | |
try: | |
r = requests.get(url, headers={"User-Agent": UA}, timeout=30) | |
r.raise_for_status() | |
except requests.RequestException as exc: | |
raise FetchError(f"Failed to fetch {url}: {exc}") from exc | |
r.encoding = "utf-8" | |
return r.text | |
def extract_notes(html: str) -> List[str]: | |
"""Return all text lines located under the **Notes** section. | |
The parser stops at the next heading (e.g. `<h2>` or `<h3>`). | |
""" | |
soup = BeautifulSoup(html, "html.parser") | |
# First try the canonical header `<h2 id="notes">Notes</h2>` | |
header = soup.find(id="notes") | |
if header is not None: | |
return header.get_text(separator=" ", strip=True) | |
return "no notes" | |
def fetch_cve_notes(cve: str) -> List[str]: | |
"""Download *cve* page from Ubuntu and extract its Notes.""" | |
return extract_notes(fetch_html(BASE_URL.format(cve))) | |
# --------------------------------------------------------------------------- | |
# CLI | |
# --------------------------------------------------------------------------- | |
def _build_parser() -> argparse.ArgumentParser: | |
p = argparse.ArgumentParser( | |
prog="fetch_ubuntu_cve_notes.py", | |
description="Fetch the 'Notes' section from Ubuntu CVE advisories.", | |
formatter_class=argparse.RawTextHelpFormatter, | |
) | |
p.add_argument( | |
"cves", | |
nargs="*", | |
help="CVE identifiers (e.g. CVE-2016-2781). If omitted, they are read from standard input.", | |
) | |
p.add_argument( | |
"-o", | |
"--out", | |
type=Path, | |
metavar="FILE", | |
help="Write collected notes to FILE in JSON format.", | |
) | |
p.add_argument( | |
"-q", | |
"--quiet", | |
action="store_true", | |
help="Suppress human‑readable output; useful together with --out.", | |
) | |
return p | |
def _parse_args(argv: List[str] | None = None): | |
parser = _build_parser() | |
args = parser.parse_args(argv) | |
# If CVEs were not provided on the CLI, try to read them from stdin. | |
if not args.cves: | |
if sys.stdin.isatty(): | |
parser.print_help(sys.stderr) | |
parser.exit(2, "\nerror: at least one CVE identifier is required (provide as arguments or via stdin)\n") | |
args.cves = [line.strip() for line in sys.stdin if line.strip()] | |
if not args.cves: | |
arg.cves = ["CVE-2025-0167"] | |
return args | |
def main(argv: List[str] | None = None) -> None: # pragma: no cover (invoked via __main__) | |
args = _parse_args(argv) | |
results: Dict[str, List[str]] = {} | |
for cve in args.cves: | |
try: | |
notes = fetch_cve_notes(cve) | |
except FetchError as exc: | |
print(exc, file=sys.stderr) | |
continue | |
results[cve] = notes | |
if not args.quiet: | |
if notes: | |
print(f"{cve} notes: {notes}") | |
else: | |
print(f"{cve}: <no notes section found>") | |
print() | |
if args.out: | |
try: | |
args.out.write_text(json.dumps(results, indent=2, ensure_ascii=False), encoding="utf-8") | |
except OSError as exc: | |
sys.exit(f"Could not write to {args.out}: {exc}") | |
sys.exit(0 if any(results.values()) else 1) | |
# --------------------------------------------------------------------------- | |
# Unit tests (run: `python fetch_ubuntu_cve_notes.py test`) | |
# --------------------------------------------------------------------------- | |
if __name__ == "__main__": | |
if len(sys.argv) > 1 and sys.argv[1] == "test": | |
import unittest | |
class ExtractNotesTests(unittest.TestCase): | |
"""Minimal unit tests for the HTML parser.""" | |
def test_notes_found(self): | |
html = ( | |
"<html><body>" | |
"<h2 id='notes'>Notes</h2>" | |
"<p>first</p><p>second</p>" | |
"<h2 id='refs'>References</h2>" | |
"</body></html>" | |
) | |
self.assertEqual(extract_notes(html), ["first", "second"]) | |
def test_notes_absent(self): | |
html = "<html><body><h2>Details</h2><p>none</p></body></html>" | |
self.assertEqual(extract_notes(html), []) | |
unittest.main(argv=[sys.argv[0]]) | |
else: | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment