Skip to content

Instantly share code, notes, and snippets.

@erik1o6
Created September 8, 2025 21:49
Show Gist options
  • Save erik1o6/b53302a24e91031a1190d38ec2b5ee2b to your computer and use it in GitHub Desktop.
Save erik1o6/b53302a24e91031a1190d38ec2b5ee2b to your computer and use it in GitHub Desktop.
scan_vulnerable_packages
#!/usr/bin/env bash
set -euo pipefail
# Simple security scanner for vulnerable npm packages
# Checks all repos in an organization for specific package versions
#
# REQUIREMENTS FOR MACOS:
# -----------------------
# 1. Install GitHub CLI:
# brew install gh
#
# 2. Authenticate with GitHub:
# gh auth login
#
# 3. Install Node.js (v16 or later):
# brew install node
#
# 4. Install required npm packages:
# npm install @yarnpkg/lockfile @yarnpkg/parsers js-yaml
#
# 5. Ensure git is installed:
# git --version (comes with Xcode Command Line Tools)
#
# USAGE:
# ------
# ./scan.sh
#
# The script will:
# - Clone all repos from the organization (shallow clones)
# - Scan package-lock.json, yarn.lock, and pnpm-lock.yaml files
# - Check for specific vulnerable package versions
# - Output results to security-scan-results.csv
#
ORG="org" # enter ORG
WORKDIR="${TMPDIR:-/tmp}/org-scan"
# Ensure we have the required npm packages for parsing
echo " Checking dependencies..."
npm list @yarnpkg/lockfile @yarnpkg/parsers js-yaml >/dev/null 2>&1 || {
echo "Installing required parsers..."
npm install @yarnpkg/lockfile @yarnpkg/parsers js-yaml >/dev/null 2>&1
}
# Create work directory
mkdir -p "$WORKDIR"
# Create the scanner script
cat > /tmp/scanner.mjs << 'EOF'
#!/usr/bin/env node
import fs from "fs";
import path from "path";
// Import parsers
let yarnLockfile, yarnParsers, jsYaml;
try { yarnLockfile = await import("@yarnpkg/lockfile"); } catch {}
try { yarnParsers = await import("@yarnpkg/parsers"); } catch {}
try { jsYaml = await import("js-yaml"); } catch {}
// Vulnerable packages we're checking
const TARGETS = [
{ name:"backslash", version:"0.2.1" },
{ name:"chalk-template", version:"1.1.1" },
{ name:"supports-hyperlinks", version:"4.1.1" },
{ name:"has-ansi", version:"6.0.1" },
{ name:"simple-swizzle", version:"0.2.3" },
{ name:"color-string", version:"2.1.1" },
{ name:"error-ex", version:"1.3.3" },
{ name:"color-name", version:"2.0.1" },
{ name:"is-arrayish", version:"0.3.3" },
{ name:"slice-ansi", version:"7.1.1" },
{ name:"color-convert", version:"3.1.1" },
{ name:"wrap-ansi", version:"9.0.1" },
{ name:"ansi-regex", version:"6.2.1" },
{ name:"supports-color", version:"10.2.1" },
{ name:"strip-ansi", version:"7.1.1" },
{ name:"chalk", version:"5.6.1" },
{ name:"debug", version:"4.4.2" },
{ name:"ansi-styles", version:"6.2.2" },
];
const repo = process.env.REPO || "";
const branch = process.env.BRANCH || "";
const dir = process.argv[2] || ".";
// Find all versions of our target packages
const found = new Map();
function recordPackage(name, version) {
if (!found.has(name)) found.set(name, new Set());
found.get(name).add(version);
}
// Parse npm package-lock.json
function scanNpmLock(file) {
try {
const data = JSON.parse(fs.readFileSync(file, "utf8"));
if (data.packages) {
for (const [key, pkg] of Object.entries(data.packages)) {
if (pkg.name && pkg.version) {
recordPackage(pkg.name, pkg.version);
}
}
}
} catch {}
}
// Parse yarn.lock
function scanYarnLock(file) {
try {
const text = fs.readFileSync(file, "utf8");
let parsed;
if (yarnLockfile?.parse) {
const result = yarnLockfile.parse(text);
if (result?.object) parsed = result.object;
}
if (parsed) {
for (const [selector, info] of Object.entries(parsed)) {
if (info?.version) {
const match = selector.match(/^(@?[^@]+)@/);
if (match) recordPackage(match[1], info.version);
}
}
}
} catch {}
}
// Parse pnpm-lock.yaml
function scanPnpmLock(file) {
try {
let foundPackages = false;
// Try YAML parser first if available
if (jsYaml && jsYaml.load) {
try {
const data = jsYaml.load(fs.readFileSync(file, "utf8"));
// Parse packages section (pnpm v6+)
if (data?.packages) {
for (const [key, info] of Object.entries(data.packages)) {
// Format: /package-name@version or /package-name@version(params)
let match = key.match(/^\/(.+?)@([^@\/(]+)/);
if (match) {
recordPackage(match[1], match[2]);
foundPackages = true;
}
}
}
} catch {}
}
// Always use regex fallback if YAML didn't find packages
if (!foundPackages) {
const text = fs.readFileSync(file, "utf8");
const lines = text.split('\n');
for (const line of lines) {
// Look for package definitions like: /[email protected]:
const match = line.match(/^\s*\/(.+?)@([^@\/:]+):/);
if (match) {
recordPackage(match[1], match[2]);
}
}
}
} catch {}
}
// Find and scan lockfiles
function scanDirectory(dir) {
const walk = (d) => {
const files = [];
try {
for (const entry of fs.readdirSync(d, { withFileTypes: true })) {
const p = path.join(d, entry.name);
if (entry.isDirectory() && !["node_modules", ".git"].includes(entry.name)) {
files.push(...walk(p));
} else if (["package-lock.json", "yarn.lock", "pnpm-lock.yaml"].includes(entry.name)) {
files.push(p);
}
}
} catch {}
return files;
};
const lockfiles = walk(dir);
for (const file of lockfiles) {
const base = path.basename(file);
if (base === "package-lock.json") scanNpmLock(file);
else if (base === "yarn.lock") scanYarnLock(file);
else if (base === "pnpm-lock.yaml") scanPnpmLock(file);
}
}
// Run the scan
scanDirectory(dir);
// Output results
for (const target of TARGETS) {
const versions = found.get(target.name);
const hasTarget = versions?.has(target.version);
const actualVersions = versions ? Array.from(versions).sort().join(";") : "-";
console.log([
repo,
branch,
target.name,
target.version,
hasTarget ? "VULNERABLE" : "SAFE",
actualVersions
].join(","));
}
EOF
chmod +x /tmp/scanner.mjs
# Output files
RESULTS="security-scan-results.csv"
SUMMARY="security-scan-summary.txt"
# CSV header
echo "repo,branch,package,vulnerable_version,status,actual_versions" > "$RESULTS"
# Get repos and scan
echo ""
echo " Scanning $ORG repositories..."
echo ""
REPOS=$(gh repo list "$ORG" --limit 1000 --json name,isArchived,defaultBranchRef \
-q '.[] | select(.isArchived|not) | "\(.name):\(.defaultBranchRef.name)"')
TOTAL=$(echo "$REPOS" | wc -l | tr -d ' ')
COUNT=0
VULNERABLE_COUNT=0
for repo_info in $REPOS; do
repo="${repo_info%%:*}"
branch="${repo_info##*:}"
COUNT=$((COUNT + 1))
printf "\r[%3d/%3d] Scanning %-50s" "$COUNT" "$TOTAL" "$repo ($branch)"
dir="$WORKDIR/$repo"
# Clone if needed
if [ ! -d "$dir" ]; then
gh repo clone "$ORG/$repo" "$dir" -- \
--depth=1 --filter=blob:none --branch "$branch" >/dev/null 2>&1 || continue
fi
# Run scanner
OUTPUT=$(REPO="$repo" BRANCH="$branch" node /tmp/scanner.mjs "$dir" 2>/dev/null || true)
if [ -n "$OUTPUT" ]; then
echo "$OUTPUT" >> "$RESULTS"
# Count vulnerabilities
if echo "$OUTPUT" | grep -q "VULNERABLE"; then
VULNERABLE_COUNT=$((VULNERABLE_COUNT + $(echo "$OUTPUT" | grep -c "VULNERABLE")))
fi
fi
done
echo ""
echo ""
echo "════════════════════════════════════════════════════════════"
echo " SCAN COMPLETE"
echo "════════════════════════════════════════════════════════════"
echo ""
echo " Results:"
echo " • Repositories scanned: $COUNT"
echo " • Vulnerable packages found: $VULNERABLE_COUNT"
echo ""
# Show status of all target packages
echo " Package Status Summary:"
echo ""
# Process CSV to get package summary
tail -n +2 "$RESULTS" | awk -F',' '
BEGIN {
# List of all packages we check for
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
targets["[email protected]"] = 1
}
{
pkg = $3 "@" $4
status = $5
version = $6
if (version != "-") {
# Collect all unique versions
if (allversions[pkg] == "") {
allversions[pkg] = version
} else if (index(allversions[pkg], version) == 0) {
# Only add if not already present
allversions[pkg] = allversions[pkg] ";" version
}
count[pkg]++
if (status == "VULNERABLE") vuln[pkg]++
}
}
END {
# Show packages found
for (p in allversions) {
# Get unique versions and format them
gsub(";", ", ", allversions[p])
if (vuln[p] > 0) {
printf " ✗ %s - VULNERABLE (versions: %s in %d repos)\n", p, allversions[p], count[p]
} else {
printf " ✓ %s - SAFE (versions: %s in %d repos)\n", p, allversions[p], count[p]
}
delete targets[p]
}
# Show packages not found
for (p in targets) {
printf " ○ %s - Not found in any repo\n", p
}
}'
echo ""
echo " Full results saved to: $RESULTS"
echo ""
@LanceMcCarthy
Copy link

Thanks @erik1o6!

It was easier for me to convert your work to PowerShell, than to setup the CLI in WSL (and reauth, etc.), so I'm sharing it here in case someone else finds it useful.

$ORG = "org"  # Replace with your GitHub org
$WORKDIR = Join-Path $env:TEMP "$ORG-scan"
$RESULTS = "security-scan-results.csv"
$SUMMARY = "security-scan-summary.txt"
$scannerPath = "$env:TEMP\scanner.mjs"

# Ensure required npm packages are installed
Write-Host "Checking dependencies..."
$npmPackages = "@yarnpkg/lockfile @yarnpkg/parsers js-yaml"
npm list $npmPackages > $null 2>&1
if ($LASTEXITCODE -ne 0) {
    Write-Host "Installing required parsers..."
    npm install $npmPackages > $null 2>&1
}

# Create working directory
New-Item -ItemType Directory -Force -Path $WORKDIR | Out-Null

# Write scanner.mjs
@'
#!/usr/bin/env node
import fs from "fs";
import path from "path";

let yarnLockfile, yarnParsers, jsYaml;
try { yarnLockfile = await import("@yarnpkg/lockfile"); } catch {}
try { yarnParsers = await import("@yarnpkg/parsers"); } catch {}
try { jsYaml = await import("js-yaml"); } catch {}

const TARGETS = [
  { name:"backslash", version:"0.2.1" },
  { name:"chalk-template", version:"1.1.1" },
  { name:"supports-hyperlinks", version:"4.1.1" },
  { name:"has-ansi", version:"6.0.1" },
  { name:"simple-swizzle", version:"0.2.3" },
  { name:"color-string", version:"2.1.1" },
  { name:"error-ex", version:"1.3.3" },
  { name:"color-name", version:"2.0.1" },
  { name:"is-arrayish", version:"0.3.3" },
  { name:"slice-ansi", version:"7.1.1" },
  { name:"color-convert", version:"3.1.1" },
  { name:"wrap-ansi", version:"9.0.1" },
  { name:"ansi-regex", version:"6.2.1" },
  { name:"supports-color", version:"10.2.1" },
  { name:"strip-ansi", version:"7.1.1" },
  { name:"chalk", version:"5.6.1" },
  { name:"debug", version:"4.4.2" },
  { name:"ansi-styles", version:"6.2.2" },
];

const repo = process.env.REPO || "";
const branch = process.env.BRANCH || "";
const dir = process.argv[2] || ".";

const found = new Map();
function recordPackage(name, version) {
  if (!found.has(name)) found.set(name, new Set());
  found.get(name).add(version);
}

function scanNpmLock(file) {
  try {
    const data = JSON.parse(fs.readFileSync(file, "utf8"));
    if (data.packages) {
      for (const [key, pkg] of Object.entries(data.packages)) {
        if (pkg.name && pkg.version) {
          recordPackage(pkg.name, pkg.version);
        }
      }
    }
  } catch {}
}

function scanYarnLock(file) {
  try {
    const text = fs.readFileSync(file, "utf8");
    let parsed;
    if (yarnLockfile?.parse) {
      const result = yarnLockfile.parse(text);
      if (result?.object) parsed = result.object;
    }
    if (parsed) {
      for (const [selector, info] of Object.entries(parsed)) {
        if (info?.version) {
          const match = selector.match(/^(@?[^@]+)@/);
          if (match) recordPackage(match[1], info.version);
        }
      }
    }
  } catch {}
}

function scanPnpmLock(file) {
  try {
    let foundPackages = false;
    if (jsYaml && jsYaml.load) {
      try {
        const data = jsYaml.load(fs.readFileSync(file, "utf8"));
        if (data?.packages) {
          for (const [key, info] of Object.entries(data.packages)) {
            let match = key.match(/^\/(.+?)@([^@\/(]+)/);
            if (match) {
              recordPackage(match[1], match[2]);
              foundPackages = true;
            }
          }
        }
      } catch {}
    }
    if (!foundPackages) {
      const text = fs.readFileSync(file, "utf8");
      const lines = text.split('\n');
      for (const line of lines) {
        const match = line.match(/^\s*\/(.+?)@([^@/:]+):/);
        if (match) {
          recordPackage(match[1], match[2]);
        }
      }
    }
  } catch {}
}

function scanDirectory(dir) {
  const walk = (d) => {
    const files = [];
    try {
      for (const entry of fs.readdirSync(d, { withFileTypes: true })) {
        const p = path.join(d, entry.name);
        if (entry.isDirectory() && !["node_modules", ".git"].includes(entry.name)) {
          files.push(...walk(p));
        } else if (["package-lock.json", "yarn.lock", "pnpm-lock.yaml"].includes(entry.name)) {
          files.push(p);
        }
      }
    } catch {}
    return files;
  };
  const lockfiles = walk(dir);
  for (const file of lockfiles) {
    const base = path.basename(file);
    if (base === "package-lock.json") scanNpmLock(file);
    else if (base === "yarn.lock") scanYarnLock(file);
    else if (base === "pnpm-lock.yaml") scanPnpmLock(file);
  }
}

scanDirectory(dir);

for (const target of TARGETS) {
  const versions = found.get(target.name);
  const hasTarget = versions?.has(target.version);
  const actualVersions = versions ? Array.from(versions).sort().join(";") : "-";
  console.log([repo, branch, target.name, target.version, hasTarget ? "VULNERABLE" : "SAFE", actualVersions].join(","));
}
'@ | Set-Content -Path $scannerPath -Encoding UTF8

# CSV Header
"repo,branch,package,vulnerable_version,status,actual_versions" | Out-File -FilePath $RESULTS

# Get repos from GitHub
Write-Host "`nScanning $ORG repositories..."
$reposJson = gh repo list $ORG --limit 1000 --json name,isArchived,defaultBranchRef
$repos = ($reposJson | ConvertFrom-Json) | Where-Object { -not $_.isArchived }

$COUNT = 0
$VULNERABLE_COUNT = 0

foreach ($repoObj in $repos) {
    $repo = $repoObj.name
    $branch = $repoObj.defaultBranchRef.name
    $COUNT++

    Write-Host ("[{0}/{1}] Scanning {2} ({3})" -f $COUNT, $repos.Count, $repo, $branch)

    $dir = Join-Path $WORKDIR $repo
    if (-not (Test-Path $dir)) {
        gh repo clone "$ORG/$repo" $dir -- --depth=1 --filter=blob:none --branch $branch > $null 2>&1
        continue
    }

    $env:REPO = $repo
    $env:BRANCH = $branch
    $output = node $scannerPath "$dir" 2>$null

    if ($output) {
        $output | Out-File -Append -FilePath $RESULTS
        if ($output -match "VULNERABLE") {
            $VULNERABLE_COUNT += ($output | Select-String "VULNERABLE").Count
        }
    }
}

Write-Host "`n════════════════════════════════════════════════════════════"
Write-Host "SCAN COMPLETE"
Write-Host "════════════════════════════════════════════════"

@mbougarne
Copy link

This is not safe

cat > /tmp/scanner.mjs << 'EOF'

tmp in Unix systems is world-writable, thus a malicious local user could pre-place a symlink there and make the shell to do a lot of bad stuff on the machine, it's safer to go with mktemp with a random file

SCANNER="$(mktemp "${WORKDIR}/scanner.XXXXXX.mjs")"
cat > "$SCANNER" <<'EOF'

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment