Last active
June 26, 2025 23:35
-
-
Save Sdy603/576ef0de5fc26a9132783084f3d043ca to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Script to simulate and optionally send deployment data to the DX API using merge_commit_shas attribution. | |
// Queries PRs merged into 'master' between previous deployment and current head commit, | |
// then generates a DX-compatible payload with support for dry-run CSV export. | |
// Required modules | |
// Script to simulate and optionally send deployment data to the DX API using merge_commit_shas attribution. | |
import dotenv from 'dotenv'; | |
import { Client } from 'pg'; | |
import fetch from 'node-fetch'; | |
import { DateTime } from 'luxon'; | |
import fs from 'fs'; | |
import path from 'path'; | |
dotenv.config(); | |
const DATABASE_URL = process.env.DX_DB_CONNECTION; | |
const DX_API_URL = process.env.DX_API_ENDPOINT || 'https://favor.getdx.net/api/deployments.create'; | |
const DX_TOKEN = process.env.DX_API_KEY; | |
const VERBOSE = process.env.VERBOSE === 'true'; | |
const MAX_RETRIES = 3; | |
const RETRY_DELAY = 2000; | |
const DRY_RUN = process.env.DRY_RUN === 'true'; | |
const validateEnvVars = () => { | |
const requiredVars = ['DX_DB_CONNECTION', 'DX_API_KEY']; | |
requiredVars.forEach(key => { | |
if (!process.env[key]) { | |
console.error(`Missing required environment variable: ${key}`); | |
process.exit(1); | |
} | |
}); | |
}; | |
validateEnvVars(); | |
const normalizePostgresURL = (url) => url.startsWith('postgres://') ? url.replace('postgres://', 'postgresql://') : url; | |
const sendDeployment = async (payload) => { | |
if (DRY_RUN) { | |
const outputPath = path.resolve(process.cwd(), 'dry_run_deployments.csv'); | |
const header = 'reference_id,repository,service,deployed_at,merge_commit_shas\n'; | |
const line = `${payload.reference_id},${payload.repository},${payload.service},${payload.deployed_at},"${payload.merge_commit_shas.join(';')}"\n`; | |
if (!fs.existsSync(outputPath)) { | |
fs.writeFileSync(outputPath, header); | |
} | |
fs.appendFileSync(outputPath, line); | |
console.log(`π Dry-run: Payload written to ${outputPath}`); | |
return; | |
} | |
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) { | |
try { | |
const response = await fetch(DX_API_URL, { | |
method: 'POST', | |
headers: { | |
'Authorization': `Bearer ${DX_TOKEN}`, | |
'Content-Type': 'application/json' | |
}, | |
body: JSON.stringify(payload) | |
}); | |
if (!response.ok) throw new Error(`HTTP ${response.status}`); | |
const data = await response.json(); | |
console.log('β Deployment posted successfully:', data); | |
return; | |
} catch (err) { | |
console.error(`β Attempt ${attempt} failed:`, err); | |
if (attempt < MAX_RETRIES) { | |
console.log(`Retrying in ${RETRY_DELAY / 1000}s...`); | |
await new Promise(res => setTimeout(res, RETRY_DELAY)); | |
} else { | |
console.error('β Max retries reached. Giving up.'); | |
} | |
} | |
} | |
}; | |
const processDeployment = async ({ repo, headSha, lastShaTime, service, deployedAt, buildId }) => { | |
const [org, repoName] = repo.split('/'); | |
const client = new Client({ connectionString: normalizePostgresURL(DATABASE_URL), ssl: { rejectUnauthorized: false } }); | |
try { | |
await client.connect(); | |
if (VERBOSE) console.log('Connected to database.'); | |
const query = ` | |
WITH prev_deploy AS ( | |
SELECT deployed_at AS prev_deployed_at | |
FROM deployments | |
WHERE repository = $1 | |
AND service = $2 | |
AND success = true | |
AND deployed_at < $3::timestamp | |
ORDER BY deployed_at DESC | |
LIMIT 1 | |
), | |
master_prs AS ( | |
SELECT DISTINCT pr.merge_commit_sha | |
FROM pull_requests pr | |
JOIN repos r ON r.id = pr.repo_id, | |
prev_deploy | |
WHERE r.name ILIKE $4 | |
AND r.organization ILIKE $5 | |
AND pr.base_ref = 'master' | |
AND pr.merged BETWEEN COALESCE(prev_deploy.prev_deployed_at, $3::timestamp) AND $3::timestamp | |
AND pr.merge_commit_sha IS NOT NULL | |
), | |
production_prs AS ( | |
SELECT DISTINCT pr.merge_commit_sha | |
FROM pull_requests pr | |
JOIN repos r ON r.id = pr.repo_id, | |
prev_deploy | |
WHERE r.name ILIKE $4 | |
AND r.organization ILIKE $5 | |
AND pr.head_ref ILIKE '%production%' | |
AND pr.merged BETWEEN COALESCE(prev_deploy.prev_deployed_at, $3::timestamp) AND $3::timestamp | |
AND pr.merge_commit_sha IS NOT NULL | |
) | |
SELECT merge_commit_sha FROM master_prs | |
UNION | |
SELECT merge_commit_sha FROM production_prs | |
UNION | |
SELECT $6; | |
`; | |
const values = [repo, service, lastShaTime, repoName, org, headSha]; | |
const result = await client.query(query, values); | |
const shas = result.rows.map(row => row.merge_commit_sha); | |
const payload = { | |
reference_id: `${service}:${buildId}`, | |
deployed_at: deployedAt, | |
service, | |
repository: repo, | |
merge_commit_shas: shas, | |
source_name: 'custom-pr-attribution' | |
}; | |
await sendDeployment(payload); | |
} catch (error) { | |
console.error('β Error during deployment processing:', error); | |
} finally { | |
await client.end(); | |
if (VERBOSE) console.log('Database connection closed.'); | |
} | |
}; | |
// Example invocation wrapper | |
(async () => { | |
const args = process.argv.slice(2); | |
if (args.length < 6) { | |
console.error('Usage: node script.js <org/repo> <headSha> <lastShaTime> <service> <deployedAtUnix> <buildId>'); | |
process.exit(1); | |
} | |
const [repo, headSha, lastShaTimeStr, service, deployedAtStr, buildId] = args; | |
const lastShaTime = DateTime.fromISO(lastShaTimeStr).toISO(); // Ensure it's ISO for DB query | |
const deployedAt = parseInt(deployedAtStr); | |
await processDeployment({ repo, headSha, lastShaTime, service, deployedAt, buildId }); | |
})(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I actually noticed, when I tested locally, that even though I passed in a Zulu time,
const lastShaTime = DateTime.fromISO(lastShaTimeStr).toISO();
would ignore theZ
in the ISO timestamp and use my local time zone.I ended up adding a zone parameter,
const lastShaTime = DateTime.fromISO(lastShaTimeStr, {zone: 'UTC'}).toISO();
, to ensure it works consistently on my GHA runner and locally.It came up because I found one case where it missed a commit due to that time difference.