Last active
April 18, 2025 20:59
-
-
Save Sdy603/520c35cce8d60d3dc88895ea82166fe1 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import fs from 'fs'; | |
| import dotenv from 'dotenv'; | |
| import pkg from 'pg'; | |
| const { Client } = pkg; | |
| import fetch from 'node-fetch'; | |
| import { DateTime } from 'luxon'; | |
| dotenv.config(); | |
| const DX_DB_CONNECTION = process.env.DX_DB_CONNECTION; | |
| const DX_API_KEY = process.env.DX_API_KEY; | |
| const DX_API_ENDPOINT = "https://anywhere.getdx.net/api/deployments.create"; | |
| const VERBOSE = process.env.VERBOSE === 'true'; | |
| const MAX_RETRIES = 3; | |
| const RETRY_DELAY = 2000; | |
| const validateEnvVars = () => { | |
| const requiredVars = ["DX_DB_CONNECTION", "DX_API_KEY"]; | |
| requiredVars.forEach((key) => { | |
| if (!process.env[key]) { | |
| console.error(`Missing required environment variable: ${key}`); | |
| process.exit(1); | |
| } | |
| }); | |
| }; | |
| validateEnvVars(); | |
| const normalizePostgresURL = (url) => | |
| url.startsWith("postgres://") ? url.replace("postgres://", "postgresql://") : url; | |
| const isoToUnix = (isoString) => { | |
| return DateTime.fromISO(isoString, { zone: 'utc' }).toSeconds(); | |
| }; | |
| const getScriptParams = () => { | |
| if (process.argv.length < 4) { | |
| console.error("Usage: node Deployment_by_Repos.mjs <deployed_at_iso> <repo_name> [--dry-run] [--source=<source>]"); | |
| process.exit(1); | |
| } | |
| const sourceArg = process.argv.find(arg => arg.startsWith("--source=")); | |
| const source = sourceArg ? sourceArg.split("=")[1] : null; | |
| return { | |
| deployedAtISO: new Date(process.argv[2]).toISOString(), | |
| deployedAtUnix: isoToUnix(process.argv[2]), | |
| repoName: process.argv[3], | |
| dryRun: process.argv.includes("--dry-run"), | |
| source: source | |
| }; | |
| }; | |
| const fetchPRData = async (deployedAtISO, repoName, sourceOverride = null) => { | |
| const client = new Client({ | |
| connectionString: normalizePostgresURL(DX_DB_CONNECTION), | |
| ssl: { rejectUnauthorized: false } | |
| }); | |
| try { | |
| await client.connect(); | |
| if (VERBOSE) console.log("Connected to the database."); | |
| if (VERBOSE) console.log("Query parameters:", { deployedAtISO, repoName, sourceOverride }); | |
| const formattedDeployedAtISO = new Date(deployedAtISO).toISOString().replace("T", " ").replace("Z", ""); | |
| let query, values; | |
| if (sourceOverride) { | |
| query = ` | |
| WITH target_repo AS ( | |
| SELECT id, source | |
| FROM repos | |
| WHERE name = $2 | |
| AND source = $3 | |
| AND api_accessible = 't' | |
| LIMIT 1 | |
| ) | |
| SELECT | |
| (SELECT merge_commit_sha | |
| FROM pull_requests | |
| JOIN target_repo ON pull_requests.repo_id = target_repo.id | |
| WHERE merged < CAST($1 AS TIMESTAMP) | |
| AND pull_requests.bot_authored IS NOT TRUE | |
| AND pull_requests.draft IS NOT TRUE | |
| ORDER BY merged DESC | |
| LIMIT 1) AS reference_id, | |
| (SELECT array_agg(merge_commit_sha) | |
| FROM pull_requests | |
| JOIN target_repo ON pull_requests.repo_id = target_repo.id | |
| WHERE merged BETWEEN COALESCE(( | |
| SELECT deployed_at | |
| FROM deployments | |
| WHERE service = $2 | |
| ORDER BY deployed_at DESC | |
| LIMIT 1 | |
| ), '1970-01-01'::timestamp) AND CAST($1 AS TIMESTAMP) | |
| AND pull_requests.bot_authored IS NOT TRUE | |
| AND pull_requests.draft IS NOT TRUE | |
| AND pull_requests.id NOT IN (SELECT pull_request_id FROM pull_request_deployments) | |
| ) AS merge_request_sha_array, | |
| (SELECT source FROM target_repo) AS source_name; | |
| `; | |
| values = [formattedDeployedAtISO, repoName, sourceOverride]; | |
| } else { | |
| query = ` | |
| WITH target_repo AS ( | |
| SELECT id, source | |
| FROM repos | |
| WHERE name = $2 | |
| AND api_accessible = 't' | |
| LIMIT 1 | |
| ) | |
| SELECT | |
| (SELECT merge_commit_sha | |
| FROM pull_requests | |
| JOIN target_repo ON pull_requests.repo_id = target_repo.id | |
| WHERE merged < CAST($1 AS TIMESTAMP) | |
| AND pull_requests.bot_authored IS NOT TRUE | |
| AND pull_requests.draft IS NOT TRUE | |
| ORDER BY merged DESC | |
| LIMIT 1) AS reference_id, | |
| (SELECT array_agg(merge_commit_sha) | |
| FROM pull_requests | |
| JOIN target_repo ON pull_requests.repo_id = target_repo.id | |
| WHERE merged BETWEEN COALESCE(( | |
| SELECT deployed_at | |
| FROM deployments | |
| WHERE service = $2 | |
| ORDER BY deployed_at DESC | |
| LIMIT 1 | |
| ), '1970-01-01'::timestamp) AND CAST($1 AS TIMESTAMP) | |
| AND pull_requests.bot_authored IS NOT TRUE | |
| AND pull_requests.draft IS NOT TRUE | |
| AND pull_requests.id NOT IN (SELECT pull_request_id FROM pull_request_deployments) | |
| ) AS merge_request_sha_array, | |
| (SELECT source FROM target_repo) AS source_name; | |
| `; | |
| values = [formattedDeployedAtISO, repoName]; | |
| } | |
| if (VERBOSE) console.log("Executing query with values:", values); | |
| const result = await client.query(query, values); | |
| if (VERBOSE) console.log("Query executed successfully.", result.rows); | |
| return result.rows.length > 0 | |
| ? { | |
| referenceId: result.rows[0].reference_id, | |
| mergeRequestShaArray: result.rows[0].merge_request_sha_array || [], | |
| sourceName: result.rows[0].source_name || null | |
| } | |
| : { referenceId: null, mergeRequestShaArray: [], sourceName: null }; | |
| } catch (error) { | |
| console.error("Database query failed:", error); | |
| return { referenceId: null, mergeRequestShaArray: [], sourceName: null }; | |
| } finally { | |
| await client.end(); | |
| if (VERBOSE) console.log("Database connection closed."); | |
| } | |
| }; | |
| const writeDryRunCSV = (payload) => { | |
| const filePath = './dry_run_output.csv'; | |
| const headers = ['deployed_at', 'reference_id', 'service', 'repository', 'merge_commit_shas', 'source_name']; | |
| const row = [ | |
| payload.deployed_at, | |
| payload.reference_id, | |
| payload.service, | |
| payload.repository, | |
| `"${(payload.merge_commit_shas || []).join(',')}"`, | |
| payload.source_name | |
| ]; | |
| const csvLine = row.join(',') + '\n'; | |
| if (!fs.existsSync(filePath)) { | |
| fs.writeFileSync(filePath, headers.join(',') + '\n'); | |
| } | |
| fs.appendFileSync(filePath, csvLine); | |
| }; | |
| const main = async () => { | |
| const params = getScriptParams(); | |
| if (VERBOSE) console.log("Script parameters:", params); | |
| const prData = await fetchPRData(params.deployedAtISO, params.repoName, params.source); | |
| const payload = { | |
| deployed_at: params.deployedAtISO, | |
| reference_id: prData.referenceId, | |
| service: params.repoName, | |
| repository: params.repoName, | |
| merge_commit_shas: prData.mergeRequestShaArray, | |
| source_name: prData.sourceName | |
| }; | |
| if (params.dryRun) { | |
| if (VERBOSE) { | |
| console.log("Dry run enabled. Writing payload to CSV..."); | |
| console.log("Payload:", payload); | |
| } | |
| writeDryRunCSV(payload); | |
| console.log("✅ Dry run complete. Output written to dry_run_output.csv"); | |
| } else { | |
| if (VERBOSE) { | |
| console.log("Sending payload to DX API..."); | |
| console.log("Payload:", payload); | |
| } | |
| try { | |
| const response = await fetch(DX_API_ENDPOINT, { | |
| method: 'POST', | |
| headers: { | |
| 'Authorization': `Bearer ${DX_API_KEY}`, | |
| 'Content-Type': 'application/json' | |
| }, | |
| body: JSON.stringify(payload) | |
| }); | |
| if (!response.ok) { | |
| throw new Error(`Failed to send deployment: ${response.status} ${await response.text()}`); | |
| } | |
| console.log("✅ Deployment successfully sent to DX."); | |
| } catch (err) { | |
| console.error("❌ API request failed:", err); | |
| } | |
| } | |
| }; | |
| main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment