Skip to content

Instantly share code, notes, and snippets.

// enrich_duo_csv.js
// CommonJS version. Only env var required: DATABASE_URL
const fs = require("fs");
const { parse } = require("csv-parse");
const { stringify } = require("csv-stringify");
const { Pool } = require("pg");
const DATABASE_URL = process.env.DATABASE_URL;
if (!DATABASE_URL) {
/**
* Fetch ADO custom field "Classification" for Work Items and upsert into DX.
* Usage: node fetch_classification.js
*/
const fs = require('fs');
const path = require('path');
const { Client } = require('pg');
const axios = require('axios');
'use strict';
/**
* export_dx_users.js
*
* Streams a DX Postgres query to a CSV file.
* - Uses DATABASE_URL from environment
* - Normalizes postgres:// → postgresql://
* - Streams results to avoid memory issues
* - Includes graceful shutdown
// Jira Worklogs + Summary with Original Estimate
//
/*
Jira Worklogs + Estimates Sync
Purpose:
- Sync Jira worklogs into Postgres and build a per issue time summary with Original Estimate.
Selection:
- Processes only completed issues
/**
* fetch_work_item_parents.incremental.js
*
* Purpose: Incremental-only importer of ADO parent-child links into DX.
* Uses a per-project watermark based on ADO System.ChangedDate to avoid gaps and duplicates.
*
* Writes: batched multi-row upserts into custom.ado_work_item_links
* Watermarks: custom.ado_wi_links_watermarks (organization_name, project_name, last_changed_at)
*
* Schema columns used in links table:
/**
* fetch_work_item_parents.js
*
* Backfill: WorkItemLinks WIQL, partitioned by Source.ChangedDate windows using DATE precision (YYYY-MM-DD)
* Incremental: WorkItems WIQL by ChangedDate, then workitemsbatch expand=Relations
* Writes: batched multi row upserts into custom.ado_work_item_links
* Schema columns used: child_work_item_source_id, parent_work_item_source_id, relation_url
* MIN_ID applied to both parent and child
* ADO HTTPS via http://DX_PROXY_USER:[email protected]:80
* Dry run writes a SQL file
# frozen_string_literal: true
module CustomerScripts
class accountIncidentImport
extend Callable
def call
results = unprocessed_issues
if results.empty?
// rollup-duo-usage-autodetect.js
//
// Purpose
// -------
// Pull per-user, per-day GitLab Duo usage **without ClickHouse**.
// 1) Probe GraphQL for `group.aiUsageData.all`; if present, use it.
// 2) Else fall back to `group.aiUsageData.codeSuggestionEvents`.
// Counts are **always** computed; sizes are included when the schema exposes `suggestionSize`.
// Any non–Code-Suggestions event types from `all` are summarized into `extras` with:
// - count (per user/day)
"""
jira_extractor_proxy.py
DX Proxy to filter Jira Issue Types and optionally redact data before sending to the DX Datacloud API.
This Flask-based proxy acts as a middle layer between the DX Extractor and Datacloud,
giving teams control over the data they forward for ingestion.
Use Cases:
- Filter out specific Jira Issue Types (e.g., "Access Request", "Service Request")
- Redact or replace sensitive fields (e.g., issue summaries containing PII)
// This query retrieves all merge_commit_shas from failed deployments
// for a given repo and service that occurred *after* the last successful deployment.
// It ensures unshipped PRs from failed attempts are rolled forward into the next successful deployment.
#!/usr/bin/env node
// dx_deployments_with_failed_sha_rollup_v4.js
import dotenv from 'dotenv';
import { Client } from 'pg';