Skip to content

Instantly share code, notes, and snippets.

// bitbucket_changelog_ingest.js
//
// Purpose:
// Incrementally ingest Bitbucket Server commit activity per user using the
// Awesome Graphs "user activities" endpoint. Uses a per-user high-water mark
// based on MAX(created_at) already stored in the target table, with a small
// overlap window for safety.
//
// Requirements:
// - Node 18+
// proposed table schema:
// sha TEXT PRIMARY KEY,
// source_url TEXT,
// author_id TEXT, -- remains TEXT to match source_id in bitbucket_server_users
// created TIMESTAMP,
// repository_source_id INTEGER NOT NULL, -- used to map to bitbucket_server_repos
// row_created_at TIMESTAMP DEFAULT NOW()
const fs = require('fs');
// Connects to the PostgreSQL database using the provided connection string.
// Executes the query to fetch incident data.
// Formats the result into the DX Incidents API payload.
// Sends each incident to the DX API with pacing and retry logic.
// REQUIRED ENVIRONMENT VARIABLES:
// DATABASE_URL= <your_pg_connection>
// DX_API_TOKEN= <your_dx_api_token>
// DX_BASE_URL= <https://concentra.getdx.net>
pipeline {
agent any
environment {
API_TOKEN = credentials('DX_DEPLOYMENT_API_TOKEN') // Jenkins credential ID
DATACLOUD_HOST = 'yourinstance.getdx.net'
SERVICE_NAME = 'my_service'
}
options {
const fs = require("fs");
const csv = require("fast-csv");
const { Client } = require("pg");
const inputFile = process.argv[2] || "combined_dx_jira_matches.csv";
const outputFile = "new_identity_inserts.sql";
const DX_DB_CONNECTION =
"postgres://uw1e3sby6trmysstv:8nq4f6zzkwcSmbVE@integrated-practice-solus.6bfe0e091eb03829ab9e.db.getdx.net/client";
const { Client } = require("pg");
const fs = require("fs");
const csvWriter = require("fast-csv");
const fuzz = require("fuzzball");
const dayjs = require("dayjs");
const normalizePostgresURL = (url) =>
url.startsWith("postgres://") ? url.replace("postgres://", "postgresql://") : url;
// Script to simulate and optionally send deployment data to the DX API using merge_commit_shas attribution.
// Queries PRs merged into 'master' between previous deployment and current head commit,
// then generates a DX-compatible payload with support for dry-run CSV export.
// Required modules
// Script to simulate and optionally send deployment data to the DX API using merge_commit_shas attribution.
import dotenv from 'dotenv';
import { Client } from 'pg';
import fetch from 'node-fetch';
import fs from 'fs';
import dotenv from 'dotenv';
import pkg from 'pg';
const { Client } = pkg;
import fetch from 'node-fetch';
import { DateTime } from 'luxon';
dotenv.config();
const DX_DB_CONNECTION = process.env.DX_DB_CONNECTION;
SELECT
du.name,
du.email,
COUNT(DISTINCT pr.id) AS number_of_prs_merged,
COUNT(DISTINCT CASE WHEN cdu.is_active THEN cdu.date END) AS cursor_days_active
FROM cursor_daily_user_metrics cdu
INNER JOIN dx_users du
ON du.email = cdu.email
INNER JOIN pull_requests pr
ON pr.dx_user_id = du.id
# frozen_string_literal: true
module CustomerScripts
class IncidentIoImport
extend Callable
def call
results = unprocessed_incidents
if results.empty?