Skip to content

Instantly share code, notes, and snippets.

pipeline {
agent any
environment {
API_TOKEN = credentials('DX_DEPLOYMENT_API_TOKEN') // Jenkins credential ID
DATACLOUD_HOST = 'yourinstance.getdx.net'
SERVICE_NAME = 'my_service'
}
options {
const fs = require("fs");
const csv = require("fast-csv");
const { Client } = require("pg");
const inputFile = process.argv[2] || "combined_dx_jira_matches.csv";
const outputFile = "new_identity_inserts.sql";
const DX_DB_CONNECTION =
"postgres://uw1e3sby6trmysstv:8nq4f6zzkwcSmbVE@integrated-practice-solus.6bfe0e091eb03829ab9e.db.getdx.net/client";
const { Client } = require("pg");
const fs = require("fs");
const csvWriter = require("fast-csv");
const fuzz = require("fuzzball");
const dayjs = require("dayjs");
const normalizePostgresURL = (url) =>
url.startsWith("postgres://") ? url.replace("postgres://", "postgresql://") : url;
// Script to simulate and optionally send deployment data to the DX API using merge_commit_shas attribution.
// Queries PRs merged into 'master' between previous deployment and current head commit,
// then generates a DX-compatible payload with support for dry-run CSV export.
// Required modules
// Script to simulate and optionally send deployment data to the DX API using merge_commit_shas attribution.
import dotenv from 'dotenv';
import { Client } from 'pg';
import fetch from 'node-fetch';
import fs from 'fs';
import dotenv from 'dotenv';
import pkg from 'pg';
const { Client } = pkg;
import fetch from 'node-fetch';
import { DateTime } from 'luxon';
dotenv.config();
const DX_DB_CONNECTION = process.env.DX_DB_CONNECTION;
SELECT
du.name,
du.email,
COUNT(DISTINCT pr.id) AS number_of_prs_merged,
COUNT(DISTINCT CASE WHEN cdu.is_active THEN cdu.date END) AS cursor_days_active
FROM cursor_daily_user_metrics cdu
INNER JOIN dx_users du
ON du.email = cdu.email
INNER JOIN pull_requests pr
ON pr.dx_user_id = du.id
# frozen_string_literal: true
module CustomerScripts
class IncidentIoImport
extend Callable
def call
results = unprocessed_incidents
if results.empty?
// import_cursor_usage.js
const fs = require('fs');
const path = require('path');
const dotenv = require('dotenv');
const { Client } = require('pg');
const axios = require('axios');
const { parseISO, format, isAfter } = require('date-fns');
dotenv.config();
// import_tabnine_usage.js
const fs = require('fs');
const path = require('path');
const dotenv = require('dotenv');
const { Client } = require('pg');
const axios = require('axios');
const { parseISO, format, isAfter } = require('date-fns');
dotenv.config();
import fs from 'fs';
import path from 'path';
import dotenv from 'dotenv';
import { Client } from 'pg';
import csvParser from 'csv-parser';
dotenv.config();
const validateEnvVars = () => {
const requiredVars = ["DX_DB_CONNECTION"];