Skip to content

Instantly share code, notes, and snippets.

View mr-pascal's full-sized avatar

Pascal mr-pascal

View GitHub Profile
/**
* Creates a new instance with 'instanceId'
* @param {string} instanceId The instance ID
* @param {string} clusterId The cluster ID
* @returns {Instance} The Bigtable instance
*/
const createNewInstance = async (instanceId, clusterId) => {
const instance = bigtable.instance(instanceId);
console.log('Creating a CBT instance');
const { Bigtable, Table, Instance, Row, RawFilter } = require('@google-cloud/bigtable');
// -- MAKE SURE TO CHANGE THIS TO YOUR NEEDS!! --
const projectId = 'YOUR_GCP_PROJECT';
const keyFilename = 'YOUR_KEY_FILE.json';
// ----------------------------------------------
// Create CBT client
const bigtable = new Bigtable({
keyFilename,
const { Bigtable, Table, Instance, Row, RawFilter } = require('@google-cloud/bigtable');
// -- MAKE SURE TO CHANGE THIS TO YOUR NEEDS!! --
const projectId = 'YOUR_GCP_PROJECT';
const keyFilename = 'YOUR_KEY_FILE.json';
// ----------------------------------------------
// Create CBT client
const bigtable = new Bigtable({
keyFilename,
interface Cell {
value: string | boolean | number
timestamp: number
}
interface Table {
[rowKey: string]: {
[columnFamily: string]: {
[columnQualifier: string]: Array<Cell>
// Imports the Google Cloud client library
const bigqueryDataTransfer = require('@google-cloud/bigquery-data-transfer');
// ------------------------------------------------
// TODO: Developer, make sure to add your own values here!
const projectId = 'GCP_PROJECT_ID';
const datasetId = 'DATASET_ID';
const keyFilename = 'KEY_FILE.json';
const rawDataTable = 'RAW_DATA_TABLE_ID';
const datasetLocation = 'US';
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
const moment = require('moment');
/**
* Converts a string date to a DATETIME string that can be used
* in BigQuery tables
* @param {string} date Date in string Format 'YYYY-MM-DD'
*/
const format = (date) => {
return `${moment(new Date(date)).format('YYYY-MM-DD hh:mm:ss')}.000000`;
--- With Clustering ---
--- Job Statistics ---
┌─────────┬────────────────────────┬──────────┬─────────┐
│ (index) │ Description │ Value │ Unit │
├─────────┼────────────────────────┼──────────┼─────────┤
│ 0 │ 'Cache hit' │ false │ 'Bool' │
│ 1 │ 'Time taken' │ 1306 │ 'ms' │
│ 2 │ 'Partitions processed' │ 0 │ 'Count' │
│ 3 │ 'Rows read' │ 799675 │ 'Count' │
│ 4 │ 'Bytes processed' │ 9046393 │ 'Bytes' │
--- With Clustering ---
--- Job Statistics ---
┌─────────┬────────────────────────┬──────────┬─────────┐
│ (index) │ Description │ Value │ Unit │
├─────────┼────────────────────────┼──────────┼─────────┤
│ 0 │ 'Cache hit' │ false │ 'Bool' │
│ 1 │ 'Time taken' │ 215 │ 'ms' │
│ 2 │ 'Partitions processed' │ 0 │ 'Count' │
│ 3 │ 'Rows read' │ 399474 │ 'Count' │
│ 4 │ 'Bytes processed' │ 4644182 │ 'Bytes' │
// Imports the Google Cloud client library
const { BigQuery } = require('@google-cloud/bigquery');
// ------------------------------------------------
// TODO: Developer, make sure to add your own values here!
const projectId = 'YOUR_GCP_PROJECT_ID';
const datasetId = 'YOUR_DATASET_ID';
const keyFilename = 'YOUR_KEYFILE.json';
const datasetLocation = 'US';
// ------------------------------------------------
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
// -- Set these variables to your needs
const totalEntries = 5000000;
const differentNames = 1000;
const differentEvents = 10;
// ---------------------------
const data = [];
const csvWriter = createCsvWriter({