Last active
May 16, 2019 17:36
-
-
Save brockthebear/64c9156b27e72740137e5e0811a427d4 to your computer and use it in GitHub Desktop.
Export a MongoDB collection and write to a CSV. Uses json2csv, async/await, and dotenv.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
require('dotenv').config(); | |
const { AsyncParser } = require('json2csv'); | |
const fs = require('fs'); | |
const MongoClient = require('mongodb').MongoClient; | |
const url = `mongodb://localhost:27017`; | |
const HEADERS = ['first_name', 'last_name', 'middle_name', 'id']; | |
const FIELDS = ['name.first_name', 'name.last_name', 'name.middle_name', 'id']; | |
(async function() { | |
const client = new MongoClient(url); | |
try { | |
await client.connect(); | |
console.log(`successfully connected to server at ${url}`); | |
const db = client.db(process.env.DB); // database you want to connect to. | |
const coll = db.collection(process.env.COLLECTION); // collection you want to export. | |
const documents = await coll.find({}).toArray(); // return all documents in the collection. | |
console.log(`found ${documents.length} documents`); | |
await writeCsv(documents); | |
} catch (err) { | |
console.log('oh no! error occurred ', err.stack); | |
} | |
client.close(); | |
})(); | |
/** | |
* Build an array of objects that maps a column's title | |
* to it's value within a MongoDB Document. | |
* We can map the headers to the values because there should always be the same | |
* number of each, and they should be in the same order. | |
* | |
* Resulting array will look something like this | |
* [ | |
* { | |
* label: 'FirstName', // how it appears in the CSV | |
* value: 'name.first_name', // where the value is found. | |
* default: null, // what to input if the value is not found within the document. | |
* }, | |
* {...} | |
* ] | |
*/ | |
const makeFields = () => { | |
let fields = []; | |
HEADERS.map((header, i) => { | |
fields.push({ | |
label: header, | |
value: FIELDS[i], | |
default: null, | |
}); | |
}); | |
return fields; | |
}; | |
const writeCsv = async records => { | |
const dir = __dirname + '/csv/'; | |
const filename = `output_${Date.now()}.csv`; | |
const path = `${dir}${filename}`; | |
// Check that /path/to/csv/ directory exists and create it if it does not. | |
if (!fs.existsSync(dir)) { | |
fs.mkdirSync(dir); | |
} | |
fs.createWriteStream(path); | |
let csv = ''; | |
let fields = makeFields(); | |
const parser = new AsyncParser({ | |
fields, | |
unwind: ['name'], | |
}); | |
// Parse the data into a string. | |
parser.processor | |
.on('data', chunk => { | |
csv += chunk.toString(); | |
}) | |
.on('end', () => { | |
console.log('data has been parsed successfully!'); | |
// write the resulting string (`csv`) to a file. | |
fs.writeFile(path, csv, err => { | |
if (err) { | |
return console.error('Error occurred while creating csv ', err); | |
} | |
console.log(`csv file written successfully at ${path}`); | |
return csv; | |
}); | |
}) | |
.on('error', err => | |
console.error('oh no! an error occurred while parsing data: ', err) | |
); | |
parser.input.push(JSON.stringify(records)); // feed the data into our parser. | |
parser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it. | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment