Created
November 21, 2018 17:53
-
-
Save kmelve/6f180913f7153adc60866b0afe6da604 to your computer and use it in GitHub Desktop.
Indexing in Algolia using serverless functions (and observables!) https://www.sanity.io/blog/indexing-in-algolia-using-serverless-functions-and-observables
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const algoliasearch = require('algoliasearch'); | |
const request = require('request'); | |
const ndjson = require('ndjson'); | |
const {bindNodeCallback} = require('rxjs'); | |
const {streamToRx} = require('rxjs-stream'); | |
const {bufferCount, map, mergeMap, toArray, tap} = require('rxjs/operators'); | |
// Algolia configuration | |
const algoliaApp = 'your_app_id'; | |
const algoliaIndex = 'what_you_want_to_call_your_index'; | |
// Sanity configuration | |
const projectId = 'your_project_id'; | |
const dataset = 'your_dataset_name'; | |
const sanityExportURL = `https://${projectId}.api.sanity.io/v1/data/export/${dataset}`; | |
module.exports = function indexContent(context, cb) { | |
// Initiate an Algolia client | |
const client = algoliasearch(algoliaApp, context.secrets.ALGOLIA_TOKEN); | |
// Initiate the Algolia index | |
const index = client.initIndex(algoliaIndex); | |
// bind the update function to use it as an observable | |
const partialUpdateObjects = bindNodeCallback((...args) => index.saveObjects(...args)); | |
streamToRx( | |
request(sanityExportURL).pipe(ndjson()) | |
).pipe( | |
/* | |
* Pick and prepare fields you want to index, | |
* here we reduce structured text to plain text | |
*/ | |
map(function sanityToAlgolia(doc) { | |
return { | |
objectID: doc._id, | |
body: blocksToText(doc.body || []), | |
blurb: blocksToText(doc.blurb || []), | |
title: doc.title, | |
name: doc.name, | |
slug: doc.slug | |
}; | |
}), | |
// buffer batches in chunks of 100 | |
bufferCount(100), | |
// 👇uncomment to console.log objects for debugging | |
// tap(console.log), | |
// submit actions, one batch at a time | |
mergeMap(docs => partialUpdateObjects(docs), 1), | |
// collect all batches and emit when the stream is complete | |
toArray() | |
) | |
.subscribe(batchResults => { | |
const totalLength = batchResults.reduce((count, batchResult) => count + batchResult.objectIDs.length, 0); | |
cb(null, `Updated ${totalLength} documents in ${batchResults.length} batches`); | |
}, cb); | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment