Created
June 19, 2019 18:16
-
-
Save samin/976271f97b03bba14017a760570df88a to your computer and use it in GitHub Desktop.
Adapted jaydenseric/graphql-upload to work on a Lambda event. Based on https://github.com/jaydenseric/graphql-upload/blob/master/src/processRequest.mjs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const Busboy = require('busboy'); | |
const { WriteStream } = require('fs-capacitor'); | |
const objectPath = require('object-path'); | |
const SPEC_URL = 'https://github.com/jaydenseric/graphql-multipart-request-spec'; | |
const isObject = value => value && value.constructor === Object; | |
const isString = value => typeof value === 'string' || value instanceof String; | |
const ignoreStream = (stream) => { | |
stream.on('error', () => {}); | |
stream.resume(); | |
}; | |
class Upload { | |
constructor() { | |
this.promise = new Promise((resolve, reject) => { | |
this.resolve = (file) => { | |
this.file = file; | |
resolve(file); | |
}; | |
this.reject = reject; | |
}); | |
this.promise.catch(() => {}); | |
} | |
} | |
module.exports.processRequest = ( | |
event, | |
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}, | |
) => new Promise((resolve, reject) => { | |
const released = false; | |
let exitError; | |
let currentStream; | |
let operations; | |
let operationsPath; | |
let map; | |
const parser = new Busboy({ | |
headers: { | |
'content-type': event.headers['Content-Type'], | |
}, | |
limits: { | |
fieldSize: maxFieldSize, | |
fields: 2, | |
fileSize: maxFileSize, | |
files: maxFiles, | |
}, | |
}); | |
const exit = (error) => { | |
if (exitError) return; | |
exitError = error; | |
reject(new Error(exitError)); | |
parser.destroy(); | |
if (currentStream) currentStream.destroy(new Error(exitError)); | |
if (map) { | |
for (const upload of map.values()) { if (!upload.file) upload.reject(new Error(exitError)); } | |
} | |
}; | |
parser.on('file', (fieldName, stream, fileName, encoding, mimeType) => { | |
if (exitError) { | |
ignoreStream(stream); | |
return null; | |
} | |
if (!map) { | |
ignoreStream(stream); | |
return exit(`Misordered multipart fields; files should follow ‘map’ (${SPEC_URL}).`); | |
} | |
currentStream = stream; | |
stream.on('end', () => { | |
if (currentStream === stream) currentStream = null; | |
}); | |
const upload = map.get(fieldName); | |
if (!upload) { | |
ignoreStream(stream); | |
return null; | |
} | |
const capacitor = new WriteStream(); | |
capacitor.on('error', () => { | |
stream.unpipe(); | |
stream.resume(); | |
}); | |
stream.on('limit', () => { | |
if (currentStream === stream) currentStream = null; | |
stream.unpipe(); | |
capacitor.destroy( | |
new Error('File truncated as it exceeds the size limit.'), | |
); | |
}); | |
stream.on('error', (error) => { | |
if (currentStream === stream) currentStream = null; | |
stream.unpipe(); | |
capacitor.destroy(exitError || error); | |
}); | |
stream.pipe(capacitor); | |
const file = { | |
fileName, | |
mimeType, | |
encoding, | |
createReadStream() { | |
const error = capacitor.error || (released ? exitError : null); | |
if (error) throw error; | |
return capacitor.createReadStream(); | |
}, | |
}; | |
upload.resolve(file); | |
}); | |
parser.on('field', (fieldName, value) => { | |
switch (fieldName) { | |
default: | |
break; | |
case 'operations': | |
try { | |
operations = JSON.parse(value); | |
} catch (error) { | |
return exit(`Invalid JSON in the ‘operations’ multipart field (${SPEC_URL}).`); | |
} | |
if (!isObject(operations) && !Array.isArray(operations)) { | |
return exit(`Invalid type for the ‘operations’ multipart field (${SPEC_URL}).`); | |
} | |
operationsPath = objectPath(operations); | |
break; | |
case 'map': { | |
if (!operations) { | |
return exit(`Misordered multipart fields; ‘map’ should follow ‘operations’ (${SPEC_URL}).`); | |
} | |
let parsedMap; | |
try { | |
parsedMap = JSON.parse(value); | |
} catch (error) { | |
return exit(`Invalid JSON in the ‘map’ multipart field (${SPEC_URL}).`); | |
} | |
if (!isObject(parsedMap)) { | |
return exit(`Invalid type for the ‘map’ multipart field (${SPEC_URL}).`); | |
} | |
const mapEntries = Object.entries(parsedMap); | |
if (mapEntries.length > maxFiles) { | |
return exit(`${maxFiles} max file uploads exceeded.`); | |
} | |
map = new Map(); | |
for (const [fieldName, paths] of mapEntries) { | |
if (!Array.isArray(paths)) { | |
return exit(`Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array (${SPEC_URL}).`); | |
} | |
map.set(fieldName, new Upload()); | |
for (const [index, path] of paths.entries()) { | |
if (!isString(path)) { | |
return exit(`Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value (${SPEC_URL}).`); | |
} | |
operationsPath.set(path, map.get(fieldName).promise); | |
} | |
} | |
} | |
} | |
return null; | |
}); | |
parser.once('filesLimit', () => exit(`${maxFiles} max file uploads exceeded.`)); | |
parser.once('finish', () => { | |
if (!operations) { | |
return exit(`Missing multipart field ‘operations’ (${SPEC_URL}).`); | |
} | |
if (!map) { | |
return exit(`Missing multipart field ‘map’ (${SPEC_URL}).`); | |
} | |
for (const upload of map.values()) { | |
if (!upload.file) { upload.reject(new Error('File missing in the request.')); } | |
} | |
resolve(operations); | |
}); | |
parser.once('error', exit); | |
parser.write(event.body, event.isBase64Encoded ? 'base64' : 'binary'); | |
parser.end(); | |
}); |
Looks like this article explains how to setup the Lambda so that it brings event.body
in various formats.
https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-payload-encodings-configure-with-console.html
This line explains it all to me:
parser.write(event.body, event.isBase64Encoded ? 'base64' : 'binary');
I've published this module today https://github.com/koresar/graphql-upload-minimal
It's a fork of graphq-upload
. The main difference - it doesn't create temporary files on disk.
What I would recommend - adding one more export function - graphqlUploadAlmbda.js
which would then pass all the necessary variables down to the processRequest()
.
I can help arranging a PR.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
How do I use this? Could you give an example of where to call it?