Last active
August 16, 2022 06:47
-
-
Save leefsmp/f66c86930e038d2d808808cb95c027c5 to your computer and use it in GitHub Desktop.
Forge resumable upload (node.js)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
///////////////////////////////////////////////////////// | |
// Uploads object to bucket using resumable endpoint | |
// | |
///////////////////////////////////////////////////////// | |
uploadObjectChunked ( | |
getToken, | |
bucketKey, objectKey, | |
file, | |
opts = {}) { | |
return new Promise((resolve, reject) => { | |
const chunkSize = opts.chunkSize || 5 * 1024 * 1024 | |
const nbChunks = Math.ceil(file.size / chunkSize) | |
const chunksMap = Array.from({ | |
length: nbChunks | |
}, (e, i) => i) | |
// generates uniques session ID | |
const sessionId = this.guid() | |
// prepare the upload tasks | |
const uploadTasks = chunksMap.map((chunkIdx) => { | |
const start = chunkIdx * chunkSize | |
const end = Math.min( | |
file.size, (chunkIdx + 1) * chunkSize) - 1 | |
const range = `bytes ${start}-${end}/${file.size}` | |
const length = end - start + 1 | |
const readStream = | |
fs.createReadStream(file.path, { | |
start, end | |
}) | |
const run = async () => { | |
const token = await getToken() | |
return this._objectsAPI.uploadChunk( | |
bucketKey, objectKey, | |
length, range, sessionId, | |
readStream, {}, | |
{autoRefresh: false}, token) | |
} | |
return { | |
chunkIndex: chunkIdx, | |
run | |
} | |
}) | |
let progress = 0 | |
// runs asynchronously in parallel the upload tasks | |
// number of simultaneous uploads is defined by | |
// opts.concurrentUploads | |
eachLimit(uploadTasks, opts.concurrentUploads || 3, | |
(task, callback) => { | |
task.run().then((res) => { | |
progress += 100.0 / nbChunks | |
if (opts.onProgress) { | |
opts.onProgress ({ | |
progress: Math.round(progress * 100) / 100, | |
chunkIndex: task.chunkIndex | |
}) | |
} | |
callback () | |
}, (err) => { | |
if (opts.onError) { | |
opts.onError(err) | |
} | |
callback(err) | |
}) | |
}, (err) => { | |
if (!err && opts.onComplete) { | |
opts.onComplete () | |
} | |
}) | |
resolve({ | |
fileSize: file.size, | |
bucketKey, | |
objectKey, | |
nbChunks | |
}) | |
}) | |
} |
I suggest you contact directly Autodesk support as I'm no longer working as advocate for that company. In addition to that this upload API is deprecated and will be removed pretty soon: https://forge.autodesk.com/blog/data-management-oss-object-storage-service-migrating-direct-s3-approach
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Follow your instruction. I modified a little by my own. It's running but can not send all for large file (let say 300 or 600 MB). This is my code. Please help: