Last active
July 16, 2023 03:59
-
-
Save semlinker/b211c0b148ac9be0ac286b387757e692 to your computer and use it in GitHub Desktop.
JavaScript 中如何实现大文件并发上传?
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!DOCTYPE html> | |
<html lang="zh-CN"> | |
<head> | |
<meta charset="UTF-8" /> | |
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> | |
<meta http-equiv="X-UA-Compatible" content="ie=edge" /> | |
<title>大文件并发上传示例(阿宝哥)</title> | |
<script src="https://cdn.bootcdn.net/ajax/libs/axios/0.21.1/axios.min.js"></script> | |
<script src="https://cdn.bootcdn.net/ajax/libs/spark-md5/3.0.0/spark-md5.min.js"></script> | |
</head> | |
<body> | |
<input type="file" id="uploadFile" /> | |
<button id="submit" onclick="uploadFile()">上传文件</button> | |
<script> | |
const uploadFileEle = document.querySelector("#uploadFile"); | |
const request = axios.create({ | |
baseURL: "http://localhost:3000/upload", | |
timeout: 10000, | |
}); | |
function calcFileMD5(file) { | |
return new Promise((resolve, reject) => { | |
let chunkSize = 2097152, // 2M | |
chunks = Math.ceil(file.size / chunkSize), | |
currentChunk = 0, | |
spark = new SparkMD5.ArrayBuffer(), | |
fileReader = new FileReader(); | |
fileReader.onload = (e) => { | |
spark.append(e.target.result); | |
currentChunk++; | |
if (currentChunk < chunks) { | |
loadNext(); | |
} else { | |
resolve(spark.end()); | |
} | |
}; | |
fileReader.onerror = (e) => { | |
reject(fileReader.error); | |
reader.abort(); | |
}; | |
function loadNext() { | |
let start = currentChunk * chunkSize, | |
end = | |
start + chunkSize >= file.size ? file.size : start + chunkSize; | |
fileReader.readAsArrayBuffer(file.slice(start, end)); | |
} | |
loadNext(); | |
}); | |
} | |
function checkFileExist(url, name, md5) { | |
return request | |
.get(url, { | |
params: { | |
name, | |
md5, | |
}, | |
}) | |
.then((response) => response.data); | |
} | |
async function asyncPool(poolLimit, array, iteratorFn) { | |
const ret = []; | |
const executing = []; | |
for (const item of array) { | |
const p = Promise.resolve().then(() => iteratorFn(item, array)); | |
ret.push(p); | |
if (poolLimit <= array.length) { | |
const e = p.then(() => executing.splice(executing.indexOf(e), 1)); | |
executing.push(e); | |
if (executing.length >= poolLimit) { | |
await Promise.race(executing); | |
} | |
} | |
} | |
return Promise.all(ret); | |
} | |
async function uploadFile() { | |
if (!uploadFileEle.files.length) return; | |
const file = uploadFileEle.files[0]; // 获取待上传的文件 | |
const fileMd5 = await calcFileMD5(file); // 计算文件的MD5 | |
const fileStatus = await checkFileExist( | |
// 判断文件是否已存在 | |
"/exists", | |
file.name, | |
fileMd5 | |
); | |
if (fileStatus.data && fileStatus.data.isExists) { | |
alert("文件已上传[秒传]"); | |
return; | |
} else { | |
await upload({ | |
url: "/single", | |
file, | |
fileMd5, | |
fileSize: file.size, | |
chunkSize: 1 * 1024 * 1024, | |
chunkIds: fileStatus.data.chunkIds, | |
poolLimit: 3, | |
}); | |
} | |
await concatFiles("/concatFiles", file.name, fileMd5); | |
} | |
function upload({ | |
url, | |
file, | |
fileMd5, | |
fileSize, | |
chunkSize, | |
chunkIds, | |
poolLimit = 1, | |
}) { | |
const chunks = | |
typeof chunkSize === "number" ? Math.ceil(fileSize / chunkSize) : 1; | |
return asyncPool(poolLimit, [...new Array(chunks).keys()], (i) => { | |
if (chunkIds.indexOf(i + "") !== -1) { | |
// 已上传的分块直接跳过 | |
return Promise.resolve(); | |
} | |
let start = i * chunkSize; | |
let end = i + 1 == chunks ? fileSize : (i + 1) * chunkSize; | |
const chunk = file.slice(start, end); | |
return uploadChunk({ | |
url, | |
chunk, | |
chunkIndex: i, | |
fileMd5, | |
fileName: file.name, | |
}); | |
}); | |
} | |
function uploadChunk({ url, chunk, chunkIndex, fileMd5, fileName }) { | |
let formData = new FormData(); | |
formData.set("file", chunk, fileMd5 + "-" + chunkIndex); | |
formData.set("name", fileName); | |
formData.set("timestamp", Date.now()); | |
return request.post(url, formData); | |
} | |
function concatFiles(url, name, md5) { | |
return request.get(url, { | |
params: { | |
name, | |
md5, | |
}, | |
}); | |
} | |
</script> | |
</body> | |
</html> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const fs = require("fs"); | |
const path = require("path"); | |
const util = require("util"); | |
const Koa = require("koa"); | |
const cors = require("@koa/cors"); | |
const multer = require("@koa/multer"); | |
const Router = require("@koa/router"); | |
const serve = require("koa-static"); | |
const fse = require("fs-extra"); | |
const readdir = util.promisify(fs.readdir); | |
const unlink = util.promisify(fs.unlink); | |
const app = new Koa(); | |
const router = new Router(); | |
const TMP_DIR = path.join(__dirname, "tmp"); // 临时目录 | |
const UPLOAD_DIR = path.join(__dirname, "/public/upload"); | |
const IGNORES = [".DS_Store"]; // 忽略的文件列表 | |
const storage = multer.diskStorage({ | |
destination: async function (req, file, cb) { | |
let fileMd5 = file.originalname.split("-")[0]; | |
const fileDir = path.join(TMP_DIR, fileMd5); | |
await fse.ensureDir(fileDir); | |
cb(null, fileDir); | |
}, | |
filename: function (req, file, cb) { | |
let chunkIndex = file.originalname.split("-")[1]; | |
cb(null, `${chunkIndex}`); | |
}, | |
}); | |
const multerUpload = multer({ storage }); | |
router.get("/", async (ctx) => { | |
ctx.body = "大文件并发上传示例(阿宝哥)"; | |
}); | |
router.get("/upload/exists", async (ctx) => { | |
const { name: fileName, md5: fileMd5 } = ctx.query; | |
const filePath = path.join(UPLOAD_DIR, fileName); | |
const isExists = await fse.pathExists(filePath); | |
if (isExists) { | |
ctx.body = { | |
status: "success", | |
data: { | |
isExists: true, | |
url: `http://localhost:3000/${fileName}`, | |
}, | |
}; | |
} else { | |
let chunkIds = []; | |
const chunksPath = path.join(TMP_DIR, fileMd5); | |
const hasChunksPath = await fse.pathExists(chunksPath); | |
if (hasChunksPath) { | |
let files = await readdir(chunksPath); | |
chunkIds = files.filter((file) => { | |
return IGNORES.indexOf(file) === -1; | |
}); | |
} | |
ctx.body = { | |
status: "success", | |
data: { | |
isExists: false, | |
chunkIds, | |
}, | |
}; | |
} | |
}); | |
router.post( | |
"/upload/single", | |
multerUpload.single("file"), | |
async (ctx, next) => { | |
ctx.body = { | |
code: 1, | |
data: ctx.file, | |
}; | |
} | |
); | |
router.get("/upload/concatFiles", async (ctx) => { | |
const { name: fileName, md5: fileMd5 } = ctx.query; | |
await concatFiles( | |
path.join(TMP_DIR, fileMd5), | |
path.join(UPLOAD_DIR, fileName) | |
); | |
ctx.body = { | |
status: "success", | |
data: { | |
url: `http://localhost:3000/${fileName}`, | |
}, | |
}; | |
}); | |
async function concatFiles(sourceDir, targetPath) { | |
const readFile = (file, ws) => | |
new Promise((resolve, reject) => { | |
fs.createReadStream(file) | |
.on("data", (data) => ws.write(data)) | |
.on("end", resolve) | |
.on("error", reject); | |
}); | |
const files = await readdir(sourceDir); | |
const sortedFiles = files | |
.filter((file) => { | |
return IGNORES.indexOf(file) === -1; | |
}) | |
.sort((a, b) => a - b); | |
const writeStream = fs.createWriteStream(targetPath); | |
for (const file of sortedFiles) { | |
let filePath = path.join(sourceDir, file); | |
await readFile(filePath, writeStream); | |
await unlink(filePath); // 删除已合并的分块 | |
} | |
writeStream.end(); | |
} | |
// 注册中间件 | |
app.use(cors()); | |
app.use(serve(UPLOAD_DIR)); | |
app.use(router.routes()).use(router.allowedMethods()); | |
app.listen(3000, () => { | |
console.log("app starting at port 3000"); | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment