Add base64 upload support
This commit is contained in:
@@ -6,8 +6,12 @@ import path from "path"
|
||||
import util from "util"
|
||||
import config from "config"
|
||||
import autobind from "autobind-decorator"
|
||||
import Buffer from "safe-buffer"
|
||||
import B64 from "b64"
|
||||
import { PassThrough } from "stream"
|
||||
import { catchAll } from "."
|
||||
|
||||
function pipeToGridFS(readable, gfsWriteable) {
|
||||
function pipeToGridFS(readable, gfsWriteable, decoder) {
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
readable.on("error", (error) => {
|
||||
reject(error)
|
||||
@@ -19,23 +23,27 @@ function pipeToGridFS(readable, gfsWriteable) {
|
||||
resolve(file)
|
||||
})
|
||||
})
|
||||
readable.pipe(gfsWriteable)
|
||||
readable.pipe(decoder).pipe(gfsWriteable)
|
||||
return promise
|
||||
}
|
||||
|
||||
@autobind
|
||||
export class AssetRoutes {
|
||||
static rangeRegex = /^byte (\d+)/
|
||||
static rangeRegex = /^(byte|base64) (\d+)/
|
||||
|
||||
constructor(container) {
|
||||
const app = container.app
|
||||
|
||||
this.log = container.log
|
||||
this.db = container.db
|
||||
this.rs = container.rs
|
||||
this.uploadTimeout = config.get("api.uploadTimout")
|
||||
app
|
||||
.route("/assets/:_id")
|
||||
.get(passport.authenticate("bearer", { session: false }), this.getAsset)
|
||||
.get(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
catchAll(this.getAsset)
|
||||
)
|
||||
.delete(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
this.deleteAsset
|
||||
@@ -45,221 +53,217 @@ export class AssetRoutes {
|
||||
.route("/assets/upload")
|
||||
.post(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
this.beginAssetUpload
|
||||
catchAll(this.beginAssetUpload)
|
||||
)
|
||||
|
||||
app
|
||||
.route("/assets/upload/:_id")
|
||||
.post(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
this.continueAssetUpload
|
||||
catchAll(this.continueAssetUpload)
|
||||
)
|
||||
}
|
||||
|
||||
getAsset(req, res, next) {
|
||||
async getAsset(req, res, next) {
|
||||
const assetId = req.params._id
|
||||
|
||||
this.db.gridfs
|
||||
.findOneAsync({ _id: assetId })
|
||||
.then((file) => {
|
||||
if (!file) {
|
||||
return next(createError.NotFound(`Asset ${assetId} was not found`))
|
||||
}
|
||||
const file = await this.db.gridfs.findOneAsync({ _id: assetId })
|
||||
|
||||
const ifNoneMatch = req.get("If-None-Match")
|
||||
if (!file) {
|
||||
throw createError.NotFound(`Asset ${assetId} was not found`)
|
||||
}
|
||||
|
||||
if (ifNoneMatch && ifNoneMatch === file.md5) {
|
||||
res
|
||||
.status(304)
|
||||
.set({
|
||||
ETag: file.md5,
|
||||
"Cache-Control": "private,max-age=86400",
|
||||
})
|
||||
.end()
|
||||
return
|
||||
}
|
||||
const ifNoneMatch = req.get("If-None-Match")
|
||||
|
||||
res.status(200).set({
|
||||
"Content-Type": file.contentType,
|
||||
"Content-Length": file.length,
|
||||
if (ifNoneMatch && ifNoneMatch === file.md5) {
|
||||
res
|
||||
.status(304)
|
||||
.set({
|
||||
ETag: file.md5,
|
||||
"Cache-Control": "private,max-age=86400",
|
||||
})
|
||||
.end()
|
||||
return
|
||||
}
|
||||
|
||||
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
|
||||
})
|
||||
.catch((err) => {
|
||||
next(
|
||||
createError.BadRequest(
|
||||
`Error returning asset '${assetId}'. ${err.message}`
|
||||
)
|
||||
)
|
||||
})
|
||||
res.status(200).set({
|
||||
"Content-Type": file.contentType,
|
||||
"Content-Length": file.length,
|
||||
ETag: file.md5,
|
||||
})
|
||||
|
||||
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
|
||||
}
|
||||
|
||||
deleteAsset(req, res, next) {
|
||||
async deleteAsset(req, res, next) {
|
||||
const assetId = req.params._id
|
||||
|
||||
this.db.gridfs
|
||||
.removeAsync({ _id: assetId })
|
||||
.then(() => {
|
||||
res.json({})
|
||||
})
|
||||
.catch((err) => {
|
||||
next(
|
||||
createError.BadRequest(
|
||||
`Unable to delete asset '${assetId}'. ${err.message}`
|
||||
)
|
||||
)
|
||||
})
|
||||
await this.db.gridfs.removeAsync({ _id: assetId })
|
||||
|
||||
res.json({})
|
||||
}
|
||||
|
||||
beginAssetUpload(req, res, next) {
|
||||
async beginAssetUpload(req, res, next) {
|
||||
const uploadId = this.db.newObjectId()
|
||||
let { fileName, fileSize, numberOfChunks, contentType } = req.body
|
||||
let {
|
||||
fileName,
|
||||
uploadSize,
|
||||
numberOfChunks,
|
||||
contentType,
|
||||
chunkContentType,
|
||||
} = req.body
|
||||
|
||||
if (!fileName || !fileSize || !numberOfChunks || !contentType) {
|
||||
return next(
|
||||
createError.BadRequest(
|
||||
"Must specify fileName, fileSize, numberOfChunks and Content-Type header"
|
||||
)
|
||||
if (!fileName || !uploadSize || !numberOfChunks || !contentType) {
|
||||
throw createError.BadRequest(
|
||||
"Must specify fileName, uploadSize, numberOfChunks, contentType"
|
||||
)
|
||||
}
|
||||
|
||||
fileName = uploadId + "-" + path.basename(fileName)
|
||||
|
||||
this.rs
|
||||
.setAsync(
|
||||
uploadId,
|
||||
JSON.stringify({
|
||||
fileName,
|
||||
fileSize,
|
||||
numberOfChunks,
|
||||
contentType,
|
||||
}),
|
||||
"EX",
|
||||
this.uploadTimeout
|
||||
)
|
||||
.then(() => {
|
||||
res.json({ uploadId })
|
||||
})
|
||||
.catch((error) => {
|
||||
next(createError.InternalServerError(error.message))
|
||||
})
|
||||
}
|
||||
|
||||
continueAssetUpload(req, res, next) {
|
||||
if (!(req.body instanceof Buffer)) {
|
||||
return next(
|
||||
createError.BadRequest("Body must be of type application/octet-stream")
|
||||
)
|
||||
if (chunkContentType) {
|
||||
if (
|
||||
chunkContentType !== "application/octet-stream" &&
|
||||
chunkContentType !== "application/base64"
|
||||
) {
|
||||
throw createError.BadRequest(
|
||||
"chunkContentType must be application/octet-stream or application/base64"
|
||||
)
|
||||
}
|
||||
} else {
|
||||
chunkContentType = "application/octet-stream"
|
||||
}
|
||||
|
||||
const range = req.get("Range")
|
||||
const contentLength = req.get("Content-Length")
|
||||
let match = range.match(AssetRoutes.rangeRegex)
|
||||
let offset = null
|
||||
await this.rs.setAsync(
|
||||
uploadId,
|
||||
JSON.stringify({
|
||||
fileName,
|
||||
uploadSize,
|
||||
numberOfChunks,
|
||||
contentType,
|
||||
chunkContentType,
|
||||
}),
|
||||
"EX",
|
||||
this.uploadTimeout
|
||||
)
|
||||
|
||||
if (!match || match.length < 2 || (offset = parseInt(match[1])) === NaN) {
|
||||
return next(
|
||||
createError.BadRequest(
|
||||
"Range header must be supplied and of form 'byte <offset>'"
|
||||
)
|
||||
res.json({ uploadId })
|
||||
}
|
||||
|
||||
async continueAssetUpload(req, res, next) {
|
||||
const uploadId = req.params._id
|
||||
const uploadCountId = uploadId + "$#"
|
||||
const uploadDataId = uploadId + "$@"
|
||||
const content = await this.rs.getAsync(uploadId)
|
||||
const uploadData = JSON.parse(content)
|
||||
const contentType = req.get("Content-Type")
|
||||
const contentRange = req.get("Content-Range")
|
||||
const contentLength = req.get("Content-Length")
|
||||
|
||||
console.log(uploadData)
|
||||
|
||||
if (contentType !== uploadData.chunkContentType) {
|
||||
throw createError.BadRequest(
|
||||
`Content-Type ${contentType} does not match chunk type ${
|
||||
uploadData.chunkContentType
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
if (parseInt(contentLength, 10) !== req.body.length) {
|
||||
return next(
|
||||
createError.BadRequest(
|
||||
"Must supply Content-Length header matching length of request body"
|
||||
)
|
||||
throw createError.BadRequest(
|
||||
"Must supply Content-Length header matching length of request body"
|
||||
)
|
||||
}
|
||||
|
||||
const uploadId = req.params._id
|
||||
const uploadCountId = uploadId + "$#"
|
||||
const uploadDataId = uploadId + "$@"
|
||||
let match = contentRange.match(AssetRoutes.rangeRegex)
|
||||
|
||||
this.rs
|
||||
.getAsync(uploadId)
|
||||
.then((content) => {
|
||||
let uploadData = null
|
||||
if (!match || match.length !== 3) {
|
||||
throw createError.BadRequest(
|
||||
"Content-Range header must be supplied and of form '[byte|base64] <offset>'"
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
uploadData = JSON.parse(content)
|
||||
} catch (error) {
|
||||
return Promise.reject(new Error("Could not parse upload data"))
|
||||
}
|
||||
const [, contentOffsetUnit, contentOffset] = match
|
||||
|
||||
if (offset < 0 || offset + req.body.length > uploadData.fileSize) {
|
||||
return Promise.reject(
|
||||
new Error(`Illegal range offset ${offset} given`)
|
||||
)
|
||||
}
|
||||
if (
|
||||
(uploadData.chunkContentType === "application/octet-stream" &&
|
||||
contentOffsetUnit !== "byte") ||
|
||||
(uploadData.chunkContentType === "application/base64" &&
|
||||
contentOffsetUnit !== "base64")
|
||||
) {
|
||||
throw createError.BadRequest(
|
||||
`Content-Range offset unit must be ${
|
||||
uploadData.chunkContentType === "application/base64"
|
||||
? "base64"
|
||||
: "byte"
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
Promise.all([
|
||||
this.rs.setrangeAsync(uploadDataId, offset, req.body),
|
||||
this.rs.incrAsync(uploadCountId),
|
||||
let offset = Number.parseInt(contentOffset)
|
||||
|
||||
if (offset < 0 || offset + req.body.length > uploadData.uploadSize) {
|
||||
throw createError.BadRequest(
|
||||
`Illegal Content-Range ${contentOffsetType} ${contentOffset} and Content-Length ${contentLength} for upload size ${
|
||||
uploadData.uploadSize
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
const [uploadedChunks] = await Promise.all([
|
||||
this.rs.setrangeAsync(uploadDataId, offset, req.body),
|
||||
this.rs.incrAsync(uploadCountId),
|
||||
])
|
||||
const chunkInfo = {
|
||||
numberOfChunks: uploadData.numberOfChunks,
|
||||
uploadedChunks,
|
||||
}
|
||||
|
||||
if (uploadedChunks >= uploadData.numberOfChunks) {
|
||||
let readable = redisReadStream(this.rs.client, uploadDataId)
|
||||
let writeable = this.db.gridfs.createWriteStream({
|
||||
_id: uploadId,
|
||||
filename: uploadData.fileName,
|
||||
content_type: uploadData.contentType,
|
||||
})
|
||||
|
||||
const decoder =
|
||||
uploadData.chunkContentType === "application/base64"
|
||||
? new B64.Decoder()
|
||||
: new PassThrough()
|
||||
const file = await pipeToGridFS(readable, writeable, decoder)
|
||||
|
||||
await Promise.all([
|
||||
this.rs.del(uploadId),
|
||||
this.rs.del(uploadCountId),
|
||||
this.rs.del(uploadDataId),
|
||||
])
|
||||
.then((arr) => {
|
||||
const uploadedChunks = arr[1]
|
||||
let chunkInfo = {
|
||||
numberOfChunks: uploadData.numberOfChunks,
|
||||
uploadedChunks,
|
||||
}
|
||||
|
||||
if (uploadedChunks >= uploadData.numberOfChunks) {
|
||||
let readable = redisReadStream(
|
||||
this.rs.client,
|
||||
Buffer(uploadDataId)
|
||||
)
|
||||
let writeable = this.db.gridfs.createWriteStream({
|
||||
_id: uploadId,
|
||||
filename: uploadData.fileName,
|
||||
content_type: uploadData.contentType,
|
||||
})
|
||||
res.json({
|
||||
assetId: file._id,
|
||||
fileName: file.filename,
|
||||
contentType: file.contentType,
|
||||
uploadDate: file.uploadDate,
|
||||
md5: file.md5,
|
||||
...chunkInfo,
|
||||
})
|
||||
} else {
|
||||
await Promise.all([
|
||||
this.rs.expireAsync(uploadId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
|
||||
])
|
||||
|
||||
let promise = pipeToGridFS(readable, writeable)
|
||||
.then((file) => {
|
||||
return Promise.all([
|
||||
Promise.resolve(file),
|
||||
this.rs.del(uploadId),
|
||||
this.rs.del(uploadCountId),
|
||||
this.rs.del(uploadDataId),
|
||||
])
|
||||
})
|
||||
.then((arr) => {
|
||||
const [file] = arr
|
||||
res.json({
|
||||
assetId: file._id,
|
||||
fileName: file.filename,
|
||||
contentType: file.contentType,
|
||||
uploadDate: file.uploadDate,
|
||||
md5: file.md5,
|
||||
...chunkInfo,
|
||||
})
|
||||
}) // TODO: Test that this will be caught...
|
||||
return promise
|
||||
} else {
|
||||
return Promise.all([
|
||||
this.rs.expireAsync(uploadId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
|
||||
]).then(() => {
|
||||
res.json(chunkInfo)
|
||||
})
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
this.rs.del(uploadId)
|
||||
this.rs.del(uploadCountId)
|
||||
this.rs.del(uploadDataId)
|
||||
console.error(error) // TODO: This should go into log file
|
||||
next(createError.BadRequest("Unable to upload data chunk"))
|
||||
})
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error) // TODO: This should go into log file
|
||||
next(createError.BadRequest(error.message))
|
||||
})
|
||||
res.json(chunkInfo)
|
||||
}
|
||||
} catch (error) {
|
||||
this.rs.del(uploadId)
|
||||
this.rs.del(uploadCountId)
|
||||
this.rs.del(uploadDataId)
|
||||
this.log.error(error.message)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user