250 lines
6.5 KiB
JavaScript
250 lines
6.5 KiB
JavaScript
import passport from "passport"
|
|
import redis from "redis"
|
|
import redisReadStream from "redis-rstream"
|
|
import createError from "http-errors"
|
|
import path from "path"
|
|
import util from "util"
|
|
import config from "config"
|
|
import autobind from "autobind-decorator"
|
|
import { PassThrough } from "stream"
|
|
import { catchAll } from "."
|
|
import { pipeToPromise } from "../../util"
|
|
|
|
@autobind
|
|
export class AssetRoutes {
|
|
static rangeRegex = /^byte (\d+)/
|
|
|
|
constructor(container) {
|
|
const app = container.app
|
|
|
|
this.log = container.log
|
|
this.db = container.db
|
|
this.rs = container.rs
|
|
this.uploadTimeout = config.get("api.uploadTimout")
|
|
app
|
|
.route("/assets/:_id")
|
|
.get(
|
|
passport.authenticate("bearer", { session: false }),
|
|
catchAll(this.getAsset)
|
|
)
|
|
.delete(
|
|
passport.authenticate("bearer", { session: false }),
|
|
this.deleteAsset
|
|
)
|
|
|
|
app
|
|
.route("/assets/upload")
|
|
.post(
|
|
passport.authenticate("bearer", { session: false }),
|
|
catchAll(this.beginAssetUpload)
|
|
)
|
|
|
|
app
|
|
.route("/assets/upload/:_id")
|
|
.post(
|
|
passport.authenticate("bearer", { session: false }),
|
|
catchAll(this.continueAssetUpload)
|
|
)
|
|
}
|
|
|
|
async getAsset(req, res, next) {
|
|
let assetId = req.params._id
|
|
const extIndex = assetId.indexOf(".")
|
|
|
|
if (extIndex !== -1) {
|
|
// TODO: Should really check the index against the requested extension...
|
|
assetId = assetId.slice(0, extIndex)
|
|
}
|
|
|
|
const cursor = await this.db.gridfs.find({ _id: assetId })
|
|
const file = await cursor.next()
|
|
|
|
if (!file) {
|
|
throw createError.NotFound(`Asset ${assetId} was not found`)
|
|
}
|
|
|
|
const ifNoneMatch = req.get("If-None-Match")
|
|
|
|
if (ifNoneMatch && ifNoneMatch === file.md5) {
|
|
res
|
|
.status(304)
|
|
.set({
|
|
ETag: file.md5,
|
|
"Cache-Control": "private,max-age=86400",
|
|
})
|
|
.end()
|
|
return
|
|
}
|
|
|
|
res.status(200).set({
|
|
"Content-Type": file.contentType,
|
|
"Content-Length": file.length,
|
|
ETag: file.md5,
|
|
})
|
|
|
|
this.db.gridfs.openDownloadStream(file._id).pipe(res)
|
|
}
|
|
|
|
async deleteAsset(req, res, next) {
|
|
const assetId = req.params._id
|
|
|
|
await this.db.gridfs.delete(assetId)
|
|
|
|
res.json({})
|
|
}
|
|
|
|
async beginAssetUpload(req, res, next) {
|
|
const uploadId = this.db.newObjectId().toString()
|
|
let {
|
|
fileName,
|
|
uploadSize,
|
|
numberOfChunks,
|
|
contentType,
|
|
chunkContentType,
|
|
} = req.body
|
|
|
|
if (!uploadSize || !numberOfChunks || !contentType) {
|
|
throw createError.BadRequest(
|
|
"Must specify uploadSize, numberOfChunks, contentType"
|
|
)
|
|
}
|
|
|
|
if (fileName) {
|
|
fileName = uploadId + "-" + path.basename(fileName)
|
|
} else {
|
|
fileName = uploadId
|
|
}
|
|
|
|
if (chunkContentType) {
|
|
if (
|
|
chunkContentType !== "application/octet-stream" &&
|
|
chunkContentType !== "application/base64"
|
|
) {
|
|
throw createError.BadRequest(
|
|
"chunkContentType must be 'application/octet-stream' or 'application/base64'"
|
|
)
|
|
}
|
|
} else {
|
|
chunkContentType = "application/octet-stream"
|
|
}
|
|
|
|
await this.rs.setAsync(
|
|
uploadId,
|
|
JSON.stringify({
|
|
fileName,
|
|
uploadSize,
|
|
numberOfChunks,
|
|
contentType,
|
|
chunkContentType,
|
|
}),
|
|
"EX",
|
|
this.uploadTimeout
|
|
)
|
|
|
|
res.json({ uploadId })
|
|
}
|
|
|
|
async continueAssetUpload(req, res, next) {
|
|
const uploadId = req.params._id
|
|
const uploadCountId = uploadId + "$#"
|
|
const uploadDataId = uploadId + "$@"
|
|
const content = await this.rs.getAsync(uploadId)
|
|
const uploadData = JSON.parse(content)
|
|
const contentType = req.get("Content-Type")
|
|
const contentRange = req.get("Content-Range")
|
|
const contentLength = req.get("Content-Length")
|
|
|
|
if (!uploadData) {
|
|
throw createError.BadRequest(`Bad upload id ${uploadId}`)
|
|
}
|
|
|
|
try {
|
|
if (!contentType.startsWith(uploadData.chunkContentType)) {
|
|
throw createError.BadRequest(
|
|
`Content-Type ${contentType} does not match chunk type ${
|
|
uploadData.chunkContentType
|
|
}`
|
|
)
|
|
}
|
|
|
|
if (parseInt(contentLength, 10) !== req.body.length) {
|
|
throw createError.BadRequest(
|
|
"Must supply Content-Length header matching length of request body"
|
|
)
|
|
}
|
|
|
|
let match = contentRange.match(AssetRoutes.rangeRegex)
|
|
|
|
if (!match || match.length !== 2) {
|
|
throw createError.BadRequest(
|
|
"Content-Range header must be supplied and of form 'byte <offset>'"
|
|
)
|
|
}
|
|
|
|
const [, contentOffset] = match
|
|
let offset = Number.parseInt(contentOffset)
|
|
|
|
const data =
|
|
uploadData.chunkContentType === "application/base64"
|
|
? Buffer.from(req.body, "base64")
|
|
: req.body
|
|
|
|
if (offset < 0 || offset + data.length > uploadData.uploadSize) {
|
|
throw createError.BadRequest(
|
|
`Illegal Content-Range 'byte ${contentOffset}' and Content-Length ${contentLength} for upload size ${
|
|
uploadData.uploadSize
|
|
}`
|
|
)
|
|
}
|
|
|
|
const [, uploadedChunks] = await Promise.all([
|
|
this.rs.setrangeAsync(uploadDataId, offset, data),
|
|
this.rs.incrAsync(uploadCountId),
|
|
])
|
|
const chunkInfo = {
|
|
numberOfChunks: uploadData.numberOfChunks,
|
|
uploadedChunks,
|
|
}
|
|
|
|
if (uploadedChunks >= uploadData.numberOfChunks) {
|
|
let readable = redisReadStream(this.rs.client, uploadDataId)
|
|
let writeable = this.db.gridfs.openUploadStreamWithId(
|
|
this.db.newObjectId(uploadId),
|
|
uploadData.fileName,
|
|
{ contentType: uploadData.contentType }
|
|
)
|
|
|
|
const file = await pipeToPromise(readable, writeable)
|
|
|
|
await Promise.all([
|
|
this.rs.del(uploadId),
|
|
this.rs.del(uploadCountId),
|
|
this.rs.del(uploadDataId),
|
|
])
|
|
|
|
res.json({
|
|
assetId: file._id,
|
|
fileName: file.filename,
|
|
contentType: file.contentType,
|
|
uploadDate: file.uploadDate,
|
|
md5: file.md5,
|
|
...chunkInfo,
|
|
})
|
|
} else {
|
|
await Promise.all([
|
|
this.rs.expireAsync(uploadId, this.uploadTimeout),
|
|
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
|
|
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
|
|
])
|
|
|
|
res.json(chunkInfo)
|
|
}
|
|
} catch (error) {
|
|
this.rs.del(uploadId)
|
|
this.rs.del(uploadCountId)
|
|
this.rs.del(uploadDataId)
|
|
throw error
|
|
}
|
|
}
|
|
}
|