Initial commit
This commit is contained in:
199
server/src/api/routes/AssetRoutes.js
Normal file
199
server/src/api/routes/AssetRoutes.js
Normal file
@@ -0,0 +1,199 @@
|
||||
import passport from 'passport'
|
||||
import redis from 'redis'
|
||||
import redisReadStream from 'redis-rstream'
|
||||
import createError from 'http-errors'
|
||||
import path from 'path'
|
||||
import util from 'util'
|
||||
import config from 'config'
|
||||
import autoBind from 'auto-bind2'
|
||||
|
||||
function pipeToGridFS(readable, gfsWriteable) {
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
readable.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
gfsWriteable.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
gfsWriteable.on('close', (file) => {
|
||||
resolve(file)
|
||||
})
|
||||
})
|
||||
readable.pipe(gfsWriteable)
|
||||
return promise
|
||||
}
|
||||
|
||||
export class AssetRoutes {
|
||||
static rangeRegex = /^byte (\d+)/
|
||||
|
||||
constructor(container) {
|
||||
const app = container.app
|
||||
|
||||
this.db = container.db
|
||||
this.rs = container.rs
|
||||
this.uploadTimeout = config.get('api.uploadTimout')
|
||||
autoBind(this)
|
||||
app.route('/assets/:_id')
|
||||
.get(passport.authenticate('bearer', { session: false }), this.getAsset)
|
||||
.delete(passport.authenticate('bearer', { session: false }), this.deleteAsset)
|
||||
|
||||
app.route('/assets/upload')
|
||||
.post(passport.authenticate('bearer', { session: false }), this.beginAssetUpload)
|
||||
|
||||
app.route('/assets/upload/:_id')
|
||||
.post(passport.authenticate('bearer', { session: false }), this.continueAssetUpload)
|
||||
}
|
||||
|
||||
getAsset(req, res, next) {
|
||||
const assetId = req.params._id
|
||||
|
||||
this.db.gridfs.findOneAsync({ _id: assetId }).then((file) => {
|
||||
if (!file) {
|
||||
return next(createError.NotFound(`Asset ${assetId} was not found`))
|
||||
}
|
||||
|
||||
const ifNoneMatch = req.get('If-None-Match')
|
||||
|
||||
if (ifNoneMatch && ifNoneMatch === file.md5) {
|
||||
res.status(304).set({
|
||||
'ETag': file.md5,
|
||||
'Cache-Control': 'private,max-age=86400'
|
||||
}).end()
|
||||
return
|
||||
}
|
||||
|
||||
res.status(200).set({
|
||||
'Content-Type': file.contentType,
|
||||
'Content-Length': file.length,
|
||||
'ETag': file.md5})
|
||||
|
||||
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
|
||||
}).catch((err) => {
|
||||
next(createError.BadRequest(`Error returning asset '${assetId}'. ${err.message}`))
|
||||
})
|
||||
}
|
||||
|
||||
deleteAsset(req, res, next) {
|
||||
const assetId = req.params._id
|
||||
|
||||
this.db.gridfs.removeAsync({ _id: assetId }).then(() => {
|
||||
res.json({})
|
||||
}).catch((err) => {
|
||||
next(createError.BadRequest(`Unable to delete asset '${assetId}'. ${err.message}`))
|
||||
})
|
||||
}
|
||||
|
||||
beginAssetUpload(req, res, next) {
|
||||
const uploadId = this.db.newObjectId()
|
||||
let { fileName, fileSize, numberOfChunks, contentType } = req.body
|
||||
|
||||
if (!fileName || !fileSize || !numberOfChunks || !contentType) {
|
||||
return next(createError.BadRequest('Must specify fileName, fileSize, numberOfChunks and Content-Type header'))
|
||||
}
|
||||
|
||||
fileName = uploadId + '-' + path.basename(fileName)
|
||||
|
||||
this.rs.setAsync(
|
||||
uploadId, JSON.stringify({
|
||||
fileName, fileSize, numberOfChunks, contentType
|
||||
}), 'EX', this.uploadTimeout).then(() => {
|
||||
res.json({ uploadId })
|
||||
}).catch((error) => {
|
||||
next(createError.InternalServerError(error.message))
|
||||
})
|
||||
}
|
||||
|
||||
continueAssetUpload(req, res, next) {
|
||||
if (!(req.body instanceof Buffer)) {
|
||||
return next(createError.BadRequest('Body must be of type application/octet-stream'))
|
||||
}
|
||||
|
||||
const range = req.get('Range')
|
||||
const contentLength = req.get('Content-Length')
|
||||
let match = range.match(AssetRoutes.rangeRegex)
|
||||
let offset = null
|
||||
|
||||
if (!match || match.length < 2 || (offset = parseInt(match[1])) === NaN) {
|
||||
return next(createError.BadRequest('Range header must be supplied and of form \'byte <offset>\''))
|
||||
}
|
||||
|
||||
if (parseInt(contentLength, 10) !== req.body.length) {
|
||||
return next(createError.BadRequest('Must supply Content-Length header matching length of request body'))
|
||||
}
|
||||
|
||||
const uploadId = req.params._id
|
||||
const uploadCountId = uploadId + '$#'
|
||||
const uploadDataId = uploadId + '$@'
|
||||
|
||||
this.rs.getAsync(uploadId).then((content) => {
|
||||
let uploadData = null
|
||||
|
||||
try {
|
||||
uploadData = JSON.parse(content)
|
||||
} catch (error){
|
||||
return Promise.reject(new Error('Could not parse upload data'))
|
||||
}
|
||||
|
||||
if (offset < 0 || offset + req.body.length > uploadData.fileSize) {
|
||||
return Promise.reject(new Error(`Illegal range offset ${offset} given`))
|
||||
}
|
||||
|
||||
Promise.all([
|
||||
this.rs.setrangeAsync(uploadDataId, offset, req.body),
|
||||
this.rs.incrAsync(uploadCountId)
|
||||
]).then((arr) => {
|
||||
const uploadedChunks = arr[1]
|
||||
let chunkInfo = {
|
||||
numberOfChunks: uploadData.numberOfChunks,
|
||||
uploadedChunks
|
||||
}
|
||||
|
||||
if (uploadedChunks >= uploadData.numberOfChunks) {
|
||||
let readable = redisReadStream(this.rs.client, Buffer(uploadDataId))
|
||||
let writeable = this.db.gridfs.createWriteStream({
|
||||
_id: uploadId,
|
||||
filename: uploadData.fileName,
|
||||
content_type: uploadData.contentType
|
||||
})
|
||||
|
||||
let promise = pipeToGridFS(readable, writeable).then((file) => {
|
||||
return Promise.all([
|
||||
Promise.resolve(file),
|
||||
this.rs.del(uploadId),
|
||||
this.rs.del(uploadCountId),
|
||||
this.rs.del(uploadDataId)
|
||||
])
|
||||
}).then((arr) => {
|
||||
const [file] = arr
|
||||
res.json({
|
||||
assetId: file._id,
|
||||
fileName: file.filename,
|
||||
contentType: file.contentType,
|
||||
uploadDate: file.uploadDate,
|
||||
md5: file.md5,
|
||||
...chunkInfo
|
||||
})
|
||||
}) // TODO: Test that this will be caught...
|
||||
return promise
|
||||
} else {
|
||||
return Promise.all([
|
||||
this.rs.expireAsync(uploadId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadDataId, this.uploadTimeout)
|
||||
]).then(() => {
|
||||
res.json(chunkInfo)
|
||||
})
|
||||
}
|
||||
}).catch((error) => {
|
||||
this.rs.del(uploadId)
|
||||
this.rs.del(uploadCountId)
|
||||
this.rs.del(uploadDataId)
|
||||
console.error(error) // TODO: This should go into log file
|
||||
next(createError.BadRequest('Unable to upload data chunk'))
|
||||
})
|
||||
}).catch((error) => {
|
||||
console.error(error) // TODO: This should go into log file
|
||||
next(createError.BadRequest(error.message))
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user