SectionList on home screen with API

This commit is contained in:
John Lyon-Smith
2018-04-08 18:33:21 -07:00
parent 5634acb967
commit 7891bb71c9
19 changed files with 1278 additions and 1201 deletions

View File

@@ -1,21 +1,21 @@
import passport from 'passport'
import redis from 'redis'
import redisReadStream from 'redis-rstream'
import createError from 'http-errors'
import path from 'path'
import util from 'util'
import config from 'config'
import autobind from 'autobind-decorator'
import passport from "passport"
import redis from "redis"
import redisReadStream from "redis-rstream"
import createError from "http-errors"
import path from "path"
import util from "util"
import config from "config"
import autobind from "autobind-decorator"
function pipeToGridFS(readable, gfsWriteable) {
const promise = new Promise((resolve, reject) => {
readable.on('error', (error) => {
readable.on("error", (error) => {
reject(error)
})
gfsWriteable.on('error', (error) => {
gfsWriteable.on("error", (error) => {
reject(error)
})
gfsWriteable.on('close', (file) => {
gfsWriteable.on("close", (file) => {
resolve(file)
})
})
@@ -32,55 +32,85 @@ export class AssetRoutes {
this.db = container.db
this.rs = container.rs
this.uploadTimeout = config.get('api.uploadTimout')
app.route('/assets/:_id')
.get(passport.authenticate('bearer', { session: false }), this.getAsset)
.delete(passport.authenticate('bearer', { session: false }), this.deleteAsset)
this.uploadTimeout = config.get("api.uploadTimout")
app
.route("/assets/:_id")
.get(passport.authenticate("bearer", { session: false }), this.getAsset)
.delete(
passport.authenticate("bearer", { session: false }),
this.deleteAsset
)
app.route('/assets/upload')
.post(passport.authenticate('bearer', { session: false }), this.beginAssetUpload)
app
.route("/assets/upload")
.post(
passport.authenticate("bearer", { session: false }),
this.beginAssetUpload
)
app.route('/assets/upload/:_id')
.post(passport.authenticate('bearer', { session: false }), this.continueAssetUpload)
app
.route("/assets/upload/:_id")
.post(
passport.authenticate("bearer", { session: false }),
this.continueAssetUpload
)
}
getAsset(req, res, next) {
const assetId = req.params._id
this.db.gridfs.findOneAsync({ _id: assetId }).then((file) => {
if (!file) {
return next(createError.NotFound(`Asset ${assetId} was not found`))
}
this.db.gridfs
.findOneAsync({ _id: assetId })
.then((file) => {
if (!file) {
return next(createError.NotFound(`Asset ${assetId} was not found`))
}
const ifNoneMatch = req.get('If-None-Match')
const ifNoneMatch = req.get("If-None-Match")
if (ifNoneMatch && ifNoneMatch === file.md5) {
res.status(304).set({
'ETag': file.md5,
'Cache-Control': 'private,max-age=86400'
}).end()
return
}
if (ifNoneMatch && ifNoneMatch === file.md5) {
res
.status(304)
.set({
ETag: file.md5,
"Cache-Control": "private,max-age=86400",
})
.end()
return
}
res.status(200).set({
'Content-Type': file.contentType,
'Content-Length': file.length,
'ETag': file.md5})
res.status(200).set({
"Content-Type": file.contentType,
"Content-Length": file.length,
ETag: file.md5,
})
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
}).catch((err) => {
next(createError.BadRequest(`Error returning asset '${assetId}'. ${err.message}`))
})
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
})
.catch((err) => {
next(
createError.BadRequest(
`Error returning asset '${assetId}'. ${err.message}`
)
)
})
}
deleteAsset(req, res, next) {
const assetId = req.params._id
this.db.gridfs.removeAsync({ _id: assetId }).then(() => {
res.json({})
}).catch((err) => {
next(createError.BadRequest(`Unable to delete asset '${assetId}'. ${err.message}`))
})
this.db.gridfs
.removeAsync({ _id: assetId })
.then(() => {
res.json({})
})
.catch((err) => {
next(
createError.BadRequest(
`Unable to delete asset '${assetId}'. ${err.message}`
)
)
})
}
beginAssetUpload(req, res, next) {
@@ -88,112 +118,148 @@ export class AssetRoutes {
let { fileName, fileSize, numberOfChunks, contentType } = req.body
if (!fileName || !fileSize || !numberOfChunks || !contentType) {
return next(createError.BadRequest('Must specify fileName, fileSize, numberOfChunks and Content-Type header'))
return next(
createError.BadRequest(
"Must specify fileName, fileSize, numberOfChunks and Content-Type header"
)
)
}
fileName = uploadId + '-' + path.basename(fileName)
fileName = uploadId + "-" + path.basename(fileName)
this.rs.setAsync(
uploadId, JSON.stringify({
fileName, fileSize, numberOfChunks, contentType
}), 'EX', this.uploadTimeout).then(() => {
res.json({ uploadId })
}).catch((error) => {
next(createError.InternalServerError(error.message))
})
this.rs
.setAsync(
uploadId,
JSON.stringify({
fileName,
fileSize,
numberOfChunks,
contentType,
}),
"EX",
this.uploadTimeout
)
.then(() => {
res.json({ uploadId })
})
.catch((error) => {
next(createError.InternalServerError(error.message))
})
}
continueAssetUpload(req, res, next) {
if (!(req.body instanceof Buffer)) {
return next(createError.BadRequest('Body must be of type application/octet-stream'))
return next(
createError.BadRequest("Body must be of type application/octet-stream")
)
}
const range = req.get('Range')
const contentLength = req.get('Content-Length')
const range = req.get("Range")
const contentLength = req.get("Content-Length")
let match = range.match(AssetRoutes.rangeRegex)
let offset = null
if (!match || match.length < 2 || (offset = parseInt(match[1])) === NaN) {
return next(createError.BadRequest('Range header must be supplied and of form \'byte <offset>\''))
return next(
createError.BadRequest(
"Range header must be supplied and of form 'byte <offset>'"
)
)
}
if (parseInt(contentLength, 10) !== req.body.length) {
return next(createError.BadRequest('Must supply Content-Length header matching length of request body'))
return next(
createError.BadRequest(
"Must supply Content-Length header matching length of request body"
)
)
}
const uploadId = req.params._id
const uploadCountId = uploadId + '$#'
const uploadDataId = uploadId + '$@'
const uploadCountId = uploadId + "$#"
const uploadDataId = uploadId + "$@"
this.rs.getAsync(uploadId).then((content) => {
let uploadData = null
this.rs
.getAsync(uploadId)
.then((content) => {
let uploadData = null
try {
uploadData = JSON.parse(content)
} catch (error){
return Promise.reject(new Error('Could not parse upload data'))
}
if (offset < 0 || offset + req.body.length > uploadData.fileSize) {
return Promise.reject(new Error(`Illegal range offset ${offset} given`))
}
Promise.all([
this.rs.setrangeAsync(uploadDataId, offset, req.body),
this.rs.incrAsync(uploadCountId)
]).then((arr) => {
const uploadedChunks = arr[1]
let chunkInfo = {
numberOfChunks: uploadData.numberOfChunks,
uploadedChunks
try {
uploadData = JSON.parse(content)
} catch (error) {
return Promise.reject(new Error("Could not parse upload data"))
}
if (uploadedChunks >= uploadData.numberOfChunks) {
let readable = redisReadStream(this.rs.client, Buffer(uploadDataId))
let writeable = this.db.gridfs.createWriteStream({
_id: uploadId,
filename: uploadData.fileName,
content_type: uploadData.contentType
})
let promise = pipeToGridFS(readable, writeable).then((file) => {
return Promise.all([
Promise.resolve(file),
this.rs.del(uploadId),
this.rs.del(uploadCountId),
this.rs.del(uploadDataId)
])
}).then((arr) => {
const [file] = arr
res.json({
assetId: file._id,
fileName: file.filename,
contentType: file.contentType,
uploadDate: file.uploadDate,
md5: file.md5,
...chunkInfo
})
}) // TODO: Test that this will be caught...
return promise
} else {
return Promise.all([
this.rs.expireAsync(uploadId, this.uploadTimeout),
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
this.rs.expireAsync(uploadDataId, this.uploadTimeout)
]).then(() => {
res.json(chunkInfo)
})
if (offset < 0 || offset + req.body.length > uploadData.fileSize) {
return Promise.reject(
new Error(`Illegal range offset ${offset} given`)
)
}
}).catch((error) => {
this.rs.del(uploadId)
this.rs.del(uploadCountId)
this.rs.del(uploadDataId)
console.error(error) // TODO: This should go into log file
next(createError.BadRequest('Unable to upload data chunk'))
Promise.all([
this.rs.setrangeAsync(uploadDataId, offset, req.body),
this.rs.incrAsync(uploadCountId),
])
.then((arr) => {
const uploadedChunks = arr[1]
let chunkInfo = {
numberOfChunks: uploadData.numberOfChunks,
uploadedChunks,
}
if (uploadedChunks >= uploadData.numberOfChunks) {
let readable = redisReadStream(
this.rs.client,
Buffer(uploadDataId)
)
let writeable = this.db.gridfs.createWriteStream({
_id: uploadId,
filename: uploadData.fileName,
content_type: uploadData.contentType,
})
let promise = pipeToGridFS(readable, writeable)
.then((file) => {
return Promise.all([
Promise.resolve(file),
this.rs.del(uploadId),
this.rs.del(uploadCountId),
this.rs.del(uploadDataId),
])
})
.then((arr) => {
const [file] = arr
res.json({
assetId: file._id,
fileName: file.filename,
contentType: file.contentType,
uploadDate: file.uploadDate,
md5: file.md5,
...chunkInfo,
})
}) // TODO: Test that this will be caught...
return promise
} else {
return Promise.all([
this.rs.expireAsync(uploadId, this.uploadTimeout),
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
]).then(() => {
res.json(chunkInfo)
})
}
})
.catch((error) => {
this.rs.del(uploadId)
this.rs.del(uploadCountId)
this.rs.del(uploadDataId)
console.error(error) // TODO: This should go into log file
next(createError.BadRequest("Unable to upload data chunk"))
})
})
.catch((error) => {
console.error(error) // TODO: This should go into log file
next(createError.BadRequest(error.message))
})
}).catch((error) => {
console.error(error) // TODO: This should go into log file
next(createError.BadRequest(error.message))
})
}
}