Add base64 upload support

This commit is contained in:
John Lyon-Smith
2018-04-25 17:43:32 -07:00
parent 0184481a7f
commit 3ddb9ebc2d
11 changed files with 703 additions and 221 deletions

View File

@@ -41,7 +41,8 @@ app.options("*", cors()) // Enable all pre-flight CORS requests
app.use(cors())
app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json())
app.use(bodyParser.raw({ type: "application/octet-stream" })) // TODO: Support gzip, etc.. here
app.use(bodyParser.raw({ type: "application/octet-stream" }))
app.use(bodyParser.text({ type: "application/base64" }))
app.use(passport.initialize())
const rs = new RS(container)

View File

@@ -6,8 +6,12 @@ import path from "path"
import util from "util"
import config from "config"
import autobind from "autobind-decorator"
import Buffer from "safe-buffer"
import B64 from "b64"
import { PassThrough } from "stream"
import { catchAll } from "."
function pipeToGridFS(readable, gfsWriteable) {
function pipeToGridFS(readable, gfsWriteable, decoder) {
const promise = new Promise((resolve, reject) => {
readable.on("error", (error) => {
reject(error)
@@ -19,23 +23,27 @@ function pipeToGridFS(readable, gfsWriteable) {
resolve(file)
})
})
readable.pipe(gfsWriteable)
readable.pipe(decoder).pipe(gfsWriteable)
return promise
}
@autobind
export class AssetRoutes {
static rangeRegex = /^byte (\d+)/
static rangeRegex = /^(byte|base64) (\d+)/
constructor(container) {
const app = container.app
this.log = container.log
this.db = container.db
this.rs = container.rs
this.uploadTimeout = config.get("api.uploadTimout")
app
.route("/assets/:_id")
.get(passport.authenticate("bearer", { session: false }), this.getAsset)
.get(
passport.authenticate("bearer", { session: false }),
catchAll(this.getAsset)
)
.delete(
passport.authenticate("bearer", { session: false }),
this.deleteAsset
@@ -45,221 +53,217 @@ export class AssetRoutes {
.route("/assets/upload")
.post(
passport.authenticate("bearer", { session: false }),
this.beginAssetUpload
catchAll(this.beginAssetUpload)
)
app
.route("/assets/upload/:_id")
.post(
passport.authenticate("bearer", { session: false }),
this.continueAssetUpload
catchAll(this.continueAssetUpload)
)
}
getAsset(req, res, next) {
async getAsset(req, res, next) {
const assetId = req.params._id
this.db.gridfs
.findOneAsync({ _id: assetId })
.then((file) => {
if (!file) {
return next(createError.NotFound(`Asset ${assetId} was not found`))
}
const file = await this.db.gridfs.findOneAsync({ _id: assetId })
const ifNoneMatch = req.get("If-None-Match")
if (!file) {
throw createError.NotFound(`Asset ${assetId} was not found`)
}
if (ifNoneMatch && ifNoneMatch === file.md5) {
res
.status(304)
.set({
ETag: file.md5,
"Cache-Control": "private,max-age=86400",
})
.end()
return
}
const ifNoneMatch = req.get("If-None-Match")
res.status(200).set({
"Content-Type": file.contentType,
"Content-Length": file.length,
if (ifNoneMatch && ifNoneMatch === file.md5) {
res
.status(304)
.set({
ETag: file.md5,
"Cache-Control": "private,max-age=86400",
})
.end()
return
}
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
})
.catch((err) => {
next(
createError.BadRequest(
`Error returning asset '${assetId}'. ${err.message}`
)
)
})
res.status(200).set({
"Content-Type": file.contentType,
"Content-Length": file.length,
ETag: file.md5,
})
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
}
deleteAsset(req, res, next) {
async deleteAsset(req, res, next) {
const assetId = req.params._id
this.db.gridfs
.removeAsync({ _id: assetId })
.then(() => {
res.json({})
})
.catch((err) => {
next(
createError.BadRequest(
`Unable to delete asset '${assetId}'. ${err.message}`
)
)
})
await this.db.gridfs.removeAsync({ _id: assetId })
res.json({})
}
beginAssetUpload(req, res, next) {
async beginAssetUpload(req, res, next) {
const uploadId = this.db.newObjectId()
let { fileName, fileSize, numberOfChunks, contentType } = req.body
let {
fileName,
uploadSize,
numberOfChunks,
contentType,
chunkContentType,
} = req.body
if (!fileName || !fileSize || !numberOfChunks || !contentType) {
return next(
createError.BadRequest(
"Must specify fileName, fileSize, numberOfChunks and Content-Type header"
)
if (!fileName || !uploadSize || !numberOfChunks || !contentType) {
throw createError.BadRequest(
"Must specify fileName, uploadSize, numberOfChunks, contentType"
)
}
fileName = uploadId + "-" + path.basename(fileName)
this.rs
.setAsync(
uploadId,
JSON.stringify({
fileName,
fileSize,
numberOfChunks,
contentType,
}),
"EX",
this.uploadTimeout
)
.then(() => {
res.json({ uploadId })
})
.catch((error) => {
next(createError.InternalServerError(error.message))
})
}
continueAssetUpload(req, res, next) {
if (!(req.body instanceof Buffer)) {
return next(
createError.BadRequest("Body must be of type application/octet-stream")
)
if (chunkContentType) {
if (
chunkContentType !== "application/octet-stream" &&
chunkContentType !== "application/base64"
) {
throw createError.BadRequest(
"chunkContentType must be application/octet-stream or application/base64"
)
}
} else {
chunkContentType = "application/octet-stream"
}
const range = req.get("Range")
const contentLength = req.get("Content-Length")
let match = range.match(AssetRoutes.rangeRegex)
let offset = null
await this.rs.setAsync(
uploadId,
JSON.stringify({
fileName,
uploadSize,
numberOfChunks,
contentType,
chunkContentType,
}),
"EX",
this.uploadTimeout
)
if (!match || match.length < 2 || (offset = parseInt(match[1])) === NaN) {
return next(
createError.BadRequest(
"Range header must be supplied and of form 'byte <offset>'"
)
res.json({ uploadId })
}
async continueAssetUpload(req, res, next) {
const uploadId = req.params._id
const uploadCountId = uploadId + "$#"
const uploadDataId = uploadId + "$@"
const content = await this.rs.getAsync(uploadId)
const uploadData = JSON.parse(content)
const contentType = req.get("Content-Type")
const contentRange = req.get("Content-Range")
const contentLength = req.get("Content-Length")
console.log(uploadData)
if (contentType !== uploadData.chunkContentType) {
throw createError.BadRequest(
`Content-Type ${contentType} does not match chunk type ${
uploadData.chunkContentType
}`
)
}
if (parseInt(contentLength, 10) !== req.body.length) {
return next(
createError.BadRequest(
"Must supply Content-Length header matching length of request body"
)
throw createError.BadRequest(
"Must supply Content-Length header matching length of request body"
)
}
const uploadId = req.params._id
const uploadCountId = uploadId + "$#"
const uploadDataId = uploadId + "$@"
let match = contentRange.match(AssetRoutes.rangeRegex)
this.rs
.getAsync(uploadId)
.then((content) => {
let uploadData = null
if (!match || match.length !== 3) {
throw createError.BadRequest(
"Content-Range header must be supplied and of form '[byte|base64] <offset>'"
)
}
try {
uploadData = JSON.parse(content)
} catch (error) {
return Promise.reject(new Error("Could not parse upload data"))
}
const [, contentOffsetUnit, contentOffset] = match
if (offset < 0 || offset + req.body.length > uploadData.fileSize) {
return Promise.reject(
new Error(`Illegal range offset ${offset} given`)
)
}
if (
(uploadData.chunkContentType === "application/octet-stream" &&
contentOffsetUnit !== "byte") ||
(uploadData.chunkContentType === "application/base64" &&
contentOffsetUnit !== "base64")
) {
throw createError.BadRequest(
`Content-Range offset unit must be ${
uploadData.chunkContentType === "application/base64"
? "base64"
: "byte"
}`
)
}
Promise.all([
this.rs.setrangeAsync(uploadDataId, offset, req.body),
this.rs.incrAsync(uploadCountId),
let offset = Number.parseInt(contentOffset)
if (offset < 0 || offset + req.body.length > uploadData.uploadSize) {
throw createError.BadRequest(
`Illegal Content-Range ${contentOffsetType} ${contentOffset} and Content-Length ${contentLength} for upload size ${
uploadData.uploadSize
}`
)
}
try {
const [uploadedChunks] = await Promise.all([
this.rs.setrangeAsync(uploadDataId, offset, req.body),
this.rs.incrAsync(uploadCountId),
])
const chunkInfo = {
numberOfChunks: uploadData.numberOfChunks,
uploadedChunks,
}
if (uploadedChunks >= uploadData.numberOfChunks) {
let readable = redisReadStream(this.rs.client, uploadDataId)
let writeable = this.db.gridfs.createWriteStream({
_id: uploadId,
filename: uploadData.fileName,
content_type: uploadData.contentType,
})
const decoder =
uploadData.chunkContentType === "application/base64"
? new B64.Decoder()
: new PassThrough()
const file = await pipeToGridFS(readable, writeable, decoder)
await Promise.all([
this.rs.del(uploadId),
this.rs.del(uploadCountId),
this.rs.del(uploadDataId),
])
.then((arr) => {
const uploadedChunks = arr[1]
let chunkInfo = {
numberOfChunks: uploadData.numberOfChunks,
uploadedChunks,
}
if (uploadedChunks >= uploadData.numberOfChunks) {
let readable = redisReadStream(
this.rs.client,
Buffer(uploadDataId)
)
let writeable = this.db.gridfs.createWriteStream({
_id: uploadId,
filename: uploadData.fileName,
content_type: uploadData.contentType,
})
res.json({
assetId: file._id,
fileName: file.filename,
contentType: file.contentType,
uploadDate: file.uploadDate,
md5: file.md5,
...chunkInfo,
})
} else {
await Promise.all([
this.rs.expireAsync(uploadId, this.uploadTimeout),
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
])
let promise = pipeToGridFS(readable, writeable)
.then((file) => {
return Promise.all([
Promise.resolve(file),
this.rs.del(uploadId),
this.rs.del(uploadCountId),
this.rs.del(uploadDataId),
])
})
.then((arr) => {
const [file] = arr
res.json({
assetId: file._id,
fileName: file.filename,
contentType: file.contentType,
uploadDate: file.uploadDate,
md5: file.md5,
...chunkInfo,
})
}) // TODO: Test that this will be caught...
return promise
} else {
return Promise.all([
this.rs.expireAsync(uploadId, this.uploadTimeout),
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
]).then(() => {
res.json(chunkInfo)
})
}
})
.catch((error) => {
this.rs.del(uploadId)
this.rs.del(uploadCountId)
this.rs.del(uploadDataId)
console.error(error) // TODO: This should go into log file
next(createError.BadRequest("Unable to upload data chunk"))
})
})
.catch((error) => {
console.error(error) // TODO: This should go into log file
next(createError.BadRequest(error.message))
})
res.json(chunkInfo)
}
} catch (error) {
this.rs.del(uploadId)
this.rs.del(uploadCountId)
this.rs.del(uploadDataId)
this.log.error(error.message)
throw error
}
}
}

View File

@@ -7,6 +7,8 @@ export { TeamRoutes } from "./TeamRoutes"
export { SystemRoutes } from "./SystemRoutes"
import createError from "http-errors"
const isProduction = process.env.NODE_ENV === "production"
export function catchAll(routeHandler) {
return async (req, res, next) => {
try {
@@ -15,7 +17,11 @@ export function catchAll(routeHandler) {
if (err instanceof createError.HttpError) {
next(err)
} else {
next(createError.InternalServerError(err.message))
if (isProduction) {
next(createError.InternalServerError(err.message))
} else {
next(err)
}
}
}
}

View File

@@ -1,10 +1,10 @@
import parseArgs from 'minimist'
import amqp from 'amqplib'
import JSON5 from 'json5'
import fs from 'fs'
import uuidv4 from 'uuid/v4'
import chalk from 'chalk'
import autobind from 'autobind-decorator'
import parseArgs from "minimist"
import amqp from "amqplib"
import JSON5 from "json5"
import fs from "fs"
import uuidv4 from "uuid/v4"
import chalk from "chalk"
import autobind from "autobind-decorator"
@autobind
class SendMessageTool {
@@ -15,18 +15,18 @@ class SendMessageTool {
async run(argv) {
const options = {
string: [ 'exchange', 'type' ],
boolean: [ 'help', 'version' ],
string: ["exchange", "type"],
boolean: ["help", "version"],
alias: {
'x': 'exchange',
't': 'type'
}
x: "exchange",
t: "type",
},
}
let args = parseArgs(argv, options)
if (args.help) {
this.log.info(`
usage: tmr-message [options] <file>
usage: ${this.toolName} [options] <file>
options:
-x --exchange <exchange> Exchange to send the message too, e.g. tmr-image
@@ -64,16 +64,22 @@ options:
const replyQueueName = `reply-${uuidv4()}`
const withChannel = async (ch) => {
return new Promise(async (resolve, reject) => {
const q = await ch.assertQueue(replyQueueName, {exclusive: true})
const q = await ch.assertQueue(replyQueueName, { exclusive: true })
if (!q) {
return reject(new Error(`Could not create reply queue ${replyQueueName}`))
return reject(
new Error(`Could not create reply queue ${replyQueueName}`)
)
}
ch.consume(q.queue, async (resMsg) => {
this.log.info(` Response ${resMsg.content.toString()}`)
await ch.close()
resolve(0)
}, {noAck: true})
ch.consume(
q.queue,
async (resMsg) => {
this.log.info(` Response ${resMsg.content.toString()}`)
await ch.close()
resolve(0)
},
{ noAck: true }
)
const ok = await ch.checkExchange(exchangeName)
@@ -83,21 +89,23 @@ options:
const s = JSON.stringify(msg)
this.log.info(` Type '${args.type}', Correlation id '${correlationId}'`)
this.log.info(
` Type '${args.type}', Correlation id '${correlationId}'`
)
this.log.info(` Sent '${s}'`)
ch.publish(exchangeName, '', new Buffer(s), {
ch.publish(exchangeName, "", new Buffer(s), {
type: args.type,
contentType: 'application/json',
contentType: "application/json",
timestamp: Date.now(),
correlationId,
appId: 'tmr-cli',
replyTo: replyQueueName
appId: "tmr-cli",
replyTo: replyQueueName,
})
})
}
const conn = await amqp.connect('amqp://localhost')
const conn = await amqp.connect("amqp://localhost")
const ch = await conn.createChannel()
await withChannel(ch)
@@ -106,14 +114,21 @@ options:
const log = {
info: console.info,
error: function() { console.error(chalk.red('error:', [...arguments].join(' ')))},
warning: function() { console.error(chalk.yellow('warning:', [...arguments].join(' ')))}
error: function() {
console.error(chalk.red("error:", [...arguments].join(" ")))
},
warning: function() {
console.error(chalk.yellow("warning:", [...arguments].join(" ")))
},
}
const tool = new SendMessageTool('sendMessage', log)
const tool = new SendMessageTool("sendMessage", log)
tool.run(process.argv.slice(2)).then((exitCode) => {
process.exit(exitCode)
}).catch((err) => {
console.error(err)
})
tool
.run(process.argv.slice(2))
.then((exitCode) => {
process.exit(exitCode)
})
.catch((err) => {
console.error(err)
})

View File

@@ -0,0 +1,254 @@
import parseArgs from "minimist"
import chalk from "chalk"
import fetch from "node-fetch"
import path from "path"
import mime from "mime-types"
import { promisify } from "util"
import fs from "fs"
import { Progress } from "clui"
import B64 from "b64"
import tmp from "tmp-promise"
import autobind from "autobind-decorator"
const readAsync = promisify(fs.read)
const closeAsync = promisify(fs.close)
const openAsync = promisify(fs.open)
const fstat = promisify(fs.fstat)
@autobind
class UploadFileTool {
constructor(toolName, log) {
this.toolName = toolName
this.log = log
}
async run(argv) {
const defaultHostname = "http://localhost:3001"
const options = {
string: ["content-type", "user", "password", "hostname", "token"],
boolean: ["help", "version", "base64"],
alias: {
u: "user",
p: "password",
t: "token",
c: "content-type",
h: "hostname",
},
default: {
hostname: defaultHostname,
},
}
let args = parseArgs(argv, options)
if (args.help) {
this.log.info(`
usage: ${this.toolName} [options] <file>
options:
-h, --hostname <hostname> Hostname of system. Defaults to ${defaultHostname}
-u, --user <email> User email
-p, --password <password> User password
-t, --token <token> Existing login token
-c, --contentType <mimetype> The MIME content type of the file
--base64 Upload file as base64 data
`)
return 0
}
if (args._.length < 1) {
this.log.error("Please specify a file to upload")
return -1
}
let fileName = args._[0]
const contentType = args.contenttype || mime.lookup(fileName)
if (!contentType) {
this.log.error(
`'${fileName}' does not have a recognized MIME type based on the file extension`
)
return -1
}
const contentTypeJsonHeader = {
"Content-Type": "application/json",
}
const chunkSize = 16 * 1024
let authHeader = null
if ((args.user && args.password) || args.token) {
let obj = null
if (!args.token) {
const res = await fetch(args.hostname + "/auth/login", {
method: "POST",
headers: contentTypeJsonHeader,
body: JSON.stringify({
email: args.user,
password: args.password,
}),
})
obj = await res.json()
if (!res.ok) {
throw new Error(obj.message)
}
authHeader = { Authorization: res.headers.get("Authorization") }
} else {
authHeader = { Authorization: "Bearer " + args.token }
const res = await fetch(args.hostname + "/auth/who", {
method: "GET",
headers: { ...authHeader },
})
obj = await res.json()
if (!res.ok) {
throw new Error(obj.message)
}
}
this.log.info(`Logged in as '${obj.email}'`)
} else {
this.log.error("Specify either user email and password, or token")
return -1
}
if (args.base64) {
const copyToBase64 = (readable, writeable) => {
const encoder = new B64.Encoder()
const promise = new Promise((resolve, reject) => {
readable.on("error", (error) => {
reject(error)
})
writeable.on("error", (error) => {
reject(error)
})
writeable.on("finish", () => {
resolve()
})
})
readable.pipe(encoder).pipe(writeable)
return promise
}
const { path: tmpFileName } = await tmp.file()
const readable = fs.createReadStream(fileName)
const writeable = fs.createWriteStream(tmpFileName)
this.log.info(`Writing file as base64 to '${tmpFileName}'`)
await copyToBase64(readable, writeable)
fileName = tmpFileName
}
let fd = await openAsync(fileName, "r")
let bar = new Progress(20)
const onProgress = (uploadData) => {
process.stdout.write(
bar.update(uploadData.uploadedChunks / uploadData.numberOfChunks) + "\r"
)
if (uploadData.hasOwnProperty("assetId")) {
process.stdout.write("\n")
this.log.info(uploadData)
}
}
const uploadFile = async (fd, fileSize, progress) => {
const numberOfChunks = Math.ceil(fileSize / chunkSize)
let buffer = Buffer.alloc(chunkSize)
let chunk = 0
let uploadId = null
let res = await fetch(args.hostname + "/assets/upload", {
method: "POST",
headers: { ...authHeader, ...contentTypeJsonHeader },
body: JSON.stringify({
fileName,
uploadSize: fileSize,
contentType,
chunkContentType: args.base64
? "application/base64"
: "application/octet-stream",
numberOfChunks,
}),
})
let obj = await res.json()
if (!res.ok) {
throw new Error(`Unable to initiate upload. ${obj.message}`)
}
uploadId = obj.uploadId
this.log.info(
`Uploading ${
args.hostname
}/assets/${uploadId}?access_token=${authHeader[
"Authorization"
].substring("Bearer ".length)}`
)
const chunkContentType = args.base64
? "application/base64"
: "application/octet-stream"
const contentRangeOffsetType = args.base64 ? "base64" : "byte"
while (chunk < numberOfChunks) {
const position = chunk * chunkSize
const length = Math.min(fileSize - position, chunkSize)
const { bytesRead } = await readAsync(fd, buffer, 0, length, position)
let body =
bytesRead < buffer.length ? buffer.slice(0, bytesRead) : buffer
res = await fetch(args.hostname + "/assets/upload/" + uploadId, {
method: "POST",
headers: {
...authHeader,
"Content-Type": chunkContentType,
"Content-Length": body.length,
"Content-Range": contentRangeOffsetType + " " + position.toString(),
},
body,
})
obj = await res.json()
if (!res.ok) {
throw new Error(`Unable to upload chunk ${chunk}. ${obj.message}`)
}
chunk++
progress(obj)
}
}
const stat = await fstat(fd)
this.log.info(`Uploading '${fileName}'`)
await uploadFile(fd, stat.size, onProgress)
this.log.info("Upload complete")
await closeAsync(fd)
return 0
}
}
const log = {
info: console.error,
error: function() {
console.error(chalk.red("error:", [...arguments].join(" ")))
},
warning: function() {
console.error(chalk.yellow("warning:", [...arguments].join(" ")))
},
}
const tool = new UploadFileTool("uploadFile", log)
tool
.run(process.argv.slice(2))
.then((exitCode) => {
process.exit(exitCode)
})
.catch((err) => {
console.error(err)
})