Add base64 upload support
This commit is contained in:
@@ -3,5 +3,5 @@ import { Route, Redirect } from "react-router-native"
|
||||
|
||||
export const DefaultRoute = () => {
|
||||
// NOTE: When working on the app, change this to the page you are working on
|
||||
return <Route render={() => <Redirect to={"/home"} />} />
|
||||
return <Route render={() => <Redirect to={"/workItem"} />} />
|
||||
}
|
||||
|
||||
@@ -7,10 +7,10 @@ export const config = {
|
||||
googleGeocodeAPIKey: "AIzaSyCs4JVT6gysnY5dAJ7KjVJYeykLv_xz1GI",
|
||||
googleGeocodeURL: "https://maps.googleapis.com/maps/api/geocode/json",
|
||||
refererURL: "https://dar.kss.us.com",
|
||||
//defaultUser: "john@lyon-smith.org",
|
||||
defaultUser: "",
|
||||
//minGPSAccuracy: 100,
|
||||
minGPSAccuracy: 20,
|
||||
defaultUser: "john@lyon-smith.org",
|
||||
//defaultUser: "",
|
||||
minGPSAccuracy: 100,
|
||||
//minGPSAccuracy: 20,
|
||||
minDistanceToItem: 10,
|
||||
geocodeDelayMilliseconds: 500,
|
||||
}
|
||||
|
||||
@@ -41,11 +41,9 @@ export class PhotoPanel extends Component {
|
||||
} else if (response.customButton) {
|
||||
console.log("User tapped custom button: ", response.customButton)
|
||||
} else {
|
||||
let source = { uri: response.uri }
|
||||
|
||||
// You can also display the image using data:
|
||||
// let source = { uri: 'data:image/jpeg;base64,' + response.data };
|
||||
console.log(source)
|
||||
console.log(response)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -7,19 +7,22 @@ fi
|
||||
|
||||
if [[ "$1" == "--dev" ]]; then
|
||||
export NODE_ENV=development
|
||||
src_dir='src'
|
||||
shift
|
||||
else
|
||||
export NODE_ENV=production
|
||||
src_dir='dist'
|
||||
fi
|
||||
|
||||
script_dir=$(dirname $0)
|
||||
script="${script_dir}/dist/bin/${1}.js"
|
||||
script="${script_dir}/${src_dir}/bin/${1}.js"
|
||||
|
||||
if [[ -z "$1" ]]; then
|
||||
echo "usage: $(basename $0)[--test] [--dev] <command>"
|
||||
echo ""
|
||||
echo "Available commands are"
|
||||
echo ""
|
||||
find ${script_dir}/dist/bin -name \*.js -exec basename {} .js \;
|
||||
find ${script_dir}/${src_dir}/bin -name \*.js -exec basename {} .js \;
|
||||
exit -1
|
||||
fi
|
||||
|
||||
|
||||
200
server/package-lock.json
generated
200
server/package-lock.json
generated
@@ -269,6 +269,11 @@
|
||||
"integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=",
|
||||
"dev": true
|
||||
},
|
||||
"b64": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/b64/-/b64-4.0.0.tgz",
|
||||
"integrity": "sha512-EhmUQodKB0sdzPPrbIWbGqA5cQeTWxYrAgNeeT1rLZWtD3tbNTnphz8J4vkXI3cPgBNlXBjzEbzDzq0Nwi4f9A=="
|
||||
},
|
||||
"babel-cli": {
|
||||
"version": "6.26.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-cli/-/babel-cli-6.26.0.tgz",
|
||||
@@ -1325,6 +1330,17 @@
|
||||
"integrity": "sha512-uTGIPNx/nSpBdsF6xnseRXLLtfr9VLqkz8ZqHXr3Y7b6SftyRxBGjwMtJj1OhNbmlc1wZzLNAlAcvyIiE8a6ZA==",
|
||||
"dev": true
|
||||
},
|
||||
"cli-color": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/cli-color/-/cli-color-0.3.2.tgz",
|
||||
"integrity": "sha1-dfpfcowwjMSsWUsF4GzF2A2szYY=",
|
||||
"requires": {
|
||||
"d": "0.1.1",
|
||||
"es5-ext": "0.10.42",
|
||||
"memoizee": "0.3.10",
|
||||
"timers-ext": "0.1.5"
|
||||
}
|
||||
},
|
||||
"cliui": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz",
|
||||
@@ -1344,6 +1360,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"clui": {
|
||||
"version": "0.3.6",
|
||||
"resolved": "https://registry.npmjs.org/clui/-/clui-0.3.6.tgz",
|
||||
"integrity": "sha512-Z4UbgZILlIAjkEkZiDOa2aoYjohKx7fa6DxIh6cE9A6WNWZ61iXfQc6CmdC9SKdS5nO0P0UyQ+WfoXfB65e3HQ==",
|
||||
"requires": {
|
||||
"cli-color": "0.3.2"
|
||||
}
|
||||
},
|
||||
"co": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz",
|
||||
@@ -1536,6 +1560,14 @@
|
||||
"cssom": "0.3.2"
|
||||
}
|
||||
},
|
||||
"d": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d/-/d-0.1.1.tgz",
|
||||
"integrity": "sha1-2hhMU10Y2O57oqoim5FACfrhEwk=",
|
||||
"requires": {
|
||||
"es5-ext": "0.10.42"
|
||||
}
|
||||
},
|
||||
"dashdash": {
|
||||
"version": "1.14.1",
|
||||
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
||||
@@ -1740,11 +1772,92 @@
|
||||
"is-arrayish": "0.2.1"
|
||||
}
|
||||
},
|
||||
"es5-ext": {
|
||||
"version": "0.10.42",
|
||||
"resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.42.tgz",
|
||||
"integrity": "sha512-AJxO1rmPe1bDEfSR6TJ/FgMFYuTBhR5R57KW58iCkYACMyFbrkqVyzXSurYoScDGvgyMpk7uRF/lPUPPTmsRSA==",
|
||||
"requires": {
|
||||
"es6-iterator": "2.0.3",
|
||||
"es6-symbol": "3.1.1",
|
||||
"next-tick": "1.0.0"
|
||||
}
|
||||
},
|
||||
"es6-iterator": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz",
|
||||
"integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=",
|
||||
"requires": {
|
||||
"d": "1.0.0",
|
||||
"es5-ext": "0.10.42",
|
||||
"es6-symbol": "3.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"d": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d/-/d-1.0.0.tgz",
|
||||
"integrity": "sha1-dUu1v+VUUdpppYuU1F9MWwRi1Y8=",
|
||||
"requires": {
|
||||
"es5-ext": "0.10.42"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"es6-promise": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.2.1.tgz",
|
||||
"integrity": "sha1-7FYjOGgDKQkgcXDDlEjiREndH8Q="
|
||||
},
|
||||
"es6-symbol": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.1.tgz",
|
||||
"integrity": "sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=",
|
||||
"requires": {
|
||||
"d": "1.0.0",
|
||||
"es5-ext": "0.10.42"
|
||||
},
|
||||
"dependencies": {
|
||||
"d": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d/-/d-1.0.0.tgz",
|
||||
"integrity": "sha1-dUu1v+VUUdpppYuU1F9MWwRi1Y8=",
|
||||
"requires": {
|
||||
"es5-ext": "0.10.42"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"es6-weak-map": {
|
||||
"version": "0.1.4",
|
||||
"resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-0.1.4.tgz",
|
||||
"integrity": "sha1-cGzvnpmqI2undmwjnIueKG6n0ig=",
|
||||
"requires": {
|
||||
"d": "0.1.1",
|
||||
"es5-ext": "0.10.42",
|
||||
"es6-iterator": "0.1.3",
|
||||
"es6-symbol": "2.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"es6-iterator": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-0.1.3.tgz",
|
||||
"integrity": "sha1-1vWLjE/EE8JJtLqhl2j45NfIlE4=",
|
||||
"requires": {
|
||||
"d": "0.1.1",
|
||||
"es5-ext": "0.10.42",
|
||||
"es6-symbol": "2.0.1"
|
||||
}
|
||||
},
|
||||
"es6-symbol": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-2.0.1.tgz",
|
||||
"integrity": "sha1-dhtcZ8/U8dGK+yNPaR1nhoLLO/M=",
|
||||
"requires": {
|
||||
"d": "0.1.1",
|
||||
"es5-ext": "0.10.42"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"escape-html": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
|
||||
@@ -1803,6 +1916,25 @@
|
||||
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
|
||||
"integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
|
||||
},
|
||||
"event-emitter": {
|
||||
"version": "0.3.5",
|
||||
"resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz",
|
||||
"integrity": "sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=",
|
||||
"requires": {
|
||||
"d": "1.0.0",
|
||||
"es5-ext": "0.10.42"
|
||||
},
|
||||
"dependencies": {
|
||||
"d": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d/-/d-1.0.0.tgz",
|
||||
"integrity": "sha1-dUu1v+VUUdpppYuU1F9MWwRi1Y8=",
|
||||
"requires": {
|
||||
"es5-ext": "0.10.42"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"event-stream": {
|
||||
"version": "3.3.4",
|
||||
"resolved": "https://registry.npmjs.org/event-stream/-/event-stream-3.3.4.tgz",
|
||||
@@ -4524,6 +4656,14 @@
|
||||
"yallist": "2.1.2"
|
||||
}
|
||||
},
|
||||
"lru-queue": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz",
|
||||
"integrity": "sha1-Jzi9nw089PhEkMVzbEhpmsYyzaM=",
|
||||
"requires": {
|
||||
"es5-ext": "0.10.42"
|
||||
}
|
||||
},
|
||||
"makeerror": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.11.tgz",
|
||||
@@ -4553,6 +4693,27 @@
|
||||
"mimic-fn": "1.2.0"
|
||||
}
|
||||
},
|
||||
"memoizee": {
|
||||
"version": "0.3.10",
|
||||
"resolved": "https://registry.npmjs.org/memoizee/-/memoizee-0.3.10.tgz",
|
||||
"integrity": "sha1-TsoNiu057J0Bf0xcLy9kMvQuXI8=",
|
||||
"requires": {
|
||||
"d": "0.1.1",
|
||||
"es5-ext": "0.10.42",
|
||||
"es6-weak-map": "0.1.4",
|
||||
"event-emitter": "0.3.5",
|
||||
"lru-queue": "0.1.0",
|
||||
"next-tick": "0.2.2",
|
||||
"timers-ext": "0.1.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"next-tick": {
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/next-tick/-/next-tick-0.2.2.tgz",
|
||||
"integrity": "sha1-ddpKkn7liH45BliABltzNkE7MQ0="
|
||||
}
|
||||
}
|
||||
},
|
||||
"merge": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/merge/-/merge-1.2.0.tgz",
|
||||
@@ -4812,6 +4973,16 @@
|
||||
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz",
|
||||
"integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk="
|
||||
},
|
||||
"next-tick": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz",
|
||||
"integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw="
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
|
||||
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
|
||||
},
|
||||
"node-int64": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz",
|
||||
@@ -4968,8 +5139,7 @@
|
||||
"os-tmpdir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=",
|
||||
"dev": true
|
||||
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
|
||||
},
|
||||
"output-file-sync": {
|
||||
"version": "1.1.2",
|
||||
@@ -6155,6 +6325,32 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"timers-ext": {
|
||||
"version": "0.1.5",
|
||||
"resolved": "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.5.tgz",
|
||||
"integrity": "sha512-tsEStd7kmACHENhsUPaxb8Jf8/+GZZxyNFQbZD07HQOyooOa6At1rQqjffgvg7n+dxscQa9cjjMdWhJtsP2sxg==",
|
||||
"requires": {
|
||||
"es5-ext": "0.10.42",
|
||||
"next-tick": "1.0.0"
|
||||
}
|
||||
},
|
||||
"tmp": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
||||
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
|
||||
"requires": {
|
||||
"os-tmpdir": "1.0.2"
|
||||
}
|
||||
},
|
||||
"tmp-promise": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-1.0.4.tgz",
|
||||
"integrity": "sha512-76r7LZhAvRJ3kLD/xrPSEGb3aq0tirzMLJKhcchKSkQIiEgXB+RouC0ygReuZX+oiA64taGo+j+1gHTKSG8/Mg==",
|
||||
"requires": {
|
||||
"bluebird": "3.5.1",
|
||||
"tmp": "0.0.33"
|
||||
}
|
||||
},
|
||||
"tmpl": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz",
|
||||
|
||||
@@ -23,8 +23,10 @@
|
||||
"app-root-path": "^2.0.1",
|
||||
"autobind-decorator": "^2.1.0",
|
||||
"aws-sdk": "^2.98.0",
|
||||
"b64": "^4.0.0",
|
||||
"body-parser": "^1.17.1",
|
||||
"canvas": "^1.6.7",
|
||||
"clui": "^0.3.6",
|
||||
"config": "^1.25.1",
|
||||
"cors": "^2.8.3",
|
||||
"credential": "^2.0.0",
|
||||
@@ -37,6 +39,7 @@
|
||||
"mongodb": "^2.2.35",
|
||||
"mongoose": "^5.0.13",
|
||||
"mongoose-merge-plugin": "0.0.5",
|
||||
"node-fetch": "^2.1.2",
|
||||
"nodemailer": "^4.0.1",
|
||||
"passport": "^0.3.2",
|
||||
"passport-http-bearer": "^1.0.1",
|
||||
@@ -46,7 +49,9 @@
|
||||
"redis": "^2.7.1",
|
||||
"redis-rstream": "^0.1.3",
|
||||
"regexp-pattern": "^1.0.4",
|
||||
"safe-buffer": "^5.1.1",
|
||||
"socket.io": "^2.0.3",
|
||||
"tmp-promise": "^1.0.4",
|
||||
"urlsafe-base64": "^1.0.0",
|
||||
"uuid": "^3.1.0"
|
||||
},
|
||||
|
||||
@@ -41,7 +41,8 @@ app.options("*", cors()) // Enable all pre-flight CORS requests
|
||||
app.use(cors())
|
||||
app.use(bodyParser.urlencoded({ extended: true }))
|
||||
app.use(bodyParser.json())
|
||||
app.use(bodyParser.raw({ type: "application/octet-stream" })) // TODO: Support gzip, etc.. here
|
||||
app.use(bodyParser.raw({ type: "application/octet-stream" }))
|
||||
app.use(bodyParser.text({ type: "application/base64" }))
|
||||
app.use(passport.initialize())
|
||||
|
||||
const rs = new RS(container)
|
||||
|
||||
@@ -6,8 +6,12 @@ import path from "path"
|
||||
import util from "util"
|
||||
import config from "config"
|
||||
import autobind from "autobind-decorator"
|
||||
import Buffer from "safe-buffer"
|
||||
import B64 from "b64"
|
||||
import { PassThrough } from "stream"
|
||||
import { catchAll } from "."
|
||||
|
||||
function pipeToGridFS(readable, gfsWriteable) {
|
||||
function pipeToGridFS(readable, gfsWriteable, decoder) {
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
readable.on("error", (error) => {
|
||||
reject(error)
|
||||
@@ -19,23 +23,27 @@ function pipeToGridFS(readable, gfsWriteable) {
|
||||
resolve(file)
|
||||
})
|
||||
})
|
||||
readable.pipe(gfsWriteable)
|
||||
readable.pipe(decoder).pipe(gfsWriteable)
|
||||
return promise
|
||||
}
|
||||
|
||||
@autobind
|
||||
export class AssetRoutes {
|
||||
static rangeRegex = /^byte (\d+)/
|
||||
static rangeRegex = /^(byte|base64) (\d+)/
|
||||
|
||||
constructor(container) {
|
||||
const app = container.app
|
||||
|
||||
this.log = container.log
|
||||
this.db = container.db
|
||||
this.rs = container.rs
|
||||
this.uploadTimeout = config.get("api.uploadTimout")
|
||||
app
|
||||
.route("/assets/:_id")
|
||||
.get(passport.authenticate("bearer", { session: false }), this.getAsset)
|
||||
.get(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
catchAll(this.getAsset)
|
||||
)
|
||||
.delete(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
this.deleteAsset
|
||||
@@ -45,221 +53,217 @@ export class AssetRoutes {
|
||||
.route("/assets/upload")
|
||||
.post(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
this.beginAssetUpload
|
||||
catchAll(this.beginAssetUpload)
|
||||
)
|
||||
|
||||
app
|
||||
.route("/assets/upload/:_id")
|
||||
.post(
|
||||
passport.authenticate("bearer", { session: false }),
|
||||
this.continueAssetUpload
|
||||
catchAll(this.continueAssetUpload)
|
||||
)
|
||||
}
|
||||
|
||||
getAsset(req, res, next) {
|
||||
async getAsset(req, res, next) {
|
||||
const assetId = req.params._id
|
||||
|
||||
this.db.gridfs
|
||||
.findOneAsync({ _id: assetId })
|
||||
.then((file) => {
|
||||
if (!file) {
|
||||
return next(createError.NotFound(`Asset ${assetId} was not found`))
|
||||
}
|
||||
const file = await this.db.gridfs.findOneAsync({ _id: assetId })
|
||||
|
||||
const ifNoneMatch = req.get("If-None-Match")
|
||||
if (!file) {
|
||||
throw createError.NotFound(`Asset ${assetId} was not found`)
|
||||
}
|
||||
|
||||
if (ifNoneMatch && ifNoneMatch === file.md5) {
|
||||
res
|
||||
.status(304)
|
||||
.set({
|
||||
ETag: file.md5,
|
||||
"Cache-Control": "private,max-age=86400",
|
||||
})
|
||||
.end()
|
||||
return
|
||||
}
|
||||
const ifNoneMatch = req.get("If-None-Match")
|
||||
|
||||
res.status(200).set({
|
||||
"Content-Type": file.contentType,
|
||||
"Content-Length": file.length,
|
||||
if (ifNoneMatch && ifNoneMatch === file.md5) {
|
||||
res
|
||||
.status(304)
|
||||
.set({
|
||||
ETag: file.md5,
|
||||
"Cache-Control": "private,max-age=86400",
|
||||
})
|
||||
.end()
|
||||
return
|
||||
}
|
||||
|
||||
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
|
||||
})
|
||||
.catch((err) => {
|
||||
next(
|
||||
createError.BadRequest(
|
||||
`Error returning asset '${assetId}'. ${err.message}`
|
||||
)
|
||||
)
|
||||
})
|
||||
res.status(200).set({
|
||||
"Content-Type": file.contentType,
|
||||
"Content-Length": file.length,
|
||||
ETag: file.md5,
|
||||
})
|
||||
|
||||
this.db.gridfs.createReadStream({ _id: file._id }).pipe(res)
|
||||
}
|
||||
|
||||
deleteAsset(req, res, next) {
|
||||
async deleteAsset(req, res, next) {
|
||||
const assetId = req.params._id
|
||||
|
||||
this.db.gridfs
|
||||
.removeAsync({ _id: assetId })
|
||||
.then(() => {
|
||||
res.json({})
|
||||
})
|
||||
.catch((err) => {
|
||||
next(
|
||||
createError.BadRequest(
|
||||
`Unable to delete asset '${assetId}'. ${err.message}`
|
||||
)
|
||||
)
|
||||
})
|
||||
await this.db.gridfs.removeAsync({ _id: assetId })
|
||||
|
||||
res.json({})
|
||||
}
|
||||
|
||||
beginAssetUpload(req, res, next) {
|
||||
async beginAssetUpload(req, res, next) {
|
||||
const uploadId = this.db.newObjectId()
|
||||
let { fileName, fileSize, numberOfChunks, contentType } = req.body
|
||||
let {
|
||||
fileName,
|
||||
uploadSize,
|
||||
numberOfChunks,
|
||||
contentType,
|
||||
chunkContentType,
|
||||
} = req.body
|
||||
|
||||
if (!fileName || !fileSize || !numberOfChunks || !contentType) {
|
||||
return next(
|
||||
createError.BadRequest(
|
||||
"Must specify fileName, fileSize, numberOfChunks and Content-Type header"
|
||||
)
|
||||
if (!fileName || !uploadSize || !numberOfChunks || !contentType) {
|
||||
throw createError.BadRequest(
|
||||
"Must specify fileName, uploadSize, numberOfChunks, contentType"
|
||||
)
|
||||
}
|
||||
|
||||
fileName = uploadId + "-" + path.basename(fileName)
|
||||
|
||||
this.rs
|
||||
.setAsync(
|
||||
uploadId,
|
||||
JSON.stringify({
|
||||
fileName,
|
||||
fileSize,
|
||||
numberOfChunks,
|
||||
contentType,
|
||||
}),
|
||||
"EX",
|
||||
this.uploadTimeout
|
||||
)
|
||||
.then(() => {
|
||||
res.json({ uploadId })
|
||||
})
|
||||
.catch((error) => {
|
||||
next(createError.InternalServerError(error.message))
|
||||
})
|
||||
}
|
||||
|
||||
continueAssetUpload(req, res, next) {
|
||||
if (!(req.body instanceof Buffer)) {
|
||||
return next(
|
||||
createError.BadRequest("Body must be of type application/octet-stream")
|
||||
)
|
||||
if (chunkContentType) {
|
||||
if (
|
||||
chunkContentType !== "application/octet-stream" &&
|
||||
chunkContentType !== "application/base64"
|
||||
) {
|
||||
throw createError.BadRequest(
|
||||
"chunkContentType must be application/octet-stream or application/base64"
|
||||
)
|
||||
}
|
||||
} else {
|
||||
chunkContentType = "application/octet-stream"
|
||||
}
|
||||
|
||||
const range = req.get("Range")
|
||||
const contentLength = req.get("Content-Length")
|
||||
let match = range.match(AssetRoutes.rangeRegex)
|
||||
let offset = null
|
||||
await this.rs.setAsync(
|
||||
uploadId,
|
||||
JSON.stringify({
|
||||
fileName,
|
||||
uploadSize,
|
||||
numberOfChunks,
|
||||
contentType,
|
||||
chunkContentType,
|
||||
}),
|
||||
"EX",
|
||||
this.uploadTimeout
|
||||
)
|
||||
|
||||
if (!match || match.length < 2 || (offset = parseInt(match[1])) === NaN) {
|
||||
return next(
|
||||
createError.BadRequest(
|
||||
"Range header must be supplied and of form 'byte <offset>'"
|
||||
)
|
||||
res.json({ uploadId })
|
||||
}
|
||||
|
||||
async continueAssetUpload(req, res, next) {
|
||||
const uploadId = req.params._id
|
||||
const uploadCountId = uploadId + "$#"
|
||||
const uploadDataId = uploadId + "$@"
|
||||
const content = await this.rs.getAsync(uploadId)
|
||||
const uploadData = JSON.parse(content)
|
||||
const contentType = req.get("Content-Type")
|
||||
const contentRange = req.get("Content-Range")
|
||||
const contentLength = req.get("Content-Length")
|
||||
|
||||
console.log(uploadData)
|
||||
|
||||
if (contentType !== uploadData.chunkContentType) {
|
||||
throw createError.BadRequest(
|
||||
`Content-Type ${contentType} does not match chunk type ${
|
||||
uploadData.chunkContentType
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
if (parseInt(contentLength, 10) !== req.body.length) {
|
||||
return next(
|
||||
createError.BadRequest(
|
||||
"Must supply Content-Length header matching length of request body"
|
||||
)
|
||||
throw createError.BadRequest(
|
||||
"Must supply Content-Length header matching length of request body"
|
||||
)
|
||||
}
|
||||
|
||||
const uploadId = req.params._id
|
||||
const uploadCountId = uploadId + "$#"
|
||||
const uploadDataId = uploadId + "$@"
|
||||
let match = contentRange.match(AssetRoutes.rangeRegex)
|
||||
|
||||
this.rs
|
||||
.getAsync(uploadId)
|
||||
.then((content) => {
|
||||
let uploadData = null
|
||||
if (!match || match.length !== 3) {
|
||||
throw createError.BadRequest(
|
||||
"Content-Range header must be supplied and of form '[byte|base64] <offset>'"
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
uploadData = JSON.parse(content)
|
||||
} catch (error) {
|
||||
return Promise.reject(new Error("Could not parse upload data"))
|
||||
}
|
||||
const [, contentOffsetUnit, contentOffset] = match
|
||||
|
||||
if (offset < 0 || offset + req.body.length > uploadData.fileSize) {
|
||||
return Promise.reject(
|
||||
new Error(`Illegal range offset ${offset} given`)
|
||||
)
|
||||
}
|
||||
if (
|
||||
(uploadData.chunkContentType === "application/octet-stream" &&
|
||||
contentOffsetUnit !== "byte") ||
|
||||
(uploadData.chunkContentType === "application/base64" &&
|
||||
contentOffsetUnit !== "base64")
|
||||
) {
|
||||
throw createError.BadRequest(
|
||||
`Content-Range offset unit must be ${
|
||||
uploadData.chunkContentType === "application/base64"
|
||||
? "base64"
|
||||
: "byte"
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
Promise.all([
|
||||
this.rs.setrangeAsync(uploadDataId, offset, req.body),
|
||||
this.rs.incrAsync(uploadCountId),
|
||||
let offset = Number.parseInt(contentOffset)
|
||||
|
||||
if (offset < 0 || offset + req.body.length > uploadData.uploadSize) {
|
||||
throw createError.BadRequest(
|
||||
`Illegal Content-Range ${contentOffsetType} ${contentOffset} and Content-Length ${contentLength} for upload size ${
|
||||
uploadData.uploadSize
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
const [uploadedChunks] = await Promise.all([
|
||||
this.rs.setrangeAsync(uploadDataId, offset, req.body),
|
||||
this.rs.incrAsync(uploadCountId),
|
||||
])
|
||||
const chunkInfo = {
|
||||
numberOfChunks: uploadData.numberOfChunks,
|
||||
uploadedChunks,
|
||||
}
|
||||
|
||||
if (uploadedChunks >= uploadData.numberOfChunks) {
|
||||
let readable = redisReadStream(this.rs.client, uploadDataId)
|
||||
let writeable = this.db.gridfs.createWriteStream({
|
||||
_id: uploadId,
|
||||
filename: uploadData.fileName,
|
||||
content_type: uploadData.contentType,
|
||||
})
|
||||
|
||||
const decoder =
|
||||
uploadData.chunkContentType === "application/base64"
|
||||
? new B64.Decoder()
|
||||
: new PassThrough()
|
||||
const file = await pipeToGridFS(readable, writeable, decoder)
|
||||
|
||||
await Promise.all([
|
||||
this.rs.del(uploadId),
|
||||
this.rs.del(uploadCountId),
|
||||
this.rs.del(uploadDataId),
|
||||
])
|
||||
.then((arr) => {
|
||||
const uploadedChunks = arr[1]
|
||||
let chunkInfo = {
|
||||
numberOfChunks: uploadData.numberOfChunks,
|
||||
uploadedChunks,
|
||||
}
|
||||
|
||||
if (uploadedChunks >= uploadData.numberOfChunks) {
|
||||
let readable = redisReadStream(
|
||||
this.rs.client,
|
||||
Buffer(uploadDataId)
|
||||
)
|
||||
let writeable = this.db.gridfs.createWriteStream({
|
||||
_id: uploadId,
|
||||
filename: uploadData.fileName,
|
||||
content_type: uploadData.contentType,
|
||||
})
|
||||
res.json({
|
||||
assetId: file._id,
|
||||
fileName: file.filename,
|
||||
contentType: file.contentType,
|
||||
uploadDate: file.uploadDate,
|
||||
md5: file.md5,
|
||||
...chunkInfo,
|
||||
})
|
||||
} else {
|
||||
await Promise.all([
|
||||
this.rs.expireAsync(uploadId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
|
||||
])
|
||||
|
||||
let promise = pipeToGridFS(readable, writeable)
|
||||
.then((file) => {
|
||||
return Promise.all([
|
||||
Promise.resolve(file),
|
||||
this.rs.del(uploadId),
|
||||
this.rs.del(uploadCountId),
|
||||
this.rs.del(uploadDataId),
|
||||
])
|
||||
})
|
||||
.then((arr) => {
|
||||
const [file] = arr
|
||||
res.json({
|
||||
assetId: file._id,
|
||||
fileName: file.filename,
|
||||
contentType: file.contentType,
|
||||
uploadDate: file.uploadDate,
|
||||
md5: file.md5,
|
||||
...chunkInfo,
|
||||
})
|
||||
}) // TODO: Test that this will be caught...
|
||||
return promise
|
||||
} else {
|
||||
return Promise.all([
|
||||
this.rs.expireAsync(uploadId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadCountId, this.uploadTimeout),
|
||||
this.rs.expireAsync(uploadDataId, this.uploadTimeout),
|
||||
]).then(() => {
|
||||
res.json(chunkInfo)
|
||||
})
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
this.rs.del(uploadId)
|
||||
this.rs.del(uploadCountId)
|
||||
this.rs.del(uploadDataId)
|
||||
console.error(error) // TODO: This should go into log file
|
||||
next(createError.BadRequest("Unable to upload data chunk"))
|
||||
})
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error) // TODO: This should go into log file
|
||||
next(createError.BadRequest(error.message))
|
||||
})
|
||||
res.json(chunkInfo)
|
||||
}
|
||||
} catch (error) {
|
||||
this.rs.del(uploadId)
|
||||
this.rs.del(uploadCountId)
|
||||
this.rs.del(uploadDataId)
|
||||
this.log.error(error.message)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ export { TeamRoutes } from "./TeamRoutes"
|
||||
export { SystemRoutes } from "./SystemRoutes"
|
||||
import createError from "http-errors"
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production"
|
||||
|
||||
export function catchAll(routeHandler) {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
@@ -15,7 +17,11 @@ export function catchAll(routeHandler) {
|
||||
if (err instanceof createError.HttpError) {
|
||||
next(err)
|
||||
} else {
|
||||
next(createError.InternalServerError(err.message))
|
||||
if (isProduction) {
|
||||
next(createError.InternalServerError(err.message))
|
||||
} else {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import parseArgs from 'minimist'
|
||||
import amqp from 'amqplib'
|
||||
import JSON5 from 'json5'
|
||||
import fs from 'fs'
|
||||
import uuidv4 from 'uuid/v4'
|
||||
import chalk from 'chalk'
|
||||
import autobind from 'autobind-decorator'
|
||||
import parseArgs from "minimist"
|
||||
import amqp from "amqplib"
|
||||
import JSON5 from "json5"
|
||||
import fs from "fs"
|
||||
import uuidv4 from "uuid/v4"
|
||||
import chalk from "chalk"
|
||||
import autobind from "autobind-decorator"
|
||||
|
||||
@autobind
|
||||
class SendMessageTool {
|
||||
@@ -15,18 +15,18 @@ class SendMessageTool {
|
||||
|
||||
async run(argv) {
|
||||
const options = {
|
||||
string: [ 'exchange', 'type' ],
|
||||
boolean: [ 'help', 'version' ],
|
||||
string: ["exchange", "type"],
|
||||
boolean: ["help", "version"],
|
||||
alias: {
|
||||
'x': 'exchange',
|
||||
't': 'type'
|
||||
}
|
||||
x: "exchange",
|
||||
t: "type",
|
||||
},
|
||||
}
|
||||
let args = parseArgs(argv, options)
|
||||
|
||||
if (args.help) {
|
||||
this.log.info(`
|
||||
usage: tmr-message [options] <file>
|
||||
usage: ${this.toolName} [options] <file>
|
||||
|
||||
options:
|
||||
-x --exchange <exchange> Exchange to send the message too, e.g. tmr-image
|
||||
@@ -64,16 +64,22 @@ options:
|
||||
const replyQueueName = `reply-${uuidv4()}`
|
||||
const withChannel = async (ch) => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const q = await ch.assertQueue(replyQueueName, {exclusive: true})
|
||||
const q = await ch.assertQueue(replyQueueName, { exclusive: true })
|
||||
|
||||
if (!q) {
|
||||
return reject(new Error(`Could not create reply queue ${replyQueueName}`))
|
||||
return reject(
|
||||
new Error(`Could not create reply queue ${replyQueueName}`)
|
||||
)
|
||||
}
|
||||
ch.consume(q.queue, async (resMsg) => {
|
||||
this.log.info(` Response ${resMsg.content.toString()}`)
|
||||
await ch.close()
|
||||
resolve(0)
|
||||
}, {noAck: true})
|
||||
ch.consume(
|
||||
q.queue,
|
||||
async (resMsg) => {
|
||||
this.log.info(` Response ${resMsg.content.toString()}`)
|
||||
await ch.close()
|
||||
resolve(0)
|
||||
},
|
||||
{ noAck: true }
|
||||
)
|
||||
|
||||
const ok = await ch.checkExchange(exchangeName)
|
||||
|
||||
@@ -83,21 +89,23 @@ options:
|
||||
|
||||
const s = JSON.stringify(msg)
|
||||
|
||||
this.log.info(` Type '${args.type}', Correlation id '${correlationId}'`)
|
||||
this.log.info(
|
||||
` Type '${args.type}', Correlation id '${correlationId}'`
|
||||
)
|
||||
this.log.info(` Sent '${s}'`)
|
||||
|
||||
ch.publish(exchangeName, '', new Buffer(s), {
|
||||
ch.publish(exchangeName, "", new Buffer(s), {
|
||||
type: args.type,
|
||||
contentType: 'application/json',
|
||||
contentType: "application/json",
|
||||
timestamp: Date.now(),
|
||||
correlationId,
|
||||
appId: 'tmr-cli',
|
||||
replyTo: replyQueueName
|
||||
appId: "tmr-cli",
|
||||
replyTo: replyQueueName,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const conn = await amqp.connect('amqp://localhost')
|
||||
const conn = await amqp.connect("amqp://localhost")
|
||||
const ch = await conn.createChannel()
|
||||
|
||||
await withChannel(ch)
|
||||
@@ -106,14 +114,21 @@ options:
|
||||
|
||||
const log = {
|
||||
info: console.info,
|
||||
error: function() { console.error(chalk.red('error:', [...arguments].join(' ')))},
|
||||
warning: function() { console.error(chalk.yellow('warning:', [...arguments].join(' ')))}
|
||||
error: function() {
|
||||
console.error(chalk.red("error:", [...arguments].join(" ")))
|
||||
},
|
||||
warning: function() {
|
||||
console.error(chalk.yellow("warning:", [...arguments].join(" ")))
|
||||
},
|
||||
}
|
||||
|
||||
const tool = new SendMessageTool('sendMessage', log)
|
||||
const tool = new SendMessageTool("sendMessage", log)
|
||||
|
||||
tool.run(process.argv.slice(2)).then((exitCode) => {
|
||||
process.exit(exitCode)
|
||||
}).catch((err) => {
|
||||
console.error(err)
|
||||
})
|
||||
tool
|
||||
.run(process.argv.slice(2))
|
||||
.then((exitCode) => {
|
||||
process.exit(exitCode)
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err)
|
||||
})
|
||||
|
||||
254
server/src/bin/uploadFile.js
Normal file
254
server/src/bin/uploadFile.js
Normal file
@@ -0,0 +1,254 @@
|
||||
import parseArgs from "minimist"
|
||||
import chalk from "chalk"
|
||||
import fetch from "node-fetch"
|
||||
import path from "path"
|
||||
import mime from "mime-types"
|
||||
import { promisify } from "util"
|
||||
import fs from "fs"
|
||||
import { Progress } from "clui"
|
||||
import B64 from "b64"
|
||||
import tmp from "tmp-promise"
|
||||
import autobind from "autobind-decorator"
|
||||
|
||||
const readAsync = promisify(fs.read)
|
||||
const closeAsync = promisify(fs.close)
|
||||
const openAsync = promisify(fs.open)
|
||||
const fstat = promisify(fs.fstat)
|
||||
|
||||
@autobind
|
||||
class UploadFileTool {
|
||||
constructor(toolName, log) {
|
||||
this.toolName = toolName
|
||||
this.log = log
|
||||
}
|
||||
|
||||
async run(argv) {
|
||||
const defaultHostname = "http://localhost:3001"
|
||||
const options = {
|
||||
string: ["content-type", "user", "password", "hostname", "token"],
|
||||
boolean: ["help", "version", "base64"],
|
||||
alias: {
|
||||
u: "user",
|
||||
p: "password",
|
||||
t: "token",
|
||||
c: "content-type",
|
||||
h: "hostname",
|
||||
},
|
||||
default: {
|
||||
hostname: defaultHostname,
|
||||
},
|
||||
}
|
||||
let args = parseArgs(argv, options)
|
||||
|
||||
if (args.help) {
|
||||
this.log.info(`
|
||||
usage: ${this.toolName} [options] <file>
|
||||
|
||||
options:
|
||||
-h, --hostname <hostname> Hostname of system. Defaults to ${defaultHostname}
|
||||
-u, --user <email> User email
|
||||
-p, --password <password> User password
|
||||
-t, --token <token> Existing login token
|
||||
-c, --contentType <mimetype> The MIME content type of the file
|
||||
--base64 Upload file as base64 data
|
||||
`)
|
||||
return 0
|
||||
}
|
||||
|
||||
if (args._.length < 1) {
|
||||
this.log.error("Please specify a file to upload")
|
||||
return -1
|
||||
}
|
||||
|
||||
let fileName = args._[0]
|
||||
const contentType = args.contenttype || mime.lookup(fileName)
|
||||
|
||||
if (!contentType) {
|
||||
this.log.error(
|
||||
`'${fileName}' does not have a recognized MIME type based on the file extension`
|
||||
)
|
||||
return -1
|
||||
}
|
||||
|
||||
const contentTypeJsonHeader = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
const chunkSize = 16 * 1024
|
||||
let authHeader = null
|
||||
|
||||
if ((args.user && args.password) || args.token) {
|
||||
let obj = null
|
||||
|
||||
if (!args.token) {
|
||||
const res = await fetch(args.hostname + "/auth/login", {
|
||||
method: "POST",
|
||||
headers: contentTypeJsonHeader,
|
||||
body: JSON.stringify({
|
||||
email: args.user,
|
||||
password: args.password,
|
||||
}),
|
||||
})
|
||||
obj = await res.json()
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(obj.message)
|
||||
}
|
||||
|
||||
authHeader = { Authorization: res.headers.get("Authorization") }
|
||||
} else {
|
||||
authHeader = { Authorization: "Bearer " + args.token }
|
||||
|
||||
const res = await fetch(args.hostname + "/auth/who", {
|
||||
method: "GET",
|
||||
headers: { ...authHeader },
|
||||
})
|
||||
obj = await res.json()
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(obj.message)
|
||||
}
|
||||
}
|
||||
|
||||
this.log.info(`Logged in as '${obj.email}'`)
|
||||
} else {
|
||||
this.log.error("Specify either user email and password, or token")
|
||||
return -1
|
||||
}
|
||||
|
||||
if (args.base64) {
|
||||
const copyToBase64 = (readable, writeable) => {
|
||||
const encoder = new B64.Encoder()
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
readable.on("error", (error) => {
|
||||
reject(error)
|
||||
})
|
||||
writeable.on("error", (error) => {
|
||||
reject(error)
|
||||
})
|
||||
writeable.on("finish", () => {
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
readable.pipe(encoder).pipe(writeable)
|
||||
return promise
|
||||
}
|
||||
|
||||
const { path: tmpFileName } = await tmp.file()
|
||||
const readable = fs.createReadStream(fileName)
|
||||
const writeable = fs.createWriteStream(tmpFileName)
|
||||
|
||||
this.log.info(`Writing file as base64 to '${tmpFileName}'`)
|
||||
|
||||
await copyToBase64(readable, writeable)
|
||||
fileName = tmpFileName
|
||||
}
|
||||
|
||||
let fd = await openAsync(fileName, "r")
|
||||
let bar = new Progress(20)
|
||||
|
||||
const onProgress = (uploadData) => {
|
||||
process.stdout.write(
|
||||
bar.update(uploadData.uploadedChunks / uploadData.numberOfChunks) + "\r"
|
||||
)
|
||||
if (uploadData.hasOwnProperty("assetId")) {
|
||||
process.stdout.write("\n")
|
||||
this.log.info(uploadData)
|
||||
}
|
||||
}
|
||||
|
||||
const uploadFile = async (fd, fileSize, progress) => {
|
||||
const numberOfChunks = Math.ceil(fileSize / chunkSize)
|
||||
let buffer = Buffer.alloc(chunkSize)
|
||||
let chunk = 0
|
||||
let uploadId = null
|
||||
let res = await fetch(args.hostname + "/assets/upload", {
|
||||
method: "POST",
|
||||
headers: { ...authHeader, ...contentTypeJsonHeader },
|
||||
body: JSON.stringify({
|
||||
fileName,
|
||||
uploadSize: fileSize,
|
||||
contentType,
|
||||
chunkContentType: args.base64
|
||||
? "application/base64"
|
||||
: "application/octet-stream",
|
||||
numberOfChunks,
|
||||
}),
|
||||
})
|
||||
let obj = await res.json()
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Unable to initiate upload. ${obj.message}`)
|
||||
}
|
||||
|
||||
uploadId = obj.uploadId
|
||||
|
||||
this.log.info(
|
||||
`Uploading ${
|
||||
args.hostname
|
||||
}/assets/${uploadId}?access_token=${authHeader[
|
||||
"Authorization"
|
||||
].substring("Bearer ".length)}`
|
||||
)
|
||||
|
||||
const chunkContentType = args.base64
|
||||
? "application/base64"
|
||||
: "application/octet-stream"
|
||||
const contentRangeOffsetType = args.base64 ? "base64" : "byte"
|
||||
|
||||
while (chunk < numberOfChunks) {
|
||||
const position = chunk * chunkSize
|
||||
const length = Math.min(fileSize - position, chunkSize)
|
||||
const { bytesRead } = await readAsync(fd, buffer, 0, length, position)
|
||||
let body =
|
||||
bytesRead < buffer.length ? buffer.slice(0, bytesRead) : buffer
|
||||
|
||||
res = await fetch(args.hostname + "/assets/upload/" + uploadId, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...authHeader,
|
||||
"Content-Type": chunkContentType,
|
||||
"Content-Length": body.length,
|
||||
"Content-Range": contentRangeOffsetType + " " + position.toString(),
|
||||
},
|
||||
body,
|
||||
})
|
||||
obj = await res.json()
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Unable to upload chunk ${chunk}. ${obj.message}`)
|
||||
}
|
||||
|
||||
chunk++
|
||||
progress(obj)
|
||||
}
|
||||
}
|
||||
|
||||
const stat = await fstat(fd)
|
||||
|
||||
this.log.info(`Uploading '${fileName}'`)
|
||||
await uploadFile(fd, stat.size, onProgress)
|
||||
this.log.info("Upload complete")
|
||||
await closeAsync(fd)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
const log = {
|
||||
info: console.error,
|
||||
error: function() {
|
||||
console.error(chalk.red("error:", [...arguments].join(" ")))
|
||||
},
|
||||
warning: function() {
|
||||
console.error(chalk.yellow("warning:", [...arguments].join(" ")))
|
||||
},
|
||||
}
|
||||
|
||||
const tool = new UploadFileTool("uploadFile", log)
|
||||
tool
|
||||
.run(process.argv.slice(2))
|
||||
.then((exitCode) => {
|
||||
process.exit(exitCode)
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err)
|
||||
})
|
||||
Reference in New Issue
Block a user