From 6ec5f456f370fbbfb54295509812f3ce64b22e4f Mon Sep 17 00:00:00 2001 From: Jonathan Cremin Date: Mon, 6 Jun 2016 13:39:42 +0100 Subject: [PATCH] Refactor uploader --- .eslintrc | 8 +- api/routes/file.js | 126 +++++----------- lib/hostr-file-stream.js | 55 ++++--- lib/hostr-id.js | 2 +- lib/s3.js | 18 +-- lib/sftp.js | 29 ++-- lib/ssh2-sftp-client.js | 303 +++++++++++++++++++++++++++++++++++++++ lib/upload.js | 108 -------------- lib/uploader.js | 251 ++++++++++++++++++++++++++++++++ package.json | 4 +- web/public/config.js | 2 +- web/routes/file.js | 2 +- 12 files changed, 647 insertions(+), 261 deletions(-) create mode 100644 lib/ssh2-sftp-client.js delete mode 100644 lib/upload.js create mode 100644 lib/uploader.js diff --git a/.eslintrc b/.eslintrc index 6ec1c20..f5878ab 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,7 +1,11 @@ { "extends": "airbnb/base", - "ecmaFeatures": { - "modules": true + "parserOptions": { + "ecmaVersion": 6, + "sourceType": "module", + "ecmaFeatures": { + "experimentalObjectRestSpread": true + }, }, "env": { "node": true, diff --git a/api/routes/file.js b/api/routes/file.js index 3deea3b..3fcda0e 100644 --- a/api/routes/file.js +++ b/api/routes/file.js @@ -1,102 +1,36 @@ -import path from 'path'; -import crypto from 'crypto'; -import fs from 'mz/fs'; import redis from 'redis'; -import { sniff } from '../../lib/type'; -import malware from '../../lib/malware'; import { formatFile } from '../../lib/format'; -import { accept, processImage } from '../../lib/upload'; - -import debugname from 'debug'; -const debug = debugname('hostr-api:file'); +import Uploader from '../../lib/uploader'; const redisUrl = process.env.REDIS_URL; -const storePath = process.env.UPLOAD_STORAGE_PATH; - export function* post(next) { if (!this.request.is('multipart/*')) { - return yield next; + yield next; + return; } - const Files = this.db.Files; - const expectedSize = this.request.headers['content-length']; - const remoteIp = this.request.headers['x-real-ip'] || this.req.connection.remoteAddress; - const md5sum = crypto.createHash('md5'); + const uploader = new Uploader(this); - let lastPercent = 0; - let percentComplete = 0; - let lastTick = 0; - let receivedSize = 0; + yield uploader.accept(); + uploader.acceptedEvent(); + uploader.receive(); + yield uploader.save(); + yield uploader.promise; - const upload = yield accept.call(this); + uploader.processingEvent(); - upload.path = path.join(upload.id[0], upload.id + '_' + upload.filename); - const localStream = fs.createWriteStream(path.join(storePath, upload.path)); + yield uploader.sendToSFTP(); + yield uploader.processImage(); - upload.pipe(localStream); - - upload.on('data', (data) => { - receivedSize += data.length; - if (receivedSize > this.user.max_filesize) { - fs.unlink(path.join(storePath, key)); - this.throw(413, '{"error": {"message": "The file you tried to upload is too large.", "code": 601}}'); - } - - percentComplete = Math.floor(receivedSize * 100 / expectedSize); - if (percentComplete > lastPercent && lastTick < Date.now() - 1000) { - const progressEvent = `{"type": "file-progress", "data": {"id": "${upload.id}", "complete": ${percentComplete}}}`; - this.redis.publish('/file/' + upload.id, progressEvent); - this.redis.publish('/user/' + this.user.id, progressEvent); - lastTick = Date.now(); - } - lastPercent = percentComplete; - - md5sum.update(data); - }); - - const dbFile = { - owner: this.user.id, - ip: remoteIp, - 'system_name': upload.id, - 'file_name': upload.filename, - 'original_name': upload.originalName, - 'file_size': receivedSize, - 'time_added': Math.ceil(Date.now() / 1000), - status: 'active', - 'last_accessed': null, - s3: false, - type: sniff(upload.filename), - }; - - yield Files.insertOne({_id: upload.id, ...dbFile}); - - yield upload.promise; - - const completeEvent = `{"type": "file-progress", "data": {"id": "${upload.id}", "complete": 100}}`; - this.redis.publish('/file/' + upload.id, completeEvent); - this.redis.publish('/user/' + this.user.id, completeEvent); - this.statsd.incr('file.upload.complete', 1); - - const size = yield processImage(upload); - - dbFile.width = size.width; - dbFile.height = size.height; - dbFile.file_size = receivedSize; // eslint-disable-line camelcase - dbFile.status = 'active'; - dbFile.md5 = md5sum.digest('hex'); - - const formattedFile = formatFile({_id: upload.id, ...dbFile}); - - yield Files.updateOne({_id: upload.id}, {$set: dbFile}); - - const addedEvent = `{"type": "file-added", "data": ${JSON.stringify(formattedFile)}}`; - this.redis.publish('/file/' + upload.id, addedEvent); - this.redis.publish('/user/' + this.user.id, addedEvent); + yield uploader.finalise(); this.status = 201; - this.body = formattedFile; + this.body = uploader.toJSON(); + + uploader.completeEvent(); + uploader.malwareScan(); } @@ -107,7 +41,7 @@ export function* list() { if (this.request.query.trashed) { status = 'trashed'; } else if (this.request.query.all) { - status = {'$in': ['active', 'trashed']}; + status = { $in: ['active', 'trashed'] }; } let limit = 20; @@ -123,13 +57,14 @@ export function* list() { } const queryOptions = { - limit: limit, skip: skip, sort: [['time_added', 'desc']], + limit, skip, sort: [['time_added', 'desc']], hint: { - owner: 1, status: 1, 'time_added': -1, + owner: 1, status: 1, time_added: -1, }, }; - const userFiles = yield Files.find({owner: this.user.id, status: status}, queryOptions).toArray(); + const userFiles = yield Files.find({ + owner: this.user.id, status }, queryOptions).toArray(); this.statsd.incr('file.list', 1); this.body = userFiles.map(formatFile); } @@ -138,9 +73,10 @@ export function* list() { export function* get() { const Files = this.db.Files; const Users = this.db.Users; - const file = yield Files.findOne({_id: this.params.id, status: {'$in': ['active', 'uploading']}}); + const file = yield Files.findOne({ _id: this.params.id, + status: { $in: ['active', 'uploading'] } }); this.assert(file, 404, '{"error": {"message": "File not found", "code": 604}}'); - const user = yield Users.findOne({_id: file.owner}); + const user = yield Users.findOne({ _id: file.owner }); this.assert(user && !user.banned, 404, '{"error": {"message": "File not found", "code": 604}}'); this.statsd.incr('file.get', 1); this.body = formatFile(file); @@ -151,16 +87,18 @@ export function* put() { if (this.request.body.trashed) { const Files = this.db.Files; const status = this.request.body.trashed ? 'trashed' : 'active'; - yield Files.updateOne({'_id': this.params.id, owner: this.user.id}, {$set: {status: status}}, {w: 1}); + yield Files.updateOne({ _id: this.params.id, owner: this.user.id }, + { $set: { status } }, { w: 1 }); } } export function* del() { - yield this.db.Files.updateOne({'_id': this.params.id, owner: this.db.objectId(this.user.id)}, {$set: {status: 'deleted'}}, {w: 1}); - const event = {type: 'file-deleted', data: {'id': this.params.id}}; - yield this.redis.publish('/user/' + this.user.id, JSON.stringify(event)); - yield this.redis.publish('/file/' + this.params.id, JSON.stringify(event)); + yield this.db.Files.updateOne({ _id: this.params.id, owner: this.db.objectId(this.user.id) }, + { $set: { status: 'deleted' } }, { w: 1 }); + const event = { type: 'file-deleted', data: { id: this.params.id } }; + yield this.redis.publish(`/file/${this.params.id}`, JSON.stringify(event)); + yield this.redis.publish(`/user/${this.user.id}`, JSON.stringify(event)); this.statsd.incr('file.delete', 1); this.status = 204; this.body = ''; diff --git a/lib/hostr-file-stream.js b/lib/hostr-file-stream.js index 4383673..0140816 100644 --- a/lib/hostr-file-stream.js +++ b/lib/hostr-file-stream.js @@ -1,35 +1,50 @@ import fs from 'fs'; import createError from 'http-errors'; -import { get as getFile } from './sftp'; +import { get as getSFTP } from './sftp'; +import { get as getS3 } from './s3'; import debugname from 'debug'; const debug = debugname('hostr:file-stream'); -export default function* hostrFileStream(localPath, remotePath) { +function writer(localPath, remoteRead) { + return new Promise((resolve, reject) => { + remoteRead.once('error', () => { + debug('remote error'); + reject(createError(404)); + }); + const localWrite = fs.createWriteStream(localPath); + localWrite.once('finish', () => { + debug('local write end'); + resolve(fs.createReadStream(localPath)); + }); + + remoteRead.once('readable', () => { + debug('writing'); + remoteRead.pipe(localWrite); + }); + }); +} + +export default function hostrFileStream(localPath, remotePath) { const localRead = fs.createReadStream(localPath); return new Promise((resolve, reject) => { localRead.once('error', () => { - debug('local error'); - const remoteFile = getFile(remotePath); - - remoteFile.then((remoteRead) => { - const localWrite = fs.createWriteStream(localPath); - localWrite.once('finish', () => { - debug('local write end'); - resolve(fs.createReadStream(localPath)); + debug('not found locally'); + getSFTP(remotePath) + .then((remoteRead) => writer(localPath, remoteRead)) + .then(resolve) + .catch((err) => { + debug('not on sftp', err); + writer(localPath, getS3(remotePath)) + .then(resolve) + .catch((s3err) => { + debug('not on s3'); + reject(s3err); + }); }); - remoteRead.pipe(localWrite); - - remoteRead.once('error', () => { - debug('remote error'); - reject(createError(404)); - }); - }); - - }); localRead.once('readable', () => { - debug('local readable'); + debug('found locally'); resolve(localRead); }); }); diff --git a/lib/hostr-id.js b/lib/hostr-id.js index 1d19866..444d8dc 100644 --- a/lib/hostr-id.js +++ b/lib/hostr-id.js @@ -12,7 +12,7 @@ function* checkId(Files, fileId, attempts) { if (attempts > 10) { return false; } - const file = yield Files.findOne({'_id': fileId}); + const file = yield Files.findOne({ _id: fileId }); if (file === null) { return fileId; } diff --git a/lib/s3.js b/lib/s3.js index b47944d..06c0646 100644 --- a/lib/s3.js +++ b/lib/s3.js @@ -5,16 +5,10 @@ const debug = debugname('hostr:s3'); const s3 = new aws.S3(); export function get(key) { - debug('fetching from s3: %s', 'hostr_files/' + key); - return s3.getObject({Bucket: process.env.AWS_BUCKET, Key: 'hostr_files/' + key}).createReadStream(); -} - -export function upload(stream, key, callback) { - debug('sending to s3: %s', 'hostr_files/' + key); - const params = {Bucket: process.env.AWS_BUCKET, Key: 'hostr_files/' + key, Body: stream}; - const uploading = s3.upload(params); - uploading.on('error', (err) => { - console.log(err) - }); - uploading.send(callback); + let fullKey = `hostr_files/${key}`; + if (key.substr(2, 5) === '970/' || key.substr(2, 5) === '150/') { + fullKey = `hostr_files/${key.substr(2)}`; + } + debug('fetching from s3: %s', fullKey); + return s3.getObject({ Bucket: process.env.AWS_BUCKET, Key: fullKey }).createReadStream(); } diff --git a/lib/sftp.js b/lib/sftp.js index 5a11526..f02bc9d 100644 --- a/lib/sftp.js +++ b/lib/sftp.js @@ -1,40 +1,29 @@ -import { dirname } from 'path'; -import Client from 'ssh2-sftp-client'; +import { dirname, join } from 'path'; +import Client from './ssh2-sftp-client'; import debugname from 'debug'; const debug = debugname('hostr:sftp'); export function get(remotePath) { + debug('fetching', join('hostr', 'uploads', remotePath)); const sftp = new Client(); return sftp.connect({ host: process.env.SFTP_HOST, port: process.env.SFTP_PORT, username: process.env.SFTP_USERNAME, password: process.env.SFTP_PASSWORD, - }).then(() => { - return sftp.get('hostr/uploads/' + remotePath, true); - }); + }) + .then(() => sftp.get(join('hostr', 'uploads', remotePath), { encoding: null })); } export function upload(localPath, remotePath) { - debug('SFTP connecting'); const sftp = new Client(); return sftp.connect({ host: process.env.SFTP_HOST, port: process.env.SFTP_PORT, username: process.env.SFTP_USERNAME, password: process.env.SFTP_PASSWORD, - }).then(() => { - return sftp.put(localPath, remotePath, true).then(() => { - sftp.end(); - }); - }).catch(() => { - debug('Creating ' + dirname(remotePath)); - return sftp.mkdir(dirname(remotePath), true).then(() => { - return sftp.put(localPath, remotePath, true).then(() => { - sftp.end(); - }); - }); - }).then(() => { - sftp.end(); - }); + }) + .then(() => sftp.put(localPath, remotePath, true)) + .catch(() => sftp.mkdir(dirname(remotePath), true) + .then(() => sftp.put(localPath, remotePath, true))); } diff --git a/lib/ssh2-sftp-client.js b/lib/ssh2-sftp-client.js new file mode 100644 index 0000000..db44be4 --- /dev/null +++ b/lib/ssh2-sftp-client.js @@ -0,0 +1,303 @@ +/** + * ssh2 sftp client for node + */ +'use strict'; + +let Client = require('ssh2').Client; + +let SftpClient = function(){ + this.client = new Client(); +}; + +/** + * Retrieves a directory listing + * + * @param {String} path, a string containing the path to a directory + * @return {Promise} data, list info + */ +SftpClient.prototype.list = function(path) { + let reg = /-/gi; + + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + sftp.readdir(path, (err, list) => { + if (err) { + reject(err); + return false; + } + // reset file info + list.forEach((item, i) => { + list[i] = { + type: item.longname.substr(0, 1), + name: item.filename, + size: item.attrs.size, + modifyTime: item.attrs.mtime * 1000, + accessTime: item.attrs.atime * 1000, + rights: { + user: item.longname.substr(1, 3).replace(reg, ''), + group: item.longname.substr(4,3).replace(reg, ''), + other: item.longname.substr(7, 3).replace(reg, '') + }, + owner: item.attrs.uid, + group: item.attrs.gid + } + }); + resolve(list); + }); + } else { + reject('sftp connect error'); + } + }); +}; + +/** + * get file + * + * @param {String} path, path + * @param {Object} useCompression, config options + * @return {Promise} stream, readable stream + */ +SftpClient.prototype.get = function(path, useCompression) { + useCompression = Object.assign({}, {encoding: 'utf8'}, useCompression); + + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + try { + let stream = sftp.createReadStream(path, useCompression); + + stream.on('error', reject); + + resolve(stream); + } catch(err) { + reject(err); + } + } else { + reject('sftp connect error'); + } + }); +}; + +/** + * Create file + * + * @param {String|Buffer|stream} input + * @param {String} remotePath, + * @param {Object} useCompression [description] + * @return {[type]} [description] + */ +SftpClient.prototype.put = function(input, remotePath, useCompression) { + useCompression = Object.assign({}, {encoding: 'utf8'}, useCompression); + + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + if (typeof input === 'string') { + sftp.fastPut(input, remotePath, useCompression, (err) => { + if (err) { + reject(err); + return false; + } + resolve(); + }); + return false; + } + let stream = sftp.createWriteStream(remotePath, useCompression); + let data; + + stream.on('error', reject); + stream.on('close', resolve); + + if (input instanceof Buffer) { + data = stream.end(input); + return false; + } + data = input.pipe(stream); + } else { + reject('sftp connect error'); + } + }); +}; + +SftpClient.prototype.mkdir = function(path, recursive) { + recursive = recursive || false; + + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + if (!recursive) { + sftp.mkdir(path, (err) => { + if (err) { + reject(err); + return false; + } + resolve(); + }); + return false; + } + + let tokens = path.split(/\//g); + let p = ''; + + let mkdir = () => { + let token = tokens.shift(); + + if (!token && !tokens.length) { + resolve(); + return false; + } + token += '/'; + p = p + token; + sftp.mkdir(p, (err) => { + if (err && err.code !== 4) { + reject(err); + } + mkdir(); + }); + }; + return mkdir(); + } else { + reject('sftp connect error'); + } + }); +}; + +SftpClient.prototype.rmdir = function(path, recursive) { + recursive = recursive || false; + + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + if (!recursive) { + return sftp.rmdir(path, (err) => { + if (err) { + reject(err); + } + resolve(); + }); + } + let rmdir = (p) => { + return this.list(p).then((list) => { + if (list.length > 0) { + let promises = []; + + list.forEach((item) => { + let name = item.name; + let promise; + var subPath; + + if (name[0] === '/') { + subPath = name; + } else { + if (p[p.length - 1] === '/') { + subPath = p + name; + } else { + subPath = p + '/' + name; + } + } + + if (item.type === 'd') { + if (name !== '.' || name !== '..') { + promise = rmdir(subPath); + } + } else { + promise = this.delete(subPath); + } + promises.push(promise); + }); + if (promises.length) { + return Promise.all(promises).then(() => { + return rmdir(p); + }); + } + } else { + return new Promise((resolve, reject) => { + return sftp.rmdir(p, (err) => { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + }); + } + }); + }; + return rmdir(path).then(() => {resolve()}) + .catch((err) => {reject(err)}); + } else { + reject('sftp connect error'); + } + }); +}; + +SftpClient.prototype.delete = function(path) { + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + sftp.unlink(path, (err) => { + if (err) { + reject(err); + return false; + } + resolve(); + }); + } else { + reject('sftp connect error'); + } + }); +}; + +SftpClient.prototype.rename = function(srcPath, remotePath) { + return new Promise((resolve, reject) => { + let sftp = this.sftp; + + if (sftp) { + sftp.rename(srcPath, remotePath, (err) => { + if (err) { + reject(err); + return false; + } + resolve(); + }); + } else { + reject('sftp connect error'); + } + }); +} + +SftpClient.prototype.connect = function(config) { + var c = this.client; + + return new Promise((resolve, reject) => { + this.client.on('ready', () => { + + this.client.sftp((err, sftp) => { + if (err) { + reject(err); + } + this.sftp = sftp; + resolve(sftp); + }); + }).on('error', (err) => { + reject(err); + }).connect(config); + }); +}; + +SftpClient.prototype.end = function() { + return new Promise((resolve) => { + this.client.end(); + resolve(); + }); +}; + +module.exports = SftpClient; diff --git a/lib/upload.js b/lib/upload.js deleted file mode 100644 index 0b2a88b..0000000 --- a/lib/upload.js +++ /dev/null @@ -1,108 +0,0 @@ -import { join } from 'path'; -import parse from 'co-busboy'; -import fs from 'mz/fs'; -import sizeOf from 'image-size'; -import hostrId from './hostr-id'; -import resize from './resize'; -import { upload as sftpUpload } from './sftp'; - -import debugname from 'debug'; -const debug = debugname('hostr-api:upload'); - -const storePath = process.env.UPLOAD_STORAGE_PATH; -const baseURL = process.env.WEB_BASE_URL; -const supported = ['jpg', 'png', 'gif']; - -export function* checkLimit() { - const count = yield this.db.Files.count({ - owner: this.user.id, - time_added: {'$gt': Math.ceil(Date.now() / 1000) - 86400}, - }); - const userLimit = this.user.daily_upload_allowance; - const underLimit = (count < userLimit || userLimit === 'unlimited'); - if (!underLimit) { - this.statsd.incr('file.overlimit', 1); - } - this.assert(underLimit, 400, `{ - "error": { - "message": "Daily upload limits (${this.user.daily_upload_allowance}) exceeded.", - "code": 602 - } - }`); - return true; -} - -export function* accept() { - yield checkLimit.call(this); - - const upload = yield parse(this, { - autoFields: true, - headers: this.request.headers, - limits: { files: 1}, - highWaterMark: 1000000, - }); - - upload.promise = new Promise((resolve, reject) => { - upload.on('error', (err) => { - this.statsd.incr('file.upload.error', 1); - debug(err); - reject(); - }); - - upload.on('end', () => { - resolve(); - }); - }); - - upload.tempGuid = this.request.headers['hostr-guid']; - upload.originalName = upload.filename; - upload.filename = upload.filename.replace(/[^a-zA-Z0-9\.\-\_\s]/g, '').replace(/\s+/g, ''); - upload.id = yield hostrId(this.db.Files); - - const acceptedEvent = `{"type": "file-accepted", "data": {"id": "${upload.id}", "guid": "${upload.tempGuid}", "href": "${baseURL}/${upload.id}"}}`; - this.redis.publish('/user/' + this.user.id, acceptedEvent); - this.statsd.incr('file.upload.accepted', 1); - - return upload; -} - -export function resizeImage(upload, type, currentSize, newSize) { - return resize(join(storePath, upload.path), type, currentSize, newSize).then((image) => { - const path = join(upload.id[0], String(newSize.width), upload.id + '_' + upload.filename); - debug('Writing file'); - debug(join(storePath, path)); - return fs.writeFile(join(storePath, path), image).then(() => { - debug('Uploading file'); - return sftpUpload(join(storePath, path), join('hostr', 'uploads', path)); - }).catch(debug); - }).catch(debug); -} - -export function* processImage(upload) { - debug('Processing image'); - return new Promise((resolve) => { - const size = sizeOf(join(storePath, upload.path)); - debug('Size: ', size); - if (!size.width || supported.indexOf(size.type) < 0) { - resolve(); - } - - Promise.all([ - resizeImage(upload, size.type, size, {width: 150, height: 150}), - resizeImage(upload, size.type, size, {width: 970}), - ]).then(() => { - resolve(size); - }); - }); -} - -export function progressEvent() { - percentComplete = Math.floor(receivedSize * 100 / expectedSize); - if (percentComplete > lastPercent && lastTick < Date.now() - 1000) { - const progressEvent = `{"type": "file-progress", "data": {"id": "${upload.id}", "complete": ${percentComplete}}}`; - this.redis.publish('/file/' + upload.id, progressEvent); - this.redis.publish('/user/' + this.user.id, progressEvent); - lastTick = Date.now(); - } - lastPercent = percentComplete; -} diff --git a/lib/uploader.js b/lib/uploader.js new file mode 100644 index 0000000..a3a3346 --- /dev/null +++ b/lib/uploader.js @@ -0,0 +1,251 @@ +import { join } from 'path'; +import parse from 'co-busboy'; +import crypto from 'crypto'; +import fs from 'mz/fs'; +import sizeOf from 'image-size'; + +import { formatFile } from './format'; +import hostrId from './hostr-id'; +import resize from './resize'; +import malware from './malware'; +import { sniff } from './type'; +import { upload as sftpUpload } from './sftp'; + +import debugname from 'debug'; +const debug = debugname('hostr-api:uploader'); + +const storePath = process.env.UPLOAD_STORAGE_PATH; +const baseURL = process.env.WEB_BASE_URL; +const supported = ['jpeg', 'jpg', 'png', 'gif']; + + +export default class Uploader { + constructor(context) { + this.context = context; + this.Files = context.db.Files; + this.expectedSize = context.request.headers['content-length']; + this.tempGuid = context.request.headers['hostr-guid']; + this.remoteIp = context.request.headers['x-real-ip'] || context.req.connection.remoteAddress; + this.md5sum = crypto.createHash('md5'); + + this.lastPercent = 0; + this.percentComplete = 0; + this.lastTick = 0; + this.receivedSize = 0; + } + + *accept() { + yield this.checkLimit(); + this.upload = yield parse(this.context, { + autoFields: true, + headers: this.context.request.headers, + limits: { files: 1 }, + highWaterMark: 10000000, + }); + + this.promise = new Promise((resolve, reject) => { + this.upload.on('error', (err) => { + this.statsd.incr('file.upload.error', 1); + debug(err); + reject(); + }); + + this.upload.on('end', () => { + resolve(); + }); + }); + + this.tempGuid = this.tempGuid; + this.originalName = this.upload.filename; + this.filename = this.upload.filename.replace(/[^a-zA-Z0-9\.\-_\s]/g, '').replace(/\s+/g, ''); + this.id = yield hostrId(this.Files); + } + + receive() { + this.path = join(this.id[0], `${this.id}_${this.filename}`); + this.localStream = fs.createWriteStream(join(storePath, this.path)); + + this.upload.pause(); + + this.upload.on('data', (data) => { + this.receivedSize += data.length; + if (this.receivedSize > this.context.user.max_filesize) { + fs.unlink(join(storePath, this.path)); + this.context.throw(413, `{"error": {"message": "The file you tried to upload is too large.", + "code": 601}}`); + } + + this.localStream.write(data); + + this.percentComplete = Math.floor(this.receivedSize * 100 / this.expectedSize); + if (this.percentComplete > this.lastPercent && this.lastTick < Date.now() - 1000) { + const progressEvent = `{"type": "file-progress", "data": + {"id": "${this.upload.id}", "complete": ${this.percentComplete}}}`; + this.context.redis.publish(`/file/${this.upload.id}`, progressEvent); + this.context.redis.publish(`/user/${this.context.user.id}`, progressEvent); + this.lastTick = Date.now(); + } + this.lastPercent = this.percentComplete; + + this.md5sum.update(data); + }); + + this.upload.on('end', () => { + this.localStream.end(); + }); + + this.upload.resume(); + } + + sendToSFTP() { + return sftpUpload(join(storePath, this.path), join('hostr', 'uploads', this.path)); + } + + acceptedEvent() { + const acceptedEvent = `{"type": "file-accepted", "data": + {"id": "${this.id}", "guid": "${this.tempGuid}", "href": "${baseURL}/${this.id}"}}`; + this.context.redis.publish(`/user/${this.context.user.id}`, acceptedEvent); + this.context.statsd.incr('file.upload.accepted', 1); + } + + processingEvent() { + const processingEvent = `{"type": "file-progress", "data": + {"id": "${this.id}", "complete": 100}}`; + this.context.redis.publish(`/file/${this.id}`, processingEvent); + this.context.redis.publish(`/user/${this.context.user.id}`, processingEvent); + this.context.statsd.incr('file.upload.complete', 1); + } + + completeEvent() { + const completeEvent = `{"type": "file-added", "data": ${JSON.stringify(this.toDBFormat())}}`; + this.context.redis.publish(`/file/${this.id}`, completeEvent); + this.context.redis.publish(`/user/${this.context.user.id}`, completeEvent); + } + + toDBFormat() { + const formatted = { + owner: this.context.user.id, + ip: this.remoteIp, + system_name: this.id, + file_name: this.filename, + original_name: this.originalName, + file_size: this.receivedSize, + time_added: Math.ceil(Date.now() / 1000), + status: 'active', + last_accessed: null, + s3: false, + type: sniff(this.filename), + }; + + if (this.width) { + formatted.width = this.width; + formatted.height = this.height; + } + + return formatted; + } + + save() { + return this.Files.insertOne({ _id: this.id, ...this.toDBFormat() }); + } + + toJSON() { + return formatFile({ _id: this.id, ...this.toDBFormat() }); + } + + *checkLimit() { + const count = yield this.Files.count({ + owner: this.context.user.id, + time_added: { $gt: Math.ceil(Date.now() / 1000) - 86400 }, + }); + const userLimit = this.context.user.daily_upload_allowance; + const underLimit = (count < userLimit || userLimit === 'unlimited'); + if (!underLimit) { + this.context.statsd.incr('file.overlimit', 1); + } + this.context.assert(underLimit, 400, `{ + "error": { + "message": "Daily upload limits (${this.context.user.daily_upload_allowance}) exceeded.", + "code": 602 + } + }`); + return true; + } + + *finalise() { + const dbFile = this.toDBFormat(); + dbFile.file_size = this.receivedSize; + dbFile.status = 'active'; + dbFile.md5 = this.md5sum.digest('hex'); + + if (this.width) { + dbFile.width = this.width; + dbFile.height = this.height; + } + + yield this.Files.updateOne({ _id: this.id }, { $set: dbFile }); + } + + resizeImage(upload, type, currentSize, newSize) { + return resize(join(storePath, this.path), type, currentSize, newSize).then((image) => { + const path = join(this.id[0], String(newSize.width), `${this.id}_${this.filename}`); + debug('Writing file'); + debug(join(storePath, path)); + return fs.writeFile(join(storePath, path), image).then(() => { + debug('Uploading file'); + return sftpUpload(join(storePath, path), join('hostr', 'uploads', path)); + }).catch(debug); + }).catch(debug); + } + + *processImage(upload) { + return new Promise((resolve) => { + let size; + try { + if (supported.indexOf(this.path.split('.').pop().toLowerCase()) < 0) { + resolve(); + return; + } + size = sizeOf(join(storePath, this.path)); + } catch (err) { + debug(err); + resolve(); + return; + } + + if (!size.width || supported.indexOf(size.type) < 0) { + resolve(); + return; + } + + this.width = size.width; + this.height = size.height; + + Promise.all([ + this.resizeImage(upload, size.type, size, { width: 150, height: 150 }), + this.resizeImage(upload, size.type, size, { width: 970 }), + ]).then(() => { + resolve(size); + }); + }); + } + + malwareScan() { + if (process.env.VIRUSTOTAL_KEY) { + // Check in the background + process.nextTick(function* scan() { + debug('Malware Scan'); + const result = yield malware(this); + if (result) { + yield this.Files.updateOne({ _id: this.id }, + { $set: { malware: result.positive, virustotal: result } }); + if (result.positive) { + this.context.statsd.incr('file.malware', 1); + } + } + }); + } else { + debug('Skipping Malware Scan, VIRUSTOTAL env variable not found.'); + } + } +} diff --git a/package.json b/package.json index df94385..354b3e6 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,7 @@ "watch-sass": "node-sass -w -r -o web/public/styles/ web/public/styles/" }, "dependencies": { - "aws-sdk": "^2.3.16", + "aws-sdk": "^2.3.17", "babel": "^6.5.2", "babel-cli": "^6.9.0", "babel-plugin-transform-es2015-destructuring": "^6.9.0", @@ -97,9 +97,9 @@ "dependencies": { "angular": "npm:angular@^1.5.6", "angular-reconnecting-websocket": "github:adieu/angular-reconnecting-websocket@~0.1.1", + "angular-strap": "npm:angular-strap@^2.3.8", "angular/resource": "npm:angular-resource@^1.5.6", "angular/route": "npm:angular-route@^1.5.6", - "angular-strap": "npm:angular-strap@^2.3.8", "bootstrap-sass": "npm:bootstrap-sass@^3.3.6", "cferdinandi/smooth-scroll": "github:cferdinandi/smooth-scroll@~5.3.7", "dropzone": "npm:dropzone@~4.0.1", diff --git a/web/public/config.js b/web/public/config.js index 66c790d..d1045f7 100644 --- a/web/public/config.js +++ b/web/public/config.js @@ -16,9 +16,9 @@ System.config({ map: { "angular": "npm:angular@1.5.6", "angular-reconnecting-websocket": "github:adieu/angular-reconnecting-websocket@0.1.1", + "angular-strap": "npm:angular-strap@2.3.8", "angular/resource": "npm:angular-resource@1.5.6", "angular/route": "npm:angular-route@1.5.6", - "angular-strap": "npm:angular-strap@2.3.8", "babel": "npm:babel-core@5.8.38", "babel-runtime": "npm:babel-runtime@5.8.38", "bootstrap-sass": "npm:bootstrap-sass@3.3.6", diff --git a/web/routes/file.js b/web/routes/file.js index 4968fb7..105b5d8 100644 --- a/web/routes/file.js +++ b/web/routes/file.js @@ -39,7 +39,7 @@ export function* get() { let remotePath = path.join(file._id[0], file._id + '_' + file.file_name); if (this.params.size > 0) { localPath = path.join(storePath, file._id[0], this.params.size, file._id + '_' + file.file_name); - remotePath = path.join(this.params.size, file._id + '_' + file.file_name); + remotePath = path.join(file._id[0], this.params.size, file._id + '_' + file.file_name); } if (file.malware) {