integrate with new ui

This commit is contained in:
Emily 2018-08-08 11:07:09 -07:00
parent 13057804ab
commit bf16e5c8a9
27 changed files with 250 additions and 315 deletions

View file

@ -4,10 +4,30 @@ const path = require('path');
const { randomBytes } = require('crypto');
const conf = convict({
s3_bucket: {
format: String,
default: '',
env: 'S3_BUCKET'
s3_buckets: {
format: Array,
default: [],
env: 'S3_BUCKETS'
},
num_of_buckets: {
format: Number,
default: 3,
env: 'NUM_OF_BUCKETS'
},
expire_times_seconds: {
format: Array,
default: [86400, 604800, 1209600],
env: 'EXPIRE_TIMES_SECONDS'
},
default_expire_seconds: {
format: Number,
default: 86400,
env: 'DEFAULT_EXPIRE_SECONDS'
},
max_expire_seconds: {
format: Number,
default: 1209600,
env: 'MAX_EXPIRE_SECONDS'
},
redis_host: {
format: String,
@ -55,11 +75,6 @@ const conf = convict({
default: 1024 * 1024 * 1024 * 3,
env: 'MAX_FILE_SIZE'
},
expire_seconds: {
format: Number,
default: 86400,
env: 'EXPIRE_SECONDS'
},
l10n_dev: {
format: Boolean,
default: false,

View file

@ -13,7 +13,8 @@ module.exports = async function(req, res) {
'Content-Length': contentLength,
'WWW-Authenticate': `send-v1 ${req.nonce}`
});
const file_stream = storage.get(id);
const file_stream = await storage.get(id);
let cancelled = false;
req.on('close', () => {

View file

@ -35,7 +35,7 @@ if (isIE && !isUnsupportedPage) {
window.location.replace('/unsupported/ie');
}
var MAXFILESIZE = ${config.max_file_size};
var EXPIRE_SECONDS = ${config.expire_seconds};
var DEFAULT_EXPIRE_SECONDS = ${config.default_expire_seconds};
${ga}
${sentry}
`;

View file

@ -24,7 +24,9 @@ module.exports = async function(req, res) {
try {
const limiter = new Limiter(config.max_file_size);
const fileStream = req.pipe(limiter);
await storage.set(newId, fileStream, meta);
//this hasn't been updated to expiration time setting yet
//if you want to fallback to this code add this
await storage.set(newId, fileStream, meta, config.default_expire_seconds);
const protocol = config.env === 'production' ? 'https' : req.protocol;
const url = `${protocol}://${req.get('host')}/download/${newId}/`;
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);

View file

@ -23,10 +23,16 @@ module.exports = async function(ws, req) {
const owner = crypto.randomBytes(10).toString('hex');
const fileInfo = JSON.parse(message);
const timeLimit = fileInfo.timeLimit;
const metadata = fileInfo.fileMetadata;
const auth = fileInfo.authorization;
if (!metadata || !auth) {
if (
!metadata ||
!auth ||
timeLimit <= 0 ||
timeLimit > config.max_expire_seconds
) {
ws.send(
JSON.stringify({
error: 400
@ -50,7 +56,7 @@ module.exports = async function(ws, req) {
fileStream = wsStream(ws, { binary: true })
.pipe(limiter)
.pipe(parser);
await storage.set(newId, fileStream, meta);
await storage.set(newId, fileStream, meta, timeLimit);
if (ws.readyState === 1) {
// if the socket is closed by a cancelled upload the stream

View file

@ -6,7 +6,7 @@ const mkdirp = require('mkdirp');
const stat = promisify(fs.stat);
class FSStorage {
constructor(config, log) {
constructor(config, index, log) {
this.log = log;
this.dir = config.file_dir;
mkdirp.sync(this.dir);

View file

@ -5,10 +5,16 @@ const createRedisClient = require('./redis');
class DB {
constructor(config) {
const Storage = config.s3_bucket ? require('./s3') : require('./fs');
const Storage =
config.s3_buckets.length > 0 ? require('./s3') : require('./fs');
this.log = mozlog('send.storage');
this.expireSeconds = config.expire_seconds;
this.storage = new Storage(config, this.log);
this.storage = [];
for (let i = 0; i < config.num_of_buckets; i++) {
this.storage.push(new Storage(config, i, this.log));
}
this.redis = createRedisClient(config);
this.redis.on('error', err => {
this.log.error('Redis:', err);
@ -20,32 +26,51 @@ class DB {
return Math.ceil(result) * 1000;
}
length(id) {
return this.storage.length(id);
async getBucket(id) {
return this.redis.hgetAsync(id, 'bucket');
}
get(id) {
return this.storage.getStream(id);
async length(id) {
const bucket = await this.redis.hgetAsync(id, 'bucket');
return this.storage[bucket].length(id);
}
async set(id, file, meta) {
await this.storage.set(id, file);
async get(id) {
const bucket = await this.redis.hgetAsync(id, 'bucket');
return this.storage[bucket].getStream(id);
}
async set(id, file, meta, expireSeconds = config.default_expire_seconds) {
const bucketTimes = config.expire_times_seconds;
let bucket = 0;
while (bucket < config.num_of_buckets - 1) {
if (expireSeconds <= bucketTimes[bucket]) {
break;
}
bucket++;
}
await this.storage[bucket].set(id, file);
this.redis.hset(id, 'bucket', bucket);
this.redis.hmset(id, meta);
this.redis.expire(id, this.expireSeconds);
this.redis.expire(id, expireSeconds);
}
setField(id, key, value) {
this.redis.hset(id, key, value);
}
del(id) {
async del(id) {
const bucket = await this.redis.hgetAsync(id, 'bucket');
this.redis.del(id);
return this.storage.del(id);
this.storage[bucket].del(id);
}
async ping() {
await this.redis.pingAsync();
await this.storage.ping();
for (const bucket of this.storage) {
bucket.ping();
}
}
async metadata(id) {

View file

@ -2,8 +2,8 @@ const AWS = require('aws-sdk');
const s3 = new AWS.S3();
class S3Storage {
constructor(config, log) {
this.bucket = config.s3_bucket;
constructor(config, index, log) {
this.bucket = config.s3_buckets[index];
this.log = log;
}