abort uploads over maxfilesize
This commit is contained in:
parent
34f26fc017
commit
55d3d1a792
11 changed files with 415 additions and 2438 deletions
|
@ -36,6 +36,11 @@ const conf = convict({
|
|||
format: ['production', 'development', 'test'],
|
||||
default: 'development',
|
||||
env: 'NODE_ENV'
|
||||
},
|
||||
max_file_size: {
|
||||
format: Number,
|
||||
default: (1024 * 1024 * 1024) * 2,
|
||||
env: 'P2P_MAX_FILE_SIZE'
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -62,7 +62,11 @@ app.use(
|
|||
}
|
||||
})
|
||||
);
|
||||
app.use(busboy());
|
||||
app.use(busboy({
|
||||
limits: {
|
||||
fileSize: conf.max_file_size
|
||||
}
|
||||
}));
|
||||
app.use(bodyParser.json());
|
||||
app.use(express.static(STATIC_PATH));
|
||||
app.use('/l20n', express.static(L20N));
|
||||
|
@ -77,6 +81,7 @@ app.get('/jsconfig.js', (req, res) => {
|
|||
res.render('jsconfig', {
|
||||
trackerId: conf.analytics_id,
|
||||
dsn: conf.sentry_id,
|
||||
maxFileSize: conf.max_file_size,
|
||||
layout: false
|
||||
});
|
||||
});
|
||||
|
@ -227,6 +232,12 @@ app.post('/upload', (req, res, next) => {
|
|||
delete: meta.delete,
|
||||
id: newId
|
||||
});
|
||||
},
|
||||
err => {
|
||||
if (err.message === 'limit') {
|
||||
return res.sendStatus(413);
|
||||
}
|
||||
res.sendStatus(500);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -129,20 +129,24 @@ function localGet(id) {
|
|||
|
||||
function localSet(newId, file, filename, meta) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const fstream = fs.createWriteStream(
|
||||
path.join(__dirname, '../static', newId)
|
||||
);
|
||||
const filepath = path.join(__dirname, '../static', newId);
|
||||
const fstream = fs.createWriteStream(filepath);
|
||||
file.pipe(fstream);
|
||||
fstream.on('close', () => {
|
||||
file.on('limit', () => {
|
||||
file.unpipe(fstream);
|
||||
fstream.destroy(new Error('limit'));
|
||||
});
|
||||
fstream.on('finish', () => {
|
||||
redis_client.hmset(newId, meta);
|
||||
redis_client.expire(newId, 86400000);
|
||||
log.info('localSet:', 'Upload Finished of ' + newId);
|
||||
resolve(meta.delete);
|
||||
});
|
||||
|
||||
fstream.on('error', () => {
|
||||
fstream.on('error', err => {
|
||||
log.error('localSet:', 'Failed upload of ' + newId);
|
||||
reject();
|
||||
fs.unlinkSync(filepath);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -211,21 +215,25 @@ function awsSet(newId, file, filename, meta) {
|
|||
Key: newId,
|
||||
Body: file
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.upload(params, function(err, _data) {
|
||||
if (err) {
|
||||
log.info('awsUploadError:', err.stack); // an error occurred
|
||||
reject();
|
||||
let hitLimit = false;
|
||||
const upload = s3.upload(params);
|
||||
file.on('limit', () => {
|
||||
hitLimit = true;
|
||||
upload.abort();
|
||||
});
|
||||
return upload.promise()
|
||||
.then(() => {
|
||||
redis_client.hmset(newId, meta);
|
||||
redis_client.expire(newId, 86400000);
|
||||
log.info('awsUploadFinish', 'Upload Finished of ' + filename);
|
||||
},
|
||||
err => {
|
||||
if (hitLimit) {
|
||||
throw new Error('limit');
|
||||
} else {
|
||||
redis_client.hmset(newId, meta);
|
||||
|
||||
redis_client.expire(newId, 86400000);
|
||||
log.info('awsUploadFinish', 'Upload Finished of ' + filename);
|
||||
resolve(meta.delete);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function awsDelete(id, delete_token) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue