refactored server
This commit is contained in:
parent
6d470b8eba
commit
3fd2537311
36 changed files with 2944 additions and 792 deletions
|
@ -1,16 +0,0 @@
|
|||
const { availableLanguages } = require('../package.json');
|
||||
const config = require('./config');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function allLangs() {
|
||||
return fs.readdirSync(
|
||||
path.join(__dirname, '..', 'dist', 'public', 'locales')
|
||||
);
|
||||
}
|
||||
|
||||
if (config.l10n_dev) {
|
||||
module.exports = allLangs();
|
||||
} else {
|
||||
module.exports = availableLanguages;
|
||||
}
|
13
server/metadata.js
Normal file
13
server/metadata.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
class Metadata {
|
||||
constructor(obj) {
|
||||
this.dl = +obj.dl || 0;
|
||||
this.dlimit = +obj.dlimit || 1;
|
||||
this.pwd = String(obj.pwd) === 'true';
|
||||
this.owner = obj.owner;
|
||||
this.metadata = obj.metadata;
|
||||
this.auth = obj.auth;
|
||||
this.nonce = obj.nonce;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Metadata;
|
38
server/middleware/auth.js
Normal file
38
server/middleware/auth.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
const crypto = require('crypto');
|
||||
const storage = require('../storage');
|
||||
|
||||
module.exports = async function(req, res, next) {
|
||||
const id = req.params.id;
|
||||
if (id && req.header('Authorization')) {
|
||||
try {
|
||||
const auth = req.header('Authorization').split(' ')[1];
|
||||
const meta = await storage.metadata(id);
|
||||
if (!meta) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
const hmac = crypto.createHmac(
|
||||
'sha256',
|
||||
Buffer.from(meta.auth, 'base64')
|
||||
);
|
||||
hmac.update(Buffer.from(meta.nonce, 'base64'));
|
||||
const verifyHash = hmac.digest();
|
||||
if (verifyHash.equals(Buffer.from(auth, 'base64'))) {
|
||||
req.nonce = crypto.randomBytes(16).toString('base64');
|
||||
storage.setField(id, 'nonce', req.nonce);
|
||||
res.set('WWW-Authenticate', `send-v1 ${req.nonce}`);
|
||||
req.authorized = true;
|
||||
req.meta = meta;
|
||||
} else {
|
||||
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);
|
||||
req.authorized = false;
|
||||
}
|
||||
} catch (e) {
|
||||
req.authorized = false;
|
||||
}
|
||||
}
|
||||
if (req.authorized) {
|
||||
next();
|
||||
} else {
|
||||
res.sendStatus(401);
|
||||
}
|
||||
};
|
40
server/middleware/language.js
Normal file
40
server/middleware/language.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
const { availableLanguages } = require('../../package.json');
|
||||
const config = require('../config');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { negotiateLanguages } = require('fluent-langneg');
|
||||
const langData = require('cldr-core/supplemental/likelySubtags.json');
|
||||
const acceptLanguages = /(([a-zA-Z]+(-[a-zA-Z0-9]+){0,2})|\*)(;q=[0-1](\.[0-9]+)?)?/g;
|
||||
|
||||
function allLangs() {
|
||||
return fs.readdirSync(
|
||||
path.join(__dirname, '..', '..', 'dist', 'public', 'locales')
|
||||
);
|
||||
}
|
||||
|
||||
const languages = config.l10n_dev ? allLangs() : availableLanguages;
|
||||
|
||||
module.exports = function(req, res, next) {
|
||||
const header = req.headers['accept-language'] || 'en-US';
|
||||
if (header.length > 255) {
|
||||
req.language = 'en-US';
|
||||
return next();
|
||||
}
|
||||
const langs = header.replace(/\s/g, '').match(acceptLanguages);
|
||||
const preferred = langs
|
||||
.map(l => {
|
||||
const parts = l.split(';');
|
||||
return {
|
||||
locale: parts[0],
|
||||
q: parts[1] ? parseFloat(parts[1].split('=')[1]) : 1
|
||||
};
|
||||
})
|
||||
.sort((a, b) => b.q - a.q)
|
||||
.map(x => x.locale);
|
||||
req.language = negotiateLanguages(preferred, languages, {
|
||||
strategy: 'lookup',
|
||||
likelySubtags: langData.supplemental.likelySubtags,
|
||||
defaultLocale: 'en-US'
|
||||
})[0];
|
||||
next();
|
||||
};
|
22
server/middleware/owner.js
Normal file
22
server/middleware/owner.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
const storage = require('../storage');
|
||||
|
||||
module.exports = async function(req, res, next) {
|
||||
const id = req.params.id;
|
||||
const ownerToken = req.body.owner_token;
|
||||
if (id && ownerToken) {
|
||||
try {
|
||||
req.meta = await storage.metadata(id);
|
||||
if (!req.meta) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
req.authorized = req.meta.owner === ownerToken;
|
||||
} catch (e) {
|
||||
req.authorized = false;
|
||||
}
|
||||
}
|
||||
if (req.authorized) {
|
||||
next();
|
||||
} else {
|
||||
res.sendStatus(401);
|
||||
}
|
||||
};
|
|
@ -1,20 +1,9 @@
|
|||
const storage = require('../storage');
|
||||
|
||||
module.exports = async function(req, res) {
|
||||
const id = req.params.id;
|
||||
|
||||
const ownerToken = req.body.owner_token || req.body.delete_token;
|
||||
|
||||
if (!ownerToken) {
|
||||
res.sendStatus(404);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const err = await storage.delete(id, ownerToken);
|
||||
if (!err) {
|
||||
res.sendStatus(200);
|
||||
}
|
||||
await storage.del(req.params.id);
|
||||
res.sendStatus(200);
|
||||
} catch (e) {
|
||||
res.sendStatus(404);
|
||||
}
|
||||
|
|
|
@ -1,39 +1,26 @@
|
|||
const storage = require('../storage');
|
||||
const mozlog = require('../log');
|
||||
const log = mozlog('send.download');
|
||||
const crypto = require('crypto');
|
||||
|
||||
module.exports = async function(req, res) {
|
||||
const id = req.params.id;
|
||||
|
||||
try {
|
||||
const auth = req.header('Authorization').split(' ')[1];
|
||||
const meta = await storage.metadata(id);
|
||||
const hmac = crypto.createHmac('sha256', Buffer.from(meta.auth, 'base64'));
|
||||
hmac.update(Buffer.from(meta.nonce, 'base64'));
|
||||
const verifyHash = hmac.digest();
|
||||
if (!verifyHash.equals(Buffer.from(auth, 'base64'))) {
|
||||
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);
|
||||
return res.sendStatus(401);
|
||||
}
|
||||
const nonce = crypto.randomBytes(16).toString('base64');
|
||||
storage.setField(id, 'nonce', nonce);
|
||||
const meta = req.meta;
|
||||
const contentLength = await storage.length(id);
|
||||
res.writeHead(200, {
|
||||
'Content-Disposition': 'attachment',
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Length': contentLength,
|
||||
'X-File-Metadata': meta.metadata,
|
||||
'WWW-Authenticate': `send-v1 ${nonce}`
|
||||
'WWW-Authenticate': `send-v1 ${req.nonce}`
|
||||
});
|
||||
const file_stream = storage.get(id);
|
||||
|
||||
file_stream.on('end', async () => {
|
||||
const dl = (+meta.dl || 0) + 1;
|
||||
const dlimit = +meta.dlimit || 1;
|
||||
const dl = meta.dl + 1;
|
||||
const dlimit = meta.dlimit;
|
||||
try {
|
||||
if (dl >= dlimit) {
|
||||
await storage.forceDelete(id);
|
||||
await storage.del(id);
|
||||
} else {
|
||||
await storage.setField(id, 'dl', dl);
|
||||
}
|
||||
|
@ -41,7 +28,6 @@ module.exports = async function(req, res) {
|
|||
log.info('StorageError:', id);
|
||||
}
|
||||
});
|
||||
|
||||
file_stream.pipe(res);
|
||||
} catch (e) {
|
||||
res.sendStatus(404);
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
const storage = require('../storage');
|
||||
|
||||
module.exports = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
|
||||
try {
|
||||
const meta = await storage.metadata(id);
|
||||
const meta = await storage.metadata(req.params.id);
|
||||
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);
|
||||
res.send({
|
||||
password: meta.pwd !== '0'
|
||||
password: meta.pwd
|
||||
});
|
||||
} catch (e) {
|
||||
res.sendStatus(404);
|
||||
|
|
|
@ -1,41 +1,22 @@
|
|||
const busboy = require('connect-busboy');
|
||||
const helmet = require('helmet');
|
||||
const bodyParser = require('body-parser');
|
||||
const languages = require('../languages');
|
||||
const storage = require('../storage');
|
||||
const config = require('../config');
|
||||
const auth = require('../middleware/auth');
|
||||
const owner = require('../middleware/owner');
|
||||
const language = require('../middleware/language');
|
||||
const pages = require('./pages');
|
||||
const { negotiateLanguages } = require('fluent-langneg');
|
||||
|
||||
const IS_DEV = config.env === 'development';
|
||||
const acceptLanguages = /(([a-zA-Z]+(-[a-zA-Z0-9]+){0,2})|\*)(;q=[0-1](\.[0-9]+)?)?/g;
|
||||
const langData = require('cldr-core/supplemental/likelySubtags.json');
|
||||
const idregx = '([0-9a-fA-F]{10})';
|
||||
const ID_REGEX = '([0-9a-fA-F]{10})';
|
||||
const uploader = busboy({
|
||||
limits: {
|
||||
fileSize: config.max_file_size
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = function(app) {
|
||||
app.use(function(req, res, next) {
|
||||
const header = req.headers['accept-language'] || 'en-US';
|
||||
if (header.length > 255) {
|
||||
req.language = 'en-US';
|
||||
return next();
|
||||
}
|
||||
const langs = header.replace(/\s/g, '').match(acceptLanguages);
|
||||
const preferred = langs
|
||||
.map(l => {
|
||||
const parts = l.split(';');
|
||||
return {
|
||||
locale: parts[0],
|
||||
q: parts[1] ? parseFloat(parts[1].split('=')[1]) : 1
|
||||
};
|
||||
})
|
||||
.sort((a, b) => b.q - a.q)
|
||||
.map(x => x.locale);
|
||||
req.language = negotiateLanguages(preferred, languages, {
|
||||
strategy: 'lookup',
|
||||
likelySubtags: langData.supplemental.likelySubtags,
|
||||
defaultLocale: 'en-US'
|
||||
})[0];
|
||||
next();
|
||||
});
|
||||
app.use(helmet());
|
||||
app.use(
|
||||
helmet.hsts({
|
||||
|
@ -69,34 +50,27 @@ module.exports = function(app) {
|
|||
})
|
||||
);
|
||||
}
|
||||
app.use(
|
||||
busboy({
|
||||
limits: {
|
||||
fileSize: config.max_file_size
|
||||
}
|
||||
})
|
||||
);
|
||||
app.use(function(req, res, next) {
|
||||
res.set('Pragma', 'no-cache');
|
||||
res.set('Cache-Control', 'no-cache');
|
||||
next();
|
||||
});
|
||||
app.use(bodyParser.json());
|
||||
app.get('/', pages.index);
|
||||
app.get('/legal', pages.legal);
|
||||
app.get('/', language, pages.index);
|
||||
app.get('/legal', language, pages.legal);
|
||||
app.get('/jsconfig.js', require('./jsconfig'));
|
||||
app.get(`/share/:id${idregx}`, pages.blank);
|
||||
app.get(`/download/:id${idregx}`, pages.download);
|
||||
app.get('/completed', pages.blank);
|
||||
app.get('/unsupported/:reason', pages.unsupported);
|
||||
app.get(`/api/download/:id${idregx}`, require('./download'));
|
||||
app.get(`/api/exists/:id${idregx}`, require('./exists'));
|
||||
app.get(`/api/metadata/:id${idregx}`, require('./metadata'));
|
||||
app.post('/api/upload', require('./upload'));
|
||||
app.post(`/api/delete/:id${idregx}`, require('./delete'));
|
||||
app.post(`/api/password/:id${idregx}`, require('./password'));
|
||||
app.post(`/api/params/:id${idregx}`, require('./params'));
|
||||
app.post(`/api/info/:id${idregx}`, require('./info'));
|
||||
app.get(`/share/:id${ID_REGEX}`, language, pages.blank);
|
||||
app.get(`/download/:id${ID_REGEX}`, language, pages.download);
|
||||
app.get('/completed', language, pages.blank);
|
||||
app.get('/unsupported/:reason', language, pages.unsupported);
|
||||
app.get(`/api/download/:id${ID_REGEX}`, auth, require('./download'));
|
||||
app.get(`/api/exists/:id${ID_REGEX}`, require('./exists'));
|
||||
app.get(`/api/metadata/:id${ID_REGEX}`, auth, require('./metadata'));
|
||||
app.post('/api/upload', uploader, require('./upload'));
|
||||
app.post(`/api/delete/:id${ID_REGEX}`, owner, require('./delete'));
|
||||
app.post(`/api/password/:id${ID_REGEX}`, owner, require('./password'));
|
||||
app.post(`/api/params/:id${ID_REGEX}`, owner, require('./params'));
|
||||
app.post(`/api/info/:id${ID_REGEX}`, owner, require('./info'));
|
||||
|
||||
app.get('/__version__', function(req, res) {
|
||||
res.sendFile(require.resolve('../../dist/version.json'));
|
||||
|
|
|
@ -1,21 +1,11 @@
|
|||
const storage = require('../storage');
|
||||
|
||||
module.exports = async function(req, res) {
|
||||
const id = req.params.id;
|
||||
const ownerToken = req.body.owner_token;
|
||||
if (!ownerToken) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const meta = await storage.metadata(id);
|
||||
if (meta.owner !== ownerToken) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
const ttl = await storage.ttl(id);
|
||||
const ttl = await storage.ttl(req.params.id);
|
||||
return res.send({
|
||||
dlimit: +meta.dlimit,
|
||||
dtotal: +meta.dl,
|
||||
dlimit: +req.meta.dlimit,
|
||||
dtotal: +req.meta.dl,
|
||||
ttl
|
||||
});
|
||||
} catch (e) {
|
||||
|
|
|
@ -1,28 +1,14 @@
|
|||
const storage = require('../storage');
|
||||
const crypto = require('crypto');
|
||||
|
||||
module.exports = async function(req, res) {
|
||||
const id = req.params.id;
|
||||
|
||||
const meta = req.meta;
|
||||
try {
|
||||
const auth = req.header('Authorization').split(' ')[1];
|
||||
const meta = await storage.metadata(id);
|
||||
const hmac = crypto.createHmac('sha256', Buffer.from(meta.auth, 'base64'));
|
||||
hmac.update(Buffer.from(meta.nonce, 'base64'));
|
||||
const verifyHash = hmac.digest();
|
||||
if (!verifyHash.equals(Buffer.from(auth, 'base64'))) {
|
||||
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);
|
||||
return res.sendStatus(401);
|
||||
}
|
||||
const nonce = crypto.randomBytes(16).toString('base64');
|
||||
storage.setField(id, 'nonce', nonce);
|
||||
res.set('WWW-Authenticate', `send-v1 ${nonce}`);
|
||||
|
||||
const size = await storage.length(id);
|
||||
const ttl = await storage.ttl(id);
|
||||
res.send({
|
||||
metadata: meta.metadata,
|
||||
finalDownload: +meta.dl + 1 === +meta.dlimit,
|
||||
finalDownload: meta.dl + 1 === meta.dlimit,
|
||||
size,
|
||||
ttl
|
||||
});
|
||||
|
|
|
@ -1,23 +1,13 @@
|
|||
const storage = require('../storage');
|
||||
|
||||
module.exports = async function(req, res) {
|
||||
const id = req.params.id;
|
||||
const ownerToken = req.body.owner_token;
|
||||
if (!ownerToken) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
module.exports = function(req, res) {
|
||||
const dlimit = req.body.dlimit;
|
||||
if (!dlimit || dlimit > 20) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const meta = await storage.metadata(id);
|
||||
if (meta.owner !== ownerToken) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
storage.setField(id, 'dlimit', dlimit);
|
||||
storage.setField(req.params.id, 'dlimit', dlimit);
|
||||
res.sendStatus(200);
|
||||
} catch (e) {
|
||||
res.sendStatus(404);
|
||||
|
|
|
@ -1,23 +1,15 @@
|
|||
const storage = require('../storage');
|
||||
|
||||
module.exports = async function(req, res) {
|
||||
module.exports = function(req, res) {
|
||||
const id = req.params.id;
|
||||
const ownerToken = req.body.owner_token;
|
||||
if (!ownerToken) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
const auth = req.body.auth;
|
||||
if (!auth) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const meta = await storage.metadata(id);
|
||||
if (meta.owner !== ownerToken) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
storage.setField(id, 'auth', auth);
|
||||
storage.setField(id, 'pwd', 1);
|
||||
storage.setField(id, 'pwd', true);
|
||||
res.sendStatus(200);
|
||||
} catch (e) {
|
||||
return res.sendStatus(404);
|
||||
|
|
|
@ -14,12 +14,8 @@ module.exports = function(req, res) {
|
|||
}
|
||||
const owner = crypto.randomBytes(10).toString('hex');
|
||||
const meta = {
|
||||
dlimit: 1,
|
||||
dl: 0,
|
||||
owner,
|
||||
delete: owner, // delete is deprecated
|
||||
metadata,
|
||||
pwd: 0,
|
||||
auth: auth.split(' ')[1],
|
||||
nonce: crypto.randomBytes(16).toString('base64')
|
||||
};
|
||||
|
@ -47,7 +43,7 @@ module.exports = function(req, res) {
|
|||
|
||||
req.on('close', async err => {
|
||||
try {
|
||||
await storage.forceDelete(newId);
|
||||
await storage.del(newId);
|
||||
} catch (e) {
|
||||
log.info('DeleteError:', newId);
|
||||
}
|
||||
|
|
|
@ -1,285 +0,0 @@
|
|||
const AWS = require('aws-sdk');
|
||||
const s3 = new AWS.S3();
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const config = require('./config');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const mozlog = require('./log');
|
||||
|
||||
const log = mozlog('send.storage');
|
||||
|
||||
const redis_lib =
|
||||
config.env === 'development' && config.redis_host === 'localhost'
|
||||
? 'redis-mock'
|
||||
: 'redis';
|
||||
|
||||
const redis = require(redis_lib);
|
||||
const redis_client = redis.createClient({
|
||||
host: config.redis_host,
|
||||
connect_timeout: 10000
|
||||
});
|
||||
|
||||
redis_client.on('error', err => {
|
||||
log.error('Redis:', err);
|
||||
});
|
||||
|
||||
const fileDir = config.file_dir;
|
||||
|
||||
if (config.s3_bucket) {
|
||||
module.exports = {
|
||||
exists: exists,
|
||||
ttl: ttl,
|
||||
length: awsLength,
|
||||
get: awsGet,
|
||||
set: awsSet,
|
||||
setField: setField,
|
||||
delete: awsDelete,
|
||||
forceDelete: awsForceDelete,
|
||||
ping: awsPing,
|
||||
flushall: flushall,
|
||||
quit: quit,
|
||||
metadata
|
||||
};
|
||||
} else {
|
||||
mkdirp.sync(config.file_dir);
|
||||
log.info('fileDir', fileDir);
|
||||
module.exports = {
|
||||
exists: exists,
|
||||
ttl: ttl,
|
||||
length: localLength,
|
||||
get: localGet,
|
||||
set: localSet,
|
||||
setField: setField,
|
||||
delete: localDelete,
|
||||
forceDelete: localForceDelete,
|
||||
ping: localPing,
|
||||
flushall: flushall,
|
||||
quit: quit,
|
||||
metadata
|
||||
};
|
||||
}
|
||||
|
||||
if (config.redis_event_expire) {
|
||||
const forceDelete = config.s3_bucket ? awsForceDelete : localForceDelete;
|
||||
const redis_sub = redis_client.duplicate();
|
||||
const subKey = '__keyevent@0__:expired';
|
||||
redis_sub.psubscribe(subKey, function() {
|
||||
log.info('Redis:', 'subscribed to expired key events');
|
||||
});
|
||||
|
||||
redis_sub.on('pmessage', function(channel, message, id) {
|
||||
log.info('RedisExpired:', id);
|
||||
forceDelete(id);
|
||||
});
|
||||
}
|
||||
|
||||
function flushall() {
|
||||
redis_client.flushdb();
|
||||
}
|
||||
|
||||
function quit() {
|
||||
redis_client.quit();
|
||||
}
|
||||
|
||||
function metadata(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.hgetall(id, (err, reply) => {
|
||||
if (err || !reply) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(reply);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function ttl(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.ttl(id, (err, reply) => {
|
||||
if (err || !reply) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(reply * 1000);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function exists(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.exists(id, (rediserr, reply) => {
|
||||
if (reply === 1 && !rediserr) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(rediserr);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function setField(id, key, value) {
|
||||
redis_client.hset(id, key, value);
|
||||
}
|
||||
|
||||
function localLength(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
resolve(fs.statSync(path.join(fileDir, id)).size);
|
||||
} catch (err) {
|
||||
reject();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function localGet(id) {
|
||||
return fs.createReadStream(path.join(fileDir, id));
|
||||
}
|
||||
|
||||
function localSet(newId, file, meta) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const filepath = path.join(fileDir, newId);
|
||||
const fstream = fs.createWriteStream(filepath);
|
||||
file.pipe(fstream);
|
||||
file.on('limit', () => {
|
||||
file.unpipe(fstream);
|
||||
fstream.destroy(new Error('limit'));
|
||||
});
|
||||
fstream.on('finish', () => {
|
||||
redis_client.hmset(newId, meta);
|
||||
redis_client.expire(newId, config.expire_seconds);
|
||||
log.info('localSet:', 'Upload Finished of ' + newId);
|
||||
resolve(meta.owner);
|
||||
});
|
||||
|
||||
fstream.on('error', err => {
|
||||
log.error('localSet:', 'Failed upload of ' + newId);
|
||||
fs.unlinkSync(filepath);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function localDelete(id, ownerToken) {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.hget(id, 'delete', (err, reply) => {
|
||||
if (!reply || ownerToken !== reply) {
|
||||
reject();
|
||||
} else {
|
||||
redis_client.del(id);
|
||||
log.info('Deleted:', id);
|
||||
resolve(fs.unlinkSync(path.join(fileDir, id)));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function localForceDelete(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.del(id);
|
||||
resolve(fs.unlinkSync(path.join(fileDir, id)));
|
||||
});
|
||||
}
|
||||
|
||||
function localPing() {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.ping(err => {
|
||||
return err ? reject() : resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function awsLength(id) {
|
||||
const params = {
|
||||
Bucket: config.s3_bucket,
|
||||
Key: id
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.headObject(params, function(err, data) {
|
||||
if (!err) {
|
||||
resolve(data.ContentLength);
|
||||
} else {
|
||||
reject();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function awsGet(id) {
|
||||
const params = {
|
||||
Bucket: config.s3_bucket,
|
||||
Key: id
|
||||
};
|
||||
|
||||
try {
|
||||
return s3.getObject(params).createReadStream();
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function awsSet(newId, file, meta) {
|
||||
const params = {
|
||||
Bucket: config.s3_bucket,
|
||||
Key: newId,
|
||||
Body: file
|
||||
};
|
||||
let hitLimit = false;
|
||||
const upload = s3.upload(params);
|
||||
file.on('limit', () => {
|
||||
hitLimit = true;
|
||||
upload.abort();
|
||||
});
|
||||
return upload.promise().then(
|
||||
() => {
|
||||
redis_client.hmset(newId, meta);
|
||||
redis_client.expire(newId, config.expire_seconds);
|
||||
},
|
||||
err => {
|
||||
if (hitLimit) {
|
||||
throw new Error('limit');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function awsDelete(id, ownerToken) {
|
||||
return new Promise((resolve, reject) => {
|
||||
redis_client.hget(id, 'delete', (err, reply) => {
|
||||
if (!reply || ownerToken !== reply) {
|
||||
reject();
|
||||
} else {
|
||||
const params = {
|
||||
Bucket: config.s3_bucket,
|
||||
Key: id
|
||||
};
|
||||
|
||||
s3.deleteObject(params, function(err, _data) {
|
||||
redis_client.del(id);
|
||||
err ? reject(err) : resolve(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function awsForceDelete(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const params = {
|
||||
Bucket: config.s3_bucket,
|
||||
Key: id
|
||||
};
|
||||
|
||||
s3.deleteObject(params, function(err, _data) {
|
||||
redis_client.del(id);
|
||||
err ? reject(err) : resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function awsPing() {
|
||||
return localPing().then(() =>
|
||||
s3.headBucket({ Bucket: config.s3_bucket }).promise()
|
||||
);
|
||||
}
|
50
server/storage/fs.js
Normal file
50
server/storage/fs.js
Normal file
|
@ -0,0 +1,50 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const promisify = require('util').promisify;
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const stat = promisify(fs.stat);
|
||||
|
||||
class FSStorage {
|
||||
constructor(config, log) {
|
||||
this.log = log;
|
||||
this.dir = config.file_dir;
|
||||
mkdirp.sync(this.dir);
|
||||
}
|
||||
|
||||
async length(id) {
|
||||
const result = await stat(path.join(this.dir, id));
|
||||
return result.size;
|
||||
}
|
||||
|
||||
getStream(id) {
|
||||
return fs.createReadStream(path.join(this.dir, id));
|
||||
}
|
||||
|
||||
set(id, file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const filepath = path.join(this.dir, id);
|
||||
const fstream = fs.createWriteStream(filepath);
|
||||
file.pipe(fstream);
|
||||
file.on('limit', () => {
|
||||
file.unpipe(fstream);
|
||||
fstream.destroy(new Error('limit'));
|
||||
});
|
||||
fstream.on('error', err => {
|
||||
fs.unlinkSync(filepath);
|
||||
reject(err);
|
||||
});
|
||||
fstream.on('finish', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
del(id) {
|
||||
return Promise.resolve(fs.unlinkSync(path.join(this.dir, id)));
|
||||
}
|
||||
|
||||
ping() {
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FSStorage;
|
57
server/storage/index.js
Normal file
57
server/storage/index.js
Normal file
|
@ -0,0 +1,57 @@
|
|||
const config = require('../config');
|
||||
const Metadata = require('../metadata');
|
||||
const mozlog = require('../log');
|
||||
const createRedisClient = require('./redis');
|
||||
|
||||
class DB {
|
||||
constructor(config) {
|
||||
const Storage = config.s3_bucket ? require('./s3') : require('./fs');
|
||||
this.log = mozlog('send.storage');
|
||||
this.expireSeconds = config.expire_seconds;
|
||||
this.storage = new Storage(config, this.log);
|
||||
this.redis = createRedisClient(config);
|
||||
this.redis.on('error', err => {
|
||||
this.log.error('Redis:', err);
|
||||
});
|
||||
}
|
||||
|
||||
async ttl(id) {
|
||||
const result = await this.redis.ttlAsync(id);
|
||||
return Math.ceil(result) * 1000;
|
||||
}
|
||||
|
||||
length(id) {
|
||||
return this.storage.length(id);
|
||||
}
|
||||
|
||||
get(id) {
|
||||
return this.storage.getStream(id);
|
||||
}
|
||||
|
||||
async set(id, file, meta) {
|
||||
await this.storage.set(id, file);
|
||||
this.redis.hmset(id, meta);
|
||||
this.redis.expire(id, this.expireSeconds);
|
||||
}
|
||||
|
||||
setField(id, key, value) {
|
||||
this.redis.hset(id, key, value);
|
||||
}
|
||||
|
||||
del(id) {
|
||||
this.redis.del(id);
|
||||
return this.storage.del(id);
|
||||
}
|
||||
|
||||
async ping() {
|
||||
await this.redis.pingAsync();
|
||||
await this.storage.ping();
|
||||
}
|
||||
|
||||
async metadata(id) {
|
||||
const result = await this.redis.hgetallAsync(id);
|
||||
return result && new Metadata(result);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new DB(config);
|
21
server/storage/redis.js
Normal file
21
server/storage/redis.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
const promisify = require('util').promisify;
|
||||
|
||||
module.exports = function(config) {
|
||||
const redis_lib =
|
||||
config.env === 'development' && config.redis_host === 'localhost'
|
||||
? 'redis-mock'
|
||||
: 'redis';
|
||||
|
||||
//eslint-disable-next-line security/detect-non-literal-require
|
||||
const redis = require(redis_lib);
|
||||
const client = redis.createClient({
|
||||
host: config.redis_host,
|
||||
connect_timeout: 10000
|
||||
});
|
||||
|
||||
client.ttlAsync = promisify(client.ttl);
|
||||
client.hgetallAsync = promisify(client.hgetall);
|
||||
client.hgetAsync = promisify(client.hget);
|
||||
client.pingAsync = promisify(client.ping);
|
||||
return client;
|
||||
};
|
51
server/storage/s3.js
Normal file
51
server/storage/s3.js
Normal file
|
@ -0,0 +1,51 @@
|
|||
const AWS = require('aws-sdk');
|
||||
const s3 = new AWS.S3();
|
||||
|
||||
class S3Storage {
|
||||
constructor(config, log) {
|
||||
this.bucket = config.s3_bucket;
|
||||
this.log = log;
|
||||
}
|
||||
|
||||
async length(id) {
|
||||
const result = await s3
|
||||
.headObject({ Bucket: this.bucket, Key: id })
|
||||
.promise();
|
||||
return result.ContentLength;
|
||||
}
|
||||
|
||||
getStream(id) {
|
||||
return s3.getObject({ Bucket: this.bucket, Key: id }).createReadStream();
|
||||
}
|
||||
|
||||
async set(id, file) {
|
||||
let hitLimit = false;
|
||||
const upload = s3.upload({
|
||||
Bucket: this.bucket,
|
||||
Key: id,
|
||||
Body: file
|
||||
});
|
||||
file.on('limit', () => {
|
||||
hitLimit = true;
|
||||
upload.abort();
|
||||
});
|
||||
try {
|
||||
await upload.promise();
|
||||
} catch (e) {
|
||||
if (hitLimit) {
|
||||
throw new Error('limit');
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
del(id) {
|
||||
return s3.deleteObject({ Bucket: this.bucket, Key: id }).promise();
|
||||
}
|
||||
|
||||
ping() {
|
||||
return s3.headBucket({ Bucket: this.bucket }).promise();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = S3Storage;
|
Loading…
Add table
Add a link
Reference in a new issue