refactor to single bucket

This commit is contained in:
Emily 2018-08-09 14:49:52 -07:00
parent 452ccd068b
commit b89bef6e89
7 changed files with 68 additions and 69 deletions

View file

@ -32,8 +32,8 @@ const S3Storage = proxyquire('../../server/storage/s3', {
});
describe('S3Storage', function() {
it('uses config.s3_buckets', function() {
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
it('uses config.s3_bucket', function() {
const s = new S3Storage({ s3_bucket: 'foo' });
assert.equal(s.bucket, 'foo');
});
@ -42,7 +42,7 @@ describe('S3Storage', function() {
s3Stub.headObject = sinon
.stub()
.returns(resolvedPromise({ ContentLength: 123 }));
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
const len = await s.length('x');
assert.equal(len, 123);
sinon.assert.calledWithMatch(s3Stub.headObject, {
@ -54,7 +54,7 @@ describe('S3Storage', function() {
it('throws when id not found', async function() {
const err = new Error();
s3Stub.headObject = sinon.stub().returns(rejectedPromise(err));
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
try {
await s.length('x');
assert.fail();
@ -70,7 +70,7 @@ describe('S3Storage', function() {
s3Stub.getObject = sinon
.stub()
.returns({ createReadStream: () => stream });
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
const result = s.getStream('x');
assert.equal(result, stream);
sinon.assert.calledWithMatch(s3Stub.getObject, {
@ -84,7 +84,7 @@ describe('S3Storage', function() {
it('calls s3.upload', async function() {
const file = { on: sinon.stub() };
s3Stub.upload = sinon.stub().returns(resolvedPromise());
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
await s.set('x', file);
sinon.assert.calledWithMatch(s3Stub.upload, {
Bucket: 'foo',
@ -103,7 +103,7 @@ describe('S3Storage', function() {
promise: () => Promise.reject(err),
abort
});
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
try {
await s.set('x', file);
assert.fail();
@ -119,7 +119,7 @@ describe('S3Storage', function() {
};
const err = new Error();
s3Stub.upload = sinon.stub().returns(rejectedPromise(err));
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
try {
await s.set('x', file);
assert.fail();
@ -132,7 +132,7 @@ describe('S3Storage', function() {
describe('del', function() {
it('calls s3.deleteObject', async function() {
s3Stub.deleteObject = sinon.stub().returns(resolvedPromise(true));
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
const result = await s.del('x');
assert.equal(result, true);
sinon.assert.calledWithMatch(s3Stub.deleteObject, {
@ -145,7 +145,7 @@ describe('S3Storage', function() {
describe('ping', function() {
it('calls s3.headBucket', async function() {
s3Stub.headBucket = sinon.stub().returns(resolvedPromise(true));
const s = new S3Storage({ s3_buckets: ['foo', 'bar', 'baz'] }, 0);
const s = new S3Storage({ s3_bucket: 'foo' });
const result = await s.ping();
assert.equal(result, true);
sinon.assert.calledWithMatch(s3Stub.headBucket, { Bucket: 'foo' });

View file

@ -21,10 +21,11 @@ class MockStorage {
}
const config = {
default_expire_seconds: 10,
num_of_buckets: 3,
expire_times_seconds: [86400, 604800, 1209600],
s3_buckets: ['foo', 'bar', 'baz'],
s3_bucket: 'foo',
default_expire_seconds: 20,
num_of_prefixes: 3,
expire_prefixes: ['ten', 'twenty', 'thirty'],
expire_times_seconds: [10, 20, 30],
env: 'development',
redis_host: 'localhost'
};
@ -48,7 +49,6 @@ describe('Storage', function() {
describe('length', function() {
it('returns the file size', async function() {
await storage.set('x', null);
const len = await storage.length('x');
assert.equal(len, 12);
});
@ -56,7 +56,6 @@ describe('Storage', function() {
describe('get', function() {
it('returns a stream', async function() {
await storage.set('x', null);
const s = await storage.get('x');
assert.equal(s, stream);
});
@ -71,30 +70,31 @@ describe('Storage', function() {
assert.equal(Math.ceil(s), seconds);
});
it('puts into right bucket based on expire time', async function() {
for (let i = 0; i < config.num_of_buckets; i++) {
await storage.set(
'x',
null,
{ foo: 'bar' },
config.expire_times_seconds[i]
);
const bucket = await storage.getBucket('x');
assert.equal(bucket, i);
await storage.del('x');
}
it('adds right prefix based on expire time', async function() {
await storage.set('x', null, { foo: 'bar' }, 10);
const path_x = await storage.getPrefixedId('x');
assert.equal(path_x, 'ten-x');
await storage.del('x');
await storage.set('y', null, { foo: 'bar' }, 11);
const path_y = await storage.getPrefixedId('y');
assert.equal(path_y, 'twenty-y');
await storage.del('y');
await storage.set('z', null, { foo: 'bar' }, 33);
const path_z = await storage.getPrefixedId('z');
assert.equal(path_z, 'thirty-z');
await storage.del('z');
});
it('sets metadata', async function() {
const m = { foo: 'bar' };
await storage.set('x', null, m);
const meta = await storage.redis.hgetallAsync('x');
delete meta.bucket;
delete meta.prefix;
await storage.del('x');
assert.deepEqual(meta, m);
});
//it('throws when storage fails');
});
describe('setField', function() {