Implemented multi-file upload/download
This commit is contained in:
parent
b2aed06328
commit
7bf104960e
18 changed files with 475 additions and 183 deletions
125
app/streams.js
125
app/streams.js
|
@ -1,40 +1,103 @@
|
|||
/* global ReadableStream TransformStream */
|
||||
|
||||
export function transformStream(readable, transformer, oncancel) {
|
||||
if (typeof TransformStream === 'function') {
|
||||
try {
|
||||
return readable.pipeThrough(new TransformStream(transformer));
|
||||
}
|
||||
const reader = readable.getReader();
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
if (transformer.start) {
|
||||
return transformer.start(controller);
|
||||
}
|
||||
},
|
||||
async pull(controller) {
|
||||
let enqueued = false;
|
||||
const wrappedController = {
|
||||
enqueue(d) {
|
||||
enqueued = true;
|
||||
controller.enqueue(d);
|
||||
} catch (e) {
|
||||
const reader = readable.getReader();
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
if (transformer.start) {
|
||||
return transformer.start(controller);
|
||||
}
|
||||
};
|
||||
while (!enqueued) {
|
||||
const data = await reader.read();
|
||||
if (data.done) {
|
||||
if (transformer.flush) {
|
||||
await transformer.flush(controller);
|
||||
},
|
||||
async pull(controller) {
|
||||
let enqueued = false;
|
||||
const wrappedController = {
|
||||
enqueue(d) {
|
||||
enqueued = true;
|
||||
controller.enqueue(d);
|
||||
}
|
||||
return controller.close();
|
||||
};
|
||||
while (!enqueued) {
|
||||
const data = await reader.read();
|
||||
if (data.done) {
|
||||
if (transformer.flush) {
|
||||
await transformer.flush(controller);
|
||||
}
|
||||
return controller.close();
|
||||
}
|
||||
await transformer.transform(data.value, wrappedController);
|
||||
}
|
||||
},
|
||||
cancel(reason) {
|
||||
readable.cancel(reason);
|
||||
if (oncancel) {
|
||||
oncancel(reason);
|
||||
}
|
||||
await transformer.transform(data.value, wrappedController);
|
||||
}
|
||||
},
|
||||
cancel(reason) {
|
||||
readable.cancel(reason);
|
||||
if (oncancel) {
|
||||
oncancel(reason);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class BlobStreamController {
|
||||
constructor(blob, size) {
|
||||
this.blob = blob;
|
||||
this.index = 0;
|
||||
this.chunkSize = size || 1024 * 64;
|
||||
}
|
||||
|
||||
pull(controller) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const bytesLeft = this.blob.size - this.index;
|
||||
if (bytesLeft <= 0) {
|
||||
controller.close();
|
||||
return resolve();
|
||||
}
|
||||
const size = Math.min(this.chunkSize, bytesLeft);
|
||||
const slice = this.blob.slice(this.index, this.index + size);
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => {
|
||||
controller.enqueue(new Uint8Array(reader.result));
|
||||
resolve();
|
||||
};
|
||||
reader.onerror = reject;
|
||||
reader.readAsArrayBuffer(slice);
|
||||
this.index += size;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function blobStream(blob, size) {
|
||||
return new ReadableStream(new BlobStreamController(blob, size));
|
||||
}
|
||||
|
||||
class ConcatStreamController {
|
||||
constructor(streams) {
|
||||
this.streams = streams;
|
||||
this.index = 0;
|
||||
this.reader = null;
|
||||
this.nextReader();
|
||||
}
|
||||
|
||||
nextReader() {
|
||||
const next = this.streams[this.index++];
|
||||
this.reader = next && next.getReader();
|
||||
}
|
||||
|
||||
async pull(controller) {
|
||||
if (!this.reader) {
|
||||
return controller.close();
|
||||
}
|
||||
const data = await this.reader.read();
|
||||
if (data.done) {
|
||||
this.nextReader();
|
||||
return this.pull(controller);
|
||||
}
|
||||
controller.enqueue(data.value);
|
||||
}
|
||||
}
|
||||
|
||||
export function concatStream(streams) {
|
||||
return new ReadableStream(new ConcatStreamController(streams));
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue