forked from flowjs/flow.js
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Support async function for readFileFn
Since implementing a good concurrency-handler may be painful we provide a promise-based read-locking one so that asyncReadFileFn is safe to use out-of-the-box. Given that streams-read are often unable to correctly/exactly compute their final amount of bytes to upload, the number of chunks may also be off. We handle this in a safe way via FlowChunk.readBytes: - if read using an async function - and the last chunk read less bytes than usual (including 0) - and the current chunk read no byte - assume that this chunk is in fact superfluous and simulate its completion. Previous discussion on: * flowjs#42 * flowjs#318 (In the long-term Flow.js may have to support the concept of "approximate chunk number")
- Loading branch information
Raphaël Droz
committed
Oct 15, 2020
1 parent
9210467
commit 52404d0
Showing
4 changed files
with
317 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
export default class DeferredPromise { | ||
// https://stackoverflow.com/a/47112177 | ||
constructor() { | ||
this.resolved = false; | ||
this._promise = new Promise((resolve, reject) => { | ||
// assign the resolve and reject functions to `this` | ||
// making them usable on the class instance | ||
this.resolve = () => { | ||
this.resolved = true; | ||
return resolve(); | ||
}; | ||
this.reject = reject; | ||
}); | ||
// bind `then` and `catch` to implement the same interface as Promise | ||
this.then = this._promise.then.bind(this._promise); | ||
this.catch = this._promise.catch.bind(this._promise); | ||
this[Symbol.toStringTag] = 'Promise'; | ||
} | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,183 @@ | ||
describe('upload stream', function() { | ||
/** | ||
* @type {Flow} | ||
*/ | ||
var flow; | ||
/** | ||
* @type {FakeXMLHttpRequest} | ||
*/ | ||
var xhr_server; | ||
|
||
var random_sizes = false; | ||
|
||
/** | ||
* Generate an ASCII file composed of <num> parts of <segment_size> characters long. | ||
* The char for each part is randomly choosen from the below alphabet | ||
*/ | ||
function gen_file(num, segment_size = 64) { | ||
var alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789()_-?!./|'; | ||
return alphabet | ||
.repeat(Math.ceil(num / alphabet.length)) | ||
.split('') | ||
.sort(() => Math.random()-0.5) | ||
.map((v, i) => i < num ? v.repeat(segment_size) : null) | ||
.filter(e => e) | ||
.join(''); | ||
} | ||
|
||
function hash(content) { | ||
return window.crypto.subtle.digest('SHA-256', new TextEncoder('utf-8').encode(content)); | ||
} | ||
|
||
function hex(buff) { | ||
return [].map.call(new Uint8Array(buff), b => ('00' + b.toString(16)).slice(-2)).join(''); | ||
} | ||
|
||
class Streamer { | ||
constructor(chunk_size) { | ||
this._reader = null; | ||
this.chunk_size = chunk_size; | ||
|
||
// See the comment in read() for why we implement a custom reader here. | ||
this.buffer = null; | ||
this.index = 0; | ||
}; | ||
|
||
init(flowObj) { | ||
this._reader = flowObj.file.stream().getReader(); | ||
}; | ||
|
||
async read(flowObj, startByte, endByte, fileType, chunk) { | ||
// chunk._log(`Start reading from ${this.buffer !== null ? 'existing' : 'the'} buffer`); | ||
if (this.buffer === null) { | ||
// console.log(`[asyncRead ${chunk.offset}] no preexisting buffer => reader.read()`); | ||
/* | ||
Here we would expect a partial read of 64kb (by implementation) but it seems that | ||
*all* the buffer is returned making difficult to make a test based on ReadableStreamDefaultReader() behavior. | ||
As such we simulate it. | ||
*/ | ||
const {value: buffer, done} = await this._reader.read(); | ||
this.buffer = buffer; | ||
|
||
if (buffer) { | ||
// console.log(`[asyncRead ${chunk.offset}] got a buffer of ${buffer.length} bytes...`); | ||
} else { | ||
// console.log(`[asyncRead ${chunk.offset}] no buffer[bail]`); | ||
return null; | ||
} | ||
} | ||
|
||
if (this.buffer.length === 0) { | ||
// console.log(`[asyncRead ${chunk.offset}] this.buffer is null[bail]`); | ||
return null; | ||
} | ||
|
||
// console.log(`[asyncRead ${chunk.offset}] Read slice[${this.index}:${this.index + this.chunk_size}] a buffer of ${this.buffer.length} bytes`); | ||
var buffer_chunk = this.buffer.slice(this.index, this.index + this.chunk_size); | ||
|
||
if (!buffer_chunk) { | ||
// console.log(`[asyncRead ${chunk.offset}] null slice`); | ||
// console.log(buffer_chunk); | ||
} else { | ||
// chunk._log(`Read slice of ${buffer_chunk.length} bytes`); | ||
this.index += this.chunk_size; | ||
return new Blob([buffer_chunk], {type: 'application/octet-stream'}); | ||
} | ||
|
||
return null; | ||
}; | ||
} | ||
|
||
beforeAll(function() { | ||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000; | ||
|
||
xhr_server = sinon.createFakeServer({ | ||
// autoRespondAfter: 50 | ||
respondImmediately: true, | ||
}); | ||
}); | ||
|
||
|
||
afterAll(function() { | ||
xhr_server.restore(); | ||
}); | ||
|
||
beforeEach(function () { | ||
// jasmine.clock().install(); | ||
|
||
flow = new Flow({ | ||
progressCallbacksInterval: 0, | ||
forceChunkSize: true, | ||
testChunks: false, | ||
generateUniqueIdentifier: function (file) { | ||
return file.size; | ||
} | ||
}); | ||
|
||
xhr_server.respondWith('ok'); | ||
}); | ||
|
||
afterEach(function () { | ||
// jasmine.clock().uninstall(); | ||
xhr_server.restore(); | ||
}); | ||
|
||
it('synchronous initFileFn and asyncReadFileFn', async function (done) { | ||
var chunk_size, chunk_num, simultaneousUploads, upload_chunk_size; | ||
|
||
if (random_sizes) { | ||
chunk_size = Math.ceil(Math.random() * 30), | ||
chunk_num = Math.ceil(Math.random() * 100), | ||
simultaneousUploads = Math.ceil(Math.random() * 20), | ||
upload_chunk_size = Math.max(1, Math.ceil(Math.random() * chunk_size)); | ||
} else { | ||
chunk_size = 23, | ||
chunk_num = 93, | ||
simultaneousUploads = 17, | ||
upload_chunk_size = Math.max(1, Math.ceil(Math.random() * chunk_size)); | ||
} | ||
|
||
var content = gen_file(chunk_num, chunk_size), | ||
orig_hash = hex(await hash(content)), | ||
sample_file = new File([content], 'foobar.bin'); | ||
|
||
console.info(`Test File is ${chunk_num} bytes long (sha256: ${orig_hash}).`); | ||
console.info(`Now uploads ${simultaneousUploads} simultaneous chunks of at most ${upload_chunk_size} bytes`); | ||
|
||
flow.on('fileError', jasmine.createSpy('error')); | ||
flow.on('fileSuccess', jasmine.createSpy('success')); | ||
flow.on('complete', () => { | ||
validate(done, content, orig_hash); | ||
}); | ||
|
||
var streamer = new Streamer(upload_chunk_size); // chunk_size); | ||
flow.opts.chunkSize = upload_chunk_size; | ||
flow.opts.simultaneousUploads = simultaneousUploads; | ||
flow.opts.initFileFn = streamer.init.bind(streamer); | ||
flow.opts.readFileFn = streamer.read.bind(streamer); | ||
flow.opts.asyncReadFileFn = streamer.read.bind(streamer); | ||
flow.addFile(sample_file); | ||
flow.upload(); | ||
}); | ||
|
||
function validate(done, content, orig_hash) { | ||
var predicted_request_number = Math.ceil(content.length / flow.opts.chunkSize); | ||
expect(xhr_server.requests.length).toBe(predicted_request_number); | ||
|
||
var file = flow.files[0]; | ||
expect(file.progress()).toBe(1); | ||
expect(file.isUploading()).toBe(false); | ||
expect(file.isComplete()).toBe(true); | ||
|
||
// An array of promises of obtaining the corresponding request's body (= payload) | ||
var payload_contents = xhr_server.requests.map(x => x.requestBody.get('file').text()); | ||
Promise.all(payload_contents) | ||
.then(values => hash(values.join(''))) | ||
.then(hash => hex(hash)) | ||
.then(hexhash => { | ||
// console.log(orig_hash, hexhash); | ||
expect(hexhash).toBe(orig_hash); | ||
done(); | ||
}); | ||
} | ||
}); |