mirror of
https://github.com/nodejs/node.git
synced 2025-08-15 13:48:44 +02:00
src: update Blob implementation to use DataQueue / File-backed Blobs
Co-authored-by: flakey5 <73616808+flakey5@users.noreply.github.com> PR-URL: https://github.com/nodejs/node/pull/45258 Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
This commit is contained in:
parent
c8cc7e89e6
commit
950cec4c26
11 changed files with 995 additions and 331 deletions
|
@ -6,9 +6,8 @@ const {
|
|||
MathMin,
|
||||
ObjectDefineProperties,
|
||||
ObjectDefineProperty,
|
||||
PromiseResolve,
|
||||
PromiseReject,
|
||||
SafePromisePrototypeFinally,
|
||||
PromiseResolve,
|
||||
ReflectConstruct,
|
||||
RegExpPrototypeExec,
|
||||
RegExpPrototypeSymbolReplace,
|
||||
|
@ -22,7 +21,8 @@ const {
|
|||
|
||||
const {
|
||||
createBlob: _createBlob,
|
||||
FixedSizeBlobCopyJob,
|
||||
createBlobFromFileHandle: _createBlobFromFileHandle,
|
||||
concat,
|
||||
getDataObject,
|
||||
} = internalBinding('blob');
|
||||
|
||||
|
@ -52,13 +52,13 @@ const {
|
|||
const { inspect } = require('internal/util/inspect');
|
||||
|
||||
const {
|
||||
AbortError,
|
||||
codes: {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_INVALID_ARG_VALUE,
|
||||
ERR_INVALID_THIS,
|
||||
ERR_BUFFER_TOO_LARGE,
|
||||
}
|
||||
},
|
||||
errnoException,
|
||||
} = require('internal/errors');
|
||||
|
||||
const {
|
||||
|
@ -67,13 +67,8 @@ const {
|
|||
} = require('internal/validators');
|
||||
|
||||
const kHandle = Symbol('kHandle');
|
||||
const kState = Symbol('kState');
|
||||
const kIndex = Symbol('kIndex');
|
||||
const kType = Symbol('kType');
|
||||
const kLength = Symbol('kLength');
|
||||
const kArrayBufferPromise = Symbol('kArrayBufferPromise');
|
||||
|
||||
const kMaxChunkSize = 65536;
|
||||
|
||||
const disallowedTypeCharacters = /[^\u{0020}-\u{007E}]/u;
|
||||
|
||||
|
@ -266,40 +261,28 @@ class Blob {
|
|||
if (!isBlob(this))
|
||||
return PromiseReject(new ERR_INVALID_THIS('Blob'));
|
||||
|
||||
// If there's already a promise in flight for the content,
|
||||
// reuse it, but only while it's in flight. After the cached
|
||||
// promise resolves it will be cleared, allowing it to be
|
||||
// garbage collected as soon as possible.
|
||||
if (this[kArrayBufferPromise])
|
||||
return this[kArrayBufferPromise];
|
||||
if (this.size === 0) {
|
||||
return PromiseResolve(new ArrayBuffer(0));
|
||||
}
|
||||
|
||||
const job = new FixedSizeBlobCopyJob(this[kHandle]);
|
||||
|
||||
const ret = job.run();
|
||||
|
||||
// If the job returns a value immediately, the ArrayBuffer
|
||||
// was generated synchronously and should just be returned
|
||||
// directly.
|
||||
if (ret !== undefined)
|
||||
return PromiseResolve(ret);
|
||||
|
||||
const {
|
||||
promise,
|
||||
resolve,
|
||||
reject,
|
||||
} = createDeferredPromise();
|
||||
|
||||
job.ondone = (err, ab) => {
|
||||
if (err !== undefined)
|
||||
return reject(new AbortError(undefined, { cause: err }));
|
||||
resolve(ab);
|
||||
const { promise, resolve } = createDeferredPromise();
|
||||
const reader = this[kHandle].getReader();
|
||||
const buffers = [];
|
||||
const readNext = () => {
|
||||
reader.pull((status, buffer) => {
|
||||
if (status === -1) {
|
||||
// EOS, concat & resolve
|
||||
// buffer should be undefined here
|
||||
resolve(concat(buffers));
|
||||
return;
|
||||
}
|
||||
if (buffer !== undefined)
|
||||
buffers.push(buffer);
|
||||
readNext();
|
||||
});
|
||||
};
|
||||
this[kArrayBufferPromise] =
|
||||
SafePromisePrototypeFinally(
|
||||
promise,
|
||||
() => this[kArrayBufferPromise] = undefined);
|
||||
|
||||
return this[kArrayBufferPromise];
|
||||
readNext();
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -321,24 +304,57 @@ class Blob {
|
|||
if (!isBlob(this))
|
||||
throw new ERR_INVALID_THIS('Blob');
|
||||
|
||||
const self = this;
|
||||
return new lazyReadableStream({
|
||||
async start() {
|
||||
this[kState] = await self.arrayBuffer();
|
||||
this[kIndex] = 0;
|
||||
},
|
||||
if (this.size === 0) {
|
||||
return new lazyReadableStream({
|
||||
start(c) { c.close(); }
|
||||
});
|
||||
}
|
||||
|
||||
pull(controller) {
|
||||
if (this[kState].byteLength - this[kIndex] <= kMaxChunkSize) {
|
||||
controller.enqueue(new Uint8Array(this[kState], this[kIndex]));
|
||||
controller.close();
|
||||
this[kState] = undefined;
|
||||
} else {
|
||||
controller.enqueue(new Uint8Array(this[kState], this[kIndex], kMaxChunkSize));
|
||||
this[kIndex] += kMaxChunkSize;
|
||||
const reader = this[kHandle].getReader();
|
||||
return new lazyReadableStream({
|
||||
start(c) {
|
||||
// There really should only be one read at a time so using an
|
||||
// array here is purely defensive.
|
||||
this.pendingPulls = [];
|
||||
},
|
||||
pull(c) {
|
||||
const { promise, resolve, reject } = createDeferredPromise();
|
||||
this.pendingPulls.push({resolve, reject});
|
||||
reader.pull((status, buffer) => {
|
||||
// If pendingPulls is empty here, the stream had to have
|
||||
// been canceled, and we don't really care about the result.
|
||||
// we can simply exit.
|
||||
if (this.pendingPulls.length === 0) {
|
||||
return;
|
||||
}
|
||||
const pending = this.pendingPulls.shift();
|
||||
if (status === -1 || (status === 0 && buffer === undefined)) {
|
||||
// EOS
|
||||
c.close();
|
||||
pending.resolve();
|
||||
return;
|
||||
} else if (status < 0) {
|
||||
const error = errnoException(status, 'read');
|
||||
c.error(error);
|
||||
pending.reject(error);
|
||||
return;
|
||||
}
|
||||
c.enqueue(new Uint8Array(buffer));
|
||||
pending.resolve();
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
cancel(reason) {
|
||||
// Reject any currently pending pulls here.
|
||||
for (const pending of this.pendingPulls) {
|
||||
pending.reject(reason);
|
||||
}
|
||||
this.pendingPulls = [];
|
||||
}
|
||||
});
|
||||
// We set the highWaterMark to 0 because we do not want the stream to
|
||||
// start reading immediately on creation. We want it to wait until read
|
||||
// is called.
|
||||
}, new CountQueuingStrategy({ highWaterMark: 0 }));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -406,10 +422,16 @@ function resolveObjectURL(url) {
|
|||
}
|
||||
}
|
||||
|
||||
function createBlobFromFileHandle(handle) {
|
||||
const [blob, length] = _createBlobFromFileHandle(handle);
|
||||
return createBlob(blob, length);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Blob,
|
||||
ClonedBlob,
|
||||
createBlob,
|
||||
createBlobFromFileHandle,
|
||||
isBlob,
|
||||
kHandle,
|
||||
resolveObjectURL,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue