mirror of
https://github.com/oven-sh/setup-bun.git
synced 2025-07-18 20:48:29 +02:00
feat: add @actions/cache
This commit is contained in:
parent
b15fb7d098
commit
16e8c96a41
1932 changed files with 261172 additions and 10 deletions
8
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js
generated
vendored
Normal file
8
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// This file is used as a shim of "BufferScheduler" for some browser bundlers
|
||||
// when trying to bundle "BufferScheduler"
|
||||
// "BufferScheduler" class is only available in Node.js runtime
|
||||
export class BufferScheduler {
|
||||
}
|
||||
//# sourceMappingURL=BufferScheduler.browser.js.map
|
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"BufferScheduler.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,6EAA6E;AAC7E,0CAA0C;AAC1C,+DAA+D;AAC/D,MAAM,OAAO,eAAe;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BufferScheduler\" for some browser bundlers\n// when trying to bundle \"BufferScheduler\"\n// \"BufferScheduler\" class is only available in Node.js runtime\nexport class BufferScheduler {}\n"]}
|
252
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js
generated
vendored
Normal file
252
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js
generated
vendored
Normal file
|
@ -0,0 +1,252 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { EventEmitter } from "events";
|
||||
import { PooledBuffer } from "./PooledBuffer";
|
||||
/**
|
||||
* This class accepts a Node.js Readable stream as input, and keeps reading data
|
||||
* from the stream into the internal buffer structure, until it reaches maxBuffers.
|
||||
* Every available buffer will try to trigger outgoingHandler.
|
||||
*
|
||||
* The internal buffer structure includes an incoming buffer array, and a outgoing
|
||||
* buffer array. The incoming buffer array includes the "empty" buffers can be filled
|
||||
* with new incoming data. The outgoing array includes the filled buffers to be
|
||||
* handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.
|
||||
*
|
||||
* NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING
|
||||
*
|
||||
* NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers
|
||||
*
|
||||
* PERFORMANCE IMPROVEMENT TIPS:
|
||||
* 1. Input stream highWaterMark is better to set a same value with bufferSize
|
||||
* parameter, which will avoid Buffer.concat() operations.
|
||||
* 2. concurrency should set a smaller value than maxBuffers, which is helpful to
|
||||
* reduce the possibility when a outgoing handler waits for the stream data.
|
||||
* in this situation, outgoing handlers are blocked.
|
||||
* Outgoing queue shouldn't be empty.
|
||||
*/
|
||||
export class BufferScheduler {
|
||||
/**
|
||||
* Creates an instance of BufferScheduler.
|
||||
*
|
||||
* @param readable - A Node.js Readable stream
|
||||
* @param bufferSize - Buffer size of every maintained buffer
|
||||
* @param maxBuffers - How many buffers can be allocated
|
||||
* @param outgoingHandler - An async function scheduled to be
|
||||
* triggered when a buffer fully filled
|
||||
* with stream data
|
||||
* @param concurrency - Concurrency of executing outgoingHandlers (>0)
|
||||
* @param encoding - [Optional] Encoding of Readable stream when it's a string stream
|
||||
*/
|
||||
constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {
|
||||
/**
|
||||
* An internal event emitter.
|
||||
*/
|
||||
this.emitter = new EventEmitter();
|
||||
/**
|
||||
* An internal offset marker to track data offset in bytes of next outgoingHandler.
|
||||
*/
|
||||
this.offset = 0;
|
||||
/**
|
||||
* An internal marker to track whether stream is end.
|
||||
*/
|
||||
this.isStreamEnd = false;
|
||||
/**
|
||||
* An internal marker to track whether stream or outgoingHandler returns error.
|
||||
*/
|
||||
this.isError = false;
|
||||
/**
|
||||
* How many handlers are executing.
|
||||
*/
|
||||
this.executingOutgoingHandlers = 0;
|
||||
/**
|
||||
* How many buffers have been allocated.
|
||||
*/
|
||||
this.numBuffers = 0;
|
||||
/**
|
||||
* Because this class doesn't know how much data every time stream pops, which
|
||||
* is defined by highWaterMarker of the stream. So BufferScheduler will cache
|
||||
* data received from the stream, when data in unresolvedDataArray exceeds the
|
||||
* blockSize defined, it will try to concat a blockSize of buffer, fill into available
|
||||
* buffers from incoming and push to outgoing array.
|
||||
*/
|
||||
this.unresolvedDataArray = [];
|
||||
/**
|
||||
* How much data consisted in unresolvedDataArray.
|
||||
*/
|
||||
this.unresolvedLength = 0;
|
||||
/**
|
||||
* The array includes all the available buffers can be used to fill data from stream.
|
||||
*/
|
||||
this.incoming = [];
|
||||
/**
|
||||
* The array (queue) includes all the buffers filled from stream data.
|
||||
*/
|
||||
this.outgoing = [];
|
||||
if (bufferSize <= 0) {
|
||||
throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);
|
||||
}
|
||||
if (maxBuffers <= 0) {
|
||||
throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);
|
||||
}
|
||||
if (concurrency <= 0) {
|
||||
throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);
|
||||
}
|
||||
this.bufferSize = bufferSize;
|
||||
this.maxBuffers = maxBuffers;
|
||||
this.readable = readable;
|
||||
this.outgoingHandler = outgoingHandler;
|
||||
this.concurrency = concurrency;
|
||||
this.encoding = encoding;
|
||||
}
|
||||
/**
|
||||
* Start the scheduler, will return error when stream of any of the outgoingHandlers
|
||||
* returns error.
|
||||
*
|
||||
*/
|
||||
async do() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.readable.on("data", (data) => {
|
||||
data = typeof data === "string" ? Buffer.from(data, this.encoding) : data;
|
||||
this.appendUnresolvedData(data);
|
||||
if (!this.resolveData()) {
|
||||
this.readable.pause();
|
||||
}
|
||||
});
|
||||
this.readable.on("error", (err) => {
|
||||
this.emitter.emit("error", err);
|
||||
});
|
||||
this.readable.on("end", () => {
|
||||
this.isStreamEnd = true;
|
||||
this.emitter.emit("checkEnd");
|
||||
});
|
||||
this.emitter.on("error", (err) => {
|
||||
this.isError = true;
|
||||
this.readable.pause();
|
||||
reject(err);
|
||||
});
|
||||
this.emitter.on("checkEnd", () => {
|
||||
if (this.outgoing.length > 0) {
|
||||
this.triggerOutgoingHandlers();
|
||||
return;
|
||||
}
|
||||
if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {
|
||||
if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {
|
||||
const buffer = this.shiftBufferFromUnresolvedDataArray();
|
||||
this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
}
|
||||
else if (this.unresolvedLength >= this.bufferSize) {
|
||||
return;
|
||||
}
|
||||
else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Insert a new data into unresolved array.
|
||||
*
|
||||
* @param data -
|
||||
*/
|
||||
appendUnresolvedData(data) {
|
||||
this.unresolvedDataArray.push(data);
|
||||
this.unresolvedLength += data.length;
|
||||
}
|
||||
/**
|
||||
* Try to shift a buffer with size in blockSize. The buffer returned may be less
|
||||
* than blockSize when data in unresolvedDataArray is less than bufferSize.
|
||||
*
|
||||
*/
|
||||
shiftBufferFromUnresolvedDataArray(buffer) {
|
||||
if (!buffer) {
|
||||
buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);
|
||||
}
|
||||
else {
|
||||
buffer.fill(this.unresolvedDataArray, this.unresolvedLength);
|
||||
}
|
||||
this.unresolvedLength -= buffer.size;
|
||||
return buffer;
|
||||
}
|
||||
/**
|
||||
* Resolve data in unresolvedDataArray. For every buffer with size in blockSize
|
||||
* shifted, it will try to get (or allocate a buffer) from incoming, and fill it,
|
||||
* then push it into outgoing to be handled by outgoing handler.
|
||||
*
|
||||
* Return false when available buffers in incoming are not enough, else true.
|
||||
*
|
||||
* @returns Return false when buffers in incoming are not enough, else true.
|
||||
*/
|
||||
resolveData() {
|
||||
while (this.unresolvedLength >= this.bufferSize) {
|
||||
let buffer;
|
||||
if (this.incoming.length > 0) {
|
||||
buffer = this.incoming.shift();
|
||||
this.shiftBufferFromUnresolvedDataArray(buffer);
|
||||
}
|
||||
else {
|
||||
if (this.numBuffers < this.maxBuffers) {
|
||||
buffer = this.shiftBufferFromUnresolvedDataArray();
|
||||
this.numBuffers++;
|
||||
}
|
||||
else {
|
||||
// No available buffer, wait for buffer returned
|
||||
return false;
|
||||
}
|
||||
}
|
||||
this.outgoing.push(buffer);
|
||||
this.triggerOutgoingHandlers();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Try to trigger a outgoing handler for every buffer in outgoing. Stop when
|
||||
* concurrency reaches.
|
||||
*/
|
||||
async triggerOutgoingHandlers() {
|
||||
let buffer;
|
||||
do {
|
||||
if (this.executingOutgoingHandlers >= this.concurrency) {
|
||||
return;
|
||||
}
|
||||
buffer = this.outgoing.shift();
|
||||
if (buffer) {
|
||||
this.triggerOutgoingHandler(buffer);
|
||||
}
|
||||
} while (buffer);
|
||||
}
|
||||
/**
|
||||
* Trigger a outgoing handler for a buffer shifted from outgoing.
|
||||
*
|
||||
* @param buffer -
|
||||
*/
|
||||
async triggerOutgoingHandler(buffer) {
|
||||
const bufferLength = buffer.size;
|
||||
this.executingOutgoingHandlers++;
|
||||
this.offset += bufferLength;
|
||||
try {
|
||||
await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength);
|
||||
}
|
||||
catch (err) {
|
||||
this.emitter.emit("error", err);
|
||||
return;
|
||||
}
|
||||
this.executingOutgoingHandlers--;
|
||||
this.reuseBuffer(buffer);
|
||||
this.emitter.emit("checkEnd");
|
||||
}
|
||||
/**
|
||||
* Return buffer used by outgoing handler into incoming.
|
||||
*
|
||||
* @param buffer -
|
||||
*/
|
||||
reuseBuffer(buffer) {
|
||||
this.incoming.push(buffer);
|
||||
if (!this.isError && this.resolveData() && !this.isStreamEnd) {
|
||||
this.readable.resume();
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BufferScheduler.js.map
|
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
83
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js
generated
vendored
Normal file
83
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { Readable } from "stream";
|
||||
/**
|
||||
* This class generates a readable stream from the data in an array of buffers.
|
||||
*/
|
||||
export class BuffersStream extends Readable {
|
||||
/**
|
||||
* Creates an instance of BuffersStream that will emit the data
|
||||
* contained in the array of buffers.
|
||||
*
|
||||
* @param buffers - Array of buffers containing the data
|
||||
* @param byteLength - The total length of data contained in the buffers
|
||||
*/
|
||||
constructor(buffers, byteLength, options) {
|
||||
super(options);
|
||||
this.buffers = buffers;
|
||||
this.byteLength = byteLength;
|
||||
this.byteOffsetInCurrentBuffer = 0;
|
||||
this.bufferIndex = 0;
|
||||
this.pushedBytesLength = 0;
|
||||
// check byteLength is no larger than buffers[] total length
|
||||
let buffersLength = 0;
|
||||
for (const buf of this.buffers) {
|
||||
buffersLength += buf.byteLength;
|
||||
}
|
||||
if (buffersLength < this.byteLength) {
|
||||
throw new Error("Data size shouldn't be larger than the total length of buffers.");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Internal _read() that will be called when the stream wants to pull more data in.
|
||||
*
|
||||
* @param size - Optional. The size of data to be read
|
||||
*/
|
||||
_read(size) {
|
||||
if (this.pushedBytesLength >= this.byteLength) {
|
||||
this.push(null);
|
||||
}
|
||||
if (!size) {
|
||||
size = this.readableHighWaterMark;
|
||||
}
|
||||
const outBuffers = [];
|
||||
let i = 0;
|
||||
while (i < size && this.pushedBytesLength < this.byteLength) {
|
||||
// The last buffer may be longer than the data it contains.
|
||||
const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;
|
||||
const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;
|
||||
const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);
|
||||
if (remaining > size - i) {
|
||||
// chunkSize = size - i
|
||||
const end = this.byteOffsetInCurrentBuffer + size - i;
|
||||
outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
|
||||
this.pushedBytesLength += size - i;
|
||||
this.byteOffsetInCurrentBuffer = end;
|
||||
i = size;
|
||||
break;
|
||||
}
|
||||
else {
|
||||
// chunkSize = remaining
|
||||
const end = this.byteOffsetInCurrentBuffer + remaining;
|
||||
outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
|
||||
if (remaining === remainingCapacityInThisBuffer) {
|
||||
// this.buffers[this.bufferIndex] used up, shift to next one
|
||||
this.byteOffsetInCurrentBuffer = 0;
|
||||
this.bufferIndex++;
|
||||
}
|
||||
else {
|
||||
this.byteOffsetInCurrentBuffer = end;
|
||||
}
|
||||
this.pushedBytesLength += remaining;
|
||||
i += remaining;
|
||||
}
|
||||
}
|
||||
if (outBuffers.length > 1) {
|
||||
this.push(Buffer.concat(outBuffers));
|
||||
}
|
||||
else if (outBuffers.length === 1) {
|
||||
this.push(outBuffers[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BuffersStream.js.map
|
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
87
node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js
generated
vendored
Normal file
87
node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js
generated
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { BuffersStream } from "./BuffersStream";
|
||||
/**
|
||||
* maxBufferLength is max size of each buffer in the pooled buffers.
|
||||
*/
|
||||
// Can't use import as Typescript doesn't recognize "buffer".
|
||||
const maxBufferLength = require("buffer").constants.MAX_LENGTH;
|
||||
/**
|
||||
* This class provides a buffer container which conceptually has no hard size limit.
|
||||
* It accepts a capacity, an array of input buffers and the total length of input data.
|
||||
* It will allocate an internal "buffer" of the capacity and fill the data in the input buffers
|
||||
* into the internal "buffer" serially with respect to the total length.
|
||||
* Then by calling PooledBuffer.getReadableStream(), you can get a readable stream
|
||||
* assembled from all the data in the internal "buffer".
|
||||
*/
|
||||
export class PooledBuffer {
|
||||
constructor(capacity, buffers, totalLength) {
|
||||
/**
|
||||
* Internal buffers used to keep the data.
|
||||
* Each buffer has a length of the maxBufferLength except last one.
|
||||
*/
|
||||
this.buffers = [];
|
||||
this.capacity = capacity;
|
||||
this._size = 0;
|
||||
// allocate
|
||||
const bufferNum = Math.ceil(capacity / maxBufferLength);
|
||||
for (let i = 0; i < bufferNum; i++) {
|
||||
let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;
|
||||
if (len === 0) {
|
||||
len = maxBufferLength;
|
||||
}
|
||||
this.buffers.push(Buffer.allocUnsafe(len));
|
||||
}
|
||||
if (buffers) {
|
||||
this.fill(buffers, totalLength);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The size of the data contained in the pooled buffers.
|
||||
*/
|
||||
get size() {
|
||||
return this._size;
|
||||
}
|
||||
/**
|
||||
* Fill the internal buffers with data in the input buffers serially
|
||||
* with respect to the total length and the total capacity of the internal buffers.
|
||||
* Data copied will be shift out of the input buffers.
|
||||
*
|
||||
* @param buffers - Input buffers containing the data to be filled in the pooled buffer
|
||||
* @param totalLength - Total length of the data to be filled in.
|
||||
*
|
||||
*/
|
||||
fill(buffers, totalLength) {
|
||||
this._size = Math.min(this.capacity, totalLength);
|
||||
let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;
|
||||
while (totalCopiedNum < this._size) {
|
||||
const source = buffers[i];
|
||||
const target = this.buffers[j];
|
||||
const copiedNum = source.copy(target, targetOffset, sourceOffset);
|
||||
totalCopiedNum += copiedNum;
|
||||
sourceOffset += copiedNum;
|
||||
targetOffset += copiedNum;
|
||||
if (sourceOffset === source.length) {
|
||||
i++;
|
||||
sourceOffset = 0;
|
||||
}
|
||||
if (targetOffset === target.length) {
|
||||
j++;
|
||||
targetOffset = 0;
|
||||
}
|
||||
}
|
||||
// clear copied from source buffers
|
||||
buffers.splice(0, i);
|
||||
if (buffers.length > 0) {
|
||||
buffers[0] = buffers[0].slice(sourceOffset);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the readable stream assembled from all the data in the internal buffers.
|
||||
*
|
||||
*/
|
||||
getReadableStream() {
|
||||
return new BuffersStream(this.buffers, this.size);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=PooledBuffer.js.map
|
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js
generated
vendored
Normal file
4
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
export * from "./BufferScheduler.browser";
|
||||
//# sourceMappingURL=index.browser.js.map
|
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler.browser\";\n"]}
|
4
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js
generated
vendored
Normal file
4
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
export * from "./BufferScheduler";
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-common/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,mBAAmB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler\";\n"]}
|
Loading…
Add table
Add a link
Reference in a new issue