This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1 @@
module.exports = require("../index.js");

View File

@@ -0,0 +1 @@
module.exports = require("../index.js");

View File

@@ -0,0 +1,31 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAwsChunkedEncodingStream = void 0;
const getAwsChunkedEncodingStream = (readableStream, options) => {
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
const checksumRequired = base64Encoder !== undefined &&
bodyLengthChecker !== undefined &&
checksumAlgorithmFn !== undefined &&
checksumLocationName !== undefined &&
streamHasher !== undefined;
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined;
const reader = readableStream.getReader();
return new ReadableStream({
async pull(controller) {
const { value, done } = await reader.read();
if (done) {
controller.enqueue(`0\r\n`);
if (checksumRequired) {
const checksum = base64Encoder(await digest);
controller.enqueue(`${checksumLocationName}:${checksum}\r\n`);
controller.enqueue(`\r\n`);
}
controller.close();
}
else {
controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`);
}
},
});
};
exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream;

View File

@@ -0,0 +1,30 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAwsChunkedEncodingStream = void 0;
const stream_1 = require("stream");
const getAwsChunkedEncodingStream = (readableStream, options) => {
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
const checksumRequired = base64Encoder !== undefined &&
checksumAlgorithmFn !== undefined &&
checksumLocationName !== undefined &&
streamHasher !== undefined;
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined;
const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } });
readableStream.on("data", (data) => {
const length = bodyLengthChecker(data) || 0;
awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`);
awsChunkedEncodingStream.push(data);
awsChunkedEncodingStream.push("\r\n");
});
readableStream.on("end", async () => {
awsChunkedEncodingStream.push(`0\r\n`);
if (checksumRequired) {
const checksum = base64Encoder(await digest);
awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`);
awsChunkedEncodingStream.push(`\r\n`);
}
awsChunkedEncodingStream.push(null);
});
return awsChunkedEncodingStream;
};
exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream;

View File

@@ -0,0 +1,89 @@
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var src_exports = {};
__export(src_exports, {
Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter
});
module.exports = __toCommonJS(src_exports);
// src/blob/transforms.ts
var import_util_base64 = require("@smithy/util-base64");
var import_util_utf8 = require("@smithy/util-utf8");
function transformToString(payload, encoding = "utf-8") {
if (encoding === "base64") {
return (0, import_util_base64.toBase64)(payload);
}
return (0, import_util_utf8.toUtf8)(payload);
}
__name(transformToString, "transformToString");
function transformFromString(str, encoding) {
if (encoding === "base64") {
return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str));
}
return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str));
}
__name(transformFromString, "transformFromString");
// src/blob/Uint8ArrayBlobAdapter.ts
var _Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array {
/**
* @param source - such as a string or Stream.
* @returns a new Uint8ArrayBlobAdapter extending Uint8Array.
*/
static fromString(source, encoding = "utf-8") {
switch (typeof source) {
case "string":
return transformFromString(source, encoding);
default:
throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`);
}
}
/**
* @param source - Uint8Array to be mutated.
* @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter.
*/
static mutate(source) {
Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype);
return source;
}
/**
* @param encoding - default 'utf-8'.
* @returns the blob as string.
*/
transformToString(encoding = "utf-8") {
return transformToString(this, encoding);
}
};
__name(_Uint8ArrayBlobAdapter, "Uint8ArrayBlobAdapter");
var Uint8ArrayBlobAdapter = _Uint8ArrayBlobAdapter;
// src/index.ts
__reExport(src_exports, require("././getAwsChunkedEncodingStream"), module.exports);
__reExport(src_exports, require("././sdk-stream-mixin"), module.exports);
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
Uint8ArrayBlobAdapter,
getAwsChunkedEncodingStream,
sdkStreamMixin
});

View File

@@ -0,0 +1,69 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.sdkStreamMixin = void 0;
const fetch_http_handler_1 = require("@smithy/fetch-http-handler");
const util_base64_1 = require("@smithy/util-base64");
const util_hex_encoding_1 = require("@smithy/util-hex-encoding");
const util_utf8_1 = require("@smithy/util-utf8");
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
const sdkStreamMixin = (stream) => {
var _a, _b;
if (!isBlobInstance(stream) && !isReadableStreamInstance(stream)) {
const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream;
throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`);
}
let transformed = false;
const transformToByteArray = async () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
return await (0, fetch_http_handler_1.streamCollector)(stream);
};
const blobToWebStream = (blob) => {
if (typeof blob.stream !== "function") {
throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" +
"If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body");
}
return blob.stream();
};
return Object.assign(stream, {
transformToByteArray: transformToByteArray,
transformToString: async (encoding) => {
const buf = await transformToByteArray();
if (encoding === "base64") {
return (0, util_base64_1.toBase64)(buf);
}
else if (encoding === "hex") {
return (0, util_hex_encoding_1.toHex)(buf);
}
else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") {
return (0, util_utf8_1.toUtf8)(buf);
}
else if (typeof TextDecoder === "function") {
return new TextDecoder(encoding).decode(buf);
}
else {
throw new Error("TextDecoder is not available, please make sure polyfill is provided.");
}
},
transformToWebStream: () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
if (isBlobInstance(stream)) {
return blobToWebStream(stream);
}
else if (isReadableStreamInstance(stream)) {
return stream;
}
else {
throw new Error(`Cannot transform payload to web stream, got ${stream}`);
}
},
});
};
exports.sdkStreamMixin = sdkStreamMixin;
const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob;
const isReadableStreamInstance = (stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream;

View File

@@ -0,0 +1,56 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.sdkStreamMixin = void 0;
const node_http_handler_1 = require("@smithy/node-http-handler");
const util_buffer_from_1 = require("@smithy/util-buffer-from");
const stream_1 = require("stream");
const util_1 = require("util");
const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser");
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
const sdkStreamMixin = (stream) => {
var _a, _b;
if (!(stream instanceof stream_1.Readable)) {
try {
return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream);
}
catch (e) {
const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream;
throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`);
}
}
let transformed = false;
const transformToByteArray = async () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
return await (0, node_http_handler_1.streamCollector)(stream);
};
return Object.assign(stream, {
transformToByteArray,
transformToString: async (encoding) => {
const buf = await transformToByteArray();
if (encoding === undefined || Buffer.isEncoding(encoding)) {
return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding);
}
else {
const decoder = new util_1.TextDecoder(encoding);
return decoder.decode(buf);
}
},
transformToWebStream: () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
if (stream.readableFlowing !== null) {
throw new Error("The stream has been consumed by other callbacks.");
}
if (typeof stream_1.Readable.toWeb !== "function") {
throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available.");
}
transformed = true;
return stream_1.Readable.toWeb(stream);
},
});
};
exports.sdkStreamMixin = sdkStreamMixin;