This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
import { transformFromString, transformToString } from "./transforms";
export class Uint8ArrayBlobAdapter extends Uint8Array {
static fromString(source, encoding = "utf-8") {
switch (typeof source) {
case "string":
return transformFromString(source, encoding);
default:
throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`);
}
}
static mutate(source) {
Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype);
return source;
}
transformToString(encoding = "utf-8") {
return transformToString(this, encoding);
}
}

View File

@@ -0,0 +1,15 @@
import { fromBase64, toBase64 } from "@smithy/util-base64";
import { fromUtf8, toUtf8 } from "@smithy/util-utf8";
import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter";
export function transformToString(payload, encoding = "utf-8") {
if (encoding === "base64") {
return toBase64(payload);
}
return toUtf8(payload);
}
export function transformFromString(str, encoding) {
if (encoding === "base64") {
return Uint8ArrayBlobAdapter.mutate(fromBase64(str));
}
return Uint8ArrayBlobAdapter.mutate(fromUtf8(str));
}

View File

@@ -0,0 +1,27 @@
export const getAwsChunkedEncodingStream = (readableStream, options) => {
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
const checksumRequired = base64Encoder !== undefined &&
bodyLengthChecker !== undefined &&
checksumAlgorithmFn !== undefined &&
checksumLocationName !== undefined &&
streamHasher !== undefined;
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined;
const reader = readableStream.getReader();
return new ReadableStream({
async pull(controller) {
const { value, done } = await reader.read();
if (done) {
controller.enqueue(`0\r\n`);
if (checksumRequired) {
const checksum = base64Encoder(await digest);
controller.enqueue(`${checksumLocationName}:${checksum}\r\n`);
controller.enqueue(`\r\n`);
}
controller.close();
}
else {
controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`);
}
},
});
};

View File

@@ -0,0 +1,26 @@
import { Readable } from "stream";
export const getAwsChunkedEncodingStream = (readableStream, options) => {
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
const checksumRequired = base64Encoder !== undefined &&
checksumAlgorithmFn !== undefined &&
checksumLocationName !== undefined &&
streamHasher !== undefined;
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined;
const awsChunkedEncodingStream = new Readable({ read: () => { } });
readableStream.on("data", (data) => {
const length = bodyLengthChecker(data) || 0;
awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`);
awsChunkedEncodingStream.push(data);
awsChunkedEncodingStream.push("\r\n");
});
readableStream.on("end", async () => {
awsChunkedEncodingStream.push(`0\r\n`);
if (checksumRequired) {
const checksum = base64Encoder(await digest);
awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`);
awsChunkedEncodingStream.push(`\r\n`);
}
awsChunkedEncodingStream.push(null);
});
return awsChunkedEncodingStream;
};

View File

@@ -0,0 +1,3 @@
export * from "./blob/Uint8ArrayBlobAdapter";
export * from "./getAwsChunkedEncodingStream";
export * from "./sdk-stream-mixin";

View File

@@ -0,0 +1,64 @@
import { streamCollector } from "@smithy/fetch-http-handler";
import { toBase64 } from "@smithy/util-base64";
import { toHex } from "@smithy/util-hex-encoding";
import { toUtf8 } from "@smithy/util-utf8";
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
export const sdkStreamMixin = (stream) => {
if (!isBlobInstance(stream) && !isReadableStreamInstance(stream)) {
const name = stream?.__proto__?.constructor?.name || stream;
throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`);
}
let transformed = false;
const transformToByteArray = async () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
return await streamCollector(stream);
};
const blobToWebStream = (blob) => {
if (typeof blob.stream !== "function") {
throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" +
"If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body");
}
return blob.stream();
};
return Object.assign(stream, {
transformToByteArray: transformToByteArray,
transformToString: async (encoding) => {
const buf = await transformToByteArray();
if (encoding === "base64") {
return toBase64(buf);
}
else if (encoding === "hex") {
return toHex(buf);
}
else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") {
return toUtf8(buf);
}
else if (typeof TextDecoder === "function") {
return new TextDecoder(encoding).decode(buf);
}
else {
throw new Error("TextDecoder is not available, please make sure polyfill is provided.");
}
},
transformToWebStream: () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
if (isBlobInstance(stream)) {
return blobToWebStream(stream);
}
else if (isReadableStreamInstance(stream)) {
return stream;
}
else {
throw new Error(`Cannot transform payload to web stream, got ${stream}`);
}
},
});
};
const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob;
const isReadableStreamInstance = (stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream;

View File

@@ -0,0 +1,51 @@
import { streamCollector } from "@smithy/node-http-handler";
import { fromArrayBuffer } from "@smithy/util-buffer-from";
import { Readable } from "stream";
import { TextDecoder } from "util";
import { sdkStreamMixin as sdkStreamMixinReadableStream } from "./sdk-stream-mixin.browser";
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
export const sdkStreamMixin = (stream) => {
if (!(stream instanceof Readable)) {
try {
return sdkStreamMixinReadableStream(stream);
}
catch (e) {
const name = stream?.__proto__?.constructor?.name || stream;
throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`);
}
}
let transformed = false;
const transformToByteArray = async () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
return await streamCollector(stream);
};
return Object.assign(stream, {
transformToByteArray,
transformToString: async (encoding) => {
const buf = await transformToByteArray();
if (encoding === undefined || Buffer.isEncoding(encoding)) {
return fromArrayBuffer(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding);
}
else {
const decoder = new TextDecoder(encoding);
return decoder.decode(buf);
}
},
transformToWebStream: () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
if (stream.readableFlowing !== null) {
throw new Error("The stream has been consumed by other callbacks.");
}
if (typeof Readable.toWeb !== "function") {
throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available.");
}
transformed = true;
return Readable.toWeb(stream);
},
});
};