Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "ef31179", "specHash": "e95d6fa", "version": "1.1.0" }
{ "engineHash": "89557a9", "specHash": "e95d6fa", "version": "1.1.0" }
24 changes: 12 additions & 12 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

50 changes: 30 additions & 20 deletions src/internal/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,35 +196,45 @@ export async function readByteStream(byteStream: Readable) {

export async function* iterateChunks(
stream: Readable,
chunkSize: number
chunkSize: number,
fileSize: number
): Iterator<Readable> {
let buffers: Buffer[] = [];
let totalSize = 0;
for await (const data of stream) {
if (!Buffer.isBuffer(data)) {
throw new Error('Expecting a chunk of stream to be a Buffer');
}
buffers.push(data);
totalSize += data.length;
let consumedSize = 0;
while (consumedSize < fileSize && !stream.readableEnded) {
for await (const data of stream) {
if (!Buffer.isBuffer(data)) {
throw new Error('Expecting a chunk of stream to be a Buffer');
}
consumedSize += data.length;
buffers.push(data);
totalSize += data.length;

if (totalSize < chunkSize) {
continue;
}
if (totalSize < chunkSize) {
continue;
}

const buffer = Buffer.concat(buffers);
const buffer = Buffer.concat(buffers);

let start = 0;
while (totalSize >= chunkSize) {
yield generateByteStreamFromBuffer(
buffer.subarray(start, start + chunkSize)
);
start += chunkSize;
totalSize -= chunkSize;
}
let start = 0;
while (totalSize >= chunkSize) {
yield generateByteStreamFromBuffer(
buffer.subarray(start, start + chunkSize)
);
start += chunkSize;
totalSize -= chunkSize;
}

buffers = totalSize > 0 ? [buffer.subarray(start)] : [];
buffers = totalSize > 0 ? [buffer.subarray(start)] : [];
}
}

if (consumedSize !== fileSize) {
throw new Error(
`Stream size ${consumedSize} does not match expected file size ${fileSize}`
);
}
if (totalSize > 0) {
yield generateByteStreamFromBuffer(Buffer.concat(buffers));
}
Expand Down
2 changes: 1 addition & 1 deletion src/managers/chunkedUploads.generated.ts
Original file line number Diff line number Diff line change
Expand Up @@ -775,7 +775,7 @@ export class ChunkedUploadsManager {
throw new Error('Assertion failed');
}
const fileHash: Hash = new Hash({ algorithm: 'sha1' as HashName });
const chunksIterator: Iterator = iterateChunks(file, partSize);
const chunksIterator: Iterator = iterateChunks(file, partSize, fileSize);
const results: PartAccumulator = await reduceIterator(
chunksIterator,
this.reducer.bind(this),
Expand Down
3 changes: 2 additions & 1 deletion src/networking/fetch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,8 @@ export async function fetch(
url: resource,
queryParams: params,
headers: (requestInit.headers as { [key: string]: string }) ?? {},
body: requestInit.body,
body:
typeof requestInit.body === 'string' ? requestInit.body : undefined,
},
responseInfo: {
statusCode: fetchResponse.status,
Expand Down