From 94a3e66bb418824c0b64b1861e5a76bfce99d8c0 Mon Sep 17 00:00:00 2001 From: box-sdk-build Date: Mon, 12 Aug 2024 05:21:46 -0700 Subject: [PATCH 1/2] feat: parametrise chunked uploads endpoint urls (box/box-openapi#444) --- .codegen.json | 2 +- docs/chunkedUploads.md | 250 +++++++- package-lock.json | 158 ++--- src/managers/chunkedUploads.generated.ts | 712 +++++++++++++++++++++- src/test/chunkedUploads.generated.test.ts | 335 +++++++++- 5 files changed, 1321 insertions(+), 136 deletions(-) diff --git a/.codegen.json b/.codegen.json index 17d6bdcd..1c85cd9b 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "525674e", "specHash": "e50af18", "version": "1.3.0" } +{ "engineHash": "d1cb68d", "specHash": "9919482", "version": "1.3.0" } diff --git a/docs/chunkedUploads.md b/docs/chunkedUploads.md index 19a49cfb..11af132e 100644 --- a/docs/chunkedUploads.md +++ b/docs/chunkedUploads.md @@ -4,10 +4,15 @@ This is a manager for chunked uploads (allowed for files at least 20MB). - [Create upload session](#create-upload-session) - [Create upload session for existing file](#create-upload-session-for-existing-file) +- [Get upload session by URL](#get-upload-session-by-url) - [Get upload session](#get-upload-session) +- [Upload part of file by URL](#upload-part-of-file-by-url) - [Upload part of file](#upload-part-of-file) +- [Remove upload session by URL](#remove-upload-session-by-url) - [Remove upload session](#remove-upload-session) +- [List parts by URL](#list-parts-by-url) - [List parts](#list-parts) +- [Commit upload session by URL](#commit-upload-session-by-url) - [Commit upload session](#commit-upload-session) - [Upload big file](#upload-big-file) @@ -23,17 +28,11 @@ See the endpoint docs at ```ts -await this.createFileUploadSession( - { - fileName: fileName, - fileSize: fileSize, - folderId: parentFolderId, - } satisfies CreateFileUploadSessionRequestBody, - { - headers: new CreateFileUploadSessionHeaders({}), - cancellationToken: cancellationToken, - } satisfies CreateFileUploadSessionOptionalsInput -); +await client.chunkedUploads.createFileUploadSession({ + fileName: fileName, + fileSize: fileSize, + folderId: parentFolderId, +} satisfies CreateFileUploadSessionRequestBody); ``` ### Arguments @@ -75,16 +74,52 @@ This function returns a value of type `UploadSession`. Returns a new upload session. +## Get upload session by URL + +Return information about an upload session. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) endpoint. + +This operation is performed by calling function `getFileUploadSessionByUrl`. + +See the endpoint docs at +[API Reference](https://developer.box.com/reference/get-files-upload-sessions-id/). + + + +```ts +await client.chunkedUploads.getFileUploadSessionByUrl(statusUrl); +``` + +### Arguments + +- url `string` + - URL of getFileUploadSessionById method +- optionalsInput `GetFileUploadSessionByUrlOptionalsInput` + - + +### Returns + +This function returns a value of type `UploadSession`. + +Returns an upload session object. + ## Get upload session Return information about an upload session. +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) endpoint. + This operation is performed by calling function `getFileUploadSessionById`. See the endpoint docs at [API Reference](https://developer.box.com/reference/get-files-upload-sessions-id/). -_Currently we don't have an example for calling `getFileUploadSessionById` in integration tests_ + + +```ts +await client.chunkedUploads.getFileUploadSessionById(uploadSessionId); +``` ### Arguments @@ -99,9 +134,54 @@ This function returns a value of type `UploadSession`. Returns an upload session object. +## Upload part of file by URL + +Uploads a chunk of a file for an upload session. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + +This operation is performed by calling function `uploadFilePartByUrl`. + +See the endpoint docs at +[API Reference](https://developer.box.com/reference/put-files-upload-sessions-id/). + + + +```ts +await client.chunkedUploads.uploadFilePartByUrl( + acc.uploadPartUrl, + generateByteStreamFromBuffer(chunkBuffer), + { + digest: digest, + contentRange: contentRange, + } satisfies UploadFilePartByUrlHeadersInput +); +``` + +### Arguments + +- url `string` + - URL of uploadFilePart method +- requestBody `ByteStream` + - Request body of uploadFilePart method +- headersInput `UploadFilePartByUrlHeadersInput` + - Headers of uploadFilePart method +- optionalsInput `UploadFilePartByUrlOptionalsInput` + - + +### Returns + +This function returns a value of type `UploadedPart`. + +Chunk has been uploaded successfully. + ## Upload part of file -Updates a chunk of an upload session for a file. +Uploads a chunk of a file for an upload session. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. This operation is performed by calling function `uploadFilePart`. @@ -111,7 +191,7 @@ See the endpoint docs at ```ts -await this.uploadFilePart( +await client.chunkedUploads.uploadFilePart( acc.uploadSessionId, generateByteStreamFromBuffer(chunkBuffer), { @@ -138,18 +218,59 @@ This function returns a value of type `UploadedPart`. Chunk has been uploaded successfully. +## Remove upload session by URL + +Abort an upload session and discard all data uploaded. + +This cannot be reversed. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + +This operation is performed by calling function `deleteFileUploadSessionByUrl`. + +See the endpoint docs at +[API Reference](https://developer.box.com/reference/delete-files-upload-sessions-id/). + + + +```ts +await client.chunkedUploads.deleteFileUploadSessionByUrl(abortUrl); +``` + +### Arguments + +- url `string` + - URL of deleteFileUploadSessionById method +- optionalsInput `DeleteFileUploadSessionByUrlOptionalsInput` + - + +### Returns + +This function returns a value of type `undefined`. + +A blank response is returned if the session was +successfully aborted. + ## Remove upload session Abort an upload session and discard all data uploaded. This cannot be reversed. +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + This operation is performed by calling function `deleteFileUploadSessionById`. See the endpoint docs at [API Reference](https://developer.box.com/reference/delete-files-upload-sessions-id/). -_Currently we don't have an example for calling `deleteFileUploadSessionById` in integration tests_ + + +```ts +await client.chunkedUploads.deleteFileUploadSessionById(uploadSessionId); +``` ### Arguments @@ -165,10 +286,43 @@ This function returns a value of type `undefined`. A blank response is returned if the session was successfully aborted. +## List parts by URL + +Return a list of the chunks uploaded to the upload session so far. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + +This operation is performed by calling function `getFileUploadSessionPartsByUrl`. + +See the endpoint docs at +[API Reference](https://developer.box.com/reference/get-files-upload-sessions-id-parts/). + + + +```ts +await client.chunkedUploads.getFileUploadSessionPartsByUrl(listPartsUrl); +``` + +### Arguments + +- url `string` + - URL of getFileUploadSessionParts method +- optionalsInput `GetFileUploadSessionPartsByUrlOptionalsInput` + - + +### Returns + +This function returns a value of type `UploadParts`. + +Returns a list of parts that have been uploaded. + ## List parts -Return a list of the chunks uploaded to the upload -session so far. +Return a list of the chunks uploaded to the upload session so far. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. This operation is performed by calling function `getFileUploadSessionParts`. @@ -178,11 +332,7 @@ See the endpoint docs at ```ts -await this.getFileUploadSessionParts(uploadSessionId, { - queryParams: {} satisfies GetFileUploadSessionPartsQueryParams, - headers: new GetFileUploadSessionPartsHeaders({}), - cancellationToken: cancellationToken, -} satisfies GetFileUploadSessionPartsOptionalsInput); +await client.chunkedUploads.getFileUploadSessionParts(uploadSessionId); ``` ### Arguments @@ -198,10 +348,55 @@ This function returns a value of type `UploadParts`. Returns a list of parts that have been uploaded. +## Commit upload session by URL + +Close an upload session and create a file from the uploaded chunks. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + +This operation is performed by calling function `createFileUploadSessionCommitByUrl`. + +See the endpoint docs at +[API Reference](https://developer.box.com/reference/post-files-upload-sessions-id-commit/). + + + +```ts +await client.chunkedUploads.createFileUploadSessionCommitByUrl( + commitUrl, + { parts: parts } satisfies CreateFileUploadSessionCommitByUrlRequestBody, + { digest: digest } satisfies CreateFileUploadSessionCommitByUrlHeadersInput +); +``` + +### Arguments + +- url `string` + - URL of createFileUploadSessionCommit method +- requestBody `CreateFileUploadSessionCommitByUrlRequestBody` + - Request body of createFileUploadSessionCommit method +- headersInput `CreateFileUploadSessionCommitByUrlHeadersInput` + - Headers of createFileUploadSessionCommit method +- optionalsInput `CreateFileUploadSessionCommitByUrlOptionalsInput` + - + +### Returns + +This function returns a value of type `Files`. + +Returns the file object in a list.Returns when all chunks have been uploaded but not yet processed. + +Inspect the upload session to get more information about the +progress of processing the chunks, then retry committing the file +when all chunks have processed. + ## Commit upload session -Close an upload session and create a file from the -uploaded chunks. +Close an upload session and create a file from the uploaded chunks. + +The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) +and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. This operation is performed by calling function `createFileUploadSessionCommit`. @@ -211,13 +406,10 @@ See the endpoint docs at ```ts -await this.createFileUploadSessionCommit( +await client.chunkedUploads.createFileUploadSessionCommit( uploadSessionId, { parts: parts } satisfies CreateFileUploadSessionCommitRequestBody, - { digest: digest } satisfies CreateFileUploadSessionCommitHeadersInput, - { - cancellationToken: cancellationToken, - } satisfies CreateFileUploadSessionCommitOptionalsInput + { digest: digest } satisfies CreateFileUploadSessionCommitHeadersInput ); ``` diff --git a/package-lock.json b/package-lock.json index f187b482..b64709ec 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1067,9 +1067,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.19.2.tgz", - "integrity": "sha512-OHflWINKtoCFSpm/WmuQaWW4jeX+3Qt3XQDepkkiFTsoxFc5BpF3Z5aDxFZgBqRjO6ATP5+b1iilp4kGIZVWlA==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.20.0.tgz", + "integrity": "sha512-TSpWzflCc4VGAUJZlPpgAJE1+V60MePDQnBd7PPkpuEmOy8i87aL6tinFGKBFKuEDikYpig72QzdT3QPYIi+oA==", "cpu": [ "arm" ], @@ -1080,9 +1080,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.19.2.tgz", - "integrity": "sha512-k0OC/b14rNzMLDOE6QMBCjDRm3fQOHAL8Ldc9bxEWvMo4Ty9RY6rWmGetNTWhPo+/+FNd1lsQYRd0/1OSix36A==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.20.0.tgz", + "integrity": "sha512-u00Ro/nok7oGzVuh/FMYfNoGqxU5CPWz1mxV85S2w9LxHR8OoMQBuSk+3BKVIDYgkpeOET5yXkx90OYFc+ytpQ==", "cpu": [ "arm64" ], @@ -1093,9 +1093,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.19.2.tgz", - "integrity": "sha512-IIARRgWCNWMTeQH+kr/gFTHJccKzwEaI0YSvtqkEBPj7AshElFq89TyreKNFAGh5frLfDCbodnq+Ye3dqGKPBw==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.20.0.tgz", + "integrity": "sha512-uFVfvzvsdGtlSLuL0ZlvPJvl6ZmrH4CBwLGEFPe7hUmf7htGAN+aXo43R/V6LATyxlKVC/m6UsLb7jbG+LG39Q==", "cpu": [ "arm64" ], @@ -1106,9 +1106,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.19.2.tgz", - "integrity": "sha512-52udDMFDv54BTAdnw+KXNF45QCvcJOcYGl3vQkp4vARyrcdI/cXH8VXTEv/8QWfd6Fru8QQuw1b2uNersXOL0g==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.20.0.tgz", + "integrity": "sha512-xbrMDdlev53vNXexEa6l0LffojxhqDTBeL+VUxuuIXys4x6xyvbKq5XqTXBCEUA8ty8iEJblHvFaWRJTk/icAQ==", "cpu": [ "x64" ], @@ -1119,9 +1119,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.19.2.tgz", - "integrity": "sha512-r+SI2t8srMPYZeoa1w0o/AfoVt9akI1ihgazGYPQGRilVAkuzMGiTtexNZkrPkQsyFrvqq/ni8f3zOnHw4hUbA==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.20.0.tgz", + "integrity": "sha512-jMYvxZwGmoHFBTbr12Xc6wOdc2xA5tF5F2q6t7Rcfab68TT0n+r7dgawD4qhPEvasDsVpQi+MgDzj2faOLsZjA==", "cpu": [ "arm" ], @@ -1132,9 +1132,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.19.2.tgz", - "integrity": "sha512-+tYiL4QVjtI3KliKBGtUU7yhw0GMcJJuB9mLTCEauHEsqfk49gtUBXGtGP3h1LW8MbaTY6rSFIQV1XOBps1gBA==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.20.0.tgz", + "integrity": "sha512-1asSTl4HKuIHIB1GcdFHNNZhxAYEdqML/MW4QmPS4G0ivbEcBr1JKlFLKsIRqjSwOBkdItn3/ZDlyvZ/N6KPlw==", "cpu": [ "arm" ], @@ -1145,9 +1145,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.19.2.tgz", - "integrity": "sha512-OR5DcvZiYN75mXDNQQxlQPTv4D+uNCUsmSCSY2FolLf9W5I4DSoJyg7z9Ea3TjKfhPSGgMJiey1aWvlWuBzMtg==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.20.0.tgz", + "integrity": "sha512-COBb8Bkx56KldOYJfMf6wKeYJrtJ9vEgBRAOkfw6Ens0tnmzPqvlpjZiLgkhg6cA3DGzCmLmmd319pmHvKWWlQ==", "cpu": [ "arm64" ], @@ -1158,9 +1158,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.19.2.tgz", - "integrity": "sha512-Hw3jSfWdUSauEYFBSFIte6I8m6jOj+3vifLg8EU3lreWulAUpch4JBjDMtlKosrBzkr0kwKgL9iCfjA8L3geoA==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.20.0.tgz", + "integrity": "sha512-+it+mBSyMslVQa8wSPvBx53fYuZK/oLTu5RJoXogjk6x7Q7sz1GNRsXWjn6SwyJm8E/oMjNVwPhmNdIjwP135Q==", "cpu": [ "arm64" ], @@ -1171,9 +1171,9 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.19.2.tgz", - "integrity": "sha512-rhjvoPBhBwVnJRq/+hi2Q3EMiVF538/o9dBuj9TVLclo9DuONqt5xfWSaE6MYiFKpo/lFPJ/iSI72rYWw5Hc7w==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.20.0.tgz", + "integrity": "sha512-yAMvqhPfGKsAxHN8I4+jE0CpLWD8cv4z7CK7BMmhjDuz606Q2tFKkWRY8bHR9JQXYcoLfopo5TTqzxgPUjUMfw==", "cpu": [ "ppc64" ], @@ -1184,9 +1184,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.19.2.tgz", - "integrity": "sha512-EAz6vjPwHHs2qOCnpQkw4xs14XJq84I81sDRGPEjKPFVPBw7fwvtwhVjcZR6SLydCv8zNK8YGFblKWd/vRmP8g==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.20.0.tgz", + "integrity": "sha512-qmuxFpfmi/2SUkAw95TtNq/w/I7Gpjurx609OOOV7U4vhvUhBcftcmXwl3rqAek+ADBwSjIC4IVNLiszoj3dPA==", "cpu": [ "riscv64" ], @@ -1197,9 +1197,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.19.2.tgz", - "integrity": "sha512-IJSUX1xb8k/zN9j2I7B5Re6B0NNJDJ1+soezjNojhT8DEVeDNptq2jgycCOpRhyGj0+xBn7Cq+PK7Q+nd2hxLA==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.20.0.tgz", + "integrity": "sha512-I0BtGXddHSHjV1mqTNkgUZLnS3WtsqebAXv11D5BZE/gfw5KoyXSAXVqyJximQXNvNzUo4GKlCK/dIwXlz+jlg==", "cpu": [ "s390x" ], @@ -1210,9 +1210,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.19.2.tgz", - "integrity": "sha512-OgaToJ8jSxTpgGkZSkwKE+JQGihdcaqnyHEFOSAU45utQ+yLruE1dkonB2SDI8t375wOKgNn8pQvaWY9kPzxDQ==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.20.0.tgz", + "integrity": "sha512-y+eoL2I3iphUg9tN9GB6ku1FA8kOfmF4oUEWhztDJ4KXJy1agk/9+pejOuZkNFhRwHAOxMsBPLbXPd6mJiCwew==", "cpu": [ "x64" ], @@ -1223,9 +1223,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.19.2.tgz", - "integrity": "sha512-5V3mPpWkB066XZZBgSd1lwozBk7tmOkKtquyCJ6T4LN3mzKENXyBwWNQn8d0Ci81hvlBw5RoFgleVpL6aScLYg==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.20.0.tgz", + "integrity": "sha512-hM3nhW40kBNYUkZb/r9k2FKK+/MnKglX7UYd4ZUy5DJs8/sMsIbqWK2piZtVGE3kcXVNj3B2IrUYROJMMCikNg==", "cpu": [ "x64" ], @@ -1236,9 +1236,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.19.2.tgz", - "integrity": "sha512-ayVstadfLeeXI9zUPiKRVT8qF55hm7hKa+0N1V6Vj+OTNFfKSoUxyZvzVvgtBxqSb5URQ8sK6fhwxr9/MLmxdA==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.20.0.tgz", + "integrity": "sha512-psegMvP+Ik/Bg7QRJbv8w8PAytPA7Uo8fpFjXyCRHWm6Nt42L+JtoqH8eDQ5hRP7/XW2UiIriy1Z46jf0Oa1kA==", "cpu": [ "arm64" ], @@ -1249,9 +1249,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.19.2.tgz", - "integrity": "sha512-Mda7iG4fOLHNsPqjWSjANvNZYoW034yxgrndof0DwCy0D3FvTjeNo+HGE6oGWgvcLZNLlcp0hLEFcRs+UGsMLg==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.20.0.tgz", + "integrity": "sha512-GabekH3w4lgAJpVxkk7hUzUf2hICSQO0a/BLFA11/RMxQT92MabKAqyubzDZmMOC/hcJNlc+rrypzNzYl4Dx7A==", "cpu": [ "ia32" ], @@ -1262,9 +1262,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.19.2.tgz", - "integrity": "sha512-DPi0ubYhSow/00YqmG1jWm3qt1F8aXziHc/UNy8bo9cpCacqhuWu+iSq/fp2SyEQK7iYTZ60fBU9cat3MXTjIQ==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.20.0.tgz", + "integrity": "sha512-aJ1EJSuTdGnM6qbVC4B5DSmozPTqIag9fSzXRNNo+humQLG89XpPgdt16Ia56ORD7s+H8Pmyx44uczDQ0yDzpg==", "cpu": [ "x64" ], @@ -1394,9 +1394,9 @@ } }, "node_modules/@types/node": { - "version": "22.1.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.1.0.tgz", - "integrity": "sha512-AOmuRF0R2/5j1knA3c6G3HOk523Ga+l+ZXltX8SF1+5oqcXijjfTd8fY3XRZqSihEu9XhtQnKYLmkFaoxgsJHw==", + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.2.0.tgz", + "integrity": "sha512-bm6EG6/pCpkxDf/0gDNDdtDILMOHgaQBVOJGdwsqClnxA3xL6jtMv76rLBc006RVMWbmaf0xbmom4Z/5o2nRkQ==", "dev": true, "dependencies": { "undici-types": "~6.13.0" @@ -1445,9 +1445,9 @@ "dev": true }, "node_modules/@types/yargs": { - "version": "17.0.32", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", - "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", "dev": true, "dependencies": { "@types/yargs-parser": "*" @@ -1827,9 +1827,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001646", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001646.tgz", - "integrity": "sha512-dRg00gudiBDDTmUhClSdv3hqRfpbOnU28IpI1T6PBTLWa+kOj0681C8uML3PifYfREuBrVjDGhL3adYpBT6spw==", + "version": "1.0.30001651", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz", + "integrity": "sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==", "dev": true, "funding": [ { @@ -2106,9 +2106,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.4.tgz", - "integrity": "sha512-orzA81VqLyIGUEA77YkVA1D+N+nNfl2isJVjjmOyrlxuooZ19ynb+dOlaDTqd/idKRS9lDCSBmtzM+kyCsMnkA==", + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.6.tgz", + "integrity": "sha512-jwXWsM5RPf6j9dPYzaorcBSUg6AiqocPEyMpkchkvntaH9HGfOOMZwxMJjDY/XEs3T5dM7uyH1VhRMkqUU9qVw==", "dev": true }, "node_modules/emittery": { @@ -4106,9 +4106,9 @@ } }, "node_modules/rollup": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.19.2.tgz", - "integrity": "sha512-6/jgnN1svF9PjNYJ4ya3l+cqutg49vOZ4rVgsDKxdl+5gpGPnByFXWGyfH9YGx9i3nfBwSu1Iyu6vGwFFA0BdQ==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.20.0.tgz", + "integrity": "sha512-6rbWBChcnSGzIlXeIdNIZTopKYad8ZG8ajhl78lGRLsI2rX8IkaotQhVas2Ma+GPxJav19wrSzvRvuiv0YKzWw==", "dev": true, "dependencies": { "@types/estree": "1.0.5" @@ -4121,22 +4121,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.19.2", - "@rollup/rollup-android-arm64": "4.19.2", - "@rollup/rollup-darwin-arm64": "4.19.2", - "@rollup/rollup-darwin-x64": "4.19.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.19.2", - "@rollup/rollup-linux-arm-musleabihf": "4.19.2", - "@rollup/rollup-linux-arm64-gnu": "4.19.2", - "@rollup/rollup-linux-arm64-musl": "4.19.2", - "@rollup/rollup-linux-powerpc64le-gnu": "4.19.2", - "@rollup/rollup-linux-riscv64-gnu": "4.19.2", - "@rollup/rollup-linux-s390x-gnu": "4.19.2", - "@rollup/rollup-linux-x64-gnu": "4.19.2", - "@rollup/rollup-linux-x64-musl": "4.19.2", - "@rollup/rollup-win32-arm64-msvc": "4.19.2", - "@rollup/rollup-win32-ia32-msvc": "4.19.2", - "@rollup/rollup-win32-x64-msvc": "4.19.2", + "@rollup/rollup-android-arm-eabi": "4.20.0", + "@rollup/rollup-android-arm64": "4.20.0", + "@rollup/rollup-darwin-arm64": "4.20.0", + "@rollup/rollup-darwin-x64": "4.20.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.20.0", + "@rollup/rollup-linux-arm-musleabihf": "4.20.0", + "@rollup/rollup-linux-arm64-gnu": "4.20.0", + "@rollup/rollup-linux-arm64-musl": "4.20.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.20.0", + "@rollup/rollup-linux-riscv64-gnu": "4.20.0", + "@rollup/rollup-linux-s390x-gnu": "4.20.0", + "@rollup/rollup-linux-x64-gnu": "4.20.0", + "@rollup/rollup-linux-x64-musl": "4.20.0", + "@rollup/rollup-win32-arm64-msvc": "4.20.0", + "@rollup/rollup-win32-ia32-msvc": "4.20.0", + "@rollup/rollup-win32-x64-msvc": "4.20.0", "fsevents": "~2.3.2" } }, diff --git a/src/managers/chunkedUploads.generated.ts b/src/managers/chunkedUploads.generated.ts index 63a34851..dd04d012 100644 --- a/src/managers/chunkedUploads.generated.ts +++ b/src/managers/chunkedUploads.generated.ts @@ -100,6 +100,34 @@ export interface CreateFileUploadSessionForExistingFileOptionalsInput { readonly headers?: CreateFileUploadSessionForExistingFileHeaders; readonly cancellationToken?: undefined | CancellationToken; } +export class GetFileUploadSessionByUrlOptionals { + readonly headers: GetFileUploadSessionByUrlHeaders = + new GetFileUploadSessionByUrlHeaders({}); + readonly cancellationToken?: CancellationToken = void 0; + constructor( + fields: Omit< + GetFileUploadSessionByUrlOptionals, + 'headers' | 'cancellationToken' + > & + Partial< + Pick< + GetFileUploadSessionByUrlOptionals, + 'headers' | 'cancellationToken' + > + > + ) { + if (fields.headers) { + this.headers = fields.headers; + } + if (fields.cancellationToken) { + this.cancellationToken = fields.cancellationToken; + } + } +} +export interface GetFileUploadSessionByUrlOptionalsInput { + readonly headers?: GetFileUploadSessionByUrlHeaders; + readonly cancellationToken?: undefined | CancellationToken; +} export class GetFileUploadSessionByIdOptionals { readonly headers: GetFileUploadSessionByIdHeaders = new GetFileUploadSessionByIdHeaders({}); @@ -125,6 +153,20 @@ export interface GetFileUploadSessionByIdOptionalsInput { readonly headers?: GetFileUploadSessionByIdHeaders; readonly cancellationToken?: undefined | CancellationToken; } +export class UploadFilePartByUrlOptionals { + readonly cancellationToken?: CancellationToken = void 0; + constructor( + fields: Omit & + Partial> + ) { + if (fields.cancellationToken) { + this.cancellationToken = fields.cancellationToken; + } + } +} +export interface UploadFilePartByUrlOptionalsInput { + readonly cancellationToken?: undefined | CancellationToken; +} export class UploadFilePartOptionals { readonly cancellationToken?: CancellationToken = void 0; constructor( @@ -139,6 +181,34 @@ export class UploadFilePartOptionals { export interface UploadFilePartOptionalsInput { readonly cancellationToken?: undefined | CancellationToken; } +export class DeleteFileUploadSessionByUrlOptionals { + readonly headers: DeleteFileUploadSessionByUrlHeaders = + new DeleteFileUploadSessionByUrlHeaders({}); + readonly cancellationToken?: CancellationToken = void 0; + constructor( + fields: Omit< + DeleteFileUploadSessionByUrlOptionals, + 'headers' | 'cancellationToken' + > & + Partial< + Pick< + DeleteFileUploadSessionByUrlOptionals, + 'headers' | 'cancellationToken' + > + > + ) { + if (fields.headers) { + this.headers = fields.headers; + } + if (fields.cancellationToken) { + this.cancellationToken = fields.cancellationToken; + } + } +} +export interface DeleteFileUploadSessionByUrlOptionalsInput { + readonly headers?: DeleteFileUploadSessionByUrlHeaders; + readonly cancellationToken?: undefined | CancellationToken; +} export class DeleteFileUploadSessionByIdOptionals { readonly headers: DeleteFileUploadSessionByIdHeaders = new DeleteFileUploadSessionByIdHeaders({}); @@ -167,6 +237,40 @@ export interface DeleteFileUploadSessionByIdOptionalsInput { readonly headers?: DeleteFileUploadSessionByIdHeaders; readonly cancellationToken?: undefined | CancellationToken; } +export class GetFileUploadSessionPartsByUrlOptionals { + readonly queryParams: GetFileUploadSessionPartsByUrlQueryParams = + {} satisfies GetFileUploadSessionPartsByUrlQueryParams; + readonly headers: GetFileUploadSessionPartsByUrlHeaders = + new GetFileUploadSessionPartsByUrlHeaders({}); + readonly cancellationToken?: CancellationToken = void 0; + constructor( + fields: Omit< + GetFileUploadSessionPartsByUrlOptionals, + 'queryParams' | 'headers' | 'cancellationToken' + > & + Partial< + Pick< + GetFileUploadSessionPartsByUrlOptionals, + 'queryParams' | 'headers' | 'cancellationToken' + > + > + ) { + if (fields.queryParams) { + this.queryParams = fields.queryParams; + } + if (fields.headers) { + this.headers = fields.headers; + } + if (fields.cancellationToken) { + this.cancellationToken = fields.cancellationToken; + } + } +} +export interface GetFileUploadSessionPartsByUrlOptionalsInput { + readonly queryParams?: GetFileUploadSessionPartsByUrlQueryParams; + readonly headers?: GetFileUploadSessionPartsByUrlHeaders; + readonly cancellationToken?: undefined | CancellationToken; +} export class GetFileUploadSessionPartsOptionals { readonly queryParams: GetFileUploadSessionPartsQueryParams = {} satisfies GetFileUploadSessionPartsQueryParams; @@ -201,6 +305,25 @@ export interface GetFileUploadSessionPartsOptionalsInput { readonly headers?: GetFileUploadSessionPartsHeaders; readonly cancellationToken?: undefined | CancellationToken; } +export class CreateFileUploadSessionCommitByUrlOptionals { + readonly cancellationToken?: CancellationToken = void 0; + constructor( + fields: Omit< + CreateFileUploadSessionCommitByUrlOptionals, + 'cancellationToken' + > & + Partial< + Pick + > + ) { + if (fields.cancellationToken) { + this.cancellationToken = fields.cancellationToken; + } + } +} +export interface CreateFileUploadSessionCommitByUrlOptionalsInput { + readonly cancellationToken?: undefined | CancellationToken; +} export class CreateFileUploadSessionCommitOptionals { readonly cancellationToken?: CancellationToken = void 0; constructor( @@ -219,7 +342,7 @@ interface PartAccumulator { readonly lastIndex: number; readonly parts: readonly UploadPart[]; readonly fileSize: number; - readonly uploadSessionId: string; + readonly uploadPartUrl: string; readonly fileHash: Hash; } export interface CreateFileUploadSessionRequestBody { @@ -294,6 +417,30 @@ export interface CreateFileUploadSessionForExistingFileHeadersInput { readonly [key: string]: undefined | string; }; } +export class GetFileUploadSessionByUrlHeaders { + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: { + readonly [key: string]: undefined | string; + } = {}; + constructor( + fields: Omit & + Partial> + ) { + if (fields.extraHeaders) { + this.extraHeaders = fields.extraHeaders; + } + } +} +export interface GetFileUploadSessionByUrlHeadersInput { + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: + | undefined + | { + readonly [key: string]: undefined | string; + }; +} export class GetFileUploadSessionByIdHeaders { /** * Extra headers that will be included in the HTTP request. */ @@ -318,6 +465,92 @@ export interface GetFileUploadSessionByIdHeadersInput { readonly [key: string]: undefined | string; }; } +export class UploadFilePartByUrlHeaders { + /** + * The [RFC3230][1] message digest of the chunk uploaded. + * + * Only SHA1 is supported. The SHA1 digest must be base64 + * encoded. The format of this header is as + * `sha=BASE64_ENCODED_DIGEST`. + * + * To get the value for the `SHA` digest, use the + * openSSL command to encode the file part: + * `openssl sha1 -binary | base64` + * + * [1]: https://tools.ietf.org/html/rfc3230 */ + readonly digest!: string; + /** + * The byte range of the chunk. + * + * Must not overlap with the range of a part already + * uploaded this session. Each part’s size must be + * exactly equal in size to the part size specified + * in the upload session that you created. + * One exception is the last part of the file, as this can be smaller. + * + * When providing the value for `content-range`, remember that: + * + * * The lower bound of each part's byte range + * must be a multiple of the part size. + * * The higher bound must be a multiple of the part size - 1. */ + readonly contentRange!: string; + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: { + readonly [key: string]: undefined | string; + } = {}; + constructor( + fields: Omit & + Partial> + ) { + if (fields.digest) { + this.digest = fields.digest; + } + if (fields.contentRange) { + this.contentRange = fields.contentRange; + } + if (fields.extraHeaders) { + this.extraHeaders = fields.extraHeaders; + } + } +} +export interface UploadFilePartByUrlHeadersInput { + /** + * The [RFC3230][1] message digest of the chunk uploaded. + * + * Only SHA1 is supported. The SHA1 digest must be base64 + * encoded. The format of this header is as + * `sha=BASE64_ENCODED_DIGEST`. + * + * To get the value for the `SHA` digest, use the + * openSSL command to encode the file part: + * `openssl sha1 -binary | base64` + * + * [1]: https://tools.ietf.org/html/rfc3230 */ + readonly digest: string; + /** + * The byte range of the chunk. + * + * Must not overlap with the range of a part already + * uploaded this session. Each part’s size must be + * exactly equal in size to the part size specified + * in the upload session that you created. + * One exception is the last part of the file, as this can be smaller. + * + * When providing the value for `content-range`, remember that: + * + * * The lower bound of each part's byte range + * must be a multiple of the part size. + * * The higher bound must be a multiple of the part size - 1. */ + readonly contentRange: string; + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: + | undefined + | { + readonly [key: string]: undefined | string; + }; +} export class UploadFilePartHeaders { /** * The [RFC3230][1] message digest of the chunk uploaded. @@ -404,6 +637,30 @@ export interface UploadFilePartHeadersInput { readonly [key: string]: undefined | string; }; } +export class DeleteFileUploadSessionByUrlHeaders { + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: { + readonly [key: string]: undefined | string; + } = {}; + constructor( + fields: Omit & + Partial> + ) { + if (fields.extraHeaders) { + this.extraHeaders = fields.extraHeaders; + } + } +} +export interface DeleteFileUploadSessionByUrlHeadersInput { + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: + | undefined + | { + readonly [key: string]: undefined | string; + }; +} export class DeleteFileUploadSessionByIdHeaders { /** * Extra headers that will be included in the HTTP request. */ @@ -428,6 +685,42 @@ export interface DeleteFileUploadSessionByIdHeadersInput { readonly [key: string]: undefined | string; }; } +export interface GetFileUploadSessionPartsByUrlQueryParams { + /** + * The offset of the item at which to begin the response. + * + * Queries with offset parameter value + * exceeding 10000 will be rejected + * with a 400 response. */ + readonly offset?: number; + /** + * The maximum number of items to return per page. */ + readonly limit?: number; +} +export class GetFileUploadSessionPartsByUrlHeaders { + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: { + readonly [key: string]: undefined | string; + } = {}; + constructor( + fields: Omit & + Partial> + ) { + if (fields.extraHeaders) { + this.extraHeaders = fields.extraHeaders; + } + } +} +export interface GetFileUploadSessionPartsByUrlHeadersInput { + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: + | undefined + | { + readonly [key: string]: undefined | string; + }; +} export interface GetFileUploadSessionPartsQueryParams { /** * The offset of the item at which to begin the response. @@ -464,6 +757,96 @@ export interface GetFileUploadSessionPartsHeadersInput { readonly [key: string]: undefined | string; }; } +export interface CreateFileUploadSessionCommitByUrlRequestBody { + /** + * The list details for the uploaded parts */ + readonly parts: readonly UploadPart[]; +} +export class CreateFileUploadSessionCommitByUrlHeaders { + /** + * The [RFC3230][1] message digest of the whole file. + * + * Only SHA1 is supported. The SHA1 digest must be Base64 + * encoded. The format of this header is as + * `sha=BASE64_ENCODED_DIGEST`. + * + * [1]: https://tools.ietf.org/html/rfc3230 */ + readonly digest!: string; + /** + * Ensures this item hasn't recently changed before + * making changes. + * + * Pass in the item's last observed `etag` value + * into this header and the endpoint will fail + * with a `412 Precondition Failed` if it + * has changed since. */ + readonly ifMatch?: string; + /** + * Ensures an item is only returned if it has changed. + * + * Pass in the item's last observed `etag` value + * into this header and the endpoint will fail + * with a `304 Not Modified` if the item has not + * changed since. */ + readonly ifNoneMatch?: string; + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: { + readonly [key: string]: undefined | string; + } = {}; + constructor( + fields: Omit & + Partial> + ) { + if (fields.digest) { + this.digest = fields.digest; + } + if (fields.ifMatch) { + this.ifMatch = fields.ifMatch; + } + if (fields.ifNoneMatch) { + this.ifNoneMatch = fields.ifNoneMatch; + } + if (fields.extraHeaders) { + this.extraHeaders = fields.extraHeaders; + } + } +} +export interface CreateFileUploadSessionCommitByUrlHeadersInput { + /** + * The [RFC3230][1] message digest of the whole file. + * + * Only SHA1 is supported. The SHA1 digest must be Base64 + * encoded. The format of this header is as + * `sha=BASE64_ENCODED_DIGEST`. + * + * [1]: https://tools.ietf.org/html/rfc3230 */ + readonly digest: string; + /** + * Ensures this item hasn't recently changed before + * making changes. + * + * Pass in the item's last observed `etag` value + * into this header and the endpoint will fail + * with a `412 Precondition Failed` if it + * has changed since. */ + readonly ifMatch?: string; + /** + * Ensures an item is only returned if it has changed. + * + * Pass in the item's last observed `etag` value + * into this header and the endpoint will fail + * with a `304 Not Modified` if the item has not + * changed since. */ + readonly ifNoneMatch?: string; + /** + * Extra headers that will be included in the HTTP request. */ + readonly extraHeaders?: + | undefined + | { + readonly [key: string]: undefined | string; + }; +} export interface CreateFileUploadSessionCommitRequestBody { /** * The list details for the uploaded parts */ @@ -563,10 +946,15 @@ export class ChunkedUploadsManager { | 'networkSession' | 'createFileUploadSession' | 'createFileUploadSessionForExistingFile' + | 'getFileUploadSessionByUrl' | 'getFileUploadSessionById' + | 'uploadFilePartByUrl' | 'uploadFilePart' + | 'deleteFileUploadSessionByUrl' | 'deleteFileUploadSessionById' + | 'getFileUploadSessionPartsByUrl' | 'getFileUploadSessionParts' + | 'createFileUploadSessionCommitByUrl' | 'createFileUploadSessionCommit' | 'reducer' | 'uploadBigFile' @@ -669,8 +1057,44 @@ export class ChunkedUploadsManager { )) as FetchResponse; return deserializeUploadSession(response.data); } + /** + * Using this method with urls provided in response when creating a new upload session is preferred to use over GetFileUploadSessionById method. + * This allows to always upload your content to the closest Box data center and can significantly improve upload speed. + * Return information about an upload session. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) endpoint. + * @param {string} url URL of getFileUploadSessionById method + * @param {GetFileUploadSessionByUrlOptionalsInput} optionalsInput + * @returns {Promise} + */ + async getFileUploadSessionByUrl( + url: string, + optionalsInput: GetFileUploadSessionByUrlOptionalsInput = {} + ): Promise { + const optionals: GetFileUploadSessionByUrlOptionals = + new GetFileUploadSessionByUrlOptionals({ + headers: optionalsInput.headers, + cancellationToken: optionalsInput.cancellationToken, + }); + const headers: any = optionals.headers; + const cancellationToken: any = optionals.cancellationToken; + const headersMap: { + readonly [key: string]: string; + } = prepareParams({ ...{}, ...headers.extraHeaders }); + const response: FetchResponse = (await fetch(url, { + method: 'GET', + headers: headersMap, + responseFormat: 'json', + auth: this.auth, + networkSession: this.networkSession, + cancellationToken: cancellationToken, + } satisfies FetchOptions)) as FetchResponse; + return deserializeUploadSession(response.data); + } /** * Return information about an upload session. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) endpoint. * @param {string} uploadSessionId The ID of the upload session. Example: "D5E3F7A" * @param {GetFileUploadSessionByIdOptionalsInput} optionalsInput @@ -708,7 +1132,60 @@ export class ChunkedUploadsManager { return deserializeUploadSession(response.data); } /** - * Updates a chunk of an upload session for a file. + * Using this method with urls provided in response when creating a new upload session is preferred to use over UploadFilePart method. + * This allows to always upload your content to the closest Box data center and can significantly improve upload speed. + * Uploads a chunk of a file for an upload session. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + * @param {string} url URL of uploadFilePart method + * @param {ByteStream} requestBody Request body of uploadFilePart method + * @param {UploadFilePartByUrlHeadersInput} headersInput Headers of uploadFilePart method + * @param {UploadFilePartByUrlOptionalsInput} optionalsInput + * @returns {Promise} + */ + async uploadFilePartByUrl( + url: string, + requestBody: ByteStream, + headersInput: UploadFilePartByUrlHeadersInput, + optionalsInput: UploadFilePartByUrlOptionalsInput = {} + ): Promise { + const headers: UploadFilePartByUrlHeaders = new UploadFilePartByUrlHeaders({ + digest: headersInput.digest, + contentRange: headersInput.contentRange, + extraHeaders: headersInput.extraHeaders, + }); + const optionals: UploadFilePartByUrlOptionals = + new UploadFilePartByUrlOptionals({ + cancellationToken: optionalsInput.cancellationToken, + }); + const cancellationToken: any = optionals.cancellationToken; + const headersMap: { + readonly [key: string]: string; + } = prepareParams({ + ...{ + ['digest']: toString(headers.digest) as string, + ['content-range']: toString(headers.contentRange) as string, + }, + ...headers.extraHeaders, + }); + const response: FetchResponse = (await fetch(url, { + method: 'PUT', + headers: headersMap, + fileStream: requestBody, + contentType: 'application/octet-stream', + responseFormat: 'json', + auth: this.auth, + networkSession: this.networkSession, + cancellationToken: cancellationToken, + } satisfies FetchOptions)) as FetchResponse; + return deserializeUploadedPart(response.data); + } + /** + * Uploads a chunk of a file for an upload session. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. * @param {string} uploadSessionId The ID of the upload session. Example: "D5E3F7A" * @param {ByteStream} requestBody Request body of uploadFilePart method @@ -759,10 +1236,50 @@ export class ChunkedUploadsManager { )) as FetchResponse; return deserializeUploadedPart(response.data); } + /** + * Using this method with urls provided in response when creating a new upload session is preferred to use over DeleteFileUploadSessionById method. + * This allows to always upload your content to the closest Box data center and can significantly improve upload speed. + * Abort an upload session and discard all data uploaded. + * + * This cannot be reversed. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + * @param {string} url URL of deleteFileUploadSessionById method + * @param {DeleteFileUploadSessionByUrlOptionalsInput} optionalsInput + * @returns {Promise} + */ + async deleteFileUploadSessionByUrl( + url: string, + optionalsInput: DeleteFileUploadSessionByUrlOptionalsInput = {} + ): Promise { + const optionals: DeleteFileUploadSessionByUrlOptionals = + new DeleteFileUploadSessionByUrlOptionals({ + headers: optionalsInput.headers, + cancellationToken: optionalsInput.cancellationToken, + }); + const headers: any = optionals.headers; + const cancellationToken: any = optionals.cancellationToken; + const headersMap: { + readonly [key: string]: string; + } = prepareParams({ ...{}, ...headers.extraHeaders }); + const response: FetchResponse = (await fetch(url, { + method: 'DELETE', + headers: headersMap, + responseFormat: void 0, + auth: this.auth, + networkSession: this.networkSession, + cancellationToken: cancellationToken, + } satisfies FetchOptions)) as FetchResponse; + return void 0; + } /** * Abort an upload session and discard all data uploaded. * * This cannot be reversed. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. * @param {string} uploadSessionId The ID of the upload session. Example: "D5E3F7A" * @param {DeleteFileUploadSessionByIdOptionalsInput} optionalsInput @@ -800,8 +1317,54 @@ export class ChunkedUploadsManager { return void 0; } /** - * Return a list of the chunks uploaded to the upload - * session so far. + * Using this method with urls provided in response when creating a new upload session is preferred to use over GetFileUploadSessionParts method. + * This allows to always upload your content to the closest Box data center and can significantly improve upload speed. + * Return a list of the chunks uploaded to the upload session so far. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + * @param {string} url URL of getFileUploadSessionParts method + * @param {GetFileUploadSessionPartsByUrlOptionalsInput} optionalsInput + * @returns {Promise} + */ + async getFileUploadSessionPartsByUrl( + url: string, + optionalsInput: GetFileUploadSessionPartsByUrlOptionalsInput = {} + ): Promise { + const optionals: GetFileUploadSessionPartsByUrlOptionals = + new GetFileUploadSessionPartsByUrlOptionals({ + queryParams: optionalsInput.queryParams, + headers: optionalsInput.headers, + cancellationToken: optionalsInput.cancellationToken, + }); + const queryParams: any = optionals.queryParams; + const headers: any = optionals.headers; + const cancellationToken: any = optionals.cancellationToken; + const queryParamsMap: { + readonly [key: string]: string; + } = prepareParams({ + ['offset']: toString(queryParams.offset) as string, + ['limit']: toString(queryParams.limit) as string, + }); + const headersMap: { + readonly [key: string]: string; + } = prepareParams({ ...{}, ...headers.extraHeaders }); + const response: FetchResponse = (await fetch(url, { + method: 'GET', + params: queryParamsMap, + headers: headersMap, + responseFormat: 'json', + auth: this.auth, + networkSession: this.networkSession, + cancellationToken: cancellationToken, + } satisfies FetchOptions)) as FetchResponse; + return deserializeUploadParts(response.data); + } + /** + * Return a list of the chunks uploaded to the upload session so far. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. * @param {string} uploadSessionId The ID of the upload session. Example: "D5E3F7A" * @param {GetFileUploadSessionPartsOptionalsInput} optionalsInput @@ -849,8 +1412,63 @@ export class ChunkedUploadsManager { return deserializeUploadParts(response.data); } /** - * Close an upload session and create a file from the - * uploaded chunks. + * Using this method with urls provided in response when creating a new upload session is preferred to use over CreateFileUploadSessionCommit method. + * This allows to always upload your content to the closest Box data center and can significantly improve upload speed. + * Close an upload session and create a file from the uploaded chunks. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. + * @param {string} url URL of createFileUploadSessionCommit method + * @param {CreateFileUploadSessionCommitByUrlRequestBody} requestBody Request body of createFileUploadSessionCommit method + * @param {CreateFileUploadSessionCommitByUrlHeadersInput} headersInput Headers of createFileUploadSessionCommit method + * @param {CreateFileUploadSessionCommitByUrlOptionalsInput} optionalsInput + * @returns {Promise} + */ + async createFileUploadSessionCommitByUrl( + url: string, + requestBody: CreateFileUploadSessionCommitByUrlRequestBody, + headersInput: CreateFileUploadSessionCommitByUrlHeadersInput, + optionalsInput: CreateFileUploadSessionCommitByUrlOptionalsInput = {} + ): Promise { + const headers: CreateFileUploadSessionCommitByUrlHeaders = + new CreateFileUploadSessionCommitByUrlHeaders({ + digest: headersInput.digest, + ifMatch: headersInput.ifMatch, + ifNoneMatch: headersInput.ifNoneMatch, + extraHeaders: headersInput.extraHeaders, + }); + const optionals: CreateFileUploadSessionCommitByUrlOptionals = + new CreateFileUploadSessionCommitByUrlOptionals({ + cancellationToken: optionalsInput.cancellationToken, + }); + const cancellationToken: any = optionals.cancellationToken; + const headersMap: { + readonly [key: string]: string; + } = prepareParams({ + ...{ + ['digest']: toString(headers.digest) as string, + ['if-match']: toString(headers.ifMatch) as string, + ['if-none-match']: toString(headers.ifNoneMatch) as string, + }, + ...headers.extraHeaders, + }); + const response: FetchResponse = (await fetch(url, { + method: 'POST', + headers: headersMap, + data: serializeCreateFileUploadSessionCommitRequestBody(requestBody), + contentType: 'application/json', + responseFormat: 'json', + auth: this.auth, + networkSession: this.networkSession, + cancellationToken: cancellationToken, + } satisfies FetchOptions)) as FetchResponse; + return deserializeFiles(response.data); + } + /** + * Close an upload session and create a file from the uploaded chunks. + * + * The actual endpoint URL is returned by the [`Create upload session`](e://post-files-upload-sessions) + * and [`Get upload session`](e://get-files-upload-sessions-id) endpoints. * @param {string} uploadSessionId The ID of the upload session. Example: "D5E3F7A" * @param {CreateFileUploadSessionCommitRequestBody} requestBody Request body of createFileUploadSessionCommit method @@ -933,13 +1551,13 @@ export class ChunkedUploadsManager { '/', (toString(acc.fileSize) as string)! ) as string; - const uploadedPart: UploadedPart = await this.uploadFilePart( - acc.uploadSessionId, + const uploadedPart: UploadedPart = await this.uploadFilePartByUrl( + acc.uploadPartUrl, generateByteStreamFromBuffer(chunkBuffer), { digest: digest, contentRange: contentRange, - } satisfies UploadFilePartHeadersInput + } satisfies UploadFilePartByUrlHeadersInput ); const part: UploadPart = uploadedPart.part!; const partSha1: string = hexToBase64(part.sha1!); @@ -957,7 +1575,7 @@ export class ChunkedUploadsManager { lastIndex: bytesEnd, parts: parts.concat([part]), fileSize: acc.fileSize, - uploadSessionId: acc.uploadSessionId, + uploadPartUrl: acc.uploadPartUrl, fileHash: acc.fileHash, } satisfies PartAccumulator; } @@ -988,7 +1606,9 @@ export class ChunkedUploadsManager { cancellationToken: cancellationToken, } satisfies CreateFileUploadSessionOptionalsInput ); - const uploadSessionId: string = uploadSession.id!; + const uploadPartUrl: string = uploadSession.sessionEndpoints!.uploadPart!; + const commitUrl: string = uploadSession.sessionEndpoints!.commit!; + const listPartsUrl: string = uploadSession.sessionEndpoints!.listParts!; const partSize: number = uploadSession.partSize!; const totalParts: number = uploadSession.totalParts!; if (!(partSize * totalParts >= fileSize)) { @@ -1006,30 +1626,35 @@ export class ChunkedUploadsManager { lastIndex: -1, parts: [], fileSize: fileSize, - uploadSessionId: uploadSessionId, + uploadPartUrl: uploadPartUrl, fileHash: fileHash, } satisfies PartAccumulator ); const parts: readonly UploadPart[] = results.parts; const processedSessionParts: UploadParts = - await this.getFileUploadSessionParts(uploadSessionId, { - queryParams: {} satisfies GetFileUploadSessionPartsQueryParams, - headers: new GetFileUploadSessionPartsHeaders({}), + await this.getFileUploadSessionPartsByUrl(listPartsUrl, { + queryParams: {} satisfies GetFileUploadSessionPartsByUrlQueryParams, + headers: new GetFileUploadSessionPartsByUrlHeaders({}), cancellationToken: cancellationToken, - } satisfies GetFileUploadSessionPartsOptionalsInput); + } satisfies GetFileUploadSessionPartsByUrlOptionalsInput); if (!(processedSessionParts.totalCount! == totalParts)) { throw new Error('Assertion failed'); } const sha1: string = await fileHash.digestHash('base64'); const digest: string = ''.concat('sha=', sha1) as string; - const committedSession: Files = await this.createFileUploadSessionCommit( - uploadSessionId, - { parts: parts } satisfies CreateFileUploadSessionCommitRequestBody, - { digest: digest } satisfies CreateFileUploadSessionCommitHeadersInput, - { - cancellationToken: cancellationToken, - } satisfies CreateFileUploadSessionCommitOptionalsInput - ); + const committedSession: Files = + await this.createFileUploadSessionCommitByUrl( + commitUrl, + { + parts: parts, + } satisfies CreateFileUploadSessionCommitByUrlRequestBody, + { + digest: digest, + } satisfies CreateFileUploadSessionCommitByUrlHeadersInput, + { + cancellationToken: cancellationToken, + } satisfies CreateFileUploadSessionCommitByUrlOptionalsInput + ); return committedSession.entries![0]; } } @@ -1142,6 +1767,45 @@ export function deserializeCreateFileUploadSessionForExistingFileRequestBody( fileName: fileName, } satisfies CreateFileUploadSessionForExistingFileRequestBody; } +export function serializeCreateFileUploadSessionCommitByUrlRequestBody( + val: CreateFileUploadSessionCommitByUrlRequestBody +): SerializedData { + return { + ['parts']: val.parts.map(function (item: UploadPart): SerializedData { + return serializeUploadPart(item); + }) as readonly any[], + }; +} +export function deserializeCreateFileUploadSessionCommitByUrlRequestBody( + val: SerializedData +): CreateFileUploadSessionCommitByUrlRequestBody { + if (!sdIsMap(val)) { + throw new BoxSdkError({ + message: + 'Expecting a map for "CreateFileUploadSessionCommitByUrlRequestBody"', + }); + } + if (val.parts == void 0) { + throw new BoxSdkError({ + message: + 'Expecting "parts" of type "CreateFileUploadSessionCommitByUrlRequestBody" to be defined', + }); + } + if (!sdIsList(val.parts)) { + throw new BoxSdkError({ + message: + 'Expecting array for "parts" of type "CreateFileUploadSessionCommitByUrlRequestBody"', + }); + } + const parts: readonly UploadPart[] = sdIsList(val.parts) + ? (val.parts.map(function (itm: SerializedData): UploadPart { + return deserializeUploadPart(itm); + }) as readonly any[]) + : []; + return { + parts: parts, + } satisfies CreateFileUploadSessionCommitByUrlRequestBody; +} export function serializeCreateFileUploadSessionCommitRequestBody( val: CreateFileUploadSessionCommitRequestBody ): SerializedData { diff --git a/src/test/chunkedUploads.generated.test.ts b/src/test/chunkedUploads.generated.test.ts index 7776fe1a..ada81265 100644 --- a/src/test/chunkedUploads.generated.test.ts +++ b/src/test/chunkedUploads.generated.test.ts @@ -1,11 +1,55 @@ +import { serializeCreateFileUploadSessionRequestBody } from '../managers/chunkedUploads.generated.js'; +import { deserializeCreateFileUploadSessionRequestBody } from '../managers/chunkedUploads.generated.js'; +import { serializeCreateFileUploadSessionCommitRequestBody } from '../managers/chunkedUploads.generated.js'; +import { deserializeCreateFileUploadSessionCommitRequestBody } from '../managers/chunkedUploads.generated.js'; +import { serializeCreateFileUploadSessionCommitByUrlRequestBody } from '../managers/chunkedUploads.generated.js'; +import { deserializeCreateFileUploadSessionCommitByUrlRequestBody } from '../managers/chunkedUploads.generated.js'; import { serializeFile } from '../schemas/file.generated.js'; import { deserializeFile } from '../schemas/file.generated.js'; -import { BoxClient } from '../client.generated.js'; -import { ByteStream } from '../internal/utils.js'; +import { serializeUploadSession } from '../schemas/uploadSession.generated.js'; +import { deserializeUploadSession } from '../schemas/uploadSession.generated.js'; +import { serializeUploadPart } from '../schemas/uploadPart.generated.js'; +import { deserializeUploadPart } from '../schemas/uploadPart.generated.js'; +import { serializeUploadParts } from '../schemas/uploadParts.generated.js'; +import { deserializeUploadParts } from '../schemas/uploadParts.generated.js'; +import { serializeUploadedPart } from '../schemas/uploadedPart.generated.js'; +import { deserializeUploadedPart } from '../schemas/uploadedPart.generated.js'; +import { serializeFiles } from '../schemas/files.generated.js'; +import { deserializeFiles } from '../schemas/files.generated.js'; +import { UploadFilePartHeadersInput } from '../managers/chunkedUploads.generated.js'; +import { CreateFileUploadSessionCommitHeadersInput } from '../managers/chunkedUploads.generated.js'; +import { UploadFilePartByUrlHeadersInput } from '../managers/chunkedUploads.generated.js'; +import { CreateFileUploadSessionCommitByUrlHeadersInput } from '../managers/chunkedUploads.generated.js'; +import { Buffer } from '../internal/utils.js'; +import { HashName } from '../internal/utils.js'; +import { UploadFilePartHeaders } from '../managers/chunkedUploads.generated.js'; +import { CreateFileUploadSessionRequestBody } from '../managers/chunkedUploads.generated.js'; +import { Iterator } from '../internal/utils.js'; +import { CreateFileUploadSessionCommitRequestBody } from '../managers/chunkedUploads.generated.js'; +import { CreateFileUploadSessionCommitHeaders } from '../managers/chunkedUploads.generated.js'; +import { UploadFilePartByUrlHeaders } from '../managers/chunkedUploads.generated.js'; +import { CreateFileUploadSessionCommitByUrlRequestBody } from '../managers/chunkedUploads.generated.js'; +import { CreateFileUploadSessionCommitByUrlHeaders } from '../managers/chunkedUploads.generated.js'; +import { generateByteStreamFromBuffer } from '../internal/utils.js'; +import { hexToBase64 } from '../internal/utils.js'; +import { iterateChunks } from '../internal/utils.js'; +import { readByteStream } from '../internal/utils.js'; +import { reduceIterator } from '../internal/utils.js'; +import { Hash } from '../internal/utils.js'; +import { bufferLength } from '../internal/utils.js'; import { getUuid } from '../internal/utils.js'; import { generateByteStream } from '../internal/utils.js'; +import { ByteStream } from '../internal/utils.js'; import { getDefaultClient } from './commons.generated.js'; import { File } from '../schemas/file.generated.js'; +import { UploadSession } from '../schemas/uploadSession.generated.js'; +import { UploadPart } from '../schemas/uploadPart.generated.js'; +import { UploadParts } from '../schemas/uploadParts.generated.js'; +import { UploadedPart } from '../schemas/uploadedPart.generated.js'; +import { Files } from '../schemas/files.generated.js'; +import { BoxClient } from '../client.generated.js'; +import { toString } from '../internal/utils.js'; +import { sdToJson } from '../serialization/json.js'; import { SerializedData } from '../serialization/json.js'; import { sdIsEmpty } from '../serialization/json.js'; import { sdIsBoolean } from '../serialization/json.js'; @@ -14,7 +58,292 @@ import { sdIsString } from '../serialization/json.js'; import { sdIsList } from '../serialization/json.js'; import { sdIsMap } from '../serialization/json.js'; export const client: BoxClient = getDefaultClient(); -test('testChunkedUpload', async function testChunkedUpload(): Promise { +export class TestPartAccumulator { + readonly lastIndex!: number; + readonly parts!: readonly UploadPart[]; + readonly fileSize!: number; + readonly uploadPartUrl: string = ''; + readonly uploadSessionId: string = ''; + readonly fileHash!: Hash; + constructor( + fields: Omit & + Partial> + ) { + if (fields.lastIndex) { + this.lastIndex = fields.lastIndex; + } + if (fields.parts) { + this.parts = fields.parts; + } + if (fields.fileSize) { + this.fileSize = fields.fileSize; + } + if (fields.uploadPartUrl) { + this.uploadPartUrl = fields.uploadPartUrl; + } + if (fields.uploadSessionId) { + this.uploadSessionId = fields.uploadSessionId; + } + if (fields.fileHash) { + this.fileHash = fields.fileHash; + } + } +} +export interface TestPartAccumulatorInput { + readonly lastIndex: number; + readonly parts: readonly UploadPart[]; + readonly fileSize: number; + readonly uploadPartUrl?: string; + readonly uploadSessionId?: string; + readonly fileHash: Hash; +} +async function reducerById( + accInput: TestPartAccumulatorInput, + chunk: ByteStream +): Promise { + const acc: TestPartAccumulator = new TestPartAccumulator({ + lastIndex: accInput.lastIndex, + parts: accInput.parts, + fileSize: accInput.fileSize, + uploadPartUrl: accInput.uploadPartUrl, + uploadSessionId: accInput.uploadSessionId, + fileHash: accInput.fileHash, + }); + const lastIndex: number = acc.lastIndex; + const parts: readonly UploadPart[] = acc.parts; + const chunkBuffer: Buffer = await readByteStream(chunk); + const hash: Hash = new Hash({ algorithm: 'sha1' as HashName }); + hash.updateHash(chunkBuffer); + const sha1: string = await hash.digestHash('base64'); + const digest: string = ''.concat('sha=', sha1) as string; + const chunkSize: number = bufferLength(chunkBuffer); + const bytesStart: number = lastIndex + 1; + const bytesEnd: number = lastIndex + chunkSize; + const contentRange: string = ''.concat( + 'bytes ', + (toString(bytesStart) as string)!, + '-', + (toString(bytesEnd) as string)!, + '/', + (toString(acc.fileSize) as string)! + ) as string; + const uploadedPart: UploadedPart = await client.chunkedUploads.uploadFilePart( + acc.uploadSessionId, + generateByteStreamFromBuffer(chunkBuffer), + { + digest: digest, + contentRange: contentRange, + } satisfies UploadFilePartHeadersInput + ); + const part: UploadPart = uploadedPart.part!; + const partSha1: string = hexToBase64(part.sha1!); + if (!(partSha1 == sha1)) { + throw new Error('Assertion failed'); + } + if (!(part.size! == chunkSize)) { + throw new Error('Assertion failed'); + } + if (!(part.offset! == bytesStart)) { + throw new Error('Assertion failed'); + } + acc.fileHash.updateHash(chunkBuffer); + return new TestPartAccumulator({ + lastIndex: bytesEnd, + parts: parts.concat([part]), + fileSize: acc.fileSize, + uploadSessionId: acc.uploadSessionId, + fileHash: acc.fileHash, + }); +} +async function reducerByUrl( + accInput: TestPartAccumulatorInput, + chunk: ByteStream +): Promise { + const acc: TestPartAccumulator = new TestPartAccumulator({ + lastIndex: accInput.lastIndex, + parts: accInput.parts, + fileSize: accInput.fileSize, + uploadPartUrl: accInput.uploadPartUrl, + uploadSessionId: accInput.uploadSessionId, + fileHash: accInput.fileHash, + }); + const lastIndex: number = acc.lastIndex; + const parts: readonly UploadPart[] = acc.parts; + const chunkBuffer: Buffer = await readByteStream(chunk); + const hash: Hash = new Hash({ algorithm: 'sha1' as HashName }); + hash.updateHash(chunkBuffer); + const sha1: string = await hash.digestHash('base64'); + const digest: string = ''.concat('sha=', sha1) as string; + const chunkSize: number = bufferLength(chunkBuffer); + const bytesStart: number = lastIndex + 1; + const bytesEnd: number = lastIndex + chunkSize; + const contentRange: string = ''.concat( + 'bytes ', + (toString(bytesStart) as string)!, + '-', + (toString(bytesEnd) as string)!, + '/', + (toString(acc.fileSize) as string)! + ) as string; + const uploadedPart: UploadedPart = + await client.chunkedUploads.uploadFilePartByUrl( + acc.uploadPartUrl, + generateByteStreamFromBuffer(chunkBuffer), + { + digest: digest, + contentRange: contentRange, + } satisfies UploadFilePartByUrlHeadersInput + ); + const part: UploadPart = uploadedPart.part!; + const partSha1: string = hexToBase64(part.sha1!); + if (!(partSha1 == sha1)) { + throw new Error('Assertion failed'); + } + if (!(part.size! == chunkSize)) { + throw new Error('Assertion failed'); + } + if (!(part.offset! == bytesStart)) { + throw new Error('Assertion failed'); + } + acc.fileHash.updateHash(chunkBuffer); + return new TestPartAccumulator({ + lastIndex: bytesEnd, + parts: parts.concat([part]), + fileSize: acc.fileSize, + uploadPartUrl: acc.uploadPartUrl, + fileHash: acc.fileHash, + }); +} +test('testChunkedManualProcessById', async function testChunkedManualProcessById(): Promise { + const fileSize: number = 20 * 1024 * 1024; + const fileByteStream: ByteStream = generateByteStream(fileSize); + const fileName: string = getUuid(); + const parentFolderId: string = '0'; + const uploadSession: UploadSession = + await client.chunkedUploads.createFileUploadSession({ + fileName: fileName, + fileSize: fileSize, + folderId: parentFolderId, + } satisfies CreateFileUploadSessionRequestBody); + const uploadSessionId: string = uploadSession.id!; + const partSize: number = uploadSession.partSize!; + const totalParts: number = uploadSession.totalParts!; + if (!(partSize * totalParts >= fileSize)) { + throw new Error('Assertion failed'); + } + if (!(uploadSession.numPartsProcessed == 0)) { + throw new Error('Assertion failed'); + } + const fileHash: Hash = new Hash({ algorithm: 'sha1' as HashName }); + const chunksIterator: Iterator = iterateChunks( + fileByteStream, + partSize, + fileSize + ); + const results: TestPartAccumulator = await reduceIterator( + chunksIterator, + reducerById, + new TestPartAccumulator({ + lastIndex: -1, + parts: [], + fileSize: fileSize, + uploadSessionId: uploadSessionId, + fileHash: fileHash, + }) + ); + const parts: readonly UploadPart[] = results.parts; + const processedSessionParts: UploadParts = + await client.chunkedUploads.getFileUploadSessionParts(uploadSessionId); + if (!(processedSessionParts.totalCount! == totalParts)) { + throw new Error('Assertion failed'); + } + const processedSession: UploadSession = + await client.chunkedUploads.getFileUploadSessionById(uploadSessionId); + if (!(processedSession.id! == uploadSessionId)) { + throw new Error('Assertion failed'); + } + const sha1: string = await fileHash.digestHash('base64'); + const digest: string = ''.concat('sha=', sha1) as string; + const committedSession: Files = + await client.chunkedUploads.createFileUploadSessionCommit( + uploadSessionId, + { parts: parts } satisfies CreateFileUploadSessionCommitRequestBody, + { digest: digest } satisfies CreateFileUploadSessionCommitHeadersInput + ); + if (!(committedSession.entries![0].name! == fileName)) { + throw new Error('Assertion failed'); + } + await client.chunkedUploads.deleteFileUploadSessionById(uploadSessionId); +}); +test('testChunkedManualProcessByUrl', async function testChunkedManualProcessByUrl(): Promise { + const fileSize: number = 20 * 1024 * 1024; + const fileByteStream: ByteStream = generateByteStream(fileSize); + const fileName: string = getUuid(); + const parentFolderId: string = '0'; + const uploadSession: UploadSession = + await client.chunkedUploads.createFileUploadSession({ + fileName: fileName, + fileSize: fileSize, + folderId: parentFolderId, + } satisfies CreateFileUploadSessionRequestBody); + const uploadPartUrl: string = uploadSession.sessionEndpoints!.uploadPart!; + const commitUrl: string = uploadSession.sessionEndpoints!.commit!; + const listPartsUrl: string = uploadSession.sessionEndpoints!.listParts!; + const statusUrl: string = uploadSession.sessionEndpoints!.status!; + const abortUrl: string = uploadSession.sessionEndpoints!.abort!; + const uploadSessionId: string = uploadSession.id!; + const partSize: number = uploadSession.partSize!; + const totalParts: number = uploadSession.totalParts!; + if (!(partSize * totalParts >= fileSize)) { + throw new Error('Assertion failed'); + } + if (!(uploadSession.numPartsProcessed == 0)) { + throw new Error('Assertion failed'); + } + const fileHash: Hash = new Hash({ algorithm: 'sha1' as HashName }); + const chunksIterator: Iterator = iterateChunks( + fileByteStream, + partSize, + fileSize + ); + const results: TestPartAccumulator = await reduceIterator( + chunksIterator, + reducerByUrl, + new TestPartAccumulator({ + lastIndex: -1, + parts: [], + fileSize: fileSize, + uploadPartUrl: uploadPartUrl, + fileHash: fileHash, + }) + ); + const parts: readonly UploadPart[] = results.parts; + const processedSessionParts: UploadParts = + await client.chunkedUploads.getFileUploadSessionPartsByUrl(listPartsUrl); + if (!(processedSessionParts.totalCount! == totalParts)) { + throw new Error('Assertion failed'); + } + const processedSession: UploadSession = + await client.chunkedUploads.getFileUploadSessionByUrl(statusUrl); + if (!(processedSession.id! == uploadSessionId)) { + throw new Error('Assertion failed'); + } + const sha1: string = await fileHash.digestHash('base64'); + const digest: string = ''.concat('sha=', sha1) as string; + const committedSession: Files = + await client.chunkedUploads.createFileUploadSessionCommitByUrl( + commitUrl, + { parts: parts } satisfies CreateFileUploadSessionCommitByUrlRequestBody, + { + digest: digest, + } satisfies CreateFileUploadSessionCommitByUrlHeadersInput + ); + if (!(committedSession.entries![0].name! == fileName)) { + throw new Error('Assertion failed'); + } + await client.chunkedUploads.deleteFileUploadSessionByUrl(abortUrl); +}); +test('testChunkedUploadConvenienceMethod', async function testChunkedUploadConvenienceMethod(): Promise { const fileSize: number = 20 * 1024 * 1024; const fileByteStream: ByteStream = generateByteStream(fileSize); const fileName: string = getUuid(); From 244ba6baf38583267b6d414772fe5f417107385f Mon Sep 17 00:00:00 2001 From: box-sdk-build Date: Mon, 12 Aug 2024 05:27:38 -0700 Subject: [PATCH 2/2] feat: parametrise chunked uploads endpoint urls (box/box-openapi#444) --- .codegen.json | 2 +- src/internal/utils.ts | 2 +- src/networking/fetch.ts | 19 ++++++++++++++++--- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/.codegen.json b/.codegen.json index 1c85cd9b..7d2dfec7 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "d1cb68d", "specHash": "9919482", "version": "1.3.0" } +{ "engineHash": "ab2fc63", "specHash": "9919482", "version": "1.3.0" } diff --git a/src/internal/utils.ts b/src/internal/utils.ts index a9c343ef..5b3b0738 100644 --- a/src/internal/utils.ts +++ b/src/internal/utils.ts @@ -186,7 +186,7 @@ export function decodeBase64ByteStream(data: string): Readable { : eval('require')('stream').Readable.from(Buffer.from(data, 'base64')); } -export async function readByteStream(byteStream: Readable) { +export async function readByteStream(byteStream: Readable): Promise { const buffers: Buffer[] = []; for await (const data of byteStream) { buffers.push(data); diff --git a/src/networking/fetch.ts b/src/networking/fetch.ts index dbca0efa..64861ed8 100644 --- a/src/networking/fetch.ts +++ b/src/networking/fetch.ts @@ -226,8 +226,15 @@ export async function fetch( options ) : options; - - const requestInit = await createRequestInit(fetchOptions); + const fileStreamBuffer = fetchOptions.fileStream + ? await readByteStream(fetchOptions.fileStream) + : void 0; + const requestInit = await createRequestInit({ + ...fetchOptions, + fileStream: fileStreamBuffer + ? generateByteStreamFromBuffer(fileStreamBuffer) + : void 0, + }); const { params = {} } = fetchOptions; const response = await nodeFetch( @@ -287,7 +294,13 @@ export async function fetch( await fetchOptions.auth.refreshToken(fetchOptions.networkSession); // retry the request right away - return fetch(resource, { ...fetchOptions, numRetries: numRetries + 1 }); + return fetch(resource, { + ...fetchOptions, + numRetries: numRetries + 1, + fileStream: fileStreamBuffer + ? generateByteStreamFromBuffer(fileStreamBuffer) + : void 0, + }); } const isRetryable =