From 4ff7b527cf14a5f0a93cde8f37ba98f5de25aeba Mon Sep 17 00:00:00 2001 From: Jamie Epp <168486127+eppjame@users.noreply.github.com> Date: Tue, 5 Nov 2024 10:07:15 -0800 Subject: [PATCH] feat: validate uploaded parts before completing upload (#13763) Co-authored-by: Donny Wu Co-authored-by: Allan Zheng --- .../s3/apis/internal/uploadData/index.test.ts | 34 +- .../uploadData/multipartHandlers.test.ts | 390 +++++++++++++----- .../internal/uploadData/putObjectJob.test.ts | 25 +- .../s3/apis/internal/uploadData/byteLength.ts | 12 +- .../s3/apis/internal/uploadData/index.ts | 10 +- .../uploadData/multipart/initialUpload.ts | 45 +- .../uploadData/multipart/uploadHandlers.ts | 30 +- .../apis/internal/uploadData/putObjectJob.ts | 2 +- 8 files changed, 373 insertions(+), 175 deletions(-) diff --git a/packages/storage/__tests__/providers/s3/apis/internal/uploadData/index.test.ts b/packages/storage/__tests__/providers/s3/apis/internal/uploadData/index.test.ts index 16f4ee21457..9b2c94d1252 100644 --- a/packages/storage/__tests__/providers/s3/apis/internal/uploadData/index.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/internal/uploadData/index.test.ts @@ -55,12 +55,17 @@ describe('uploadData with key', () => { ); }); - it('should NOT throw if data size is unknown', async () => { - uploadData({ - key: 'key', - data: {} as any, - }); - expect(mockCreateUploadTask).toHaveBeenCalled(); + it('should throw if data size is unknown', async () => { + expect(() => + uploadData({ + key: 'key', + data: {} as any, + }), + ).toThrow( + expect.objectContaining( + validationErrorMap[StorageValidationErrorCode.InvalidUploadSource], + ), + ); }); }); @@ -166,12 +171,17 @@ describe('uploadData with path', () => { ); }); - it('should NOT throw if data size is unknown', async () => { - uploadData({ - path: testPath, - data: {} as any, - }); - expect(mockCreateUploadTask).toHaveBeenCalled(); + it('should throw if data size is unknown', async () => { + expect(() => + uploadData({ + path: testPath, + data: {} as any, + }), + ).toThrow( + expect.objectContaining( + validationErrorMap[StorageValidationErrorCode.InvalidUploadSource], + ), + ); }); }); diff --git a/packages/storage/__tests__/providers/s3/apis/internal/uploadData/multipartHandlers.test.ts b/packages/storage/__tests__/providers/s3/apis/internal/uploadData/multipartHandlers.test.ts index 47d2babed9a..9bf25707adb 100644 --- a/packages/storage/__tests__/providers/s3/apis/internal/uploadData/multipartHandlers.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/internal/uploadData/multipartHandlers.test.ts @@ -112,7 +112,7 @@ const mockMultipartUploadSuccess = (disableAssertion?: boolean) => { totalBytes: body.byteLength, }); - totalSize += byteLength(input.Body)!; + totalSize += byteLength(input.Body!)!; return { Etag: `etag-${input.PartNumber}`, @@ -238,11 +238,14 @@ describe('getMultipartUploadHandlers with key', () => { `should upload a %s type body that splits in 2 parts using ${accessLevelMsg} accessLevel`, async (_, twoPartsPayload) => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: twoPartsPayload, - options: options as StorageOptions, - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: twoPartsPayload, + options: options as StorageOptions, + }, + byteLength(twoPartsPayload)!, + ); const result = await multipartUploadJob(); await expect( mockCreateMultipartUpload, @@ -282,13 +285,16 @@ describe('getMultipartUploadHandlers with key', () => { `should create crc32 for %s type body`, async (_, twoPartsPayload, expectedCrc32) => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: twoPartsPayload, - options: { - checksumAlgorithm: CHECKSUM_ALGORITHM_CRC32, + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: twoPartsPayload, + options: { + checksumAlgorithm: CHECKSUM_ALGORITHM_CRC32, + }, }, - }); + byteLength(twoPartsPayload)!, + ); await multipartUploadJob(); /** @@ -325,10 +331,13 @@ describe('getMultipartUploadHandlers with key', () => { }, }, }; - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: new Uint8Array(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: new Uint8Array(8 * MB), + }, + 8 * MB, + ); await multipartUploadJob(); expect(calculateContentCRC32).toHaveBeenCalledTimes(1); // (final crc32 calculation = 1 undefined) expect(calculateContentMd5).toHaveBeenCalledTimes(2); @@ -337,10 +346,13 @@ describe('getMultipartUploadHandlers with key', () => { it('should throw if unsupported payload type is provided', async () => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: 1 as any, - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: 1 as any, + }, + 1, + ); await expect(multipartUploadJob()).rejects.toThrow( expect.objectContaining( validationErrorMap[StorageValidationErrorCode.InvalidUploadSource], @@ -418,10 +430,13 @@ describe('getMultipartUploadHandlers with key', () => { mockCreateMultipartUpload.mockReset(); mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error')); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -431,10 +446,13 @@ describe('getMultipartUploadHandlers with key', () => { mockCompleteMultipartUpload.mockReset(); mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -449,10 +467,13 @@ describe('getMultipartUploadHandlers with key', () => { }); mockUploadPart.mockRejectedValueOnce(new Error('error')); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); await expect(multipartUploadJob()).rejects.toThrow('error'); expect(mockUploadPart).toHaveBeenCalledTimes(2); expect(mockCompleteMultipartUpload).not.toHaveBeenCalled(); @@ -464,13 +485,16 @@ describe('getMultipartUploadHandlers with key', () => { const mockBucket = 'bucket-1'; const mockRegion = 'region-1'; mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: 'key', - data: mockData, - options: { - bucket: { bucketName: mockBucket, region: mockRegion }, + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: 'key', + data: mockData, + options: { + bucket: { bucketName: mockBucket, region: mockRegion }, + }, }, - }); + byteLength(mockData)!, + ); await multipartUploadJob(); await expect( mockCreateMultipartUpload, @@ -490,13 +514,16 @@ describe('getMultipartUploadHandlers with key', () => { it('should override bucket in putObject call when bucket as string', async () => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - key: 'key', - data: mockData, - options: { - bucket: 'default-bucket', + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: 'key', + data: mockData, + options: { + bucket: 'default-bucket', + }, }, - }); + byteLength(mockData)!, + ); await multipartUploadJob(); await expect( mockCreateMultipartUpload, @@ -514,6 +541,56 @@ describe('getMultipartUploadHandlers with key', () => { ); }); }); + + describe('cache validation', () => { + it.each([ + { + name: 'wrong part count', + parts: [{ PartNumber: 1 }, { PartNumber: 2 }, { PartNumber: 3 }], + }, + { + name: 'wrong part numbers', + parts: [{ PartNumber: 1 }, { PartNumber: 1 }], + }, + ])('should throw with $name', async ({ parts }) => { + mockMultipartUploadSuccess(); + + const mockDefaultStorage = defaultStorage as jest.Mocked< + typeof defaultStorage + >; + mockDefaultStorage.getItem.mockResolvedValue( + JSON.stringify({ + [defaultCacheKey]: { + uploadId: 'uploadId', + bucket, + key: defaultKey, + finalCrc32: 'mock-crc32', + }, + }), + ); + mockListParts.mockResolvedValue({ + Parts: parts, + $metadata: {}, + }); + + const onProgress = jest.fn(); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + key: defaultKey, + data: new ArrayBuffer(8 * MB), + options: { + onProgress, + resumableUploadsCache: mockDefaultStorage, + }, + }, + 8 * MB, + ); + await expect(multipartUploadJob()).rejects.toThrow({ + name: 'Unknown', + message: 'An unknown error has occurred.', + }); + }); + }); }); describe('upload caching', () => { @@ -735,10 +812,13 @@ describe('getMultipartUploadHandlers with key', () => { describe('cancel()', () => { it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => { - const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({ - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob, onCancel } = getMultipartUploadHandlers( + { + key: defaultKey, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); let partCount = 0; mockMultipartUploadCancellation(() => { partCount++; @@ -772,10 +852,13 @@ describe('getMultipartUploadHandlers with key', () => { }); const { multipartUploadJob, onPause, onResume } = - getMultipartUploadHandlers({ - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }); + getMultipartUploadHandlers( + { + key: defaultKey, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); let partCount = 0; mockMultipartUploadCancellation(() => { partCount++; @@ -934,10 +1017,13 @@ describe('getMultipartUploadHandlers with path', () => { `should upload a %s type body that splits into 2 parts to path ${expectedKey}`, async (_, twoPartsPayload) => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: inputPath, - data: twoPartsPayload, - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: inputPath, + data: twoPartsPayload, + }, + byteLength(twoPartsPayload)!, + ); const result = await multipartUploadJob(); await expect( mockCreateMultipartUpload, @@ -977,13 +1063,16 @@ describe('getMultipartUploadHandlers with path', () => { `should create crc32 for %s type body`, async (_, twoPartsPayload, expectedCrc32) => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: twoPartsPayload, - options: { - checksumAlgorithm: CHECKSUM_ALGORITHM_CRC32, + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: twoPartsPayload, + options: { + checksumAlgorithm: CHECKSUM_ALGORITHM_CRC32, + }, }, - }); + byteLength(twoPartsPayload)!, + ); await multipartUploadJob(); /** @@ -1020,10 +1109,13 @@ describe('getMultipartUploadHandlers with path', () => { }, }, }; - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: new Uint8Array(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new Uint8Array(8 * MB), + }, + 8 * MB, + ); await multipartUploadJob(); expect(calculateContentCRC32).toHaveBeenCalledTimes(1); // (final crc32 calculation = 1 undefined) expect(calculateContentMd5).toHaveBeenCalledTimes(2); @@ -1032,10 +1124,13 @@ describe('getMultipartUploadHandlers with path', () => { it('should throw if unsupported payload type is provided', async () => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: 1 as any, - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: 1 as any, + }, + 1, + ); await expect(multipartUploadJob()).rejects.toThrow( expect.objectContaining( validationErrorMap[StorageValidationErrorCode.InvalidUploadSource], @@ -1113,10 +1208,13 @@ describe('getMultipartUploadHandlers with path', () => { mockCreateMultipartUpload.mockReset(); mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error')); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -1126,10 +1224,13 @@ describe('getMultipartUploadHandlers with path', () => { mockCompleteMultipartUpload.mockReset(); mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -1144,10 +1245,13 @@ describe('getMultipartUploadHandlers with path', () => { }); mockUploadPart.mockRejectedValueOnce(new Error('error')); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); await expect(multipartUploadJob()).rejects.toThrow('error'); expect(mockUploadPart).toHaveBeenCalledTimes(2); expect(mockCompleteMultipartUpload).not.toHaveBeenCalled(); @@ -1158,13 +1262,19 @@ describe('getMultipartUploadHandlers with path', () => { expect.assertions(3); mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: testPath, - data: new ArrayBuffer(8 * MB), - options: { preventOverwrite: true }, - }); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + options: { preventOverwrite: true }, + }, + 8 * MB, + ); await multipartUploadJob(); - expect(mockCompleteMultipartUpload).toBeLastCalledWithConfigAndInput( + + await expect( + mockCompleteMultipartUpload, + ).toBeLastCalledWithConfigAndInput( expect.objectContaining({ credentials, region, @@ -1182,13 +1292,16 @@ describe('getMultipartUploadHandlers with path', () => { const mockBucket = 'bucket-1'; const mockRegion = 'region-1'; mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: 'path/', - data: mockData, - options: { - bucket: { bucketName: mockBucket, region: mockRegion }, + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: 'path/', + data: mockData, + options: { + bucket: { bucketName: mockBucket, region: mockRegion }, + }, }, - }); + byteLength(mockData)!, + ); await multipartUploadJob(); await expect( mockCreateMultipartUpload, @@ -1210,13 +1323,16 @@ describe('getMultipartUploadHandlers with path', () => { }); it('should override bucket in putObject call when bucket as string', async () => { mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - path: 'path/', - data: mockData, - options: { - bucket: 'default-bucket', + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: 'path/', + data: mockData, + options: { + bucket: 'default-bucket', + }, }, - }); + byteLength(mockData)!, + ); await multipartUploadJob(); await expect( mockCreateMultipartUpload, @@ -1237,6 +1353,56 @@ describe('getMultipartUploadHandlers with path', () => { expect(mockCompleteMultipartUpload).toHaveBeenCalledTimes(1); }); }); + + describe('cache validation', () => { + it.each([ + { + name: 'wrong part count', + parts: [{ PartNumber: 1 }, { PartNumber: 2 }, { PartNumber: 3 }], + }, + { + name: 'wrong part numbers', + parts: [{ PartNumber: 1 }, { PartNumber: 1 }], + }, + ])('should throw with $name', async ({ parts }) => { + mockMultipartUploadSuccess(); + + const mockDefaultStorage = defaultStorage as jest.Mocked< + typeof defaultStorage + >; + mockDefaultStorage.getItem.mockResolvedValue( + JSON.stringify({ + [testPathCacheKey]: { + uploadId: 'uploadId', + bucket, + key: defaultKey, + finalCrc32: 'mock-crc32', + }, + }), + ); + mockListParts.mockResolvedValue({ + Parts: parts, + $metadata: {}, + }); + + const onProgress = jest.fn(); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + options: { + onProgress, + resumableUploadsCache: mockDefaultStorage, + }, + }, + 8 * MB, + ); + await expect(multipartUploadJob()).rejects.toThrow({ + name: 'Unknown', + message: 'An unknown error has occurred.', + }); + }); + }); }); describe('upload caching', () => { @@ -1253,7 +1419,7 @@ describe('getMultipartUploadHandlers with path', () => { const size = 8 * MB; const { multipartUploadJob } = getMultipartUploadHandlers( { - key: defaultKey, + path: testPath, data: new ArrayBuffer(size), }, size, @@ -1457,10 +1623,13 @@ describe('getMultipartUploadHandlers with path', () => { describe('cancel()', () => { it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => { - const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({ - path: testPath, - data: new ArrayBuffer(8 * MB), - }); + const { multipartUploadJob, onCancel } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); let partCount = 0; mockMultipartUploadCancellation(() => { partCount++; @@ -1493,10 +1662,13 @@ describe('getMultipartUploadHandlers with path', () => { }); const { multipartUploadJob, onPause, onResume } = - getMultipartUploadHandlers({ - path: testPath, - data: new ArrayBuffer(8 * MB), - }); + getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); let partCount = 0; mockMultipartUploadCancellation(() => { partCount++; diff --git a/packages/storage/__tests__/providers/s3/apis/internal/uploadData/putObjectJob.test.ts b/packages/storage/__tests__/providers/s3/apis/internal/uploadData/putObjectJob.test.ts index 2b65ba1c3ad..2665fdef227 100644 --- a/packages/storage/__tests__/providers/s3/apis/internal/uploadData/putObjectJob.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/internal/uploadData/putObjectJob.test.ts @@ -43,6 +43,8 @@ const mockFetchAuthSession = jest.mocked(Amplify.Auth.fetchAuthSession); const mockPutObject = jest.mocked(putObject); const bucket = 'bucket'; const region = 'region'; +const data = 'data'; +const dataLength = data.length; mockFetchAuthSession.mockResolvedValue({ credentials, @@ -78,7 +80,6 @@ describe('putObjectJob with key', () => { async ({ checksumAlgorithm }) => { const abortController = new AbortController(); const inputKey = 'key'; - const data = 'data'; const mockContentType = 'contentType'; const contentDisposition = 'contentDisposition'; const contentEncoding = 'contentEncoding'; @@ -101,6 +102,7 @@ describe('putObjectJob with key', () => { }, }, abortController.signal, + dataLength, ); const result = await job(); expect(result).toEqual({ @@ -109,7 +111,7 @@ describe('putObjectJob with key', () => { versionId: 'versionId', contentType: 'contentType', metadata: { key: 'value' }, - size: undefined, + size: dataLength, }); expect(mockPutObject).toHaveBeenCalledTimes(1); await expect(mockPutObject).toBeLastCalledWithConfigAndInput( @@ -158,6 +160,7 @@ describe('putObjectJob with key', () => { data: 'data', }, new AbortController().signal, + dataLength, ); await job(); expect(calculateContentMd5).toHaveBeenCalledWith('data'); @@ -166,7 +169,6 @@ describe('putObjectJob with key', () => { describe('bucket passed in options', () => { it('should override bucket in putObject call when bucket as object', async () => { const abortController = new AbortController(); - const data = 'data'; const bucketName = 'bucket-1'; const mockRegion = 'region-1'; @@ -182,6 +184,7 @@ describe('putObjectJob with key', () => { }, }, new AbortController().signal, + dataLength, ); await job(); @@ -203,7 +206,6 @@ describe('putObjectJob with key', () => { it('should override bucket in putObject call when bucket as string', async () => { const abortController = new AbortController(); - const data = 'data'; const job = putObjectJob( { key: 'key', @@ -213,6 +215,7 @@ describe('putObjectJob with key', () => { }, }, new AbortController().signal, + dataLength, ); await job(); @@ -274,7 +277,6 @@ describe('putObjectJob with path', () => { 'should supply the correct parameters to putObject API handler when path is $path and checksumAlgorithm is $checksumAlgorithm', async ({ path: inputPath, expectedKey, checksumAlgorithm }) => { const abortController = new AbortController(); - const data = 'data'; const mockContentType = 'contentType'; const contentDisposition = 'contentDisposition'; const contentEncoding = 'contentEncoding'; @@ -297,6 +299,7 @@ describe('putObjectJob with path', () => { }, }, abortController.signal, + dataLength, ); const result = await job(); expect(result).toEqual({ @@ -305,7 +308,7 @@ describe('putObjectJob with path', () => { versionId: 'versionId', contentType: 'contentType', metadata: { key: 'value' }, - size: undefined, + size: dataLength, }); expect(mockPutObject).toHaveBeenCalledTimes(1); await expect(mockPutObject).toBeLastCalledWithConfigAndInput( @@ -351,9 +354,10 @@ describe('putObjectJob with path', () => { const job = putObjectJob( { path: testPath, - data: 'data', + data, }, new AbortController().signal, + dataLength, ); await job(); expect(calculateContentMd5).toHaveBeenCalledWith('data'); @@ -364,10 +368,11 @@ describe('putObjectJob with path', () => { const job = putObjectJob( { path: testPath, - data: 'data', + data, options: { preventOverwrite: true }, }, new AbortController().signal, + dataLength, ); await job(); @@ -383,7 +388,6 @@ describe('putObjectJob with path', () => { describe('bucket passed in options', () => { it('should override bucket in putObject call when bucket as object', async () => { const abortController = new AbortController(); - const data = 'data'; const bucketName = 'bucket-1'; const mockRegion = 'region-1'; @@ -399,6 +403,7 @@ describe('putObjectJob with path', () => { }, }, new AbortController().signal, + dataLength, ); await job(); @@ -420,7 +425,6 @@ describe('putObjectJob with path', () => { it('should override bucket in putObject call when bucket as string', async () => { const abortController = new AbortController(); - const data = 'data'; const job = putObjectJob( { path: 'path/', @@ -430,6 +434,7 @@ describe('putObjectJob with path', () => { }, }, new AbortController().signal, + dataLength, ); await job(); diff --git a/packages/storage/src/providers/s3/apis/internal/uploadData/byteLength.ts b/packages/storage/src/providers/s3/apis/internal/uploadData/byteLength.ts index 9b6ea87e42f..4caeb3c1973 100644 --- a/packages/storage/src/providers/s3/apis/internal/uploadData/byteLength.ts +++ b/packages/storage/src/providers/s3/apis/internal/uploadData/byteLength.ts @@ -8,16 +8,9 @@ export const byteLength = (input?: any): number | undefined => { if (input === null || input === undefined) return 0; if (typeof input === 'string') { - let len = input.length; + const blob = new Blob([input]); - for (let i = len - 1; i >= 0; i--) { - const code = input.charCodeAt(i); - if (code > 0x7f && code <= 0x7ff) len++; - else if (code > 0x7ff && code <= 0xffff) len += 2; - if (code >= 0xdc00 && code <= 0xdfff) i--; // trail surrogate - } - - return len; + return blob.size; } else if (typeof input.byteLength === 'number') { // handles Uint8Array, ArrayBuffer, Buffer, and ArrayBufferView return input.byteLength; @@ -26,6 +19,5 @@ export const byteLength = (input?: any): number | undefined => { return input.size; } - // TODO: support Node.js stream size when Node.js runtime is supported out-of-box. return undefined; }; diff --git a/packages/storage/src/providers/s3/apis/internal/uploadData/index.ts b/packages/storage/src/providers/s3/apis/internal/uploadData/index.ts index a31aff1c9c6..fca640f4b44 100644 --- a/packages/storage/src/providers/s3/apis/internal/uploadData/index.ts +++ b/packages/storage/src/providers/s3/apis/internal/uploadData/index.ts @@ -19,12 +19,18 @@ export const uploadData = ( const { data } = input; const dataByteLength = byteLength(data); + // Using InvalidUploadSource error code because the input data must NOT be any + // of permitted Blob, string, ArrayBuffer(View) if byteLength could not be determined. assertValidationError( - dataByteLength === undefined || dataByteLength <= MAX_OBJECT_SIZE, + dataByteLength !== undefined, + StorageValidationErrorCode.InvalidUploadSource, + ); + assertValidationError( + dataByteLength <= MAX_OBJECT_SIZE, StorageValidationErrorCode.ObjectIsTooLarge, ); - if (dataByteLength !== undefined && dataByteLength <= DEFAULT_PART_SIZE) { + if (dataByteLength <= DEFAULT_PART_SIZE) { // Single part upload const abortController = new AbortController(); diff --git a/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/initialUpload.ts b/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/initialUpload.ts index 95b13f2c6e8..2437cf0d4b7 100644 --- a/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/initialUpload.ts +++ b/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/initialUpload.ts @@ -28,6 +28,7 @@ interface LoadOrCreateMultipartUploadOptions { s3Config: ResolvedS3Config; data: StorageUploadDataPayload; bucket: string; + size: number; accessLevel?: StorageAccessLevel; keyPrefix?: string; key: string; @@ -35,7 +36,6 @@ interface LoadOrCreateMultipartUploadOptions { contentDisposition?: string | ContentDisposition; contentEncoding?: string; metadata?: Record; - size?: number; abortSignal?: AbortSignal; checksumAlgorithm?: UploadDataChecksumAlgorithm; optionsHash: string; @@ -84,9 +84,9 @@ export const loadOrCreateMultipartUpload = async ({ } | undefined; - if (size === undefined || !resumableUploadsCache) { + if (!resumableUploadsCache) { logger.debug( - 'uploaded data size or cache instance cannot be determined, skipping cache.', + 'uploaded cache instance cannot be determined, skipping cache.', ); cachedUpload = undefined; } else { @@ -141,33 +141,24 @@ export const loadOrCreateMultipartUpload = async ({ }, ); - if (size === undefined || !resumableUploadsCache) { - logger.debug( - 'uploaded data size or cache instance cannot be determined, skipping cache.', - ); - - return { + if (resumableUploadsCache) { + const uploadCacheKey = getUploadsCacheKey({ + size, + contentType, + file: data instanceof File ? data : undefined, + bucket, + accessLevel, + key, + optionsHash, + }); + await cacheMultipartUpload(resumableUploadsCache, uploadCacheKey, { uploadId: UploadId!, - cachedParts: [], + bucket, + key, finalCrc32, - }; + fileName: data instanceof File ? data.name : '', + }); } - const uploadCacheKey = getUploadsCacheKey({ - size, - contentType, - file: data instanceof File ? data : undefined, - bucket, - accessLevel, - key, - optionsHash, - }); - await cacheMultipartUpload(resumableUploadsCache, uploadCacheKey, { - uploadId: UploadId!, - bucket, - key, - finalCrc32, - fileName: data instanceof File ? data.name : '', - }); return { uploadId: UploadId!, diff --git a/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/uploadHandlers.ts b/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/uploadHandlers.ts index bcb23e037d6..c9f52314f8a 100644 --- a/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/uploadHandlers.ts +++ b/packages/storage/src/providers/s3/apis/internal/uploadData/multipart/uploadHandlers.ts @@ -37,12 +37,14 @@ import { getStorageUserAgentValue } from '../../../../utils/userAgent'; import { logger } from '../../../../../../utils'; import { calculateContentCRC32 } from '../../../../utils/crc32'; import { StorageOperationOptionsInput } from '../../../../../../types/inputs'; +import { IntegrityError } from '../../../../../../errors/IntegrityError'; import { uploadPartExecutor } from './uploadPartExecutor'; import { getUploadsCacheKey, removeCachedUpload } from './uploadCache'; import { getConcurrentUploadsProgressTracker } from './progressTracker'; import { loadOrCreateMultipartUpload } from './initialUpload'; import { getDataChunker } from './getDataChunker'; +import { calculatePartSize } from './calculatePartSize'; type WithResumableCacheConfig> = Input & { @@ -80,7 +82,7 @@ export type MultipartUploadDataInput = WithResumableCacheConfig< */ export const getMultipartUploadHandlers = ( uploadDataInput: MultipartUploadDataInput, - size?: number, + size: number, ) => { let resolveCallback: | ((value: ItemWithKey | ItemWithPath) => void) @@ -236,6 +238,8 @@ export const getMultipartUploadHandlers = ( await Promise.all(concurrentUploadPartExecutors); + validateCompletedParts(inProgressUpload.completedParts, size); + const { ETag: eTag } = await completeMultipartUpload( { ...resolvedS3Config, @@ -249,9 +253,7 @@ export const getMultipartUploadHandlers = ( ChecksumCRC32: inProgressUpload.finalCrc32, IfNoneMatch: preventOverwrite ? '*' : undefined, MultipartUpload: { - Parts: inProgressUpload.completedParts.sort( - (partA, partB) => partA.PartNumber! - partB.PartNumber!, - ), + Parts: sortUploadParts(inProgressUpload.completedParts), }, ExpectedBucketOwner: expectedBucketOwner, }, @@ -355,3 +357,23 @@ const resolveAccessLevel = (accessLevel?: StorageAccessLevel) => accessLevel ?? Amplify.libraryOptions.Storage?.S3?.defaultAccessLevel ?? DEFAULT_ACCESS_LEVEL; + +const validateCompletedParts = (completedParts: Part[], size: number) => { + const partsExpected = Math.ceil(size / calculatePartSize(size)); + const validPartCount = completedParts.length === partsExpected; + + const sorted = sortUploadParts(completedParts); + const validPartNumbers = sorted.every( + (part, index) => part.PartNumber === index + 1, + ); + + if (!validPartCount || !validPartNumbers) { + throw new IntegrityError(); + } +}; + +const sortUploadParts = (parts: Part[]) => { + return [...parts].sort( + (partA, partB) => partA.PartNumber! - partB.PartNumber!, + ); +}; diff --git a/packages/storage/src/providers/s3/apis/internal/uploadData/putObjectJob.ts b/packages/storage/src/providers/s3/apis/internal/uploadData/putObjectJob.ts index f5490647e72..340cd5d610a 100644 --- a/packages/storage/src/providers/s3/apis/internal/uploadData/putObjectJob.ts +++ b/packages/storage/src/providers/s3/apis/internal/uploadData/putObjectJob.ts @@ -43,7 +43,7 @@ export const putObjectJob = ( uploadDataInput: SinglePartUploadDataInput, abortSignal: AbortSignal, - totalLength?: number, + totalLength: number, ) => async (): Promise => { const { options: uploadDataOptions, data } = uploadDataInput;