diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts index 43775719dd3..938ca8863ee 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts @@ -172,12 +172,9 @@ describe('uploadData with path', () => { uploadData(testInput); expect(mockPutObjectJob).toHaveBeenCalledWith( - expect.objectContaining({ - input: testInput, - totalLength: expect.any(Number), - abortSignal: expect.any(AbortSignal), - config: expect.any(Object), - }), + testInput, + expect.any(AbortSignal), + expect.any(Number), ); expect(mockGetMultipartUploadHandlers).not.toHaveBeenCalled(); }, @@ -215,11 +212,8 @@ describe('uploadData with path', () => { expect(mockPutObjectJob).not.toHaveBeenCalled(); expect(mockGetMultipartUploadHandlers).toHaveBeenCalledWith( - expect.objectContaining({ - config: expect.any(Object), - input: testInput, - size: expect.any(Number), - }), + testInput, + expect.any(Number), ); }); diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts index ab049042afd..04c6bf6522a 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { defaultStorage } from '@aws-amplify/core'; +import { Amplify, defaultStorage } from '@aws-amplify/core'; import { abortMultipartUpload, @@ -22,7 +22,6 @@ import { byteLength } from '../../../../../src/providers/s3/apis/uploadData/byte import { CanceledError } from '../../../../../src/errors/CanceledError'; import { StorageOptions } from '../../../../../src/types'; import '../testUtils'; -import { S3InternalConfig } from '../../../../../src/providers/s3/apis/internal/types'; jest.mock('@aws-amplify/core'); jest.mock('../../../../../src/providers/s3/utils/client/s3data'); @@ -33,6 +32,7 @@ const credentials: AWSCredentials = { secretAccessKey: 'secretAccessKey', }; const defaultIdentityId = 'defaultIdentityId'; +const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const bucket = 'bucket'; const region = 'region'; const defaultKey = 'key'; @@ -131,22 +131,21 @@ const resetS3Mocks = () => { mockListParts.mockReset(); }; -const mockCredentialsProvider = jest.fn(); -const mockIdentityIdProvider = jest.fn(); -const mockServiceOptions = { bucket, region }; -const mockLibraryOptions = {}; - /* TODO Remove suite when `key` parameter is removed */ describe('getMultipartUploadHandlers with key', () => { - const mockS3Config: S3InternalConfig = { - credentialsProvider: mockCredentialsProvider, - identityIdProvider: mockIdentityIdProvider, - serviceOptions: mockServiceOptions, - libraryOptions: mockLibraryOptions, - }; beforeAll(() => { - mockCredentialsProvider.mockImplementation(async () => credentials); - mockIdentityIdProvider.mockImplementation(async () => defaultIdentityId); + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: defaultIdentityId, + }); + (Amplify.getConfig as jest.Mock).mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, + }); }); afterEach(() => { @@ -155,14 +154,13 @@ describe('getMultipartUploadHandlers with key', () => { }); it('should return multipart upload handlers', async () => { - const multipartUploadHandlers = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const multipartUploadHandlers = getMultipartUploadHandlers( + { key: defaultKey, data: { size: 5 * 1024 * 1024 } as any, }, - size: 5 * 1024 * 1024, - }); + 5 * 1024 * 1024, + ); expect(multipartUploadHandlers).toEqual({ multipartUploadJob: expect.any(Function), onPause: expect.any(Function), @@ -202,12 +200,9 @@ describe('getMultipartUploadHandlers with key', () => { async (_, twoPartsPayload) => { mockMultipartUploadSuccess(); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: twoPartsPayload, - options: options as StorageOptions, - }, + key: defaultKey, + data: twoPartsPayload, + options: options as StorageOptions, }); const result = await multipartUploadJob(); await expect( @@ -237,11 +232,8 @@ describe('getMultipartUploadHandlers with key', () => { it('should throw if unsupported payload type is provided', async () => { mockMultipartUploadSuccess(); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: 1 as any, - }, + key: defaultKey, + data: 1 as any, }); await expect(multipartUploadJob()).rejects.toThrow( expect.objectContaining( @@ -267,14 +259,13 @@ describe('getMultipartUploadHandlers with key', () => { }), } as any as File; mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: file, }, - size: file.size, - }); + file.size, + ); await multipartUploadJob(); expect(file.slice).toHaveBeenCalledTimes(10_000); // S3 limit of parts count expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); @@ -294,14 +285,13 @@ describe('getMultipartUploadHandlers with key', () => { $metadata: {}, }); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(8 * MB), }, - size: 8 * MB, - }); + 8 * MB, + ); try { await multipartUploadJob(); fail('should throw error'); @@ -319,11 +309,8 @@ describe('getMultipartUploadHandlers with key', () => { mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error')); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }, + key: defaultKey, + data: new ArrayBuffer(8 * MB), }); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -335,11 +322,8 @@ describe('getMultipartUploadHandlers with key', () => { mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }, + key: defaultKey, + data: new ArrayBuffer(8 * MB), }); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -356,11 +340,8 @@ describe('getMultipartUploadHandlers with key', () => { mockUploadPart.mockRejectedValueOnce(new Error('error')); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }, + key: defaultKey, + data: new ArrayBuffer(8 * MB), }); await expect(multipartUploadJob()).rejects.toThrow('error'); expect(mockUploadPart).toHaveBeenCalledTimes(2); @@ -380,14 +361,13 @@ describe('getMultipartUploadHandlers with key', () => { it('should send createMultipartUpload request if the upload task is not cached', async () => { mockMultipartUploadSuccess(); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -409,14 +389,13 @@ describe('getMultipartUploadHandlers with key', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); expect(mockListParts).not.toHaveBeenCalled(); @@ -428,14 +407,13 @@ describe('getMultipartUploadHandlers with key', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new File([new ArrayBuffer(size)], 'someName'), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -464,14 +442,13 @@ describe('getMultipartUploadHandlers with key', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); expect(mockCreateMultipartUpload).not.toHaveBeenCalled(); expect(mockListParts).toHaveBeenCalledTimes(1); @@ -483,14 +460,13 @@ describe('getMultipartUploadHandlers with key', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -511,14 +487,13 @@ describe('getMultipartUploadHandlers with key', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -534,14 +509,13 @@ describe('getMultipartUploadHandlers with key', () => { mockMultipartUploadSuccess(disableAssertionFlag); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(size), }, size, - }); + ); const uploadJobPromise = multipartUploadJob(); await uploadJobPromise; // 1 for caching upload task; 1 for remove cache after upload is completed @@ -557,11 +531,8 @@ describe('getMultipartUploadHandlers with key', () => { describe('cancel()', () => { it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => { const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }, + key: defaultKey, + data: new ArrayBuffer(8 * MB), }); let partCount = 0; mockMultipartUploadCancellation(() => { @@ -588,11 +559,8 @@ describe('getMultipartUploadHandlers with key', () => { it('should abort in-flight uploadPart requests if upload is paused', async () => { const { multipartUploadJob, onPause, onResume } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - key: defaultKey, - data: new ArrayBuffer(8 * MB), - }, + key: defaultKey, + data: new ArrayBuffer(8 * MB), }); let partCount = 0; mockMultipartUploadCancellation(() => { @@ -614,17 +582,16 @@ describe('getMultipartUploadHandlers with key', () => { it('should send progress for in-flight upload parts', async () => { const onProgress = jest.fn(); mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(8 * MB), options: { onProgress, }, }, - size: 8 * MB, - }); + 8 * MB, + ); await multipartUploadJob(); expect(onProgress).toHaveBeenCalledTimes(4); // 2 simulated onProgress events per uploadPart call are all tracked expect(onProgress).toHaveBeenNthCalledWith(1, { @@ -666,17 +633,16 @@ describe('getMultipartUploadHandlers with key', () => { }); const onProgress = jest.fn(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { key: defaultKey, data: new ArrayBuffer(8 * MB), options: { onProgress, }, }, - size: 8 * MB, - }); + 8 * MB, + ); await multipartUploadJob(); expect(onProgress).toHaveBeenCalledTimes(3); // The first part's 5 MB progress is reported even though no uploadPart call is made. @@ -689,15 +655,19 @@ describe('getMultipartUploadHandlers with key', () => { }); describe('getMultipartUploadHandlers with path', () => { - const mockS3Config: S3InternalConfig = { - credentialsProvider: mockCredentialsProvider, - identityIdProvider: mockIdentityIdProvider, - serviceOptions: mockServiceOptions, - libraryOptions: mockLibraryOptions, - }; beforeAll(() => { - mockCredentialsProvider.mockImplementation(async () => credentials); - mockIdentityIdProvider.mockImplementation(async () => defaultIdentityId); + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: defaultIdentityId, + }); + (Amplify.getConfig as jest.Mock).mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, + }); }); afterEach(() => { @@ -706,14 +676,13 @@ describe('getMultipartUploadHandlers with path', () => { }); it('should return multipart upload handlers', async () => { - const multipartUploadHandlers = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const multipartUploadHandlers = getMultipartUploadHandlers( + { path: testPath, data: { size: 5 * 1024 * 1024 } as any, }, - size: 5 * 1024 * 1024, - }); + 5 * 1024 * 1024, + ); expect(multipartUploadHandlers).toEqual({ multipartUploadJob: expect.any(Function), onPause: expect.any(Function), @@ -746,27 +715,24 @@ describe('getMultipartUploadHandlers with path', () => { async (_, twoPartsPayload) => { mockMultipartUploadSuccess(); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: inputPath, - data: twoPartsPayload, - }, + path: inputPath, + data: twoPartsPayload, }); const result = await multipartUploadJob(); - // await expect( - // mockCreateMultipartUpload, - // ).toBeLastCalledWithConfigAndInput( - // expect.objectContaining({ - // credentials, - // region, - // abortSignal: expect.any(AbortSignal), - // }), - // expect.objectContaining({ - // Bucket: bucket, - // Key: expectedKey, - // ContentType: defaultContentType, - // }), - // ); + await expect( + mockCreateMultipartUpload, + ).toBeLastCalledWithConfigAndInput( + expect.objectContaining({ + credentials, + region, + abortSignal: expect.any(AbortSignal), + }), + expect.objectContaining({ + Bucket: bucket, + Key: expectedKey, + ContentType: defaultContentType, + }), + ); expect(result).toEqual( expect.objectContaining({ path: expectedKey, eTag: 'etag' }), ); @@ -780,11 +746,8 @@ describe('getMultipartUploadHandlers with path', () => { it('should throw if unsupported payload type is provided', async () => { mockMultipartUploadSuccess(); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: testPath, - data: 1 as any, - }, + path: testPath, + data: 1 as any, }); await expect(multipartUploadJob()).rejects.toThrow( expect.objectContaining( @@ -810,14 +773,13 @@ describe('getMultipartUploadHandlers with path', () => { }), } as any as File; mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: file, }, - size: file.size, - }); + file.size, + ); await multipartUploadJob(); expect(file.slice).toHaveBeenCalledTimes(10_000); // S3 limit of parts count expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); @@ -837,14 +799,13 @@ describe('getMultipartUploadHandlers with path', () => { $metadata: {}, }); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(8 * MB), }, - size: 8 * MB, - }); + 8 * MB, + ); try { await multipartUploadJob(); fail('should throw error'); @@ -862,11 +823,8 @@ describe('getMultipartUploadHandlers with path', () => { mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error')); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: testPath, - data: new ArrayBuffer(8 * MB), - }, + path: testPath, + data: new ArrayBuffer(8 * MB), }); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -878,11 +836,8 @@ describe('getMultipartUploadHandlers with path', () => { mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: testPath, - data: new ArrayBuffer(8 * MB), - }, + path: testPath, + data: new ArrayBuffer(8 * MB), }); await expect(multipartUploadJob()).rejects.toThrow('error'); }); @@ -899,11 +854,8 @@ describe('getMultipartUploadHandlers with path', () => { mockUploadPart.mockRejectedValueOnce(new Error('error')); const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: testPath, - data: new ArrayBuffer(8 * MB), - }, + path: testPath, + data: new ArrayBuffer(8 * MB), }); await expect(multipartUploadJob()).rejects.toThrow('error'); expect(mockUploadPart).toHaveBeenCalledTimes(2); @@ -923,14 +875,13 @@ describe('getMultipartUploadHandlers with path', () => { it('should send createMultipartUpload request if the upload task is not cached', async () => { mockMultipartUploadSuccess(); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -952,14 +903,13 @@ describe('getMultipartUploadHandlers with path', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); expect(mockListParts).not.toHaveBeenCalled(); @@ -971,14 +921,13 @@ describe('getMultipartUploadHandlers with path', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new File([new ArrayBuffer(size)], 'someName'), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -1010,14 +959,13 @@ describe('getMultipartUploadHandlers with path', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); expect(mockCreateMultipartUpload).not.toHaveBeenCalled(); expect(mockListParts).toHaveBeenCalledTimes(1); @@ -1029,14 +977,13 @@ describe('getMultipartUploadHandlers with path', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -1055,14 +1002,13 @@ describe('getMultipartUploadHandlers with path', () => { mockMultipartUploadSuccess(); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(size), }, size, - }); + ); await multipartUploadJob(); // 1 for caching upload task; 1 for remove cache after upload is completed expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); @@ -1078,14 +1024,13 @@ describe('getMultipartUploadHandlers with path', () => { mockMultipartUploadSuccess(disableAssertionFlag); mockListParts.mockResolvedValueOnce({ Parts: [], $metadata: {} }); const size = 8 * MB; - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(size), }, size, - }); + ); const uploadJobPromise = multipartUploadJob(); await uploadJobPromise; // 1 for caching upload task; 1 for remove cache after upload is completed @@ -1101,11 +1046,8 @@ describe('getMultipartUploadHandlers with path', () => { describe('cancel()', () => { it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => { const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: testPath, - data: new ArrayBuffer(8 * MB), - }, + path: testPath, + data: new ArrayBuffer(8 * MB), }); let partCount = 0; mockMultipartUploadCancellation(() => { @@ -1132,11 +1074,8 @@ describe('getMultipartUploadHandlers with path', () => { it('should abort in-flight uploadPart requests if upload is paused', async () => { const { multipartUploadJob, onPause, onResume } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { - path: testPath, - data: new ArrayBuffer(8 * MB), - }, + path: testPath, + data: new ArrayBuffer(8 * MB), }); let partCount = 0; mockMultipartUploadCancellation(() => { @@ -1158,17 +1097,16 @@ describe('getMultipartUploadHandlers with path', () => { it('should send progress for in-flight upload parts', async () => { const onProgress = jest.fn(); mockMultipartUploadSuccess(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(8 * MB), options: { onProgress, }, }, - size: 8 * MB, - }); + 8 * MB, + ); await multipartUploadJob(); expect(onProgress).toHaveBeenCalledTimes(4); // 2 simulated onProgress events per uploadPart call are all tracked expect(onProgress).toHaveBeenNthCalledWith(1, { @@ -1210,17 +1148,16 @@ describe('getMultipartUploadHandlers with path', () => { }); const onProgress = jest.fn(); - const { multipartUploadJob } = getMultipartUploadHandlers({ - config: mockS3Config, - input: { + const { multipartUploadJob } = getMultipartUploadHandlers( + { path: testPath, data: new ArrayBuffer(8 * MB), options: { onProgress, }, }, - size: 8 * MB, - }); + 8 * MB, + ); await multipartUploadJob(); expect(onProgress).toHaveBeenCalledTimes(3); // The first part's 5 MB progress is reported even though no uploadPart call is made. diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts index f6e06fa1140..df1b92113a1 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts @@ -2,12 +2,12 @@ // SPDX-License-Identifier: Apache-2.0 import { AWSCredentials } from '@aws-amplify/core/internals/utils'; +import { Amplify } from '@aws-amplify/core'; import { putObject } from '../../../../../src/providers/s3/utils/client/s3data'; import { calculateContentMd5 } from '../../../../../src/providers/s3/utils'; import { putObjectJob } from '../../../../../src/providers/s3/apis/uploadData/putObjectJob'; import '../testUtils'; -import { S3InternalConfig } from '../../../../../src/providers/s3/apis/internal/types'; jest.mock('../../../../../src/providers/s3/utils/client/s3data'); jest.mock('../../../../../src/providers/s3/utils', () => { @@ -20,6 +20,13 @@ jest.mock('../../../../../src/providers/s3/utils', () => { }); jest.mock('@aws-amplify/core', () => ({ ConsoleLogger: jest.fn(), + fetchAuthSession: jest.fn(), + Amplify: { + getConfig: jest.fn(), + Auth: { + fetchAuthSession: jest.fn(), + }, + }, })); const testPath = 'testPath/object'; @@ -29,35 +36,31 @@ const credentials: AWSCredentials = { secretAccessKey: 'secretAccessKey', }; const identityId = 'identityId'; -const bucket = 'bucket'; -const region = 'region'; - -const mockCredentialsProvider = jest.fn(); -const mockIdentityIdProvider = jest.fn(); -const mockServiceOptions = { bucket, region }; -const mockLibraryOptions = {}; +const mockFetchAuthSession = jest.mocked(Amplify.Auth.fetchAuthSession); const mockPutObject = jest.mocked(putObject); +mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId, +}); +jest.mocked(Amplify.getConfig).mockReturnValue({ + Storage: { + S3: { + bucket: 'bucket', + region: 'region', + }, + }, +}); mockPutObject.mockResolvedValue({ ETag: 'eTag', VersionId: 'versionId', $metadata: {}, }); -const config: S3InternalConfig = { - credentialsProvider: mockCredentialsProvider, - identityIdProvider: mockIdentityIdProvider, - serviceOptions: mockServiceOptions, - libraryOptions: mockLibraryOptions, -}; - /* TODO Remove suite when `key` parameter is removed */ describe('putObjectJob with key', () => { beforeEach(() => { - mockCredentialsProvider.mockImplementation(async () => credentials); - mockIdentityIdProvider.mockImplementation(async () => identityId); mockPutObject.mockClear(); - jest.clearAllMocks(); }); it('should supply the correct parameters to putObject API handler', async () => { @@ -71,9 +74,8 @@ describe('putObjectJob with key', () => { const onProgress = jest.fn(); const useAccelerateEndpoint = true; - const job = putObjectJob({ - config, - input: { + const job = putObjectJob( + { key: inputKey, data, options: { @@ -85,8 +87,8 @@ describe('putObjectJob with key', () => { useAccelerateEndpoint, }, }, - abortSignal: abortController.signal, - }); + abortController.signal, + ); const result = await job(); expect(result).toEqual({ key: inputKey, @@ -97,7 +99,6 @@ describe('putObjectJob with key', () => { size: undefined, }); expect(mockPutObject).toHaveBeenCalledTimes(1); - await expect(mockPutObject).toBeLastCalledWithConfigAndInput( { credentials, @@ -121,19 +122,20 @@ describe('putObjectJob with key', () => { }); it('should set ContentMD5 if object lock is enabled', async () => { - const job = putObjectJob({ - config: { - ...config, - libraryOptions: { + Amplify.libraryOptions = { + Storage: { + S3: { isObjectLockEnabled: true, }, }, - input: { + }; + const job = putObjectJob( + { key: 'key', data: 'data', }, - abortSignal: new AbortController().signal, - }); + new AbortController().signal, + ); await job(); expect(calculateContentMd5).toHaveBeenCalledWith('data'); }); @@ -141,8 +143,6 @@ describe('putObjectJob with key', () => { describe('putObjectJob with path', () => { beforeEach(() => { - mockCredentialsProvider.mockImplementation(async () => credentials); - mockIdentityIdProvider.mockImplementation(async () => identityId); mockPutObject.mockClear(); }); @@ -167,9 +167,8 @@ describe('putObjectJob with path', () => { const onProgress = jest.fn(); const useAccelerateEndpoint = true; - const job = putObjectJob({ - config, - input: { + const job = putObjectJob( + { path: inputPath, data, options: { @@ -181,8 +180,8 @@ describe('putObjectJob with path', () => { useAccelerateEndpoint, }, }, - abortSignal: abortController.signal, - }); + abortController.signal, + ); const result = await job(); expect(result).toEqual({ path: expectedKey, @@ -217,19 +216,20 @@ describe('putObjectJob with path', () => { ); it('should set ContentMD5 if object lock is enabled', async () => { - const job = putObjectJob({ - config: { - ...config, - libraryOptions: { + Amplify.libraryOptions = { + Storage: { + S3: { isObjectLockEnabled: true, }, }, - input: { + }; + const job = putObjectJob( + { path: testPath, data: 'data', }, - abortSignal: new AbortController().signal, - }); + new AbortController().signal, + ); await job(); expect(calculateContentMd5).toHaveBeenCalledWith('data'); }); diff --git a/packages/storage/__tests__/providers/s3/apis/utils/resolveS3ConfigAndInput.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/resolveS3ConfigAndInput.test.ts index ba527aa8dbf..e26cb63b6c7 100644 --- a/packages/storage/__tests__/providers/s3/apis/utils/resolveS3ConfigAndInput.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/utils/resolveS3ConfigAndInput.test.ts @@ -1,18 +1,29 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { Amplify } from '@aws-amplify/core'; + import { resolveS3ConfigAndInput } from '../../../../../src/providers/s3/utils'; import { resolvePrefix } from '../../../../../src/utils/resolvePrefix'; import { StorageValidationErrorCode, validationErrorMap, } from '../../../../../src/errors/types/validation'; -import { S3InternalConfig } from '../../../../../src/providers/s3/apis/internal/types'; -import { assertValidationError } from '../../../../../src/errors/utils/assertValidationError'; +jest.mock('@aws-amplify/core', () => ({ + ConsoleLogger: jest.fn(), + Amplify: { + getConfig: jest.fn(), + Auth: { + fetchAuthSession: jest.fn(), + }, + }, +})); jest.mock('../../../../../src/utils/resolvePrefix'); +const mockGetConfig = Amplify.getConfig as jest.Mock; const mockDefaultResolvePrefix = resolvePrefix as jest.Mock; +const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const bucket = 'bucket'; const region = 'region'; @@ -23,41 +34,39 @@ const credentials = { }; const targetIdentityId = 'targetIdentityId'; -const mockCredentialsProvider = jest.fn(); -const mockIdentityIdProvider = jest.fn(); -const mockServiceOptions = { bucket, region }; -const mockLibraryOptions = {}; - describe('resolveS3ConfigAndInput', () => { - const config: S3InternalConfig = { - credentialsProvider: mockCredentialsProvider, - identityIdProvider: mockIdentityIdProvider, - serviceOptions: mockServiceOptions, - libraryOptions: mockLibraryOptions, - }; beforeEach(() => { - mockCredentialsProvider.mockImplementation(async () => credentials); - mockIdentityIdProvider.mockImplementation(async () => targetIdentityId); jest.clearAllMocks(); + Amplify.libraryOptions = {}; + }); + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: targetIdentityId, + }); + + mockGetConfig.mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, }); it('should call fetchAuthSession for credentials and identityId', async () => { expect.assertions(1); - await resolveS3ConfigAndInput({ config }); - expect(mockIdentityIdProvider).toHaveBeenCalled(); + await resolveS3ConfigAndInput(Amplify, {}); + expect(mockFetchAuthSession).toHaveBeenCalled(); }); it('should throw if credentials are not available', async () => { expect.assertions(1); - mockCredentialsProvider.mockImplementation(async () => { - assertValidationError( - !!undefined, - StorageValidationErrorCode.NoCredentials, - ); + mockFetchAuthSession.mockResolvedValue({ + identityId: targetIdentityId, }); const { s3Config: { credentials: credentialsProvider }, - } = await resolveS3ConfigAndInput({ config }); + } = await resolveS3ConfigAndInput(Amplify, {}); if (typeof credentialsProvider === 'function') { await expect(credentialsProvider()).rejects.toMatchObject( validationErrorMap[StorageValidationErrorCode.NoCredentials], @@ -68,97 +77,100 @@ describe('resolveS3ConfigAndInput', () => { }); it('should throw if identityId is not available', async () => { - mockIdentityIdProvider.mockImplementation(async () => { - assertValidationError(!!'', StorageValidationErrorCode.NoIdentityId); + mockFetchAuthSession.mockResolvedValueOnce({ + credentials, }); - await expect(resolveS3ConfigAndInput({ config })).rejects.toMatchObject( + await expect(resolveS3ConfigAndInput(Amplify, {})).rejects.toMatchObject( validationErrorMap[StorageValidationErrorCode.NoIdentityId], ); }); it('should resolve bucket from S3 config', async () => { - const { bucket: resolvedBucket } = await resolveS3ConfigAndInput({ - config, - }); + const { bucket: resolvedBucket } = await resolveS3ConfigAndInput( + Amplify, + {}, + ); expect(resolvedBucket).toEqual(bucket); + expect(mockGetConfig).toHaveBeenCalled(); }); it('should throw if bucket is not available', async () => { - await expect( - resolveS3ConfigAndInput({ - config: { - ...config, - serviceOptions: { - bucket: undefined, - }, + mockGetConfig.mockReturnValueOnce({ + Storage: { + S3: { + region, }, - }), - ).rejects.toMatchObject( + }, + }); + await expect(resolveS3ConfigAndInput(Amplify, {})).rejects.toMatchObject( validationErrorMap[StorageValidationErrorCode.NoBucket], ); }); it('should resolve region from S3 config', async () => { - const { s3Config } = await resolveS3ConfigAndInput({ config }); + const { s3Config } = await resolveS3ConfigAndInput(Amplify, {}); expect(s3Config.region).toEqual(region); + expect(mockGetConfig).toHaveBeenCalled(); }); it('should throw if region is not available', async () => { - await expect( - resolveS3ConfigAndInput({ - config: { - ...config, - serviceOptions: { - bucket, - }, + mockGetConfig.mockReturnValueOnce({ + Storage: { + S3: { + bucket, }, - }), - ).rejects.toMatchObject( + }, + }); + await expect(resolveS3ConfigAndInput(Amplify, {})).rejects.toMatchObject( validationErrorMap[StorageValidationErrorCode.NoRegion], ); }); it('should set customEndpoint and forcePathStyle to true if dangerouslyConnectToHttpEndpointForTesting is set from S3 config', async () => { - const serviceOptions = { - bucket, - region, - dangerouslyConnectToHttpEndpointForTesting: 'true', - }; - - const { s3Config } = await resolveS3ConfigAndInput({ - config: { ...config, serviceOptions }, + mockGetConfig.mockReturnValueOnce({ + Storage: { + S3: { + bucket, + region, + dangerouslyConnectToHttpEndpointForTesting: true, + }, + }, }); + const { s3Config } = await resolveS3ConfigAndInput(Amplify, {}); expect(s3Config.customEndpoint).toEqual('http://localhost:20005'); expect(s3Config.forcePathStyle).toEqual(true); + expect(mockGetConfig).toHaveBeenCalled(); }); it('should resolve isObjectLockEnabled from S3 library options', async () => { - const { isObjectLockEnabled } = await resolveS3ConfigAndInput({ - config: { - ...config, - libraryOptions: { isObjectLockEnabled: true }, + Amplify.libraryOptions = { + Storage: { + S3: { + isObjectLockEnabled: true, + }, }, - }); + }; + const { isObjectLockEnabled } = await resolveS3ConfigAndInput(Amplify, {}); expect(isObjectLockEnabled).toEqual(true); }); it('should use default prefix resolver', async () => { mockDefaultResolvePrefix.mockResolvedValueOnce('prefix'); - const { keyPrefix } = await resolveS3ConfigAndInput({ config }); + const { keyPrefix } = await resolveS3ConfigAndInput(Amplify, {}); expect(mockDefaultResolvePrefix).toHaveBeenCalled(); expect(keyPrefix).toEqual('prefix'); }); it('should use prefix resolver from S3 library options if supplied', async () => { const customResolvePrefix = jest.fn().mockResolvedValueOnce('prefix'); - const { keyPrefix } = await resolveS3ConfigAndInput({ - config: { - ...config, - libraryOptions: { + Amplify.libraryOptions = { + Storage: { + S3: { prefixResolver: customResolvePrefix, }, }, - }); + }; + const { keyPrefix } = await resolveS3ConfigAndInput(Amplify, {}); expect(customResolvePrefix).toHaveBeenCalled(); expect(keyPrefix).toEqual('prefix'); expect(mockDefaultResolvePrefix).not.toHaveBeenCalled(); @@ -166,11 +178,8 @@ describe('resolveS3ConfigAndInput', () => { it('should resolve prefix with given access level', async () => { mockDefaultResolvePrefix.mockResolvedValueOnce('prefix'); - const { keyPrefix } = await resolveS3ConfigAndInput({ - config, - apiOptions: { - accessLevel: 'someLevel' as any, - }, + const { keyPrefix } = await resolveS3ConfigAndInput(Amplify, { + accessLevel: 'someLevel' as any, }); expect(mockDefaultResolvePrefix).toHaveBeenCalledWith({ accessLevel: 'someLevel', @@ -181,14 +190,14 @@ describe('resolveS3ConfigAndInput', () => { it('should resolve prefix with default access level from S3 library options', async () => { mockDefaultResolvePrefix.mockResolvedValueOnce('prefix'); - const { keyPrefix } = await resolveS3ConfigAndInput({ - config: { - ...config, - libraryOptions: { + Amplify.libraryOptions = { + Storage: { + S3: { defaultAccessLevel: 'someLevel' as any, }, }, - }); + }; + const { keyPrefix } = await resolveS3ConfigAndInput(Amplify, {}); expect(mockDefaultResolvePrefix).toHaveBeenCalledWith({ accessLevel: 'someLevel', targetIdentityId, @@ -198,7 +207,7 @@ describe('resolveS3ConfigAndInput', () => { it('should resolve prefix with `guest` access level if no access level is given', async () => { mockDefaultResolvePrefix.mockResolvedValueOnce('prefix'); - const { keyPrefix } = await resolveS3ConfigAndInput({ config }); + const { keyPrefix } = await resolveS3ConfigAndInput(Amplify, {}); expect(mockDefaultResolvePrefix).toHaveBeenCalledWith({ accessLevel: 'guest', // default access level targetIdentityId, diff --git a/packages/storage/src/providers/s3/apis/downloadData.ts b/packages/storage/src/providers/s3/apis/downloadData.ts index e67ab7f6b25..009e75ae95b 100644 --- a/packages/storage/src/providers/s3/apis/downloadData.ts +++ b/packages/storage/src/providers/s3/apis/downloadData.ts @@ -13,7 +13,6 @@ import { import { resolveS3ConfigAndInput } from '../utils/resolveS3ConfigAndInput'; import { createDownloadTask, validateStorageOperationInput } from '../utils'; import { getObject } from '../utils/client/s3data'; -import { createStorageConfiguration } from '../utils/config'; import { getStorageUserAgentValue } from '../utils/userAgent'; import { logger } from '../../../utils'; import { @@ -115,13 +114,8 @@ const downloadDataJob = StorageDownloadDataOutput > => { const { options: downloadDataOptions } = downloadDataInput; - const config = createStorageConfiguration(Amplify); - const { bucket, keyPrefix, s3Config, identityId } = - await resolveS3ConfigAndInput({ - config, - apiOptions: downloadDataOptions, - }); + await resolveS3ConfigAndInput(Amplify, downloadDataOptions); const { inputType, objectKey } = validateStorageOperationInput( downloadDataInput, identityId, diff --git a/packages/storage/src/providers/s3/apis/internal/copy.ts b/packages/storage/src/providers/s3/apis/internal/copy.ts index 22bdc1bac6a..0e1fc01eadf 100644 --- a/packages/storage/src/providers/s3/apis/internal/copy.ts +++ b/packages/storage/src/providers/s3/apis/internal/copy.ts @@ -12,7 +12,6 @@ import { } from '../../types'; import { ResolvedS3Config } from '../../types/options'; import { - createStorageConfiguration, isInputWithPath, resolveS3ConfigAndInput, validateStorageOperationInput, @@ -41,10 +40,8 @@ const copyWithPath = async ( input: CopyWithPathInput, ): Promise => { const { source, destination } = input; - const config = createStorageConfiguration(amplify); - const { s3Config, bucket, identityId } = await resolveS3ConfigAndInput({ - config, - }); + const { s3Config, bucket, identityId } = + await resolveS3ConfigAndInput(amplify); assertValidationError(!!source.path, StorageValidationErrorCode.NoSourcePath); assertValidationError( @@ -90,19 +87,16 @@ export const copyWithKey = async ( !!destinationKey, StorageValidationErrorCode.NoDestinationKey, ); - const config = createStorageConfiguration(amplify); + const { s3Config, bucket, keyPrefix: sourceKeyPrefix, - } = await resolveS3ConfigAndInput({ - config, - apiOptions: input.source, - }); - const { keyPrefix: destinationKeyPrefix } = await resolveS3ConfigAndInput({ - config, - apiOptions: input.destination, - }); // resolveS3ConfigAndInput does not make extra API calls or storage access if called repeatedly. + } = await resolveS3ConfigAndInput(amplify, input.source); + const { keyPrefix: destinationKeyPrefix } = await resolveS3ConfigAndInput( + amplify, + input.destination, + ); // resolveS3ConfigAndInput does not make extra API calls or storage access if called repeatedly. // TODO(ashwinkumar6) V6-logger: warn `You may copy files from another user if the source level is "protected", currently it's ${srcLevel}` const finalCopySource = `${bucket}/${sourceKeyPrefix}${sourceKey}`; diff --git a/packages/storage/src/providers/s3/apis/internal/getProperties.ts b/packages/storage/src/providers/s3/apis/internal/getProperties.ts index f06f6bcab69..915f02db495 100644 --- a/packages/storage/src/providers/s3/apis/internal/getProperties.ts +++ b/packages/storage/src/providers/s3/apis/internal/getProperties.ts @@ -11,7 +11,6 @@ import { GetPropertiesWithPathOutput, } from '../../types'; import { - createStorageConfiguration, resolveS3ConfigAndInput, validateStorageOperationInput, } from '../../utils'; @@ -26,12 +25,8 @@ export const getProperties = async ( action?: StorageAction, ): Promise => { const { options: getPropertiesOptions } = input; - const config = createStorageConfiguration(amplify); const { s3Config, bucket, keyPrefix, identityId } = - await resolveS3ConfigAndInput({ - config, - apiOptions: getPropertiesOptions, - }); + await resolveS3ConfigAndInput(amplify, getPropertiesOptions); const { inputType, objectKey } = validateStorageOperationInput( input, identityId, diff --git a/packages/storage/src/providers/s3/apis/internal/getUrl.ts b/packages/storage/src/providers/s3/apis/internal/getUrl.ts index 755a2028e4c..e8440ce80eb 100644 --- a/packages/storage/src/providers/s3/apis/internal/getUrl.ts +++ b/packages/storage/src/providers/s3/apis/internal/getUrl.ts @@ -13,7 +13,6 @@ import { import { StorageValidationErrorCode } from '../../../../errors/types/validation'; import { getPresignedGetObjectUrl } from '../../utils/client/s3data'; import { - createStorageConfiguration, resolveS3ConfigAndInput, validateStorageOperationInput, } from '../../utils'; @@ -31,12 +30,8 @@ export const getUrl = async ( input: GetUrlInput | GetUrlWithPathInput, ): Promise => { const { options: getUrlOptions } = input; - const config = createStorageConfiguration(amplify); const { s3Config, keyPrefix, bucket, identityId } = - await resolveS3ConfigAndInput({ - config, - apiOptions: getUrlOptions, - }); + await resolveS3ConfigAndInput(amplify, getUrlOptions); const { inputType, objectKey } = validateStorageOperationInput( input, identityId, diff --git a/packages/storage/src/providers/s3/apis/internal/list.ts b/packages/storage/src/providers/s3/apis/internal/list.ts index 0da1742aac3..6f074858738 100644 --- a/packages/storage/src/providers/s3/apis/internal/list.ts +++ b/packages/storage/src/providers/s3/apis/internal/list.ts @@ -17,7 +17,6 @@ import { ListPaginateWithPathOutput, } from '../../types'; import { - createStorageConfiguration, resolveS3ConfigAndInput, validateStorageOperationInputWithPrefix, } from '../../utils'; @@ -54,17 +53,12 @@ export const list = async ( | ListPaginateWithPathOutput > => { const { options = {} } = input; - - const config = createStorageConfiguration(amplify); const { s3Config, bucket, keyPrefix: generatedPrefix, identityId, - } = await resolveS3ConfigAndInput({ - config, - apiOptions: options, - }); + } = await resolveS3ConfigAndInput(amplify, options); const { inputType, objectKey } = validateStorageOperationInputWithPrefix( input, diff --git a/packages/storage/src/providers/s3/apis/internal/remove.ts b/packages/storage/src/providers/s3/apis/internal/remove.ts index 5a6add5ce67..e2a9377f39e 100644 --- a/packages/storage/src/providers/s3/apis/internal/remove.ts +++ b/packages/storage/src/providers/s3/apis/internal/remove.ts @@ -11,7 +11,6 @@ import { RemoveWithPathOutput, } from '../../types'; import { - createStorageConfiguration, resolveS3ConfigAndInput, validateStorageOperationInput, } from '../../utils'; @@ -25,12 +24,8 @@ export const remove = async ( input: RemoveInput | RemoveWithPathInput, ): Promise => { const { options = {} } = input ?? {}; - const config = createStorageConfiguration(amplify); const { s3Config, keyPrefix, bucket, identityId } = - await resolveS3ConfigAndInput({ - config, - apiOptions: options, - }); + await resolveS3ConfigAndInput(amplify, options); const { inputType, objectKey } = validateStorageOperationInput( input, diff --git a/packages/storage/src/providers/s3/apis/internal/types/index.ts b/packages/storage/src/providers/s3/apis/internal/types/index.ts deleted file mode 100644 index fb20b5da08d..00000000000 --- a/packages/storage/src/providers/s3/apis/internal/types/index.ts +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { LibraryOptions, StorageConfig } from '@aws-amplify/core'; -import { AWSCredentials } from '@aws-amplify/core/internals/utils'; - -/** - * Internal S3 service options. - * - * @internal - */ -type S3ServiceOptions = StorageConfig['S3']; - -/** - * Internal S3 library options. - * - * @internal - */ -type S3LibraryOptions = NonNullable['S3']; - -/** - * S3 storage config input - * - * @internal - */ -export interface S3InternalConfig { - serviceOptions: S3ServiceOptions; - libraryOptions: S3LibraryOptions; - credentialsProvider(): Promise; - identityIdProvider(): Promise; -} diff --git a/packages/storage/src/providers/s3/apis/internal/uploadData.ts b/packages/storage/src/providers/s3/apis/internal/uploadData.ts deleted file mode 100644 index 5c616b4b7a7..00000000000 --- a/packages/storage/src/providers/s3/apis/internal/uploadData.ts +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { UploadDataInput, UploadDataWithPathInput } from '../../types'; -import { createUploadTask } from '../../utils'; -import { assertValidationError } from '../../../../errors/utils/assertValidationError'; -import { StorageValidationErrorCode } from '../../../../errors/types/validation'; -import { DEFAULT_PART_SIZE, MAX_OBJECT_SIZE } from '../../utils/constants'; -import { byteLength } from '../uploadData/byteLength'; -import { putObjectJob } from '../uploadData/putObjectJob'; -import { getMultipartUploadHandlers } from '../uploadData/multipart'; - -import { S3InternalConfig } from './types'; - -export function internalUploadData( - config: S3InternalConfig, - input: UploadDataInput | UploadDataWithPathInput, -) { - const { data } = input; - - const dataByteLength = byteLength(data); - assertValidationError( - dataByteLength === undefined || dataByteLength <= MAX_OBJECT_SIZE, - StorageValidationErrorCode.ObjectIsTooLarge, - ); - - if (dataByteLength && dataByteLength <= DEFAULT_PART_SIZE) { - // Single part upload - const abortController = new AbortController(); - - return createUploadTask({ - isMultipartUpload: false, - job: putObjectJob({ - config, - input, - abortSignal: abortController.signal, - totalLength: dataByteLength, - }), - onCancel: (message?: string) => { - abortController.abort(message); - }, - }); - } else { - // Multipart upload - const { multipartUploadJob, onPause, onResume, onCancel } = - getMultipartUploadHandlers({ config, input, size: dataByteLength }); - - return createUploadTask({ - isMultipartUpload: true, - job: multipartUploadJob, - onCancel: (message?: string) => { - onCancel(message); - }, - onPause, - onResume, - }); - } -} diff --git a/packages/storage/src/providers/s3/apis/uploadData/index.ts b/packages/storage/src/providers/s3/apis/uploadData/index.ts index f32b90425dc..8669309ec53 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/index.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/index.ts @@ -1,16 +1,20 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { Amplify } from '@aws-amplify/core'; - import { UploadDataInput, UploadDataOutput, UploadDataWithPathInput, UploadDataWithPathOutput, } from '../../types'; -import { internalUploadData } from '../internal/uploadData'; -import { createStorageConfiguration } from '../../utils/config'; +import { createUploadTask } from '../../utils'; +import { assertValidationError } from '../../../../errors/utils/assertValidationError'; +import { StorageValidationErrorCode } from '../../../../errors/types/validation'; +import { DEFAULT_PART_SIZE, MAX_OBJECT_SIZE } from '../../utils/constants'; + +import { byteLength } from './byteLength'; +import { putObjectJob } from './putObjectJob'; +import { getMultipartUploadHandlers } from './multipart'; /** * Upload data to the specified S3 object path. By default uses single PUT operation to upload if the payload is less than 5MB. @@ -123,7 +127,38 @@ export function uploadData( export function uploadData(input: UploadDataInput): UploadDataOutput; export function uploadData(input: UploadDataInput | UploadDataWithPathInput) { - const config = createStorageConfiguration(Amplify); + const { data } = input; + + const dataByteLength = byteLength(data); + assertValidationError( + dataByteLength === undefined || dataByteLength <= MAX_OBJECT_SIZE, + StorageValidationErrorCode.ObjectIsTooLarge, + ); + + if (dataByteLength && dataByteLength <= DEFAULT_PART_SIZE) { + // Single part upload + const abortController = new AbortController(); + + return createUploadTask({ + isMultipartUpload: false, + job: putObjectJob(input, abortController.signal, dataByteLength), + onCancel: (message?: string) => { + abortController.abort(message); + }, + }); + } else { + // Multipart upload + const { multipartUploadJob, onPause, onResume, onCancel } = + getMultipartUploadHandlers(input, dataByteLength); - return internalUploadData(config, input); + return createUploadTask({ + isMultipartUpload: true, + job: multipartUploadJob, + onCancel: (message?: string) => { + onCancel(message); + }, + onPause, + onResume, + }); + } } diff --git a/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts b/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts index e84de95619b..886a769648b 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts @@ -29,7 +29,6 @@ import { } from '../../../utils/client/s3data'; import { getStorageUserAgentValue } from '../../../utils/userAgent'; import { logger } from '../../../../../utils'; -import { S3InternalConfig } from '../../internal/types'; import { uploadPartExecutor } from './uploadPartExecutor'; import { getUploadsCacheKey, removeCachedUpload } from './uploadCache'; @@ -43,17 +42,10 @@ import { getDataChunker } from './getDataChunker'; * * @internal */ - -interface GetMultipartUploadHandlersProps { - config: S3InternalConfig; - input: UploadDataInput | UploadDataWithPathInput; - size?: number; -} -export const getMultipartUploadHandlers = ({ - config, - input, - size, -}: GetMultipartUploadHandlersProps) => { +export const getMultipartUploadHandlers = ( + uploadDataInput: UploadDataInput | UploadDataWithPathInput, + size?: number, +) => { let resolveCallback: | ((value: ItemWithKey | ItemWithPath) => void) | undefined; @@ -78,11 +70,11 @@ export const getMultipartUploadHandlers = ({ let isAbortSignalFromPause = false; const startUpload = async (): Promise => { - const { options: uploadDataOptions, data } = input; - const resolvedS3Options = await resolveS3ConfigAndInput({ - config, - apiOptions: uploadDataOptions, - }); + const { options: uploadDataOptions, data } = uploadDataInput; + const resolvedS3Options = await resolveS3ConfigAndInput( + Amplify, + uploadDataOptions, + ); abortController = new AbortController(); isAbortSignalFromPause = false; @@ -91,7 +83,7 @@ export const getMultipartUploadHandlers = ({ resolvedIdentityId = resolvedS3Options.identityId; const { inputType, objectKey } = validateStorageOperationInput( - input, + uploadDataInput, resolvedIdentityId, ); diff --git a/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts b/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts index 4da8bf328b5..76f9ebf5638 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts @@ -1,6 +1,7 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { Amplify } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; import { UploadDataInput, UploadDataWithPathInput } from '../../types'; @@ -13,14 +14,6 @@ import { ItemWithKey, ItemWithPath } from '../../types/outputs'; import { putObject } from '../../utils/client/s3data'; import { getStorageUserAgentValue } from '../../utils/userAgent'; import { STORAGE_INPUT_KEY } from '../../utils/constants'; -import { S3InternalConfig } from '../internal/types'; - -interface PutObjectJobProps { - config: S3InternalConfig; - input: UploadDataInput | UploadDataWithPathInput; - abortSignal: AbortSignal; - totalLength?: number; -} /** * Get a function the returns a promise to call putObject API to S3. @@ -28,17 +21,17 @@ interface PutObjectJobProps { * @internal */ export const putObjectJob = - ({ config, input, abortSignal, totalLength }: PutObjectJobProps) => + ( + uploadDataInput: UploadDataInput | UploadDataWithPathInput, + abortSignal: AbortSignal, + totalLength?: number, + ) => async (): Promise => { - const { options: uploadDataOptions, data } = input; - + const { options: uploadDataOptions, data } = uploadDataInput; const { bucket, keyPrefix, s3Config, isObjectLockEnabled, identityId } = - await resolveS3ConfigAndInput({ - config, - apiOptions: uploadDataOptions, - }); + await resolveS3ConfigAndInput(Amplify, uploadDataOptions); const { inputType, objectKey } = validateStorageOperationInput( - input, + uploadDataInput, identityId, ); diff --git a/packages/storage/src/providers/s3/types/options.ts b/packages/storage/src/providers/s3/types/options.ts index 9a908890352..633366a4628 100644 --- a/packages/storage/src/providers/s3/types/options.ts +++ b/packages/storage/src/providers/s3/types/options.ts @@ -213,14 +213,3 @@ export interface ResolvedS3Config forcePathStyle?: boolean; useAccelerateEndpoint?: boolean; } - -/** - * Internal S3 API options. - * - * @internal - */ -export interface S3ApiOptions { - accessLevel?: StorageAccessLevel; - targetIdentityId?: string; - useAccelerateEndpoint?: boolean; -} diff --git a/packages/storage/src/providers/s3/utils/config.ts b/packages/storage/src/providers/s3/utils/config.ts deleted file mode 100644 index 49258d0e04d..00000000000 --- a/packages/storage/src/providers/s3/utils/config.ts +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { AmplifyClassV6 } from '@aws-amplify/core'; - -import { StorageValidationErrorCode } from '../../../errors/types/validation'; -import { assertValidationError } from '../../../errors/utils/assertValidationError'; -import { S3InternalConfig } from '../apis/internal/types'; - -const createDefaultCredentialsProvider = (amplify: AmplifyClassV6) => { - /** - * A credentials provider function instead of a static credentials object is - * used because the long-running tasks like multipart upload may span over the - * credentials expiry. Auth.fetchAuthSession() automatically refreshes the - * credentials if they are expired. - */ - return async () => { - const { credentials } = await amplify.Auth.fetchAuthSession(); - assertValidationError( - !!credentials, - StorageValidationErrorCode.NoCredentials, - ); - - return credentials; - }; -}; - -const createDefaultIdentityIdProvider = (amplify: AmplifyClassV6) => { - return async () => { - const { identityId } = await amplify.Auth.fetchAuthSession(); - assertValidationError( - !!identityId, - StorageValidationErrorCode.NoIdentityId, - ); - - return identityId; - }; -}; - -/** - * It will return a Storage configuration used by lower level utils and APIs. - * - * @internal - */ -export const createStorageConfiguration = ( - amplify: AmplifyClassV6, -): S3InternalConfig => { - const libraryOptions = amplify.libraryOptions?.Storage?.S3 ?? {}; - const serviceOptions = amplify.getConfig()?.Storage?.S3 ?? {}; - const credentialsProvider = createDefaultCredentialsProvider(amplify); - const identityIdProvider = createDefaultIdentityIdProvider(amplify); - - return { - libraryOptions, - serviceOptions, - credentialsProvider, - identityIdProvider, - }; -}; diff --git a/packages/storage/src/providers/s3/utils/index.ts b/packages/storage/src/providers/s3/utils/index.ts index 1f43bb3f5d9..cd6b9753019 100644 --- a/packages/storage/src/providers/s3/utils/index.ts +++ b/packages/storage/src/providers/s3/utils/index.ts @@ -7,4 +7,3 @@ export { createDownloadTask, createUploadTask } from './transferTask'; export { validateStorageOperationInput } from './validateStorageOperationInput'; export { validateStorageOperationInputWithPrefix } from './validateStorageOperationInputWithPrefix'; export { isInputWithPath } from './isInputWithPath'; -export { createStorageConfiguration } from './config'; diff --git a/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts b/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts index ece08ea9223..ae7a185c93c 100644 --- a/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts +++ b/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts @@ -1,14 +1,21 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { AmplifyClassV6, StorageAccessLevel } from '@aws-amplify/core'; + import { assertValidationError } from '../../../errors/utils/assertValidationError'; import { StorageValidationErrorCode } from '../../../errors/types/validation'; import { resolvePrefix as defaultPrefixResolver } from '../../../utils/resolvePrefix'; -import { ResolvedS3Config, S3ApiOptions } from '../types/options'; -import { S3InternalConfig } from '../apis/internal/types'; +import { ResolvedS3Config } from '../types/options'; import { DEFAULT_ACCESS_LEVEL, LOCAL_TESTING_S3_ENDPOINT } from './constants'; +interface S3ApiOptions { + accessLevel?: StorageAccessLevel; + targetIdentityId?: string; + useAccelerateEndpoint?: boolean; +} + interface ResolvedS3ConfigAndInput { s3Config: ResolvedS3Config; bucket: string; @@ -17,10 +24,6 @@ interface ResolvedS3ConfigAndInput { identityId?: string; } -interface ResolveS3ConfigAndInputParams { - config: S3InternalConfig; - apiOptions?: S3ApiOptions; -} /** * resolve the common input options for S3 API handlers from Amplify configuration and library options. * @@ -32,26 +35,44 @@ interface ResolveS3ConfigAndInputParams { * * @internal */ -export const resolveS3ConfigAndInput = async ({ - config, - apiOptions, -}: ResolveS3ConfigAndInputParams): Promise => { - const { - credentialsProvider, - serviceOptions, - libraryOptions, - identityIdProvider, - } = config; +export const resolveS3ConfigAndInput = async ( + amplify: AmplifyClassV6, + apiOptions?: S3ApiOptions, +): Promise => { + /** + * IdentityId is always cached in memory so we can safely make calls here. It + * should be stable even for unauthenticated users, regardless of credentials. + */ + const { identityId } = await amplify.Auth.fetchAuthSession(); + assertValidationError(!!identityId, StorageValidationErrorCode.NoIdentityId); + + /** + * A credentials provider function instead of a static credentials object is + * used because the long-running tasks like multipart upload may span over the + * credentials expiry. Auth.fetchAuthSession() automatically refreshes the + * credentials if they are expired. + */ + const credentialsProvider = async () => { + const { credentials } = await amplify.Auth.fetchAuthSession(); + assertValidationError( + !!credentials, + StorageValidationErrorCode.NoCredentials, + ); + + return credentials; + }; + const { bucket, region, dangerouslyConnectToHttpEndpointForTesting } = - serviceOptions ?? {}; + amplify.getConfig()?.Storage?.S3 ?? {}; assertValidationError(!!bucket, StorageValidationErrorCode.NoBucket); assertValidationError(!!region, StorageValidationErrorCode.NoRegion); - const identityId = await identityIdProvider(); + const { defaultAccessLevel, prefixResolver = defaultPrefixResolver, isObjectLockEnabled, - } = libraryOptions ?? {}; + } = amplify.libraryOptions?.Storage?.S3 ?? {}; + const keyPrefix = await prefixResolver({ accessLevel: apiOptions?.accessLevel ?? defaultAccessLevel ?? DEFAULT_ACCESS_LEVEL, @@ -76,7 +97,7 @@ export const resolveS3ConfigAndInput = async ({ }, bucket, keyPrefix, - isObjectLockEnabled, identityId, + isObjectLockEnabled, }; };