diff --git a/infra-gen2/backends/storage/main/amplify/backend.ts b/infra-gen2/backends/storage/main/amplify/backend.ts index e9d462a0b3..c75ada8bba 100644 --- a/infra-gen2/backends/storage/main/amplify/backend.ts +++ b/infra-gen2/backends/storage/main/amplify/backend.ts @@ -1,26 +1,52 @@ import { defineBackend } from "@aws-amplify/backend"; import * as s3 from "aws-cdk-lib/aws-s3"; import { auth } from "./auth/resource"; -import { storage } from "./storage/resource"; +import { firstBucket, secondBucket } from "./storage/resource"; /** * @see https://docs.amplify.aws/react/build-a-backend/ to add storage, functions, and more */ const backend = defineBackend({ auth, - storage, + firstBucket, + secondBucket, }); // custom storage configurations -const s3Bucket = backend.storage.resources.bucket; +const s3Bucket = backend.firstBucket.resources.bucket; const cfnBucket = s3Bucket.node.defaultChild as s3.CfnBucket; +const s3SecondaryBucket = backend.secondBucket.resources.bucket; +const cfnSecondaryBucket = s3SecondaryBucket.node.defaultChild as s3.CfnBucket; cfnBucket.accelerateConfiguration = { accelerationStatus: "Enabled", }; +cfnSecondaryBucket.accelerateConfiguration = { + accelerationStatus: "Enabled", +}; + +// required to add the metadata header, which amplify-backend does not support +backend.firstBucket.resources.cfnResources.cfnBucket.corsConfiguration = { + corsRules: [ + { + allowedHeaders: ["*"], + allowedMethods: ["GET", "HEAD", "PUT", "POST", "DELETE"], + allowedOrigins: ["*"], + exposedHeaders: [ + "x-amz-server-side-encryption", + "x-amz-request-id", + "x-amz-id-2", + "ETag", + "x-amz-meta-description", + ], + maxAge: 3000, + }, + ], +}; + // required to add the metadata header, which amplify-backend does not support -backend.storage.resources.cfnResources.cfnBucket.corsConfiguration = { +backend.secondBucket.resources.cfnResources.cfnBucket.corsConfiguration = { corsRules: [ { allowedHeaders: ["*"], diff --git a/infra-gen2/backends/storage/main/amplify/storage/resource.ts b/infra-gen2/backends/storage/main/amplify/storage/resource.ts index 3fb921c12b..4bb947fa19 100644 --- a/infra-gen2/backends/storage/main/amplify/storage/resource.ts +++ b/infra-gen2/backends/storage/main/amplify/storage/resource.ts @@ -1,7 +1,25 @@ import { defineStorage } from "@aws-amplify/backend"; -export const storage = defineStorage({ - name: "Storage Integ Test main", +export const firstBucket = defineStorage({ + name: "Storage Integ Test main bucket", + isDefault: true, + access: (allow) => ({ + "public/*": [ + allow.guest.to(["read", "write", "delete"]), + allow.authenticated.to(["read", "delete", "write"]), + ], + "protected/{entity_id}/*": [ + allow.authenticated.to(["read"]), + allow.entity("identity").to(["read", "write", "delete"]), + ], + "private/{entity_id}/*": [ + allow.entity("identity").to(["read", "write", "delete"]), + ], + }), +}); + +export const secondBucket = defineStorage({ + name: "Storage Integ Test secondary bucket", access: (allow) => ({ "public/*": [ allow.guest.to(["read", "write", "delete"]), diff --git a/packages/amplify_core/lib/src/category/amplify_storage_category.dart b/packages/amplify_core/lib/src/category/amplify_storage_category.dart index 416446d12e..84c9603c03 100644 --- a/packages/amplify_core/lib/src/category/amplify_storage_category.dart +++ b/packages/amplify_core/lib/src/category/amplify_storage_category.dart @@ -144,6 +144,7 @@ class StorageCategory extends AmplifyCategory { required StoragePath path, void Function(StorageTransferProgress)? onProgress, StorageUploadDataOptions? options, + StorageBucket? bucket, }) { return identifyCall( StorageCategoryMethod.uploadData, @@ -152,6 +153,7 @@ class StorageCategory extends AmplifyCategory { data: data, onProgress: onProgress, options: options, + bucket: bucket, ), ); } diff --git a/packages/amplify_core/lib/src/config/amplify_outputs/storage/bucket_outputs.dart b/packages/amplify_core/lib/src/config/amplify_outputs/storage/bucket_outputs.dart new file mode 100644 index 0000000000..e156f08567 --- /dev/null +++ b/packages/amplify_core/lib/src/config/amplify_outputs/storage/bucket_outputs.dart @@ -0,0 +1,47 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import 'package:amplify_core/amplify_core.dart'; + +part 'bucket_outputs.g.dart'; + +/// {@template amplify_core.amplify_outputs.bucket_outputs} +/// The Amplify Gen 2 outputs for Buckets in the Storage category. +/// {@endtemplate} +@zAmplifyOutputsSerializable +class BucketOutputs + with AWSEquatable, AWSSerializable, AWSDebuggable { + /// {@macro amplify_core.amplify_outputs.bucket_outputs} + const BucketOutputs({ + required this.name, + required this.bucketName, + required this.awsRegion, + }); + + factory BucketOutputs.fromJson(Map json) => + _$BucketOutputsFromJson(json); + + /// The user friendly name of the bucket + final String name; + + /// The Amazon S3 bucket name. + final String bucketName; + + /// The AWS region of Amazon S3 resources. + final String awsRegion; + + @override + List get props => [ + name, + bucketName, + awsRegion, + ]; + + @override + String get runtimeTypeName => 'BucketOutputs'; + + @override + Object? toJson() { + return _$BucketOutputsToJson(this); + } +} diff --git a/packages/amplify_core/lib/src/config/amplify_outputs/storage/bucket_outputs.g.dart b/packages/amplify_core/lib/src/config/amplify_outputs/storage/bucket_outputs.g.dart new file mode 100644 index 0000000000..0d60c85fcc --- /dev/null +++ b/packages/amplify_core/lib/src/config/amplify_outputs/storage/bucket_outputs.g.dart @@ -0,0 +1,34 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +// ignore_for_file: deprecated_member_use_from_same_package + +part of 'bucket_outputs.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +BucketOutputs _$BucketOutputsFromJson(Map json) => + $checkedCreate( + 'BucketOutputs', + json, + ($checkedConvert) { + final val = BucketOutputs( + name: $checkedConvert('name', (v) => v as String), + bucketName: $checkedConvert('bucket_name', (v) => v as String), + awsRegion: $checkedConvert('aws_region', (v) => v as String), + ); + return val; + }, + fieldKeyMap: const { + 'bucketName': 'bucket_name', + 'awsRegion': 'aws_region' + }, + ); + +Map _$BucketOutputsToJson(BucketOutputs instance) => + { + 'name': instance.name, + 'bucket_name': instance.bucketName, + 'aws_region': instance.awsRegion, + }; diff --git a/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.dart b/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.dart index 9db9356aa6..fec0ec0662 100644 --- a/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.dart +++ b/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.dart @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 import 'package:amplify_core/amplify_core.dart'; +import 'package:amplify_core/src/config/amplify_outputs/storage/bucket_outputs.dart'; part 'storage_outputs.g.dart'; @@ -12,7 +13,11 @@ part 'storage_outputs.g.dart'; class StorageOutputs with AWSEquatable, AWSSerializable, AWSDebuggable { /// {@macro amplify_core.amplify_outputs.storage_outputs} - const StorageOutputs({required this.awsRegion, required this.bucketName}); + const StorageOutputs({ + required this.awsRegion, + required this.bucketName, + this.buckets, + }); factory StorageOutputs.fromJson(Map json) => _$StorageOutputsFromJson(json); @@ -23,8 +28,11 @@ class StorageOutputs /// The Amazon S3 bucket name. final String bucketName; + /// The list of buckets if there are multiple buckets for the project + final List? buckets; + @override - List get props => [awsRegion, bucketName]; + List get props => [awsRegion, bucketName, buckets]; @override String get runtimeTypeName => 'StorageOutputs'; diff --git a/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.g.dart b/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.g.dart index 7b90421189..40d147f387 100644 --- a/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.g.dart +++ b/packages/amplify_core/lib/src/config/amplify_outputs/storage/storage_outputs.g.dart @@ -16,6 +16,12 @@ StorageOutputs _$StorageOutputsFromJson(Map json) => final val = StorageOutputs( awsRegion: $checkedConvert('aws_region', (v) => v as String), bucketName: $checkedConvert('bucket_name', (v) => v as String), + buckets: $checkedConvert( + 'buckets', + (v) => (v as List?) + ?.map( + (e) => BucketOutputs.fromJson(e as Map)) + .toList()), ); return val; }, @@ -25,8 +31,18 @@ StorageOutputs _$StorageOutputsFromJson(Map json) => }, ); -Map _$StorageOutputsToJson(StorageOutputs instance) => - { - 'aws_region': instance.awsRegion, - 'bucket_name': instance.bucketName, - }; +Map _$StorageOutputsToJson(StorageOutputs instance) { + final val = { + 'aws_region': instance.awsRegion, + 'bucket_name': instance.bucketName, + }; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('buckets', instance.buckets?.map((e) => e.toJson()).toList()); + return val; +} diff --git a/packages/amplify_core/lib/src/plugin/amplify_storage_plugin_interface.dart b/packages/amplify_core/lib/src/plugin/amplify_storage_plugin_interface.dart index c4d874d8a0..efb891fbef 100644 --- a/packages/amplify_core/lib/src/plugin/amplify_storage_plugin_interface.dart +++ b/packages/amplify_core/lib/src/plugin/amplify_storage_plugin_interface.dart @@ -63,6 +63,7 @@ abstract class StoragePluginInterface extends AmplifyPluginInterface { required StorageDataPayload data, void Function(StorageTransferProgress)? onProgress, StorageUploadDataOptions? options, + StorageBucket? bucket, }) { throw UnimplementedError('uploadData() has not been implemented.'); } diff --git a/packages/amplify_core/lib/src/types/exception/amplify_exception.dart b/packages/amplify_core/lib/src/types/exception/amplify_exception.dart index aedc1c1e19..a60f435bd6 100644 --- a/packages/amplify_core/lib/src/types/exception/amplify_exception.dart +++ b/packages/amplify_core/lib/src/types/exception/amplify_exception.dart @@ -21,6 +21,7 @@ part 'network_exception.dart'; part 'push/push_notification_exception.dart'; part 'storage/access_denied_exception.dart'; part 'storage/http_status_exception.dart'; +part 'storage/invalid_storage_bucket_exception.dart'; part 'storage/local_file_not_found_exception.dart'; part 'storage/not_found_exception.dart'; part 'storage/operation_canceled_exception.dart'; diff --git a/packages/amplify_core/lib/src/types/exception/storage/invalid_storage_bucket_exception.dart b/packages/amplify_core/lib/src/types/exception/storage/invalid_storage_bucket_exception.dart new file mode 100644 index 0000000000..5e5c56fe0e --- /dev/null +++ b/packages/amplify_core/lib/src/types/exception/storage/invalid_storage_bucket_exception.dart @@ -0,0 +1,15 @@ +part of '../amplify_exception.dart'; + +/// {@template amplify_core.storage.invalid_storage_bucket_exception} +/// Exception thrown when the [StorageBucket] is invalid. +/// {@endtemplate} +class InvalidStorageBucketException extends StorageException { + const InvalidStorageBucketException( + super.message, { + super.recoverySuggestion, + super.underlyingException, + }); + + @override + String get runtimeTypeName => 'InvalidStorageBucketException'; +} diff --git a/packages/amplify_core/lib/src/types/storage/bucket_info.dart b/packages/amplify_core/lib/src/types/storage/bucket_info.dart new file mode 100644 index 0000000000..a30811b333 --- /dev/null +++ b/packages/amplify_core/lib/src/types/storage/bucket_info.dart @@ -0,0 +1,17 @@ +import 'package:amplify_core/amplify_core.dart'; + +/// {@template amplify_core.storage.bucket_info} +/// Presents a storage bucket information. +/// {@endtemplate} +class BucketInfo with AWSEquatable { + /// {@macro amplify_core.storage.bucket_info} + const BucketInfo({required this.bucketName, required this.region}); + final String bucketName; + final String region; + + @override + List get props => [ + bucketName, + region, + ]; +} diff --git a/packages/amplify_core/lib/src/types/storage/list_options.dart b/packages/amplify_core/lib/src/types/storage/list_options.dart index c046f90d55..8e292040b1 100644 --- a/packages/amplify_core/lib/src/types/storage/list_options.dart +++ b/packages/amplify_core/lib/src/types/storage/list_options.dart @@ -1,7 +1,10 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import 'package:aws_common/aws_common.dart'; +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import 'package:amplify_core/amplify_core.dart'; /// {@template amplify_core.storage.list_options} /// Configurable options for `Amplify.Storage.list`. @@ -15,6 +18,7 @@ class StorageListOptions const StorageListOptions({ this.pageSize = 1000, this.nextToken, + this.bucket, this.pluginOptions, }); @@ -26,9 +30,12 @@ class StorageListOptions /// {@macro amplify_core.storage.list_plugin_options} final StorageListPluginOptions? pluginOptions; + + /// an optional bucket to specify which bucket to return the list for + final StorageBucket? bucket; @override - List get props => [pageSize, nextToken, pluginOptions]; + List get props => [pageSize, nextToken, pluginOptions, bucket]; @override String get runtimeTypeName => 'StorageListOptions'; @@ -37,6 +44,7 @@ class StorageListOptions Map toJson() => { 'pageSize': pageSize, 'nextToken': nextToken, + 'bucket': bucket, 'pluginOptions': pluginOptions?.toJson(), }; } diff --git a/packages/amplify_core/lib/src/types/storage/storage_bucket.dart b/packages/amplify_core/lib/src/types/storage/storage_bucket.dart new file mode 100644 index 0000000000..711e30d18b --- /dev/null +++ b/packages/amplify_core/lib/src/types/storage/storage_bucket.dart @@ -0,0 +1,19 @@ +import 'package:amplify_core/src/config/amplify_outputs/storage/storage_outputs.dart'; +import 'package:amplify_core/src/types/storage/bucket_info.dart'; +import 'package:amplify_core/src/types/storage/storage_bucket_from_outputs.dart'; +import 'package:meta/meta.dart'; + +/// Presents a storage bucket. +class StorageBucket { + /// Creates a [StorageBucket] from [BucketInfo]. + const StorageBucket.fromBucketInfo(this._info); + + /// Creates a [StorageBucket] defined by the [name] in AmplifyOutputs file. + factory StorageBucket.fromOutputs(String name) => + StorageBucketFromOutputs(name); + + final BucketInfo _info; + + @internal + BucketInfo resolveBucketInfo(StorageOutputs? storageOutputs) => _info; +} diff --git a/packages/amplify_core/lib/src/types/storage/storage_bucket_from_outputs.dart b/packages/amplify_core/lib/src/types/storage/storage_bucket_from_outputs.dart new file mode 100644 index 0000000000..ed22c24aa2 --- /dev/null +++ b/packages/amplify_core/lib/src/types/storage/storage_bucket_from_outputs.dart @@ -0,0 +1,43 @@ +import 'package:amplify_core/amplify_core.dart'; +import 'package:amplify_core/src/config/amplify_outputs/storage/storage_outputs.dart'; +import 'package:meta/meta.dart'; + +/// {@template amplify_core.storage.storage_bucket_from_outputs} +/// Creates a [StorageBucket] defined by the name in AmplifyOutputs file. +/// {@endtemplate} +@internal +class StorageBucketFromOutputs implements StorageBucket { + /// {@macro amplify_core.storage.storage_bucket_from_outputs} + const StorageBucketFromOutputs(this._name); + + final String _name; + + @override + BucketInfo resolveBucketInfo(StorageOutputs? storageOutputs) { + assert( + storageOutputs != null, + 'storageOutputs can not be null', + ); + final buckets = storageOutputs!.buckets; + if (buckets == null) { + throw const InvalidStorageBucketException( + 'Amplify Outputs storage configuration does not have buckets specified.', + recoverySuggestion: + 'Make sure Amplify Outputs file has storage configuration with ' + 'buckets specified.', + ); + } + final bucket = buckets.singleWhere( + (e) => e.name == _name, + orElse: () => throw const InvalidStorageBucketException( + 'Unable to lookup bucket from provided name in Amplify Outputs file.', + recoverySuggestion: 'Make sure Amplify Outputs file has the specified ' + 'bucket configuration.', + ), + ); + return BucketInfo( + bucketName: bucket.bucketName, + region: bucket.awsRegion, + ); + } +} diff --git a/packages/amplify_core/lib/src/types/storage/storage_types.dart b/packages/amplify_core/lib/src/types/storage/storage_types.dart index 9324a72683..1e4bfbfb02 100644 --- a/packages/amplify_core/lib/src/types/storage/storage_types.dart +++ b/packages/amplify_core/lib/src/types/storage/storage_types.dart @@ -11,6 +11,7 @@ export '../exception/amplify_exception.dart' StorageOperationCanceledException, NetworkException, UnknownException; +export 'bucket_info.dart'; export 'copy_operation.dart'; export 'copy_options.dart'; export 'copy_request.dart'; @@ -44,6 +45,7 @@ export 'remove_operation.dart'; export 'remove_options.dart'; export 'remove_request.dart'; export 'remove_result.dart'; +export 'storage_bucket.dart'; export 'storage_item.dart'; export 'storage_path.dart'; export 'transfer_progress.dart'; diff --git a/packages/amplify_core/test/config/amplify_outputs/test_data.dart b/packages/amplify_core/test/config/amplify_outputs/test_data.dart index 317e7a78b4..4a1f5e7b87 100644 --- a/packages/amplify_core/test/config/amplify_outputs/test_data.dart +++ b/packages/amplify_core/test/config/amplify_outputs/test_data.dart @@ -99,9 +99,26 @@ const amplifyoutputs = '''{ ] }, "storage": { - "aws_region": "oem dks", - "bucket_name": "dolor et esse" - }, + "aws_region": "us-east-2", + "bucket_name": "amplifyTeamDrive-one-stora-testbucketgen2bucket0b8c-9ggcfqfunkjr", + "buckets": [ + { + "name": "amplifyTeamDrive-one", + "bucket_name": "amplifyTeamDrive-one-stora-testbucketgen2bucket0b8c-9ggcfqfunkjr", + "aws_region": "us-east-2" + }, + { + "name": "amplifyTeamDrive-two", + "bucket_name": "amplifyTeamDrive-two-stora-testbucketgen2bucket0b8c-2", + "aws_region": "us-east-2" + }, + { + "name": "amplifyTeamDrive-three", + "bucket_name": "amplifyTeamDrive-three-stora-testbucketgen2bucket0b8c-3", + "aws_region": "us-east-2" + } + ] +}, "version": "1" } '''; diff --git a/packages/amplify_core/test/types/storage/storage_bucket_test.dart b/packages/amplify_core/test/types/storage/storage_bucket_test.dart new file mode 100644 index 0000000000..edae2ab4af --- /dev/null +++ b/packages/amplify_core/test/types/storage/storage_bucket_test.dart @@ -0,0 +1,87 @@ +import 'package:amplify_core/amplify_core.dart'; +import 'package:amplify_core/src/config/amplify_outputs/storage/bucket_outputs.dart'; +import 'package:amplify_core/src/config/amplify_outputs/storage/storage_outputs.dart'; +import 'package:test/test.dart'; + +void main() { + group('Storage bucket resolve BucketInfo', () { + const defaultBucketOutputs = BucketOutputs( + name: 'default-bucket-friendly-name', + bucketName: 'default-bucket-unique-name', + awsRegion: 'default-bucket-aws-region', + ); + const secondBucketOutputs = BucketOutputs( + name: 'second-bucket-friendly-name', + bucketName: 'second-bucket-unique-name', + awsRegion: 'second-bucket-aws-region', + ); + final defaultBucketInfo = BucketInfo( + bucketName: defaultBucketOutputs.bucketName, + region: defaultBucketOutputs.awsRegion, + ); + final secondBucketInfo = BucketInfo( + bucketName: secondBucketOutputs.bucketName, + region: secondBucketOutputs.awsRegion, + ); + final testStorageOutputsMultiBucket = StorageOutputs( + awsRegion: defaultBucketOutputs.awsRegion, + bucketName: defaultBucketOutputs.bucketName, + buckets: [ + defaultBucketOutputs, + secondBucketOutputs, + ], + ); + final testStorageOutputsSingleBucket = StorageOutputs( + awsRegion: defaultBucketOutputs.awsRegion, + bucketName: defaultBucketOutputs.bucketName, + ); + + test( + 'should return same bucket info when storage bucket is created from' + ' a bucket info', () { + final storageBucket = StorageBucket.fromBucketInfo( + defaultBucketInfo, + ); + final bucketInfo = storageBucket.resolveBucketInfo(null); + expect(bucketInfo, defaultBucketInfo); + }); + + test( + 'should return bucket info when storage bucket is created from' + ' buckets in storage outputs', () { + final storageBucket = StorageBucket.fromOutputs(secondBucketOutputs.name); + final bucketInfo = + storageBucket.resolveBucketInfo(testStorageOutputsMultiBucket); + expect(bucketInfo, secondBucketInfo); + }); + + test( + 'should throw assertion error when storage bucket is created from' + ' outputs and storage outputs is null', () { + final storageBucket = + StorageBucket.fromOutputs(defaultBucketOutputs.name); + expect( + () => storageBucket.resolveBucketInfo(null), + throwsA(isA()), + ); + }); + test( + 'should throw exception when storage bucket is created from outputs and' + ' storage outputs does not have buckets', () { + final storageBucket = StorageBucket.fromOutputs('bucket-name'); + expect( + () => storageBucket.resolveBucketInfo(testStorageOutputsSingleBucket), + throwsA(isA()), + ); + }); + test( + 'should throw exception when storage bucket is created from outputs and' + ' bucket name does not match any bucket in storage outputs', () { + final storageBucket = StorageBucket.fromOutputs('invalid-bucket-name'); + expect( + () => storageBucket.resolveBucketInfo(testStorageOutputsMultiBucket), + throwsA(isA()), + ); + }); + }); +} diff --git a/packages/storage/amplify_storage_s3/example/integration_test/list_test.dart b/packages/storage/amplify_storage_s3/example/integration_test/list_test.dart index 22e2a2766b..8cb8eb43af 100644 --- a/packages/storage/amplify_storage_s3/example/integration_test/list_test.dart +++ b/packages/storage/amplify_storage_s3/example/integration_test/list_test.dart @@ -20,18 +20,28 @@ void main() { '$uniquePrefix/file2.txt', '$uniquePrefix/subdir/file3.txt', '$uniquePrefix/subdir2#file4.txt', + '$uniquePrefix/file5.txt', + '$uniquePrefix/file6.txt', + '$uniquePrefix/subdir3/file7.txt', + '$uniquePrefix/subdir4#file8.txt', ]; group('standard config', () { setUpAll(() async { await configure(amplifyEnvironments['main']!); - - for (final path in uploadedPaths) { + for (var i = 0; i < 4; i++) { await Amplify.Storage.uploadData( - path: StoragePath.fromString(path), + path: StoragePath.fromString(uploadedPaths[i]), data: StorageDataPayload.bytes('test content'.codeUnits), + bucket: StorageBucket.fromOutputs('Storage Integ Test main bucket'), + ).result; + } + for (var i = 4; i < 8; i++) { + await Amplify.Storage.uploadData( + path: StoragePath.fromString(uploadedPaths[i]), + data: StorageDataPayload.bytes('test content'.codeUnits), + bucket: StorageBucket.fromOutputs('Storage Integ Test secondary bucket'), ).result; } - for (final path in uploadedPaths) { addTearDownPath(StoragePath.fromString(path)); } @@ -39,13 +49,22 @@ void main() { group('list() without options', () { testWidgets('should list all files with unique prefix', (_) async { - final listResult = await Amplify.Storage.list( + final listResultMainBucket = await Amplify.Storage.list( path: StoragePath.fromString(uniquePrefix), ).result; - - for (final uploadedPath in uploadedPaths) { + final listResultSecondaryBucket = await Amplify.Storage.list( + path: StoragePath.fromString(uniquePrefix), + options: StorageListOptions(bucket: StorageBucket.fromOutputs('Storage Integ Test secondary bucket')), + ).result; + for (var i = 0; i < 4; i++) { + expect( + listResultMainBucket.items.any((item) => item.path == uploadedPaths[i]), + isTrue, + ); + } + for (var i = 4; i < 8; i++) { expect( - listResult.items.any((item) => item.path == uploadedPath), + listResultSecondaryBucket.items.any((item) => item.path == uploadedPaths[i]), isTrue, ); } @@ -101,6 +120,17 @@ void main() { ), ).result as S3ListResult; + final listResultSecondaryBucket = await Amplify.Storage.list( + path: StoragePath.fromString('$uniquePrefix/'), + options: StorageListOptions( + pluginOptions: const S3ListPluginOptions( + excludeSubPaths: true, + delimiter: '#', + ), + bucket: StorageBucket.fromOutputs('Storage Integ Test secondary bucket'), + ), + ).result as S3ListResult; + expect(listResult.items.length, 3); expect(listResult.items.first.path, contains('file1.txt')); @@ -110,6 +140,16 @@ void main() { '$uniquePrefix/subdir2#', ); expect(listResult.metadata.delimiter, '#'); + + expect(listResultSecondaryBucket.items.length, 3); + expect(listResultSecondaryBucket.items.first.path, contains('file5.txt')); + + expect(listResultSecondaryBucket.metadata.subPaths.length, 1); + expect( + listResultSecondaryBucket.metadata.subPaths.first, + '$uniquePrefix/subdir4#', + ); + expect(listResultSecondaryBucket.metadata.delimiter, '#'); }); }); @@ -123,6 +163,17 @@ void main() { expect(listResult.items.length, 2); expect(listResult.items.first.path, contains('file1.txt')); + + final listResultSecondaryBucket = await Amplify.Storage.list( + path: StoragePath.fromString(uniquePrefix), + options: StorageListOptions( + pageSize: 2, + bucket: StorageBucket.fromOutputs('Storage Integ Test secondary bucket'), + ), + ).result; + + expect(listResultSecondaryBucket.items.length, 2); + expect(listResultSecondaryBucket.items.first.path, contains('file5.txt')); }); testWidgets('should list files with pagination', (_) async { @@ -157,8 +208,19 @@ void main() { ), ).result; - expect(listResult.items.length, uploadedPaths.length); + expect(listResult.items.length, uploadedPaths.length~/2); expect(listResult.nextToken, isNull); + + final listResultSecondaryBucket = await Amplify.Storage.list( + path: StoragePath.fromString(uniquePrefix), + options: StorageListOptions( + pluginOptions: const S3ListPluginOptions.listAll(), + bucket: StorageBucket.fromOutputs('Storage Integ Test secondary bucket'), + ), + ).result; + + expect(listResultSecondaryBucket.items.length, uploadedPaths.length~/2); + expect(listResultSecondaryBucket.nextToken, isNull); }); }); }); diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/amplify_storage_s3_dart_impl.dart b/packages/storage/amplify_storage_s3_dart/lib/src/amplify_storage_s3_dart_impl.dart index cc709c5802..3afda601ce 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/amplify_storage_s3_dart_impl.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/amplify_storage_s3_dart_impl.dart @@ -124,7 +124,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface }); } - @override + @override S3ListOperation list({ required StoragePath path, StorageListOptions? options, @@ -136,6 +136,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface final s3Options = StorageListOptions( pluginOptions: s3PluginOptions, nextToken: options?.nextToken, + bucket: options?.bucket, pageSize: options?.pageSize ?? 1000, ); @@ -151,6 +152,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface ); } + @override S3GetPropertiesOperation getProperties({ required StoragePath path, @@ -274,6 +276,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface required StoragePath path, void Function(S3TransferProgress)? onProgress, StorageUploadDataOptions? options, + StorageBucket? bucket, }) { final s3PluginOptions = reifyPluginOptions( pluginOptions: options?.pluginOptions, @@ -290,6 +293,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface dataPayload: data, options: s3Options, onProgress: onProgress, + bucket: bucket, ); return S3UploadDataOperation( diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/s3_client_info.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/s3_client_info.dart new file mode 100644 index 0000000000..d70adc524d --- /dev/null +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/s3_client_info.dart @@ -0,0 +1,19 @@ +import 'package:amplify_storage_s3_dart/src/sdk/src/s3/s3_client.dart'; +import 'package:meta/meta.dart'; +import 'package:smithy_aws/smithy_aws.dart'; + +/// It holds Amazon S3 client information. +@internal +class S3ClientInfo { + const S3ClientInfo({ + required this.client, + required this.config, + required this.bucketName, + required this.awsRegion, + }); + + final S3Client client; + final S3ClientConfig config; + final String bucketName; + final String awsRegion; +} diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/storage_s3_service_impl.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/storage_s3_service_impl.dart index f749de67f3..0191ecff42 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/storage_s3_service_impl.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/storage_s3_service_impl.dart @@ -14,6 +14,7 @@ import 'package:amplify_storage_s3_dart/src/path_resolver/s3_path_resolver.dart' import 'package:amplify_storage_s3_dart/src/sdk/s3.dart' as s3; import 'package:amplify_storage_s3_dart/src/sdk/src/s3/common/endpoint_resolver.dart' as endpoint_resolver; +import 'package:amplify_storage_s3_dart/src/storage_s3_service/service/s3_client_info.dart'; import 'package:amplify_storage_s3_dart/src/storage_s3_service/storage_s3_service.dart'; import 'package:amplify_storage_s3_dart/src/storage_s3_service/transfer/transfer.dart' as transfer; @@ -84,10 +85,8 @@ class StorageS3Service { ..supportedProtocols = SupportedProtocols.http1, ), _pathResolver = pathResolver, + _credentialsProvider = credentialsProvider, _logger = logger, - // dependencyManager.get() => sigv4.AWSSigV4Signer is used for unit tests - _awsSigV4Signer = dependencyManager.get() ?? - sigv4.AWSSigV4Signer(credentialsProvider: credentialsProvider), _dependencyManager = dependencyManager, _serviceStartingTime = DateTime.now(); @@ -101,14 +100,10 @@ class StorageS3Service { final s3.S3Client _defaultS3Client; final S3PathResolver _pathResolver; final AWSLogger _logger; - final sigv4.AWSSigV4Signer _awsSigV4Signer; final DependencyManager _dependencyManager; final DateTime _serviceStartingTime; - - sigv4.AWSCredentialScope get _signerScope => sigv4.AWSCredentialScope( - region: _storageOutputs.awsRegion, - service: AWSService.s3, - ); + final AWSIamAmplifyAuthProvider _credentialsProvider; + final Map _s3ClientsInfo = {}; transfer.TransferDatabase get _transferDatabase => _dependencyManager.getOrCreate(); @@ -130,11 +125,12 @@ class StorageS3Service { const S3ListPluginOptions(); final resolvedPath = await _pathResolver.resolvePath(path: path); + final s3ClientInfo = options.bucket == null ? getS3ClientInfo() : getS3ClientInfo(storageBucket: options.bucket); if (!s3PluginOptions.listAll) { final request = s3.ListObjectsV2Request.build((builder) { builder - ..bucket = _storageOutputs.bucketName + ..bucket = s3ClientInfo.bucketName ..prefix = resolvedPath ..maxKeys = options.pageSize ..continuationToken = options.nextToken @@ -145,7 +141,7 @@ class StorageS3Service { try { return S3ListResult.fromPaginatedResult( - await _defaultS3Client.listObjectsV2(request).result, + await s3ClientInfo.client.listObjectsV2(request).result, ); } on smithy.UnknownSmithyHttpException catch (error) { // S3Client.headObject may return 403 error @@ -161,14 +157,15 @@ class StorageS3Service { try { final request = s3.ListObjectsV2Request.build((builder) { builder - ..bucket = _storageOutputs.bucketName + // ignore: invalid_use_of_internal_member + ..bucket = options.bucket == null ? _storageOutputs.bucketName : options.bucket!.resolveBucketInfo(_storageOutputs).bucketName ..prefix = resolvedPath ..delimiter = s3PluginOptions.excludeSubPaths ? s3PluginOptions.delimiter : null; }); - listResult = await _defaultS3Client.listObjectsV2(request).result; + listResult = await s3ClientInfo.client.listObjectsV2(request).result; recursiveResult = S3ListResult.fromPaginatedResult( listResult, ); @@ -261,10 +258,20 @@ class StorageS3Service { path: '/$resolvedPath', ); + // dependencyManager.get() is used for unit tests + final awsSigV4Signer = _dependencyManager.get() ?? + sigv4.AWSSigV4Signer( + credentialsProvider: _credentialsProvider, + ); + final signerScope = sigv4.AWSCredentialScope( + region: _storageOutputs.awsRegion, + service: AWSService.s3, + ); + return S3GetUrlResult( - url: await _awsSigV4Signer.presign( + url: await awsSigV4Signer.presign( urlRequest, - credentialScope: _signerScope, + credentialScope: signerScope, expiresIn: s3PluginOptions.expiresIn, serviceConfiguration: _defaultS3SignerConfiguration, ), @@ -323,12 +330,15 @@ class StorageS3Service { void Function(S3TransferProgress)? onProgress, FutureOr Function()? onDone, FutureOr Function()? onError, + StorageBucket? bucket, }) { + final s3ClientInfo = getS3ClientInfo(storageBucket: bucket); final uploadDataTask = S3UploadTask.fromDataPayload( dataPayload, - s3Client: _defaultS3Client, - defaultS3ClientConfig: _defaultS3ClientConfig, - bucket: _storageOutputs.bucketName, + s3Client: s3ClientInfo.client, + s3ClientConfig: s3ClientInfo.config, + bucket: s3ClientInfo.bucketName, + awsRegion: s3ClientInfo.awsRegion, path: path, options: options, pathResolver: _pathResolver, @@ -366,8 +376,9 @@ class StorageS3Service { final uploadDataTask = S3UploadTask.fromAWSFile( localFile, s3Client: _defaultS3Client, - defaultS3ClientConfig: _defaultS3ClientConfig, + s3ClientConfig: _defaultS3ClientConfig, bucket: _storageOutputs.bucketName, + awsRegion: _storageOutputs.awsRegion, path: path, options: uploadDataOptions, pathResolver: _pathResolver, @@ -594,21 +605,77 @@ class StorageS3Service { Future abortIncompleteMultipartUploads() async { final records = await _transferDatabase .getMultipartUploadRecordsCreatedBefore(_serviceStartingTime); - for (final record in records) { + final bucketInfo = BucketInfo( + bucketName: record.bucketName ?? _storageOutputs.bucketName, + region: record.awsRegion ?? _storageOutputs.awsRegion, + ); final request = s3.AbortMultipartUploadRequest.build((builder) { builder - ..bucket = _storageOutputs.bucketName + ..bucket = bucketInfo.bucketName ..key = record.objectKey ..uploadId = record.uploadId; }); + final s3Client = getS3ClientInfo( + storageBucket: StorageBucket.fromBucketInfo(bucketInfo), + ).client; try { - await _defaultS3Client.abortMultipartUpload(request).result; + await s3Client.abortMultipartUpload(request).result; await _transferDatabase.deleteTransferRecords(record.uploadId); } on Exception catch (error) { _logger.error('Failed to abort multipart upload due to: $error'); } } } + + /// Creates and caches [S3ClientInfo] given the optional [storageBucket] + /// parameter. If the optional parameter is not provided it uses + /// StorageOutputs default bucket to create the [S3ClientInfo]. + @internal + @visibleForTesting + S3ClientInfo getS3ClientInfo({StorageBucket? storageBucket}) { + if (storageBucket == null) { + return S3ClientInfo( + client: _defaultS3Client, + config: _defaultS3ClientConfig, + bucketName: _storageOutputs.bucketName, + awsRegion: _storageOutputs.awsRegion, + ); + } + // ignore: invalid_use_of_internal_member + final bucketInfo = storageBucket.resolveBucketInfo(_storageOutputs); + if (_s3ClientsInfo[bucketInfo.bucketName] != null) { + return _s3ClientsInfo[bucketInfo.bucketName]!; + } + + final usePathStyle = bucketInfo.bucketName.contains('.'); + if (usePathStyle) { + _logger.warn( + 'Since your bucket name contains dots (`"."`), the StorageS3 plugin' + ' will use path style URLs to communicate with the S3 service. S3' + ' Transfer acceleration is not supported for path style URLs. For more' + ' information, refer to:' + ' https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html'); + } + final s3ClientConfig = smithy_aws.S3ClientConfig( + signerConfiguration: _defaultS3SignerConfiguration, + usePathStyle: usePathStyle, + ); + final s3Client = s3.S3Client( + region: bucketInfo.region, + credentialsProvider: _credentialsProvider, + s3ClientConfig: s3ClientConfig, + client: AmplifyHttpClient(_dependencyManager) + ..supportedProtocols = SupportedProtocols.http1, + ); + final s3ClientInfo = S3ClientInfo( + client: s3Client, + config: s3ClientConfig, + bucketName: bucketInfo.bucketName, + awsRegion: bucketInfo.region, + ); + _s3ClientsInfo[bucketInfo.bucketName] = s3ClientInfo; + return s3ClientInfo; + } } diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/task/s3_upload_task.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/task/s3_upload_task.dart index 55f5091165..bbb49cf100 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/task/s3_upload_task.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/service/task/s3_upload_task.dart @@ -48,9 +48,10 @@ const fallbackContentType = 'application/octet-stream'; class S3UploadTask { S3UploadTask._({ required s3.S3Client s3Client, - required smithy_aws.S3ClientConfig defaultS3ClientConfig, + required smithy_aws.S3ClientConfig s3ClientConfig, required S3PathResolver pathResolver, required String bucket, + required String awsRegion, required StoragePath path, required StorageUploadDataOptions options, S3DataPayload? dataPayload, @@ -59,9 +60,10 @@ class S3UploadTask { required AWSLogger logger, required transfer.TransferDatabase transferDatabase, }) : _s3Client = s3Client, - _defaultS3ClientConfig = defaultS3ClientConfig, + _s3ClientConfig = s3ClientConfig, _pathResolver = pathResolver, _bucket = bucket, + _awsRegion = awsRegion, _path = path, _options = options, _dataPayload = dataPayload, @@ -81,9 +83,10 @@ class S3UploadTask { S3UploadTask.fromDataPayload( S3DataPayload dataPayload, { required s3.S3Client s3Client, - required smithy_aws.S3ClientConfig defaultS3ClientConfig, + required smithy_aws.S3ClientConfig s3ClientConfig, required S3PathResolver pathResolver, required String bucket, + required String awsRegion, required StoragePath path, required StorageUploadDataOptions options, void Function(S3TransferProgress)? onProgress, @@ -91,9 +94,10 @@ class S3UploadTask { required transfer.TransferDatabase transferDatabase, }) : this._( s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: s3ClientConfig, pathResolver: pathResolver, bucket: bucket, + awsRegion: awsRegion, path: path, dataPayload: dataPayload, options: options, @@ -108,9 +112,10 @@ class S3UploadTask { S3UploadTask.fromAWSFile( AWSFile localFile, { required s3.S3Client s3Client, - required smithy_aws.S3ClientConfig defaultS3ClientConfig, + required smithy_aws.S3ClientConfig s3ClientConfig, required S3PathResolver pathResolver, required String bucket, + required String awsRegion, required StoragePath path, required StorageUploadDataOptions options, void Function(S3TransferProgress)? onProgress, @@ -118,9 +123,10 @@ class S3UploadTask { required transfer.TransferDatabase transferDatabase, }) : this._( s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: s3ClientConfig, pathResolver: pathResolver, bucket: bucket, + awsRegion: awsRegion, path: path, localFile: localFile, options: options, @@ -135,9 +141,10 @@ class S3UploadTask { final Completer _uploadCompleter = Completer(); final s3.S3Client _s3Client; - final smithy_aws.S3ClientConfig _defaultS3ClientConfig; + final smithy_aws.S3ClientConfig _s3ClientConfig; final S3PathResolver _pathResolver; final String _bucket; + final String _awsRegion; final StoragePath _path; final StorageUploadDataOptions _options; final void Function(S3TransferProgress)? _onProgress; @@ -191,7 +198,7 @@ class S3UploadTask { /// Should be used only internally. Future start() async { if (_s3PluginOptions.useAccelerateEndpoint && - _defaultS3ClientConfig.usePathStyle) { + _s3ClientConfig.usePathStyle) { _completeUploadWithError(s3_exception.accelerateEndpointUnusable); return; } @@ -328,7 +335,7 @@ class S3UploadTask { try { _putObjectOperation = _s3Client.putObject( putObjectRequest, - s3ClientConfig: _defaultS3ClientConfig.copyWith( + s3ClientConfig: _s3ClientConfig.copyWith( useAcceleration: _s3PluginOptions.useAccelerateEndpoint, ), ); @@ -497,6 +504,8 @@ class S3UploadTask { TransferRecord( uploadId: uploadId, objectKey: _resolvedPath, + bucketName: _bucket, + awsRegion: _awsRegion, createdAt: DateTime.now(), ), ); @@ -651,7 +660,7 @@ class S3UploadTask { try { final operation = _s3Client.uploadPart( request, - s3ClientConfig: _defaultS3ClientConfig.copyWith( + s3ClientConfig: _s3ClientConfig.copyWith( useAcceleration: _s3PluginOptions.useAccelerateEndpoint, ), ); diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/database_io.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/database_io.dart index a16a66bcf3..1c105d8b32 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/database_io.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/database_io.dart @@ -35,7 +35,25 @@ class TransferDatabase extends $TransferDatabase // Bump the version number when any alteration is made into tables.dart @override - int get schemaVersion => 1; + int get schemaVersion => 2; + + @override + MigrationStrategy get migration { + return MigrationStrategy( + onCreate: (Migrator m) async { + await m.createAll(); + }, + onUpgrade: (Migrator m, int from, int to) async { + // Note: From schemaVersion 1->2 we added bucketName and awsRegion. + // they are nullable columns so that on upgrade we need to update + // the transferRecords table to add these two columns + if (from < 2) { + await m.addColumn(transferRecords, transferRecords.bucketName); + await m.addColumn(transferRecords, transferRecords.awsRegion); + } + }, + ); + } @override Future> getMultipartUploadRecordsCreatedBefore( @@ -52,6 +70,8 @@ class TransferDatabase extends $TransferDatabase objectKey: e.objectKey, uploadId: e.uploadId, createdAt: DateTime.parse(e.createdAt), + bucketName: e.bucketName, + awsRegion: e.awsRegion, ), ) .get(); @@ -63,6 +83,8 @@ class TransferDatabase extends $TransferDatabase uploadId: record.uploadId, objectKey: record.objectKey, createdAt: record.createdAt.toIso8601String(), + bucketName: Value(record.bucketName), + awsRegion: Value(record.awsRegion), ); final value = await into(transferRecords).insert(entry); return value.toString(); diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.dart index 00d5b5b22d..4d13dcea2a 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.dart @@ -20,4 +20,10 @@ class TransferRecords extends Table { /// Timestamp of [uploadId] creation. TextColumn get createdAt => text()(); + + /// Amazon S3 bucket name. + TextColumn get bucketName => text().nullable()(); + + /// AWS region of Amazon S3 bucket. + TextColumn get awsRegion => text().nullable()(); } diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.drift.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.drift.dart index 30ebd91612..f50e5c57ce 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.drift.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/tables.drift.dart @@ -38,8 +38,21 @@ class $TransferRecordsTable extends i2.TransferRecords late final i0.GeneratedColumn createdAt = i0.GeneratedColumn( 'created_at', aliasedName, false, type: i0.DriftSqlType.string, requiredDuringInsert: true); + static const i0.VerificationMeta _bucketNameMeta = + const i0.VerificationMeta('bucketName'); @override - List get $columns => [id, uploadId, objectKey, createdAt]; + late final i0.GeneratedColumn bucketName = i0.GeneratedColumn( + 'bucket_name', aliasedName, true, + type: i0.DriftSqlType.string, requiredDuringInsert: false); + static const i0.VerificationMeta _awsRegionMeta = + const i0.VerificationMeta('awsRegion'); + @override + late final i0.GeneratedColumn awsRegion = i0.GeneratedColumn( + 'aws_region', aliasedName, true, + type: i0.DriftSqlType.string, requiredDuringInsert: false); + @override + List get $columns => + [id, uploadId, objectKey, createdAt, bucketName, awsRegion]; @override String get aliasedName => _alias ?? actualTableName; @override @@ -72,6 +85,16 @@ class $TransferRecordsTable extends i2.TransferRecords } else if (isInserting) { context.missing(_createdAtMeta); } + if (data.containsKey('bucket_name')) { + context.handle( + _bucketNameMeta, + bucketName.isAcceptableOrUnknown( + data['bucket_name']!, _bucketNameMeta)); + } + if (data.containsKey('aws_region')) { + context.handle(_awsRegionMeta, + awsRegion.isAcceptableOrUnknown(data['aws_region']!, _awsRegionMeta)); + } return context; } @@ -89,6 +112,10 @@ class $TransferRecordsTable extends i2.TransferRecords .read(i0.DriftSqlType.string, data['${effectivePrefix}object_key'])!, createdAt: attachedDatabase.typeMapping .read(i0.DriftSqlType.string, data['${effectivePrefix}created_at'])!, + bucketName: attachedDatabase.typeMapping + .read(i0.DriftSqlType.string, data['${effectivePrefix}bucket_name']), + awsRegion: attachedDatabase.typeMapping + .read(i0.DriftSqlType.string, data['${effectivePrefix}aws_region']), ); } @@ -111,11 +138,19 @@ class TransferRecord extends i0.DataClass /// Timestamp of [uploadId] creation. final String createdAt; + + /// Amazon S3 bucket name. + final String? bucketName; + + /// AWS region of Amazon S3 bucket. + final String? awsRegion; const TransferRecord( {required this.id, required this.uploadId, required this.objectKey, - required this.createdAt}); + required this.createdAt, + this.bucketName, + this.awsRegion}); @override Map toColumns(bool nullToAbsent) { final map = {}; @@ -123,6 +158,12 @@ class TransferRecord extends i0.DataClass map['upload_id'] = i0.Variable(uploadId); map['object_key'] = i0.Variable(objectKey); map['created_at'] = i0.Variable(createdAt); + if (!nullToAbsent || bucketName != null) { + map['bucket_name'] = i0.Variable(bucketName); + } + if (!nullToAbsent || awsRegion != null) { + map['aws_region'] = i0.Variable(awsRegion); + } return map; } @@ -132,6 +173,12 @@ class TransferRecord extends i0.DataClass uploadId: i0.Value(uploadId), objectKey: i0.Value(objectKey), createdAt: i0.Value(createdAt), + bucketName: bucketName == null && nullToAbsent + ? const i0.Value.absent() + : i0.Value(bucketName), + awsRegion: awsRegion == null && nullToAbsent + ? const i0.Value.absent() + : i0.Value(awsRegion), ); } @@ -143,6 +190,8 @@ class TransferRecord extends i0.DataClass uploadId: serializer.fromJson(json['uploadId']), objectKey: serializer.fromJson(json['objectKey']), createdAt: serializer.fromJson(json['createdAt']), + bucketName: serializer.fromJson(json['bucketName']), + awsRegion: serializer.fromJson(json['awsRegion']), ); } @override @@ -153,16 +202,25 @@ class TransferRecord extends i0.DataClass 'uploadId': serializer.toJson(uploadId), 'objectKey': serializer.toJson(objectKey), 'createdAt': serializer.toJson(createdAt), + 'bucketName': serializer.toJson(bucketName), + 'awsRegion': serializer.toJson(awsRegion), }; } i1.TransferRecord copyWith( - {int? id, String? uploadId, String? objectKey, String? createdAt}) => + {int? id, + String? uploadId, + String? objectKey, + String? createdAt, + i0.Value bucketName = const i0.Value.absent(), + i0.Value awsRegion = const i0.Value.absent()}) => i1.TransferRecord( id: id ?? this.id, uploadId: uploadId ?? this.uploadId, objectKey: objectKey ?? this.objectKey, createdAt: createdAt ?? this.createdAt, + bucketName: bucketName.present ? bucketName.value : this.bucketName, + awsRegion: awsRegion.present ? awsRegion.value : this.awsRegion, ); @override String toString() { @@ -170,13 +228,16 @@ class TransferRecord extends i0.DataClass ..write('id: $id, ') ..write('uploadId: $uploadId, ') ..write('objectKey: $objectKey, ') - ..write('createdAt: $createdAt') + ..write('createdAt: $createdAt, ') + ..write('bucketName: $bucketName, ') + ..write('awsRegion: $awsRegion') ..write(')')) .toString(); } @override - int get hashCode => Object.hash(id, uploadId, objectKey, createdAt); + int get hashCode => + Object.hash(id, uploadId, objectKey, createdAt, bucketName, awsRegion); @override bool operator ==(Object other) => identical(this, other) || @@ -184,7 +245,9 @@ class TransferRecord extends i0.DataClass other.id == this.id && other.uploadId == this.uploadId && other.objectKey == this.objectKey && - other.createdAt == this.createdAt); + other.createdAt == this.createdAt && + other.bucketName == this.bucketName && + other.awsRegion == this.awsRegion); } class TransferRecordsCompanion extends i0.UpdateCompanion { @@ -192,17 +255,23 @@ class TransferRecordsCompanion extends i0.UpdateCompanion { final i0.Value uploadId; final i0.Value objectKey; final i0.Value createdAt; + final i0.Value bucketName; + final i0.Value awsRegion; const TransferRecordsCompanion({ this.id = const i0.Value.absent(), this.uploadId = const i0.Value.absent(), this.objectKey = const i0.Value.absent(), this.createdAt = const i0.Value.absent(), + this.bucketName = const i0.Value.absent(), + this.awsRegion = const i0.Value.absent(), }); TransferRecordsCompanion.insert({ this.id = const i0.Value.absent(), required String uploadId, required String objectKey, required String createdAt, + this.bucketName = const i0.Value.absent(), + this.awsRegion = const i0.Value.absent(), }) : uploadId = i0.Value(uploadId), objectKey = i0.Value(objectKey), createdAt = i0.Value(createdAt); @@ -211,12 +280,16 @@ class TransferRecordsCompanion extends i0.UpdateCompanion { i0.Expression? uploadId, i0.Expression? objectKey, i0.Expression? createdAt, + i0.Expression? bucketName, + i0.Expression? awsRegion, }) { return i0.RawValuesInsertable({ if (id != null) 'id': id, if (uploadId != null) 'upload_id': uploadId, if (objectKey != null) 'object_key': objectKey, if (createdAt != null) 'created_at': createdAt, + if (bucketName != null) 'bucket_name': bucketName, + if (awsRegion != null) 'aws_region': awsRegion, }); } @@ -224,12 +297,16 @@ class TransferRecordsCompanion extends i0.UpdateCompanion { {i0.Value? id, i0.Value? uploadId, i0.Value? objectKey, - i0.Value? createdAt}) { + i0.Value? createdAt, + i0.Value? bucketName, + i0.Value? awsRegion}) { return i1.TransferRecordsCompanion( id: id ?? this.id, uploadId: uploadId ?? this.uploadId, objectKey: objectKey ?? this.objectKey, createdAt: createdAt ?? this.createdAt, + bucketName: bucketName ?? this.bucketName, + awsRegion: awsRegion ?? this.awsRegion, ); } @@ -248,6 +325,12 @@ class TransferRecordsCompanion extends i0.UpdateCompanion { if (createdAt.present) { map['created_at'] = i0.Variable(createdAt.value); } + if (bucketName.present) { + map['bucket_name'] = i0.Variable(bucketName.value); + } + if (awsRegion.present) { + map['aws_region'] = i0.Variable(awsRegion.value); + } return map; } @@ -257,7 +340,9 @@ class TransferRecordsCompanion extends i0.UpdateCompanion { ..write('id: $id, ') ..write('uploadId: $uploadId, ') ..write('objectKey: $objectKey, ') - ..write('createdAt: $createdAt') + ..write('createdAt: $createdAt, ') + ..write('bucketName: $bucketName, ') + ..write('awsRegion: $awsRegion') ..write(')')) .toString(); } @@ -269,6 +354,8 @@ typedef $$TransferRecordsTableInsertCompanionBuilder required String uploadId, required String objectKey, required String createdAt, + i0.Value bucketName, + i0.Value awsRegion, }); typedef $$TransferRecordsTableUpdateCompanionBuilder = i1.TransferRecordsCompanion Function({ @@ -276,6 +363,8 @@ typedef $$TransferRecordsTableUpdateCompanionBuilder i0.Value uploadId, i0.Value objectKey, i0.Value createdAt, + i0.Value bucketName, + i0.Value awsRegion, }); class $$TransferRecordsTableTableManager extends i0.RootTableManager< @@ -303,24 +392,32 @@ class $$TransferRecordsTableTableManager extends i0.RootTableManager< i0.Value uploadId = const i0.Value.absent(), i0.Value objectKey = const i0.Value.absent(), i0.Value createdAt = const i0.Value.absent(), + i0.Value bucketName = const i0.Value.absent(), + i0.Value awsRegion = const i0.Value.absent(), }) => i1.TransferRecordsCompanion( id: id, uploadId: uploadId, objectKey: objectKey, createdAt: createdAt, + bucketName: bucketName, + awsRegion: awsRegion, ), getInsertCompanionBuilder: ({ i0.Value id = const i0.Value.absent(), required String uploadId, required String objectKey, required String createdAt, + i0.Value bucketName = const i0.Value.absent(), + i0.Value awsRegion = const i0.Value.absent(), }) => i1.TransferRecordsCompanion.insert( id: id, uploadId: uploadId, objectKey: objectKey, createdAt: createdAt, + bucketName: bucketName, + awsRegion: awsRegion, ), )); } @@ -360,6 +457,16 @@ class $$TransferRecordsTableFilterComposer column: $state.table.createdAt, builder: (column, joinBuilders) => i0.ColumnFilters(column, joinBuilders: joinBuilders)); + + i0.ColumnFilters get bucketName => $state.composableBuilder( + column: $state.table.bucketName, + builder: (column, joinBuilders) => + i0.ColumnFilters(column, joinBuilders: joinBuilders)); + + i0.ColumnFilters get awsRegion => $state.composableBuilder( + column: $state.table.awsRegion, + builder: (column, joinBuilders) => + i0.ColumnFilters(column, joinBuilders: joinBuilders)); } class $$TransferRecordsTableOrderingComposer extends i0 @@ -384,4 +491,14 @@ class $$TransferRecordsTableOrderingComposer extends i0 column: $state.table.createdAt, builder: (column, joinBuilders) => i0.ColumnOrderings(column, joinBuilders: joinBuilders)); + + i0.ColumnOrderings get bucketName => $state.composableBuilder( + column: $state.table.bucketName, + builder: (column, joinBuilders) => + i0.ColumnOrderings(column, joinBuilders: joinBuilders)); + + i0.ColumnOrderings get awsRegion => $state.composableBuilder( + column: $state.table.awsRegion, + builder: (column, joinBuilders) => + i0.ColumnOrderings(column, joinBuilders: joinBuilders)); } diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.dart index 3607e8a0ed..e5911c8d22 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.dart @@ -18,6 +18,8 @@ class TransferRecord { required this.uploadId, required this.objectKey, required this.createdAt, + this.bucketName, + this.awsRegion, }); /// creates new [TransferRecord] object from a [json] map. @@ -40,6 +42,12 @@ class TransferRecord { /// Timestamp of [uploadId] creation. final DateTime createdAt; + /// Amazon S3 bucket name. + final String? bucketName; + + /// AWS region of Amazon S3 bucket. + final String? awsRegion; + /// return json map representation of [TransferRecord] object. Map toJson() => _$TransferRecordToJson(this); diff --git a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.g.dart b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.g.dart index 270e70dd8f..d9783e3a7c 100644 --- a/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.g.dart +++ b/packages/storage/amplify_storage_s3_dart/lib/src/storage_s3_service/transfer/database/transfer_record.g.dart @@ -11,6 +11,8 @@ TransferRecord _$TransferRecordFromJson(Map json) => uploadId: json['uploadId'] as String, objectKey: json['objectKey'] as String, createdAt: DateTime.parse(json['createdAt'] as String), + bucketName: json['bucketName'] as String?, + awsRegion: json['awsRegion'] as String?, ); Map _$TransferRecordToJson(TransferRecord instance) => @@ -18,4 +20,6 @@ Map _$TransferRecordToJson(TransferRecord instance) => 'uploadId': instance.uploadId, 'objectKey': instance.objectKey, 'createdAt': instance.createdAt.toIso8601String(), + 'bucketName': instance.bucketName, + 'awsRegion': instance.awsRegion, }; diff --git a/packages/storage/amplify_storage_s3_dart/test/amplify_storage_s3_dart_test.dart b/packages/storage/amplify_storage_s3_dart/test/amplify_storage_s3_dart_test.dart index 17015eb5f7..9cd6013373 100644 --- a/packages/storage/amplify_storage_s3_dart/test/amplify_storage_s3_dart_test.dart +++ b/packages/storage/amplify_storage_s3_dart/test/amplify_storage_s3_dart_test.dart @@ -140,6 +140,7 @@ void main() { const testOptions = StorageListOptions( pluginOptions: S3ListPluginOptions(excludeSubPaths: true), nextToken: 'next-token-123', + bucket: StorageBucket.fromBucketInfo(BucketInfo(bucketName: 'unit-test-bucket', region: 'us-east-2')), pageSize: 2, ); @@ -534,6 +535,14 @@ void main() { const StorageUploadDataOptions(), ); registerFallbackValue(const S3DataPayload.empty()); + registerFallbackValue( + const StorageBucket.fromBucketInfo( + BucketInfo( + bucketName: 'bucketName', + region: 'region', + ), + ), + ); }); test('should forward default options to StorageS3Service.uploadData API', @@ -631,6 +640,48 @@ void main() { ); }); + test('should forward bucket to StorageS3Service.uploadData API', + () async { + const testBucket = StorageBucket.fromBucketInfo( + BucketInfo( + bucketName: 'test-bucket', + region: 'test-region', + ), + ); + when( + () => storageS3Service.uploadData( + path: testPath, + dataPayload: any(named: 'dataPayload'), + options: any(named: 'options'), + bucket: testBucket, + ), + ).thenAnswer((_) => testS3UploadTask); + + when(() => testS3UploadTask.result).thenAnswer((_) async => testItem); + uploadDataOperation = storageS3Plugin.uploadData( + data: testData, + path: testPath, + bucket: testBucket, + ); + final capturedBucket = verify( + () => storageS3Service.uploadData( + path: testPath, + dataPayload: any(named: 'dataPayload'), + options: any( + named: 'options', + ), + bucket: captureAny( + named: 'bucket', + ), + ), + ).captured.last; + + expect( + capturedBucket, + testBucket, + ); + }); + test('should forward options.metadata to StorageS3Service.uploadData API', () async { const testMetadata = { diff --git a/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/storage_s3_service_test.dart b/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/storage_s3_service_test.dart index aec65db336..0ca3760d5e 100644 --- a/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/storage_s3_service_test.dart +++ b/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/storage_s3_service_test.dart @@ -4,6 +4,7 @@ import 'dart:async'; import 'package:amplify_core/amplify_core.dart' hide PaginatedResult; +import 'package:amplify_core/src/config/amplify_outputs/storage/bucket_outputs.dart'; import 'package:amplify_core/src/config/amplify_outputs/storage/storage_outputs.dart'; import 'package:amplify_storage_s3_dart/amplify_storage_s3_dart.dart'; import 'package:amplify_storage_s3_dart/src/exception/s3_storage_exception.dart'; @@ -25,9 +26,18 @@ const testPath = StoragePath.fromString('some/path.txt'); void main() { group('StorageS3Service', () { const testBucket = 'bucket1'; + const testBucketName = 'bucket1-name'; const testRegion = 'west-2'; - const storageOutputs = - StorageOutputs(bucketName: testBucket, awsRegion: testRegion); + const testBuckets = BucketOutputs( + name: testBucket, + bucketName: testBucketName, + awsRegion: testRegion, + ); + const storageOutputs = StorageOutputs( + bucketName: testBucket, + awsRegion: testRegion, + buckets: [testBuckets], + ); final pathResolver = TestPathResolver(); late DependencyManager dependencyManager; @@ -35,14 +45,20 @@ void main() { late StorageS3Service storageS3Service; late AWSLogger logger; late AWSSigV4Signer awsSigV4Signer; + late AmplifyUserAgent mockUserAgent; + late AWSHttpClient mockAwsHttpClient; setUp(() { s3Client = MockS3Client(); logger = MockAWSLogger(); awsSigV4Signer = MockAWSSigV4Signer(); + mockUserAgent = MockAmplifyUserAgent(); + mockAwsHttpClient = MockAWSHttpClient(); dependencyManager = DependencyManager() ..addInstance(s3Client) - ..addInstance(awsSigV4Signer); + ..addInstance(awsSigV4Signer) + ..addInstance(mockUserAgent) + ..addInstance(mockAwsHttpClient); storageS3Service = StorageS3Service( storageOutputs: storageOutputs, pathResolver: pathResolver, @@ -69,6 +85,19 @@ void main() { expect(message, contains('Since your bucket name contains dots')); }); + test('creates and caches s3 client info for each storage bucket', () { + final client1 = storageS3Service.getS3ClientInfo( + storageBucket: const StorageBucket.fromBucketInfo( + BucketInfo(bucketName: testBucketName, region: testRegion), + ), + ); + final client2 = storageS3Service.getS3ClientInfo( + storageBucket: StorageBucket.fromOutputs(testBucket), + ); + + expect(client1, client2); + }); + group('list() API', () { late S3ListResult listResult; const testNextContinuationToken = 'get-next-page'; diff --git a/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/task/s3_upload_task_test.dart b/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/task/s3_upload_task_test.dart index 094aef27c4..691523feb6 100644 --- a/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/task/s3_upload_task_test.dart +++ b/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/task/s3_upload_task_test.dart @@ -27,6 +27,7 @@ void main() { late AWSLogger logger; late transfer.TransferDatabase transferDatabase; const testBucket = 'fake-bucket'; + const testRegion = 'test-region'; const defaultS3ClientConfig = smithy_aws.S3ClientConfig(); final pathResolver = TestPathResolver(); const testUploadDataOptions = StorageUploadDataOptions(); @@ -112,9 +113,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayload, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: const StorageUploadDataOptions(), logger: logger, @@ -171,9 +173,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayload, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: testUploadDataOptions, logger: logger, @@ -221,9 +224,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayloadBytes, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: testUploadDataOptions, logger: logger, @@ -290,9 +294,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayload, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: testUploadDataOptions, logger: logger, @@ -333,9 +338,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayload, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: testUploadDataOptions, logger: logger, @@ -368,9 +374,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayload, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: testUploadDataOptions, logger: logger, @@ -413,9 +420,10 @@ void main() { final uploadDataTask = S3UploadTask.fromDataPayload( testDataPayload, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: testPath, options: testUploadDataOptions, logger: logger, @@ -465,9 +473,10 @@ void main() { final uploadDataTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -525,9 +534,10 @@ void main() { final uploadDataTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -581,9 +591,10 @@ void main() { final uploadDataTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -635,9 +646,10 @@ void main() { final uploadDataTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -773,9 +785,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -955,9 +968,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1047,9 +1061,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFileWithoutContentType, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1148,9 +1163,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1185,9 +1201,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1217,9 +1234,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testBadFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1246,9 +1264,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1290,9 +1309,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: const StorageUploadDataOptions(), logger: logger, @@ -1333,9 +1353,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1376,9 +1397,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1457,9 +1479,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1549,9 +1572,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: const StorageUploadDataOptions(), logger: logger, @@ -1640,9 +1664,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1710,9 +1735,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1866,9 +1892,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1924,9 +1951,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( testLocalFile, s3Client: s3Client, - defaultS3ClientConfig: defaultS3ClientConfig, + s3ClientConfig: defaultS3ClientConfig, pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: testUploadDataOptions, logger: logger, @@ -1988,10 +2016,10 @@ void main() { final uploadTask = S3UploadTask.fromAWSFile( AWSFile.fromPath('fake/file.jpg'), s3Client: s3Client, - defaultS3ClientConfig: - const smithy_aws.S3ClientConfig(usePathStyle: true), + s3ClientConfig: const smithy_aws.S3ClientConfig(usePathStyle: true), pathResolver: pathResolver, bucket: testBucket, + awsRegion: testRegion, path: const StoragePath.fromString(testKey), options: const StorageUploadDataOptions( pluginOptions: S3UploadDataPluginOptions( diff --git a/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/transfer/database_html_test.dart b/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/transfer/database_html_test.dart index 73ae16eb75..9a92df808e 100644 --- a/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/transfer/database_html_test.dart +++ b/packages/storage/amplify_storage_s3_dart/test/storage_s3_service/transfer/database_html_test.dart @@ -12,12 +12,16 @@ void main() { group('TransferDatabase for web', () { const testUploadId = 'test-upload-Id'; const testObjectKey = 'test-object-Key'; + const testBucketName = 'test-bucket-name'; + const testAwsRegion = 'test-aws-region'; final testCreatedAt = DateTime(2022, 1, 1); final testTransferRecord = TransferRecord( uploadId: testUploadId, objectKey: testObjectKey, createdAt: testCreatedAt, + bucketName: testBucketName, + awsRegion: testAwsRegion, ); final testTransferRecordJsonString = testTransferRecord.toJsonString(); diff --git a/packages/storage/amplify_storage_s3_dart/test/test_utils/mocks.dart b/packages/storage/amplify_storage_s3_dart/test/test_utils/mocks.dart index 1136704cde..1954fb75fa 100644 --- a/packages/storage/amplify_storage_s3_dart/test/test_utils/mocks.dart +++ b/packages/storage/amplify_storage_s3_dart/test/test_utils/mocks.dart @@ -27,3 +27,7 @@ class MockS3UploadTask extends Mock implements S3UploadTask {} class MockTransferDatabase extends Mock implements TransferDatabase {} class MockSmithyOperation extends Mock implements SmithyOperation {} + +class MockAmplifyUserAgent extends Mock implements AmplifyUserAgent {} + +class MockAWSHttpClient extends Mock implements AWSHttpClient {}