From 93db12d258d9d8012aa151686ae00a69d320e124 Mon Sep 17 00:00:00 2001 From: deepanker13 Date: Fri, 5 Apr 2024 20:01:39 +0530 Subject: [PATCH] fixing python black test Signed-off-by: deepanker13 --- sdk/python/kubeflow/storage_initializer/s3.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/sdk/python/kubeflow/storage_initializer/s3.py b/sdk/python/kubeflow/storage_initializer/s3.py index a17d872788..5f60bbc72d 100644 --- a/sdk/python/kubeflow/storage_initializer/s3.py +++ b/sdk/python/kubeflow/storage_initializer/s3.py @@ -47,10 +47,10 @@ def download_dataset(self): aws_secret_access_key=self.config.secret_key, region_name=self.config.region_name, ) - s3_resource = s3_client.resource('s3', endpoint_url=self.config.endpoint_url) + s3_resource = s3_client.resource("s3", endpoint_url=self.config.endpoint_url) # Get the bucket object bucket = s3_resource.Bucket(self.config.bucket_name) - + # Filter objects with the specified prefix objects = bucket.objects.filter(Prefix=self.config.file_key) # Iterate over filtered objects @@ -62,13 +62,11 @@ def download_dataset(self): # Create directories if they don't exist os.makedirs( - os.path.join(VOLUME_PATH_DATASET, path_excluded_first_last_parts), exist_ok=True + os.path.join(VOLUME_PATH_DATASET, path_excluded_first_last_parts), + exist_ok=True, ) # Download the file file_path = os.path.sep.join(path_components[1:]) - bucket.download_file( - obj_key, - os.path.join(VOLUME_PATH_DATASET, file_path) - ) + bucket.download_file(obj_key, os.path.join(VOLUME_PATH_DATASET, file_path)) print(f"Files downloaded")