From 4d51db1d532ec1266457835616593bd7041fe1a0 Mon Sep 17 00:00:00 2001 From: nazarfil Date: Fri, 12 Jul 2024 10:45:46 +0200 Subject: [PATCH] refactor: removed not needed file metadata query --- hexa/datasets/graphql/schema.graphql | 19 ++++--------------- hexa/datasets/migrations/__init__.py | 0 hexa/datasets/schema/mutations.py | 14 -------------- hexa/datasets/schema/queries.py | 2 +- hexa/files/gcp.py | 10 ++++++++-- hexa/files/tests/test_api.py | 17 ----------------- 6 files changed, 13 insertions(+), 49 deletions(-) create mode 100644 hexa/datasets/migrations/__init__.py diff --git a/hexa/datasets/graphql/schema.graphql b/hexa/datasets/graphql/schema.graphql index db70cdb94..cb931106c 100644 --- a/hexa/datasets/graphql/schema.graphql +++ b/hexa/datasets/graphql/schema.graphql @@ -424,19 +424,10 @@ type PinDatasetResult { errors: [PinDatasetError!]! } -input CreateDatasetFileMetadataInput { - fileId: String! -} - type DatasetFileMetadata { - content: JSON! - dataset_version_file: DatasetVersionFile -} - -type CreateDatasetFileMetadataResult { - dataset_file_metadata : DatasetFileMetadata - success: Boolean! - errors: [PrepareVersionFileDownloadError!]! + content: JSON + status: String! + datasetVersionFile: DatasetVersionFile } @@ -446,7 +437,7 @@ extend type Query { "Get a dataset by its slug." datasetVersion(id: ID!): DatasetVersion "Get a dataset file snapshot by fileSnapshot id or by fileId" - datasetFileMetadata(id: ID, fileId: ID): DatasetFileMetadata + datasetFileSnapshot(id: ID, fileId: ID): DatasetFileMetadata "Get a dataset link by its id." datasetLink(id: ID!): DatasetLink "Get a dataset link by its slug." @@ -471,8 +462,6 @@ extend type Mutation { generateDatasetUploadUrl(input: GenerateDatasetUploadUrlInput!): GenerateDatasetUploadUrlResult! @loginRequired "Create a new file in a dataset version." createDatasetVersionFile(input: CreateDatasetVersionFileInput!): CreateDatasetVersionFileResult! @loginRequired - "Create dataset version snapshot." - createDatasetVersionFileMetadata(input: CreateDatasetFileMetadataInput!): CreateDatasetFileMetadataResult! @loginRequired "Prepare to download a file in a dataset version." prepareVersionFileDownload(input: PrepareVersionFileDownloadInput!): PrepareVersionFileDownloadResult! @loginRequired "Link a dataset with a workspace." diff --git a/hexa/datasets/migrations/__init__.py b/hexa/datasets/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hexa/datasets/schema/mutations.py b/hexa/datasets/schema/mutations.py index ffb710791..9519d4771 100644 --- a/hexa/datasets/schema/mutations.py +++ b/hexa/datasets/schema/mutations.py @@ -274,20 +274,6 @@ def resolve_create_version_file(_, info, **kwargs): return {"success": False, "errors": ["PERMISSION_DENIED"]} -@mutations.field("createDatasetVersionFileMetadata") -def resolve_create_version_file_metadata(_, info, **kwargs): - mutation_input = kwargs["input"] - - dataset_file_metadata_queue.enqueue( - { - "generate_file_metadata", - { - "file_id": mutation_input["file_id"], - }, - } - ) - - @mutations.field("prepareVersionFileDownload") def resolve_version_file_download(_, info, **kwargs): request = info.context["request"] diff --git a/hexa/datasets/schema/queries.py b/hexa/datasets/schema/queries.py index 749bb8b60..4012dbac4 100644 --- a/hexa/datasets/schema/queries.py +++ b/hexa/datasets/schema/queries.py @@ -42,7 +42,7 @@ def resolve_dataset_version(_, info, **kwargs): return None -@datasets_queries.field("datasetFileMetadata") +@datasets_queries.field("datasetFileSnapshot") def resolve_dataset_file_snapshot(_, info, **kwargs): try: if kwargs.get("file_id"): diff --git a/hexa/files/gcp.py b/hexa/files/gcp.py index 0f35f5db7..3ea112f45 100644 --- a/hexa/files/gcp.py +++ b/hexa/files/gcp.py @@ -11,7 +11,13 @@ from google.oauth2 import service_account from google.protobuf import duration_pb2 -from .basefs import BaseClient, NotFound, ObjectsPage, load_bucket_sample_data_with +from .basefs import ( + BaseClient, + BucketObjectAlreadyExists, + NotFound, + ObjectsPage, + load_bucket_sample_data_with, +) def get_credentials(): @@ -198,7 +204,7 @@ def generate_upload_url( client = get_storage_client() gcs_bucket = client.get_bucket(bucket_name) if raise_if_exists and gcs_bucket.get_blob(target_key) is not None: - raise ValidationError(f"GCS: Object {target_key} already exists!") + raise BucketObjectAlreadyExists(target_key) blob = gcs_bucket.blob(target_key) return blob.generate_signed_url( expiration=3600, version="v4", method="PUT", content_type=content_type diff --git a/hexa/files/tests/test_api.py b/hexa/files/tests/test_api.py index 3a4e703c6..e748a8360 100644 --- a/hexa/files/tests/test_api.py +++ b/hexa/files/tests/test_api.py @@ -532,20 +532,3 @@ def get_type(self): class APIGcpTestCase(APITestCase, OnlyGCP, TestCase): def get_type(self): return "gcp" - - -class TestDownloadFromCloudStorage(TestCase): - def test_get_from_gcp(self): - pass - # lines = get_storage("gcp").read_object_lines( - # "hexa-test-datasets", - # "3237e8c2-896d-4628-9054-59d69c785a11/add8469f-14d6-4081-8e02-adb53016f7bd/people.csv", - # 1, - # ) - # print(f"Lines are : {lines}") - # self.assertEqual( - # lines, - # [ - # "3237e8c2-896d-4628-9054-59d69c785a11/add8469f-14d6-4081-8e02-adb53016f7bd/people.csv" - # ], - # )