diff --git a/.github/workflows/analyze.yml b/.github/workflows/analyze.yml index 7852b41c1..0333e708a 100644 --- a/.github/workflows/analyze.yml +++ b/.github/workflows/analyze.yml @@ -8,15 +8,16 @@ on: jobs: Analyze: + if: ${{ !contains(github.event.head_commit.message, 'Bump version') }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: [3.9] os: [ubuntu-latest] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install Dependencies @@ -41,9 +42,9 @@ jobs: runs-on: ubuntu-latest needs: [Analyze] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index f7932bf24..c4388350a 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -7,6 +7,7 @@ on: jobs: docs: + if: ${{ !contains(github.event.head_commit.message, 'Bump version') }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v2.3.1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 911941b83..a080c60d9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,6 +22,7 @@ on: jobs: test: + if: ${{ !contains(github.event.head_commit.message, 'Bump version') }} runs-on: ${{ matrix.os }} strategy: matrix: @@ -46,11 +47,11 @@ jobs: lint: runs-on: ubuntu-latest - + if: ${{ !contains(github.event.head_commit.message, 'Bump version') }} steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Install Dependencies @@ -68,6 +69,11 @@ jobs: if: success() && startsWith(github.ref, 'refs/tags/') needs: [lint, test] runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/cellpack + permissions: + id-token: write steps: - uses: actions/checkout@v1 @@ -83,7 +89,5 @@ jobs: run: | python setup.py sdist bdist_wheel - name: Publish to PyPI - uses: pypa/gh-action-pypi-publish@master - with: - user: meganrm - password: ${{ secrets.PYPI_TOKEN }} + uses: pypa/gh-action-pypi-publish@release/v1 + diff --git a/.github/workflows/cleanup-firebase.yml b/.github/workflows/cleanup-firebase.yml new file mode 100644 index 000000000..fb8f58498 --- /dev/null +++ b/.github/workflows/cleanup-firebase.yml @@ -0,0 +1,29 @@ +name: Cleanup Firebase Metadata + +on: + schedule: + - cron: "24 18 * * 1" # Runs at 18:24 UTC every Monday + +jobs: + cleanup: + runs-on: ${{ matrix.os }} + strategy: + matrix: + python-version: [3.9] + os: [ubuntu-latest, windows-latest, macOS-latest] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install .[all] + - name: Cleanup Firebase Metadata + env: + FIREBASE_TOKEN: ${{ secrets.FIREBASE_TOKEN }} + FIREBASE_EMAIL: ${{ secrets.FIREBASE_EMAIL }} + run: | + python cellpack/bin/cleanup_tasks.py diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml index b9cb47c22..13fc4b528 100644 --- a/.github/workflows/cleanup.yml +++ b/.github/workflows/cleanup.yml @@ -9,9 +9,9 @@ jobs: cleanup: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/cellpack/__init__.py b/cellpack/__init__.py index 65c4ef6c0..e33611c66 100644 --- a/cellpack/__init__.py +++ b/cellpack/__init__.py @@ -6,7 +6,7 @@ __email__ = "meganr@alleninstitute.org" # Do not edit this string manually, always use bumpversion # Details in CONTRIBUTING.md -__version__ = "1.0.3" +__version__ = "1.0.8" from .autopack.loaders.recipe_loader import RecipeLoader # noqa: F401 diff --git a/cellpack/autopack/AWSHandler.py b/cellpack/autopack/AWSHandler.py index c93a6098f..b4c6397ec 100644 --- a/cellpack/autopack/AWSHandler.py +++ b/cellpack/autopack/AWSHandler.py @@ -1,5 +1,6 @@ import logging from pathlib import Path +from urllib.parse import parse_qs, urlparse, urlunparse import boto3 from botocore.exceptions import ClientError @@ -40,7 +41,7 @@ def _create_session(self, region_name): def get_aws_object_key(self, object_name): if self.folder_name is not None: - object_name = self.folder_name + object_name + object_name = f"{self.folder_name}/{object_name}" else: object_name = object_name return object_name @@ -76,23 +77,46 @@ def create_presigned_url(self, object_name, expiration=3600): """ object_name = self.get_aws_object_key(object_name) # Generate a presigned URL for the S3 object + # The response contains the presigned URL + # https://{self.bucket_name}.s3.{region}.amazonaws.com/{object_key} try: url = self.s3_client.generate_presigned_url( "get_object", Params={"Bucket": self.bucket_name, "Key": object_name}, ExpiresIn=expiration, ) + base_url = urlunparse(urlparse(url)._replace(query="", fragment="")) + return base_url except ClientError as e: - logging.error(e) + logging.error(f"Error generating presigned URL: {e}") return None - # The response contains the presigned URL - # https://{self.bucket_name}.s3.{region}.amazonaws.com/{object_key} - return url - def save_file(self, file_path): + def is_url_valid(self, url): + """ + Validate the url's scheme, bucket name, and query parameters, etc. + """ + parsed_url = urlparse(url) + # Check the scheme + if parsed_url.scheme != "https": + return False + # Check the bucket name + if not parsed_url.path.startswith(f"/{self.bucket_name}/"): + return False + # Check unwanted query parameters + unwanted_query_params = ["AWSAccessKeyId", "Signature", "Expires"] + if parsed_url.query: + query_params = parse_qs(parsed_url.query) + for param in unwanted_query_params: + if param in query_params: + return False + return True + + def save_file_and_get_url(self, file_path): """ - Uploads a file to S3 and returns the presigned url + Uploads a file to S3 and returns the base url """ file_name = self.upload_file(file_path) - if file_name: - return file_name, self.create_presigned_url(file_name) + base_url = self.create_presigned_url(file_name) + if file_name and base_url: + if self.is_url_valid(base_url): + return file_name, base_url diff --git a/cellpack/autopack/Compartment.py b/cellpack/autopack/Compartment.py index f3950f264..c5a1ae741 100644 --- a/cellpack/autopack/Compartment.py +++ b/cellpack/autopack/Compartment.py @@ -1218,7 +1218,7 @@ def BuildGrid_trimesh( off_grid_surface_points = surface_points_in_bounding_box - ex = False # True if nbGridPoints == len(idarray) else False + ex = True # True if nbGridPoints == len(idarray) else False surfacePoints, surfacePointsNormals = self.extendGridArrays( nbGridPoints, diff --git a/cellpack/autopack/DBRecipeHandler.py b/cellpack/autopack/DBRecipeHandler.py index af4e97e54..d166621d0 100644 --- a/cellpack/autopack/DBRecipeHandler.py +++ b/cellpack/autopack/DBRecipeHandler.py @@ -1,6 +1,9 @@ import copy +from datetime import datetime, timezone +from enum import Enum from deepdiff import DeepDiff +import requests from cellpack.autopack.utils import deep_merge @@ -375,6 +378,36 @@ def should_write(self, db, grad_name): return None, None +class ResultDoc: + def __init__(self, db): + self.db = db + + def handle_expired_results(self): + """ + Check if the results in the database are expired and delete them if the linked object expired. + """ + current_utc = datetime.now(timezone.utc) + results = self.db.get_all_docs("results") + if results: + for result in results: + result_data = self.db.doc_to_dict(result) + result_age = current_utc - result_data["timestamp"] + if result_age.days > 180 and not self.validate_existence( + result_data["url"] + ): + self.db.delete_doc("results", self.db.doc_id(result)) + print("Results cleanup complete.") + else: + print("No results found in the database.") + + def validate_existence(self, url): + """ + Validate the existence of an S3 object by checking if the URL is accessible. + Returns True if the URL is accessible. + """ + return requests.head(url).status_code == requests.codes.ok + + class DBUploader(object): """ Handles the uploading of data to the database. @@ -404,6 +437,9 @@ def prep_data_for_db(data): modified_data[key] = unpacked_value if isinstance(unpacked_value, dict): modified_data[key] = DBUploader.prep_data_for_db(unpacked_value) + # If the value is an enum, convert it to a string. e.g. during a version migration process where "type" in a v1 recipe is an enum + elif isinstance(value, Enum): + modified_data[key] = value.name # If the value is a dictionary, recursively convert its nested lists to dictionaries elif isinstance(value, dict): modified_data[key] = DBUploader.prep_data_for_db(value) @@ -572,6 +608,7 @@ def upload_recipe(self, recipe_meta_data, recipe_data): print(f"{recipe_id} is already in firestore") return recipe_to_save = self.upload_collections(recipe_meta_data, recipe_data) + recipe_to_save["recipe_path"] = self.db.create_path("recipes", recipe_id) self.upload_data("recipes", recipe_to_save, recipe_id) def upload_result_metadata(self, file_name, url): @@ -584,7 +621,7 @@ def upload_result_metadata(self, file_name, url): self.db.update_or_create( "results", file_name, - {"user": username, "timestamp": timestamp, "url": url.split("?")[0]}, + {"user": username, "timestamp": timestamp, "url": url}, ) @@ -630,6 +667,18 @@ def prep_db_doc_for_download(self, db_doc): def collect_docs_by_id(self, collection, id): return self.db.get_doc_by_id(collection, id) + def validate_input_recipe_path(self, path): + """ + Validates if the input path corresponds to a recipe path in the database. + Format of a recipe path: firebase:recipes/[RECIPE-ID] + """ + collection, id = self.db.get_collection_id_from_path(path) + recipe_path = self.db.get_value(collection, id, "recipe_path") + if not recipe_path: + raise ValueError( + f"No recipe found at the input path: '{path}'. Please ensure the recipe exists in the database and is spelled correctly. Expected path format: 'firebase:recipes/[RECIPE-ID]'" + ) + @staticmethod def _get_grad_and_obj(obj_data, obj_dict, grad_dict): """ @@ -706,3 +755,19 @@ def compile_db_recipe_data(db_recipe_data, obj_dict, grad_dict, comp_dict): if grad_dict: recipe_data["gradients"] = [{**v} for v in grad_dict.values()] return recipe_data + + +class DBMaintenance(object): + """ + Handles the maintenance of the database. + """ + + def __init__(self, db_handler): + self.db = db_handler + self.result_doc = ResultDoc(self.db) + + def cleanup_results(self): + """ + Check if the results in the database are expired and delete them if the linked object expired. + """ + self.result_doc.handle_expired_results() diff --git a/cellpack/autopack/FirebaseHandler.py b/cellpack/autopack/FirebaseHandler.py index 69e1f0fef..b8d64624d 100644 --- a/cellpack/autopack/FirebaseHandler.py +++ b/cellpack/autopack/FirebaseHandler.py @@ -1,10 +1,14 @@ import ast +import logging import os import firebase_admin from firebase_admin import credentials, firestore from dotenv import load_dotenv from google.cloud.exceptions import NotFound from cellpack.autopack.loaders.utils import read_json_file, write_json_file +from cellpack.autopack.interface_objects.default_values import ( + default_firebase_collection_names, +) class FirebaseHandler(object): @@ -16,10 +20,10 @@ class FirebaseHandler(object): _initialized = False _db = None - def __init__(self): + def __init__(self, default_db=None): # check if firebase is already initialized if not FirebaseHandler._initialized: - db_choice = FirebaseHandler.which_db() + db_choice = FirebaseHandler.which_db(default_db=default_db) if db_choice == "staging": cred = FirebaseHandler.get_staging_creds() else: @@ -34,14 +38,16 @@ def __init__(self): # common utility methods @staticmethod - def which_db(): + def which_db(default_db=None): options = {"1": "dev", "2": "staging"} - print("Choose database:") + if default_db in options.values(): + print(f"Using {default_db} database -------------") + return default_db for key, value in options.items(): print(f"[{key}] {value}") choice = input("Enter number: ").strip() print(f"Using {options.get(choice, 'dev')} database -------------") - return options.get(choice, "dev") # default to dev db + return options.get(choice, "dev") # default to dev db for recipe uploads @staticmethod def doc_to_dict(doc): @@ -65,10 +71,18 @@ def get_path_from_ref(doc): @staticmethod def get_collection_id_from_path(path): - # path example = firebase:composition/uid_1 - components = path.split(":")[1].split("/") - collection = components[0] - id = components[1] + try: + components = path.split(":")[1].split("/") + collection = components[0] + id = components[1] + if collection not in default_firebase_collection_names: + raise ValueError( + f"Invalid collection name: '{collection}'. Choose from: {default_firebase_collection_names}" + ) + except IndexError: + raise ValueError( + "Invalid path provided. Expected format: firebase:collection/id" + ) return collection, id # Create methods @@ -77,10 +91,10 @@ def set_doc(self, collection, id, data): if not doc: doc_ref = self.db.collection(collection).document(id) doc_ref.set(data) - print(f"successfully uploaded to path: {doc_ref.path}") + logging.info(f"successfully uploaded to path: {doc_ref.path}") return doc_ref else: - print( + logging.error( f"ERROR: {doc_ref.path} already exists. If uploading new data, provide a unique recipe name." ) return @@ -101,12 +115,13 @@ def get_staging_creds(): # set override=True to refresh the .env file if softwares or tokens updated load_dotenv(dotenv_path="./.env", override=False) FIREBASE_TOKEN = os.getenv("FIREBASE_TOKEN") + firebase_key = FIREBASE_TOKEN.replace("\\n", "\n") FIREBASE_EMAIL = os.getenv("FIREBASE_EMAIL") return { "type": "service_account", "project_id": "cell-pack-database", "client_email": FIREBASE_EMAIL, - "private_key": FIREBASE_TOKEN, + "private_key": firebase_key, "token_uri": "https://oauth2.googleapis.com/token", } @@ -141,11 +156,28 @@ def get_doc_by_ref(self, path): collection, id = FirebaseHandler.get_collection_id_from_path(path) return self.get_doc_by_id(collection, id) + def get_all_docs(self, collection): + try: + docs_stream = self.db.collection(collection).stream() + docs = list(docs_stream) + return docs + except Exception as e: + logging.error( + f"An error occurred while retrieving docs from collection '{collection}': {e}" + ) + return None + + def get_value(self, collection, id, field): + doc, _ = self.get_doc_by_id(collection, id) + if doc is None: + return None + return doc[field] + # Update methods def update_doc(self, collection, id, data): doc_ref = self.db.collection(collection).document(id) doc_ref.update(data) - print(f"successfully updated to path: {doc_ref.path}") + logging.info(f"successfully updated to path: {doc_ref.path}") return doc_ref @staticmethod @@ -166,6 +198,13 @@ def update_or_create(self, collection, id, data): except NotFound: self.set_doc(collection, id, data) + # Delete methods + def delete_doc(self, collection, id): + doc_ref = self.db.collection(collection).document(id) + doc_ref.delete() + logging.info(f"successfully deleted path: {doc_ref.path}") + return doc_ref.id + # other utils @staticmethod def write_creds_path(): diff --git a/cellpack/autopack/__init__.py b/cellpack/autopack/__init__.py index 18c7667b8..143544f0e 100755 --- a/cellpack/autopack/__init__.py +++ b/cellpack/autopack/__init__.py @@ -386,7 +386,8 @@ def load_file(filename, destination="", cache="geometries", force=None): # command example: `pack -r firebase:recipes/[FIREBASE-RECIPE-ID] -c [CONFIG-FILE-PATH]` if database_name == "firebase": db = DATABASE_IDS.handlers().get(database_name) - db_handler = DBRecipeLoader(db) + initialize_db = db() + db_handler = DBRecipeLoader(initialize_db) recipe_id = file_path.split("/")[-1] db_doc, _ = db_handler.collect_docs_by_id( collection="recipes", id=recipe_id diff --git a/cellpack/autopack/interface_objects/database_ids.py b/cellpack/autopack/interface_objects/database_ids.py index c9a7520e6..7ef4203a4 100644 --- a/cellpack/autopack/interface_objects/database_ids.py +++ b/cellpack/autopack/interface_objects/database_ids.py @@ -21,8 +21,11 @@ def create_aws_handler(bucket_name, sub_folder_name, region_name): region_name=region_name, ) + def create_firebase_handler(default_db=None): + return FirebaseHandler(default_db=default_db) + handlers_dict = { - cls.FIREBASE: FirebaseHandler(), + cls.FIREBASE: create_firebase_handler, cls.AWS: create_aws_handler, } return handlers_dict diff --git a/cellpack/autopack/interface_objects/default_values.py b/cellpack/autopack/interface_objects/default_values.py index 18d82716d..0e7d9d983 100644 --- a/cellpack/autopack/interface_objects/default_values.py +++ b/cellpack/autopack/interface_objects/default_values.py @@ -13,3 +13,11 @@ "mode_settings": {}, "weight_mode_settings": {}, } + +default_firebase_collection_names = [ + "composition", + "objects", + "gradients", + "recipes", + "results", +] diff --git a/cellpack/autopack/loaders/recipe_loader.py b/cellpack/autopack/loaders/recipe_loader.py index c8677ba0a..fd21717e5 100644 --- a/cellpack/autopack/loaders/recipe_loader.py +++ b/cellpack/autopack/loaders/recipe_loader.py @@ -196,7 +196,7 @@ def _read(self, resolve_inheritance=True): atomic=reps.get("atomic", None), packing=reps.get("packing", None), ) - # the key "all_partners" exists in obj["partners"] if the recipe is downloaded from a remote db + # the key "all_partners" already exists in obj["partners"] if the recipe is downloaded from firebase partner_settings = ( [] if ( diff --git a/cellpack/autopack/upy/simularium/simularium_helper.py b/cellpack/autopack/upy/simularium/simularium_helper.py index 031dccd88..1e047e560 100644 --- a/cellpack/autopack/upy/simularium/simularium_helper.py +++ b/cellpack/autopack/upy/simularium/simularium_helper.py @@ -1413,18 +1413,22 @@ def store_result_file(file_path, storage=None): handler = DATABASE_IDS.handlers().get(storage) initialized_handler = handler( bucket_name="cellpack-results", - sub_folder_name="simularium/", + sub_folder_name="simularium", region_name="us-west-2", ) - file_name, url = initialized_handler.save_file(file_path) + file_name, url = initialized_handler.save_file_and_get_url(file_path) simulariumHelper.store_metadata(file_name, url, db="firebase") return file_name, url @staticmethod def store_metadata(file_name, url, db=None): if db == "firebase": - db_handler = DBUploader(DATABASE_IDS.handlers().get(db)) - db_handler.upload_result_metadata(file_name, url) + handler = DATABASE_IDS.handlers().get(db) + initialized_db = handler( + default_db="staging" + ) # default to staging for metadata uploads + db_uploader = DBUploader(initialized_db) + db_uploader.upload_result_metadata(file_name, url) @staticmethod def open_in_simularium(aws_url): diff --git a/cellpack/bin/cleanup_tasks.py b/cellpack/bin/cleanup_tasks.py new file mode 100644 index 000000000..08217aa04 --- /dev/null +++ b/cellpack/bin/cleanup_tasks.py @@ -0,0 +1,20 @@ +from cellpack.autopack.DBRecipeHandler import DBMaintenance +from cellpack.autopack.interface_objects.database_ids import DATABASE_IDS + + +def run_cleanup(db_id=DATABASE_IDS.FIREBASE): + """ + Performs cleanup operations on expired database entries. + This function is executed as part of a scheduled task defined in .github/workflows/cleanup-firebase.yml + + Args: + db_id(str): The database id to use + """ + handler = DATABASE_IDS.handlers().get(db_id) + initialized_db = handler(default_db="staging") + db_maintainer = DBMaintenance(initialized_db) + db_maintainer.cleanup_results() + + +if __name__ == "__main__": + run_cleanup() diff --git a/cellpack/tests/test_aws_handler.py b/cellpack/tests/test_aws_handler.py new file mode 100644 index 000000000..6aaad87cc --- /dev/null +++ b/cellpack/tests/test_aws_handler.py @@ -0,0 +1,107 @@ +import boto3 +from unittest.mock import patch +from moto import mock_aws +from cellpack.autopack.AWSHandler import AWSHandler + + +@patch("cellpack.autopack.AWSHandler.boto3.client") +def test_create_session(mock_client): + with mock_aws(): + aws_handler = AWSHandler( + bucket_name="test_bucket", + sub_folder_name="test_folder", + region_name="us-west-2", + ) + assert aws_handler.s3_client is not None + mock_client.assert_called_once_with( + "s3", + endpoint_url="https://s3.us-west-2.amazonaws.com", + region_name="us-west-2", + ) + + +def test_get_aws_object_key(): + with mock_aws(): + aws_handler = AWSHandler( + bucket_name="test_bucket", + sub_folder_name="test_folder", + region_name="us-west-2", + ) + object_key = aws_handler.get_aws_object_key("test_file") + assert object_key == "test_folder/test_file" + + +def test_upload_file(): + with mock_aws(): + aws_handler = AWSHandler( + bucket_name="test_bucket", + sub_folder_name="test_folder", + region_name="us-west-2", + ) + s3 = boto3.client("s3", region_name="us-west-2") + s3.create_bucket( + Bucket="test_bucket", + CreateBucketConfiguration={"LocationConstraint": "us-west-2"}, + ) + with open("test_file.txt", "w") as file: + file.write("test file") + file_name = aws_handler.upload_file("test_file.txt") + assert file_name == "test_file.txt" + + +def test_create_presigned_url(): + with mock_aws(), patch.object(AWSHandler, "_s3_client") as mock_client: + presigned_url = "https://s3.us-west-2.amazonaws.com/test_bucket/test_folder/test_file.txt?query=string" + mock_client.generate_presigned_url.return_value = presigned_url + aws_handler = AWSHandler( + bucket_name="test_bucket", + sub_folder_name="test_folder", + region_name="us-west-2", + ) + s3 = boto3.client("s3", region_name="us-west-2") + s3.create_bucket( + Bucket="test_bucket", + CreateBucketConfiguration={"LocationConstraint": "us-west-2"}, + ) + with open("test_file.txt", "w") as file: + file.write("test file") + aws_handler.upload_file("test_file.txt") + url = aws_handler.create_presigned_url("test_file.txt") + assert url is not None + assert url.startswith( + "https://s3.us-west-2.amazonaws.com/test_bucket/test_folder/test_file.txt" + ) + + +def test_is_url_valid(): + with mock_aws(), patch.object(AWSHandler, "_s3_client") as mock_client: + presigned_url = "https://s3.us-west-2.amazonaws.com/test_bucket/test_folder/test_file.txt?query=string" + mock_client.generate_presigned_url.return_value = presigned_url + aws_handler = AWSHandler( + bucket_name="test_bucket", + sub_folder_name="test_folder", + region_name="us-west-2", + ) + s3 = boto3.client("s3", region_name="us-west-2") + s3.create_bucket( + Bucket="test_bucket", + CreateBucketConfiguration={"LocationConstraint": "us-west-2"}, + ) + with open("test_file.txt", "w") as file: + file.write("test file") + aws_handler.upload_file("test_file.txt") + url = aws_handler.create_presigned_url("test_file.txt") + assert aws_handler.is_url_valid(url) is True + assert aws_handler.is_url_valid("invalid_url") is False + assert ( + aws_handler.is_url_valid( + "https://s3.us-west-2.amazonaws.com/test_bucket/test_folder/test_file.txt" + ) + is True + ) + assert ( + aws_handler.is_url_valid( + "https://s3.us-west-2.amazonaws.com/test_bucket/test_folder/test_file.txt?AWSAccessKeyId=1234" + ) + is False + ) diff --git a/examples/packing-configs/debug.json b/examples/packing-configs/debug.json index 2e4cbc0ef..4ce4583a2 100644 --- a/examples/packing-configs/debug.json +++ b/examples/packing-configs/debug.json @@ -1,7 +1,7 @@ { "name": "debug", "format": "simularium", - "inner_grid_method": "raytrace", + "inner_grid_method": "trimesh", "live_packing": false, "ordered_packing": false, "out": "out/", diff --git a/examples/recipes/v2/nested.json b/examples/recipes/v2/nested.json index c78a23b2b..bac4bc55f 100644 --- a/examples/recipes/v2/nested.json +++ b/examples/recipes/v2/nested.json @@ -19,7 +19,6 @@ "type": "single_sphere", "jitter_attempts": 10, "packing_mode": "random", - "place_method": "jitter", "available_regions": { "interior": {}, "surface": {} @@ -84,7 +83,11 @@ "object": "red_sphere", "count": 40 } - ] + ], + "surface": [{ + "object": "green_sphere", + "count": 40 + }] } }, "inner_sphere": { diff --git a/examples/recipes/v2/spheres_in_a_box.json b/examples/recipes/v2/spheres_in_a_box.json new file mode 100644 index 000000000..8ee55855d --- /dev/null +++ b/examples/recipes/v2/spheres_in_a_box.json @@ -0,0 +1,88 @@ +{ + "version": "1.0.0", + "format_version": "2.0", + "name": "analysis_b", + "bounding_box": [ + [ + 0, + 0, + 0 + ], + [ + 1000, + 1000, + 1000 + ] + ], + "objects": { + "base": { + "type": "single_sphere" + + }, + "sphere_100": { + "type": "single_sphere", + "inherit": "base", + "color": [ + 0.498, + 0.498, + 0.498 + ], + "radius": 100 + }, + "sphere_200": { + "type": "single_sphere", + "inherit": "base", + "color": [ + 0.827, + 0.82700002, + 0.82700002 + ], + "radius": 200 + }, + "sphere_50": { + "type": "single_sphere", + "inherit": "base", + "color": [ + 0.306, + 0.45100001, + 0.81599998 + ], + "radius": 50 + }, + "sphere_25": { + "type": "single_sphere", + "inherit": "base", + "color": [ + 0.467, + 0.23899999, + 0.972 + ], + "radius": 25 + } + }, + "composition": { + "space": { + "regions": { + "interior": [ + "A", "B", "C", "D" + ] + } + }, + "A": { + "object": "sphere_100", + "count": 6 + }, + "B": { + "object": "sphere_200", + "count": 2 + }, + "C": { + "object": "sphere_50", + "count": 15 + }, + "D": { + "object": "sphere_25", + "count": 40 + } + } +} \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 9f52748b5..d94219923 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.0.3 +current_version = 1.0.8 commit = True tag = True diff --git a/setup.py b/setup.py index 8726cc88b..10729b39e 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,6 @@ "black>=19.10b0, <=23.0", "flake8>=3.8.3, <=6.0.0", "flake8-debugger>=3.2.1", - "mdutils>=1.4.0", "pytest>=5.4.3", "pytest-cov>=2.9.0", "pytest-raises>=0.11", @@ -24,10 +23,12 @@ analysis_requirements = [ "tqdm>=4.64.1", + "mdutils>=1.4.0", "scikit-learn>=1.1.3", "seaborn>=0.12.1", "aicsimageio>=4.10.0", "pandas>=1.2.4", + "mdutils>=1.4.0", ] dev_requirements = [ @@ -47,6 +48,7 @@ ] requirements = [ + *analysis_requirements, "boto3>=1.28.3", "fire>=0.4.0", "firebase_admin>=6.0.1", @@ -62,6 +64,7 @@ "trimesh>=3.9.34", "deepdiff>=5.5.0", "python-dotenv>=1.0.0", + "moto>=5.0.2", ] extra_requirements = { @@ -118,6 +121,6 @@ url="https://github.com/mesoscope/cellpack", # Do not edit this string manually, always use bumpversion # Details in CONTRIBUTING.rst - version="1.0.3", + version="1.0.8", zip_safe=False, )