Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/upload local recipes to s3 #189

Merged
merged 6 commits into from
Sep 22, 2023
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,21 @@ See [CONTRIBUTING.md](CONTRIBUTING.md) for information related to developing the
each set of changes to `main` atomic and as a side effect naturally encourages small
well defined PR's.

## Introduction to Remote Databases
### AWS S3
1. Pre-requisites
* Obtain an AWS account for AICS. Please contact the IT team or the code owner.
* Generate an `aws_access_key_id` and `aws_secret_access_key` in your AWS account.

2. Step-by-step Guide
* Download and install the [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html)
* Configure AWS CLI by running `aws configure`, then enter your credentials as prompted.
* Ensure that Boto3, the AWS SDK for Python is installed and included in the requirements section of `setup.py`.

### Firebase Firestore
1. Step-by-step Guide
* Create a Firebase project in test mode with your google account, select `firebase_admin` as the SDK. [Firebase Firestore tutorial](https://firebase.google.com/docs/firestore)
* Generate a new private key by navigating to "Project settings">"Service account" in the project's dashboard.

**MIT license**

77 changes: 77 additions & 0 deletions cellpack/autopack/AWSHandler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import logging
from pathlib import Path

import boto3
from botocore.exceptions import ClientError


class AWSHandler(object):
"""
Handles all the AWS S3 operations
"""

def __init__(
self,
bucket_name,
sub_folder_name=None,
region_name=None,
):
self.bucket_name = bucket_name
self.folder_name = sub_folder_name
session = boto3.Session()
self.s3_client = session.client(
"s3",
endpoint_url=f"https://s3.{region_name}.amazonaws.com",
region_name=region_name,
)

def get_aws_object_key(self, object_name):
if self.folder_name is not None:
object_name = self.folder_name + object_name
else:
object_name = object_name
return object_name

def upload_file(self, file_path):
"""Upload a file to an S3 bucket
:param file_path: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_path is used
:return: True if file was uploaded, else False
"""

file_name = Path(file_path).name

object_name = self.get_aws_object_key(file_name)
# Upload the file
try:
self.s3_client.upload_file(file_path, self.bucket_name, object_name)
self.s3_client.put_object_acl(
ACL="public-read", Bucket=self.bucket_name, Key=object_name
)

except ClientError as e:
logging.error(e)
return False
return file_name

def create_presigned_url(self, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object
:param object_name: string
:param expiration: Time in seconds for the presigned URL to remain valid
:return: Presigned URL as string. If error, returns None.
"""
object_name = self.get_aws_object_key(object_name)
# Generate a presigned URL for the S3 object
try:
url = self.s3_client.generate_presigned_url(
"get_object",
Params={"Bucket": self.bucket_name, "Key": object_name},
ExpiresIn=expiration,
)
except ClientError as e:
logging.error(e)
return None
# The response contains the presigned URL
# https://{self.bucket_name}.s3.{region}.amazonaws.com/{object_key}
return url
41 changes: 41 additions & 0 deletions cellpack/autopack/upy/simularium/simularium_helper.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
# -*- coding: utf-8 -*-
# standardmodule
import os
import webbrowser
from pathlib import Path

import matplotlib
import numpy as np
import trimesh
from botocore.exceptions import NoCredentialsError

from simulariumio import (
TrajectoryConverter,
Expand All @@ -19,6 +23,7 @@
from simulariumio.constants import DISPLAY_TYPE, VIZ_TYPE

from cellpack.autopack.upy import hostHelper
from cellpack.autopack.AWSHandler import AWSHandler
import collada


Expand Down Expand Up @@ -1335,6 +1340,7 @@ def writeToFile(self, file_name, bb, recipe_name, version):
spatial_units=UnitData("nm"), # nanometers
)
TrajectoryConverter(converted_data).save(file_name, False)
return file_name

def raycast(self, **kw):
intersect = False
Expand All @@ -1348,3 +1354,38 @@ def raycast(self, **kw):

def raycast_test(self, obj, start, end, length, **kw):
return

def post_and_open_file(self, file_name):
simularium_file = Path(f"{file_name}.simularium")
url = None
try:
url = simulariumHelper.store_results_to_s3(simularium_file)
except Exception as e:
aws_readme_url = "https://github.com/mesoscope/cellpack/blob/feature/main/README.md#aws-s3"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is helpful, thanks for adding it in!

if isinstance(e, NoCredentialsError):
print(
f"need to configure your aws account, find instructions here: {aws_readme_url}"
)
else:
print(
f"An error occurred while storing the file {simularium_file} to S3: {e}"
)
if url is not None:
simulariumHelper.open_in_simularium(url)

@staticmethod
def store_results_to_s3(file_path):
Copy link
Member

@meganrm meganrm Sep 6, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I might consider moving more of this into the handler. so in this file all you have to do is call url = AWSHandler(OPTIONS).save_file(file_path) and everything is handled by the other file

handler = AWSHandler(
bucket_name="cellpack-results",
sub_folder_name="simularium/",
region_name="us-west-2",
)
file_name = handler.upload_file(file_path)
url = handler.create_presigned_url(file_name)
return url

@staticmethod
def open_in_simularium(aws_url):
webbrowser.open_new_tab(
f"https://simularium.allencell.org/viewer?trajUrl={aws_url}"
)
5 changes: 4 additions & 1 deletion cellpack/autopack/writers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,10 @@ def save_as_simularium(self, env, all_ingr_as_array, compartments):
env.helper.add_grid_data_to_scene(
f"{gradient.name}-weights", grid_positions, gradient.weight
)
env.helper.writeToFile(env.result_file, env.boundingBox, env.name, env.version)
file_name = env.helper.writeToFile(
env.result_file, env.boundingBox, env.name, env.version
)
autopack.helper.post_and_open_file(file_name)

def save_Mixed_asJson(
self,
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
]

requirements = [
"boto3>=1.28.3",
"fire>=0.4.0",
"firebase_admin>=6.0.1",
"matplotlib>=3.3.4",
Expand Down