Skip to content

Commit

Permalink
add batch_job_id to result metadata (#295)
Browse files Browse the repository at this point in the history
* retrieve job id

* formatting

* add a bucket for batch jobs

* formatting

* remove print statement and add comments
  • Loading branch information
rugeli authored Nov 12, 2024
1 parent 3480a55 commit cdef941
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 17 deletions.
9 changes: 7 additions & 2 deletions cellpack/autopack/DBRecipeHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,7 @@ def upload_recipe(self, recipe_meta_data, recipe_data):
recipe_to_save["recipe_path"] = self.db.create_path("recipes", recipe_id)
self.upload_data("recipes", recipe_to_save, recipe_id)

def upload_result_metadata(self, file_name, url):
def upload_result_metadata(self, file_name, url, job_id=None):
"""
Upload the metadata of the result file to the database.
"""
Expand All @@ -654,7 +654,12 @@ def upload_result_metadata(self, file_name, url):
self.db.update_or_create(
"results",
file_name,
{"user": username, "timestamp": timestamp, "url": url},
{
"user": username,
"timestamp": timestamp,
"url": url,
"batch_job_id": job_id,
},
)


Expand Down
4 changes: 3 additions & 1 deletion cellpack/autopack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,9 @@ def read_text_file(filename, destination="", cache="collisionTrees", force=None)
return sphere_data


def load_file(filename, destination="", cache="geometries", force=None, use_docker=False):
def load_file(
filename, destination="", cache="geometries", force=None, use_docker=False
):
if is_remote_path(filename):
database_name, file_path = convert_db_shortname_to_url(filename)
# command example: `pack -r firebase:recipes/[FIREBASE-RECIPE-ID] -c [CONFIG-FILE-PATH]`
Expand Down
8 changes: 5 additions & 3 deletions cellpack/autopack/interface_objects/partners.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,11 @@ def __init__(self, partners):
partner["name"],
partner["position"] if "position" in partner else [0, 0, 0],
partner["weight"] if "weight" in partner else weight,
partner["binding_probability"]
if "binding_probability" in partner
else 1.0,
(
partner["binding_probability"]
if "binding_probability" in partner
else 1.0
),
)
self.all_partners.append(partner)

Expand Down
4 changes: 3 additions & 1 deletion cellpack/autopack/loaders/recipe_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,9 @@ def _migrate_version(self, old_recipe):
)

def _read(self, resolve_inheritance=True, use_docker=False):
new_values, database_name = autopack.load_file(self.file_path, cache="recipes", use_docker=use_docker)
new_values, database_name = autopack.load_file(
self.file_path, cache="recipes", use_docker=use_docker
)
if database_name == "firebase":
objects, gradients, composition = DBRecipeLoader.collect_and_sort_data(
new_values["composition"]
Expand Down
1 change: 1 addition & 0 deletions cellpack/autopack/upy/colors.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
You should have received a copy of the GNU General Public License
along with upy. If not, see <http://www.gnu.org/licenses/gpl-3.0.html>.
"""

import numpy
from math import floor

Expand Down
1 change: 1 addition & 0 deletions cellpack/autopack/upy/simularium/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@
You should have received a copy of the GNU General Public License
along with upy. If not, see <http://www.gnu.org/licenses/gpl-3.0.html>.
"""

# CRITICAL_DEPENDENCIES = ['blender','c4d']
__revision__ = "01"
32 changes: 22 additions & 10 deletions cellpack/autopack/upy/simularium/simularium_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1398,23 +1398,35 @@ def raycast_test(self, obj, start, end, length, **kw):
def post_and_open_file(self, file_name, open_results_in_browser=True):
simularium_file = Path(f"{file_name}.simularium")
url = None
job_id = os.environ.get("AWS_BATCH_JOB_ID", None)
file_name, url = simulariumHelper.store_result_file(
simularium_file, storage="aws"
simularium_file, storage="aws", batch_job_id=job_id
)
if file_name and url:
simulariumHelper.store_metadata(file_name, url, db="firebase")
simulariumHelper.store_metadata(
file_name, url, db="firebase", job_id=job_id
)
if open_results_in_browser:
simulariumHelper.open_in_simularium(url)

@staticmethod
def store_result_file(file_path, storage=None):
def store_result_file(file_path, storage=None, batch_job_id=None):
if storage == "aws":
handler = DATABASE_IDS.handlers().get(storage)
initialized_handler = handler(
bucket_name="cellpack-results",
sub_folder_name="simularium",
region_name="us-west-2",
)
# if batch_job_id is not None, then we are in a batch job and should use the temp bucket
# TODO: use cellpack-results bucket for batch jobs once we have the correct permissions
if batch_job_id:
initialized_handler = handler(
bucket_name="cellpack-demo",
sub_folder_name="simularium",
region_name="us-west-2",
)
else:
initialized_handler = handler(
bucket_name="cellpack-results",
sub_folder_name="simularium",
region_name="us-west-2",
)
file_name, url = initialized_handler.save_file_and_get_url(file_path)
if not file_name or not url:
db_maintainer = DBMaintenance(initialized_handler)
Expand All @@ -1424,15 +1436,15 @@ def store_result_file(file_path, storage=None):
return file_name, url

@staticmethod
def store_metadata(file_name, url, db=None):
def store_metadata(file_name, url, db=None, job_id=None):
if db == "firebase":
handler = DATABASE_IDS.handlers().get(db)
initialized_db = handler(
default_db="staging"
) # default to staging for metadata uploads
if initialized_db._initialized:
db_uploader = DBUploader(initialized_db)
db_uploader.upload_result_metadata(file_name, url)
db_uploader.upload_result_metadata(file_name, url, job_id)
else:
db_maintainer = DBMaintenance(initialized_db)
print(
Expand Down

0 comments on commit cdef941

Please sign in to comment.