Skip to content

Commit

Permalink
Merge pull request galaxyproject#16929 from davelopez/23.1_fix_discar…
Browse files Browse the repository at this point in the history
…ded_dataset_ordering

[23.1] Fix discarded dataset ordering in Storage Dashboard
  • Loading branch information
martenson authored Oct 27, 2023
2 parents a2e3972 + 3950785 commit 12b20c6
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 3 deletions.
6 changes: 4 additions & 2 deletions lib/galaxy/managers/hdas.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
desc,
false,
func,
nulls_first,
nulls_last,
select,
true,
)
Expand Down Expand Up @@ -347,8 +349,8 @@ def __init__(self, hda_manager: HDAManager, dataset_manager: datasets.DatasetMan
self.sort_map = {
StoredItemOrderBy.NAME_ASC: asc(model.HistoryDatasetAssociation.name),
StoredItemOrderBy.NAME_DSC: desc(model.HistoryDatasetAssociation.name),
StoredItemOrderBy.SIZE_ASC: asc(model.Dataset.total_size),
StoredItemOrderBy.SIZE_DSC: desc(model.Dataset.total_size),
StoredItemOrderBy.SIZE_ASC: nulls_first(asc(model.Dataset.total_size)),
StoredItemOrderBy.SIZE_DSC: nulls_last(desc(model.Dataset.total_size)),
StoredItemOrderBy.UPDATE_TIME_ASC: asc(model.HistoryDatasetAssociation.update_time),
StoredItemOrderBy.UPDATE_TIME_DSC: desc(model.HistoryDatasetAssociation.update_time),
}
Expand Down
42 changes: 41 additions & 1 deletion test/integration/test_storage_cleaner.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@
from uuid import uuid4

from galaxy_test.base.decorators import requires_new_history
from galaxy_test.base.populators import DatasetPopulator
from galaxy_test.base.populators import (
DatasetPopulator,
skip_without_tool,
)
from galaxy_test.driver import integration_util


Expand Down Expand Up @@ -39,6 +42,43 @@ def test_discarded_datasets_monitoring_and_cleanup(self):
"datasets", test_datasets, dataset_ids, delete_resource_uri=f"histories/{history_id}/contents"
)

@requires_new_history
@skip_without_tool("cat_data_and_sleep")
def test_discarded_datasets_with_null_size_are_sorted_correctly(self):
history_id = self.dataset_populator.new_history(f"History for discarded datasets {uuid4()}")
test_datasets = [
StoredItemDataForTests(name=f"TestDataset01_{uuid4()}", size=10),
StoredItemDataForTests(name=f"TestDataset02_{uuid4()}", size=50),
]
dataset_ids = self._create_datasets_in_history_with(history_id, test_datasets)

# Run a tool on the first dataset and delete the output before completing the job
# so it has a null size in the database
inputs = {
"input1": {"src": "hda", "id": dataset_ids[0]},
"sleep_time": 10,
}
run_response = self.dataset_populator.run_tool_raw(
"cat_data_and_sleep",
inputs,
history_id,
)
null_size_dataset = run_response.json()["outputs"][0]
self.dataset_populator.delete_dataset(history_id, null_size_dataset["id"], stop_job=True)
# delete the other datasets too
for dataset_id in dataset_ids:
self.dataset_populator.delete_dataset(history_id, dataset_id)

# Check the dataset size sorting is correct [0, 10, 50]
item_names_forward_order = [null_size_dataset["name"], test_datasets[0].name, test_datasets[1].name]
item_names_reverse_order = list(reversed(item_names_forward_order))
expected_order_by_map = {
"size-asc": item_names_forward_order,
"size-dsc": item_names_reverse_order,
}
for order_by, expected_ordered_names in expected_order_by_map.items():
self._assert_order_is_expected("storage/datasets/discarded", order_by, expected_ordered_names)

@requires_new_history
def test_archived_histories_monitoring_and_cleanup(self):
test_histories = self._build_test_items(resource_name="History")
Expand Down

0 comments on commit 12b20c6

Please sign in to comment.