Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[24.0] Fix source history update_time being updated when importing a public history #17728

Merged
merged 8 commits into from
Mar 21, 2024
22 changes: 20 additions & 2 deletions client/src/components/History/Modals/SelectorModal.vue
Original file line number Diff line number Diff line change
Expand Up @@ -100,16 +100,27 @@ function selectHistories() {
function setFilterValue(newFilter: string, newValue: string) {
filter.value = HistoriesFilters.setFilterValue(filter.value, newFilter, newValue);
}

// hacky workaround for popovers in date pickers being cutoff
// https://github.com/galaxyproject/galaxy/issues/17711
const modalBodyClasses = computed(() => {
return [
"history-selector-modal-body",
showAdvanced.value
? "history-selector-modal-body-allow-overflow"
: "history-selector-modal-body-prevent-overflow",
];
});
</script>

<template>
<div>
<BModal
ref="modal"
v-model="propShowModal"
body-class="history-selector-modal-body"
content-class="history-selector-modal-content"
v-bind="$attrs"
:body-class="modalBodyClasses"
static
centered
hide-footer
Expand Down Expand Up @@ -174,11 +185,18 @@ function setFilterValue(newFilter: string, newValue: string) {
with scoped or lang="scss" */

.history-selector-modal-body {
overflow: hidden;
display: flex;
flex-direction: column;
}

.history-selector-modal-body-allow-overflow {
overflow: visible;
}

.history-selector-modal-body-prevent-overflow {
overflow: hidden;
}

.history-selector-modal-content {
max-height: 80vh !important;
}
Expand Down
21 changes: 10 additions & 11 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4364,6 +4364,8 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable):
permitted_actions = Dataset.permitted_actions
purged: bool
creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]]
copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"]
copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"]

validated_states = DatasetValidatedState

Expand Down Expand Up @@ -4393,6 +4395,8 @@ def __init__(
flush=True,
metadata_deferred=False,
creating_job_id=None,
copied_from_history_dataset_association=None,
copied_from_library_dataset_dataset_association=None,
):
self.name = name or "Unnamed dataset"
self.id = id
Expand All @@ -4416,6 +4420,10 @@ def __init__(
self.validated_state = validated_state
self.validated_state_message = validated_state_message
# Relationships
if copied_from_history_dataset_association:
self.copied_from_history_dataset_association_id = copied_from_history_dataset_association.id
if copied_from_library_dataset_dataset_association:
self.copied_from_library_dataset_dataset_association_id = copied_from_library_dataset_dataset_association.id
if not dataset and create_dataset:
# Had to pass the sqlalchemy session in order to create a new dataset
dataset = Dataset(state=Dataset.states.NEW)
Expand Down Expand Up @@ -4985,8 +4993,6 @@ def __init__(
self,
hid=None,
history=None,
copied_from_history_dataset_association=None,
copied_from_library_dataset_dataset_association=None,
sa_session=None,
**kwd,
):
Expand All @@ -4999,8 +5005,6 @@ def __init__(
self.hid = hid
# Relationships
self.history = history
self.copied_from_history_dataset_association = copied_from_history_dataset_association
self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association

def __strict_check_before_flush__(self):
if self.extension != "len":
Expand Down Expand Up @@ -5034,7 +5038,7 @@ def __create_version__(self, session):

# hist.deleted holds old value(s)
changes[attr.key] = hist.deleted
if self.update_time and self.state == self.states.OK and not self.deleted:
if changes and self.update_time and self.state == self.states.OK and not self.deleted:
# We only record changes to HDAs that exist in the database and have a update_time
new_values = {}
new_values["name"] = changes.get("name", self.name)
Expand Down Expand Up @@ -5770,10 +5774,9 @@ def to_dict(self, view="collection"):


class LibraryDatasetDatasetAssociation(DatasetInstance, HasName, Serializable):

def __init__(
self,
copied_from_history_dataset_association=None,
copied_from_library_dataset_dataset_association=None,
library_dataset=None,
user=None,
sa_session=None,
Expand All @@ -5782,10 +5785,6 @@ def __init__(
# FIXME: sa_session is must be passed to DataSetInstance if the create_dataset
# parameter in kwd is True so that the new object can be flushed. Is there a better way?
DatasetInstance.__init__(self, sa_session=sa_session, **kwd)
if copied_from_history_dataset_association:
self.copied_from_history_dataset_association_id = copied_from_history_dataset_association.id
if copied_from_library_dataset_dataset_association:
self.copied_from_library_dataset_dataset_association_id = copied_from_library_dataset_dataset_association.id
self.library_dataset = library_dataset
self.user = user

Expand Down
6 changes: 2 additions & 4 deletions lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2395,10 +2395,8 @@ def record_associated_jobs(obj):
f"Expected a HistoryDatasetAssociation or LibraryDatasetDatasetAssociation, but got a {type(hda)}: {hda}"
)
job_hda = hda
while getattr(
job_hda, "copied_from_history_dataset_association", None
): # should this check library datasets as well?
job_hda = job_hda.copied_from_history_dataset_association # type: ignore[union-attr]
while job_hda.copied_from_history_dataset_association: # should this check library datasets as well?
job_hda = job_hda.copied_from_history_dataset_association
if not job_hda.creating_job_associations:
# No viable HDA found.
continue
Expand Down
8 changes: 8 additions & 0 deletions lib/galaxy_test/api/test_histories.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,10 @@ def test_copy_history(self):
history_id, contents=["Hello", "World"], direct_upload=True
)
dataset_collection = self.dataset_collection_populator.wait_for_fetched_collection(fetch_response.json())
history = self._show(history_id)
assert "update_time" in history
original_update_time = history["update_time"]

copied_history_response = self.dataset_populator.copy_history(history_id)
copied_history_response.raise_for_status()
copied_history = copied_history_response.json()
Expand All @@ -366,6 +370,10 @@ def test_copy_history(self):
assert source_hda["history_id"] != copied_hda["history_id"]
assert source_hda["hid"] == copied_hda["hid"] == 2

history = self._show(history_id)
new_update_time = history["update_time"]
assert original_update_time == new_update_time

# TODO: (CE) test_create_from_copy
def test_import_from_model_store_dict(self):
response = self.dataset_populator.create_from_store(store_dict=history_model_store_dict())
Expand Down
13 changes: 11 additions & 2 deletions test/unit/data/test_model_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,18 @@ def test_history_dataset_copy(num_datasets=NUM_DATASETS, include_metadata_file=I
session.commit()

history_copy_timer = ExecutionTimer()
new_history = old_history.copy(target_user=old_history.user)
original_update_time = old_history.update_time
assert original_update_time
new_history = old_history.copy(name="new name", target_user=old_history.user, all_datasets=True)
session.add(new_history)
session.add(old_history)
with transaction(session):
session.commit()
session.refresh(old_history)
new_update_time = session.get(model.History, old_history.id).update_time
assert original_update_time == new_update_time
print("history copied %s" % history_copy_timer)
assert new_history.name == "HistoryCopyHistory1"
assert new_history.name == "new name"
assert new_history.user == old_history.user
for hda in new_history.active_datasets:
assert hda.get_size() == 3
Expand Down
Loading