Skip to content

Commit

Permalink
Merge pull request #16875 from mvdbeek/eliminate_more_flushes
Browse files Browse the repository at this point in the history
[23.1] Remove more flushes in database operation tools
  • Loading branch information
mvdbeek authored Oct 18, 2023
2 parents 634d04f + f979572 commit b116321
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 10 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5722,7 +5722,7 @@ def to_history_dataset_association(self, target_history, parent_id=None, add_to_
sa_session.commit()
return hda

def copy(self, parent_id=None, target_folder=None):
def copy(self, parent_id=None, target_folder=None, flush=True):
sa_session = object_session(self)
ldda = LibraryDatasetDatasetAssociation(
name=self.name,
Expand Down
26 changes: 17 additions & 9 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3245,7 +3245,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history

assert collection.collection_type == "paired"
forward_o, reverse_o = collection.dataset_instances
forward, reverse = forward_o.copy(copy_tags=forward_o.tags), reverse_o.copy(copy_tags=reverse_o.tags)
forward, reverse = forward_o.copy(copy_tags=forward_o.tags, flush=False), reverse_o.copy(
copy_tags=reverse_o.tags, flush=False
)
self._add_datasets_to_history(history, [forward, reverse])

out_data["forward"] = forward
Expand All @@ -3261,7 +3263,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
forward_o = incoming["input_forward"]
reverse_o = incoming["input_reverse"]

forward, reverse = forward_o.copy(copy_tags=forward_o.tags), reverse_o.copy(copy_tags=reverse_o.tags)
forward, reverse = forward_o.copy(copy_tags=forward_o.tags, flush=False), reverse_o.copy(
copy_tags=reverse_o.tags, flush=False
)
new_elements = {}
new_elements["forward"] = forward
new_elements["reverse"] = reverse
Expand Down Expand Up @@ -3292,7 +3296,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
identifier = getattr(incoming_repeat["input"], "element_identifier", incoming_repeat["input"].name)
elif id_select == "manual":
identifier = incoming_repeat["id_cond"]["identifier"]
new_elements[identifier] = incoming_repeat["input"].copy(copy_tags=incoming_repeat["input"].tags)
new_elements[identifier] = incoming_repeat["input"].copy(
copy_tags=incoming_repeat["input"].tags, flush=False
)

self._add_datasets_to_history(history, new_elements.values())
output_collections.create_collection(
Expand Down Expand Up @@ -3326,7 +3332,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
else:
raise Exception("Invalid tool parameters.")
extracted = extracted_element.element_object
extracted_o = extracted.copy(copy_tags=extracted.tags, new_name=extracted_element.element_identifier)
extracted_o = extracted.copy(
copy_tags=extracted.tags, new_name=extracted_element.element_identifier, flush=False
)
self._add_datasets_to_history(history, [extracted_o], datasets_visible=True)

out_data["output"] = extracted_o
Expand Down Expand Up @@ -3409,7 +3417,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
if getattr(value, "history_content_type", None) == "dataset":
copied_value = value.copy(copy_tags=value.tags, flush=False)
else:
copied_value = value.copy()
copied_value = value.copy(flush=False)
new_elements[key] = copied_value

self._add_datasets_to_history(history, new_elements.values())
Expand All @@ -3429,7 +3437,7 @@ def _get_new_elements(self, history, elements_to_copy):
if getattr(dce.element_object, "history_content_type", None) == "dataset":
copied_value = dce.element_object.copy(copy_tags=dce.element_object.tags, flush=False)
else:
copied_value = dce.element_object.copy()
copied_value = dce.element_object.copy(flush=False)
new_elements[element_identifier] = copied_value
return new_elements

Expand Down Expand Up @@ -3597,7 +3605,7 @@ def add_copied_value_to_new_elements(new_label, dce_object):
if getattr(dce_object, "history_content_type", None) == "dataset":
copied_value = dce_object.copy(copy_tags=dce_object.tags, flush=False)
else:
copied_value = dce_object.copy()
copied_value = dce_object.copy(flush=False)
new_elements[new_label] = copied_value

new_labels_path = new_labels_dataset_assoc.file_name
Expand Down Expand Up @@ -3703,7 +3711,7 @@ def add_copied_value_to_new_elements(new_tags_dict, dce):
)
else:
# We have a collection, and we copy the elements so that we don't manipulate the original tags
copied_value = dce.element_object.copy(element_destination=history)
copied_value = dce.element_object.copy(element_destination=history, flush=False)
for new_element, old_element in zip(copied_value.dataset_elements, dce.element_object.dataset_elements):
# TODO: This should be eliminated, but collections created by the collection builder
# don't set `visible` to `False` if you don't hide the original elements.
Expand Down Expand Up @@ -3763,7 +3771,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
if getattr(dce_object, "history_content_type", None) == "dataset":
copied_value = dce_object.copy(copy_tags=dce_object.tags, flush=False)
else:
copied_value = dce_object.copy()
copied_value = dce_object.copy(flush=False)

if passes_filter:
filtered_elements[element_identifier] = copied_value
Expand Down

0 comments on commit b116321

Please sign in to comment.