Skip to content

Commit

Permalink
Merge branch 'release_24.0' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed May 2, 2024
2 parents 142e409 + 49ca43c commit 0456ff1
Show file tree
Hide file tree
Showing 8 changed files with 89 additions and 20 deletions.
6 changes: 3 additions & 3 deletions lib/galaxy/config/sample/datatypes_conf.xml.sample
Original file line number Diff line number Diff line change
Expand Up @@ -217,9 +217,9 @@
<display file="igv/interval_as_bed.xml" inherit="true"/>
</datatype>
<datatype extension="jellyfish" type="galaxy.datatypes.binary:Binary" subclass="true" display_in_upload="true" description="Jellyfish database files are k-mer counts in binary format with a readable head. They are operated on and converted to human-readable text through jellyfish commands." />
<datatype extension="ktab" type="galaxy.datatypes.binary:Binary" subclass="true" description="A table of canonical k‑mers and their counts for the fastk toolkit." display_in_upload="true" description_url="https://github.com/thegenemyers/FASTK?tab=readme-ov-file#file-encodings"/>
<datatype extension="hist" type="galaxy.datatypes.binary:Binary" subclass="true" description="A binary histogram file of kmers and frequencies for the fastk toolkit." display_in_upload="true" description_url="https://github.com/thegenemyers/FASTK?tab=readme-ov-file#file-encodings"/>
<datatype extension="prof" type="galaxy.datatypes.binary:Binary" subclass="true" description="Read profile file for the fastk toolkit." display_in_upload="true" description_url="https://github.com/thegenemyers/FASTK?tab=readme-ov-file#file-encodings"/>
<datatype extension="fastk_ktab" type="galaxy.datatypes.binary:Binary" subclass="true" description="A table of canonical k‑mers and their counts for the fastk toolkit." display_in_upload="true" description_url="https://github.com/thegenemyers/FASTK?tab=readme-ov-file#file-encodings"/>
<datatype extension="fastk_hist" type="galaxy.datatypes.binary:Binary" subclass="true" description="A binary histogram file of kmers and frequencies for the fastk toolkit." display_in_upload="true" description_url="https://github.com/thegenemyers/FASTK?tab=readme-ov-file#file-encodings"/>
<datatype extension="fastk_prof" type="galaxy.datatypes.binary:Binary" subclass="true" description="Read profile file for the fastk toolkit." display_in_upload="true" description_url="https://github.com/thegenemyers/FASTK?tab=readme-ov-file#file-encodings"/>
<datatype extension="npy" type="galaxy.datatypes.binary:Numpy" description="Standard format for saving numpy arrays" display_in_upload="true" description_url="https://numpy.org/devdocs/reference/generated/numpy.lib.format.html"/>

<!-- ISA data types -->
Expand Down
43 changes: 38 additions & 5 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3030,7 +3030,7 @@ def prune(cls, sa_session):
session.execute(q)


class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable):
class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable, UsesCreateAndUpdateTime):
__tablename__ = "history"
__table_args__ = (Index("ix_history_slug", "slug", mysql_length=200),)

Expand Down Expand Up @@ -3197,6 +3197,9 @@ def username(self):
def count(self):
return self.hid_counter - 1

def update(self):
self._update_time = now()

def add_pending_items(self, set_output_hid=True):
# These are assumed to be either copies of existing datasets or new, empty datasets,
# so we don't need to set the quota.
Expand Down Expand Up @@ -7457,7 +7460,7 @@ def __init__(self, galaxy_session, history):
self.history = history


class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById):
class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime):
"""
StoredWorkflow represents the root node of a tree of objects that compose a workflow, including workflow revisions, steps, and subworkflows.
It is responsible for the metadata associated with a workflow including owner, name, published, and create/update time.
Expand Down Expand Up @@ -7797,6 +7800,25 @@ def copy(self, user=None):
for old_step, new_step in zip(self.steps, copied_steps):
old_step.copy_to(new_step, step_mapping, user=user)
copied_workflow.steps = copied_steps

copied_comments = [comment.copy() for comment in self.comments]
steps_by_id = {s.order_index: s for s in copied_workflow.steps}
comments_by_id = {c.order_index: c for c in copied_comments}

# copy comment relationships
for old_comment, new_comment in zip(self.comments, copied_comments):
for step_id in [step.order_index for step in old_comment.child_steps]:
child_step = steps_by_id.get(step_id)
if child_step:
child_step.parent_comment = new_comment

for comment_id in [comment.order_index for comment in old_comment.child_comments]:
child_comment = comments_by_id.get(comment_id)
if child_comment:
child_comment.parent_comment = new_comment

copied_workflow.comments = copied_comments

return copied_workflow

@property
Expand All @@ -7813,7 +7835,7 @@ def log_str(self):
InputConnDictType = Dict[str, Union[Dict[str, Any], List[Dict[str, Any]]]]


class WorkflowStep(Base, RepresentById):
class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime):
"""
WorkflowStep represents a tool or subworkflow, its inputs, annotations, and any outputs that are flagged as workflow outputs.
Expand Down Expand Up @@ -8349,6 +8371,17 @@ def from_dict(dict):
comment.data = dict.get("data", None)
return comment

def copy(self):
comment = WorkflowComment()
comment.order_index = self.order_index
comment.type = self.type
comment.position = self.position
comment.size = self.size
comment.color = self.color
comment.data = self.data

return comment


class StoredWorkflowUserShareAssociation(Base, UserShareAssociation):
__tablename__ = "stored_workflow_user_share_connection"
Expand Down Expand Up @@ -10136,7 +10169,7 @@ def equals(self, user_id, provider, authn_id, config):
)


class Page(Base, HasTags, Dictifiable, RepresentById):
class Page(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime):
__tablename__ = "page"
__table_args__ = (Index("ix_page_slug", "slug", mysql_length=200),)

Expand Down Expand Up @@ -10247,7 +10280,7 @@ class PageUserShareAssociation(Base, UserShareAssociation):
page: Mapped["Page"] = relationship(back_populates="users_shared_with")


class Visualization(Base, HasTags, Dictifiable, RepresentById):
class Visualization(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime):
__tablename__ = "visualization"
__table_args__ = (
Index("ix_visualization_dbkey", "dbkey", mysql_length=200),
Expand Down
18 changes: 8 additions & 10 deletions lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1971,8 +1971,6 @@ def add(src, dest):

dir_name = "datasets"
dir_path = os.path.join(export_directory, dir_name)
dataset_hid = as_dict["hid"]
assert dataset_hid, as_dict

if dataset.dataset.id in self.dataset_id_to_path:
file_name, extra_files_path = self.dataset_id_to_path[dataset.dataset.id]
Expand All @@ -1991,7 +1989,7 @@ def add(src, dest):
self.serialization_options.get_identifier(self.security, conversion) if conversion else None
)
target_filename = get_export_dataset_filename(
as_dict["name"], as_dict["extension"], dataset_hid, conversion_key=conversion_key
as_dict["name"], as_dict["extension"], as_dict["encoded_id"], conversion_key=conversion_key
)
arcname = os.path.join(dir_name, target_filename)
src = file_name
Expand All @@ -2007,7 +2005,7 @@ def add(src, dest):

if len(file_list):
extra_files_target_filename = get_export_dataset_extra_files_dir_name(
as_dict["name"], as_dict["extension"], dataset_hid, conversion_key=conversion_key
as_dict["encoded_id"], conversion_key=conversion_key
)
arcname = os.path.join(dir_name, extra_files_target_filename)
add(extra_files_path, os.path.join(export_directory, arcname))
Expand Down Expand Up @@ -2978,22 +2976,22 @@ def tar_export_directory(export_directory: StrPath, out_file: StrPath, gzip: boo
store_archive.add(os.path.join(export_directory, export_path), arcname=export_path)


def get_export_dataset_filename(name: str, ext: str, hid: int, conversion_key: Optional[str]) -> str:
def get_export_dataset_filename(name: str, ext: str, encoded_id: str, conversion_key: Optional[str]) -> str:
"""
Builds a filename for a dataset using its name an extension.
"""
base = "".join(c in FILENAME_VALID_CHARS and c or "_" for c in name)
if not conversion_key:
return f"{base}_{hid}.{ext}"
return f"{base}_{encoded_id}.{ext}"
else:
return f"{base}_{hid}_conversion_{conversion_key}.{ext}"
return f"{base}_{encoded_id}_conversion_{conversion_key}.{ext}"


def get_export_dataset_extra_files_dir_name(name: str, ext: str, hid: int, conversion_key: Optional[str]) -> str:
def get_export_dataset_extra_files_dir_name(encoded_id: str, conversion_key: Optional[str]) -> str:
if not conversion_key:
return f"extra_files_path_{hid}"
return f"extra_files_path_{encoded_id}"
else:
return f"extra_files_path_{hid}_conversion_{conversion_key}"
return f"extra_files_path_{encoded_id}_conversion_{conversion_key}"


def imported_store_for_metadata(
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/model/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def set_tags_from_list(
if flush:
with transaction(self.sa_session):
self.sa_session.commit()
item.update()
return item.tags

def get_tag_assoc_class(self, item_class):
Expand Down
6 changes: 5 additions & 1 deletion lib/galaxy/tool_shed/util/repository_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,10 @@
from sqlalchemy.orm import joinedload

from galaxy import util
from galaxy.model.base import transaction
from galaxy.model.base import (
check_database_connection,
transaction,
)
from galaxy.model.scoped_session import install_model_scoped_session
from galaxy.model.tool_shed_install import ToolShedRepository
from galaxy.tool_shed.util import basic_util
Expand Down Expand Up @@ -282,6 +285,7 @@ def get_installed_repository(
Return a tool shed repository database record defined by the combination of a toolshed, repository name,
repository owner and either current or originally installed changeset_revision.
"""
check_database_connection(app.install_model.context)
# We store the port, if one exists, in the database.
tool_shed = common_util.remove_protocol_from_tool_shed_url(tool_shed)
if from_cache:
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/webapps/galaxy/services/history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -1452,6 +1452,9 @@ def _undelete(self, item: "HistoryItem"):
raise exceptions.ItemDeletionException("This item has been permanently deleted and cannot be recovered.")
manager = self._get_item_manager(item)
manager.undelete(item, flush=self.flush)
# Again, we need to force an update in the edge case where all selected items are already undeleted
# or when the item was purged as undelete will not trigger an update
item.update()

def _purge(self, item: "HistoryItem", trans: ProvidesHistoryContext):
if getattr(item, "purged", False):
Expand Down
17 changes: 16 additions & 1 deletion lib/galaxy_test/api/test_history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -1625,6 +1625,7 @@ def _get_item_with_id_from_history_contents(
return None

def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expected_status_code: int = 200):
original_history_update_time = self._get_history_update_time(history_id)
if query:
query = f"?{query}"
response = self._put(
Expand All @@ -1633,8 +1634,22 @@ def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expec
json=True,
)
self._assert_status_code_is(response, expected_status_code)
return response.json()
result = response.json()

if "err_msg" in result or result.get("success_count", 0) == 0:
# We don't need to check the history update time if there was an error or no items were updated
return result

# After a successful operation, history update time should be updated so the changes can be detected by the frontend
after_bulk_operation_history_update_time = self._get_history_update_time(history_id)
assert after_bulk_operation_history_update_time > original_history_update_time

return result

def _assert_bulk_success(self, bulk_operation_result, expected_success_count: int):
assert bulk_operation_result["success_count"] == expected_success_count, bulk_operation_result
assert not bulk_operation_result["errors"]

def _get_history_update_time(self, history_id: str):
history = self._get(f"histories/{history_id}").json()
return history.get("update_time")
15 changes: 15 additions & 0 deletions test/unit/data/model/test_model_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,6 +545,21 @@ def validate_invocation_collection_crate_directory(crate_directory):
assert dataset in root["hasPart"]


def test_export_history_with_missing_hid():
# The dataset's hid was used to compose the file name during the export but it
# can be missing sometimes. We now use the dataset's encoded id instead.
app = _mock_app()
u, history, d1, d2, j = _setup_simple_cat_job(app)

# Remove hid from d1
d1.hid = None
app.commit()

temp_directory = mkdtemp()
with store.DirectoryModelExportStore(temp_directory, app=app, export_files="copy") as export_store:
export_store.export_history(history)


def test_export_history_to_ro_crate(tmp_path):
app = _mock_app()
u, history, d1, d2, j = _setup_simple_cat_job(app)
Expand Down

0 comments on commit 0456ff1

Please sign in to comment.