From d4dec27a60ebaa56529379fe81b05b7a116f6d59 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 18 Oct 2022 16:07:00 -0400 Subject: [PATCH 01/73] Cleanup method signature for uninstall_repository in twilltestcase. --- lib/tool_shed/test/base/twilltestcase.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index b390561922e8..92100a73fce0 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1305,7 +1305,7 @@ def undelete_repository(self, repository: Repository) -> None: strings_not_displayed: List[str] = [] self.check_for_strings(strings_displayed, strings_not_displayed) - def uninstall_repository(self, installed_repository, strings_displayed=None, strings_not_displayed=None): + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: encoded_id = self.security.encode_id(installed_repository.id) api_key = get_admin_api_key() response = requests.delete( From 42b0d307079bc9bb93a3b0c879484cb859294054 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 16 Sep 2022 12:51:44 -0400 Subject: [PATCH 02/73] metadata_generator: initial typing for metadata_generator, no functional changes --- .../installed_repository_metadata_manager.py | 33 +++++---- .../tool_shed/metadata/metadata_generator.py | 71 +++++++++++++------ .../metadata/repository_metadata_manager.py | 6 +- 3 files changed, 74 insertions(+), 36 deletions(-) diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index 5c1d62886670..085566ead1d7 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -1,11 +1,16 @@ import logging import os -from typing import Optional +from typing import ( + Any, + Dict, + Optional, +) from sqlalchemy import false from galaxy import util from galaxy.model.base import transaction +from galaxy.model.tool_shed_install import ToolShedRepository from galaxy.structured_app import MinimalManagerApp from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager from galaxy.tool_shed.metadata.metadata_generator import MetadataGenerator @@ -29,16 +34,16 @@ def __init__( self, app: MinimalManagerApp, tpm: Optional[tool_panel_manager.ToolPanelManager] = None, - repository=None, - changeset_revision=None, - repository_clone_url=None, - shed_config_dict=None, - relative_install_dir=None, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False, - metadata_dict=None, + repository: Optional[ToolShedRepository] = None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir: Optional[str] = None, + repository_files_dir: Optional[str] = None, + resetting_all_metadata_on_repository: bool = False, + updating_installed_repository: bool = False, + persist: bool = False, + metadata_dict: Optional[Dict[str, Any]] = None, ): super().__init__( app, @@ -187,9 +192,11 @@ def set_repository(self, repository): super().set_repository(repository) self.repository_clone_url = common_util.generate_clone_url_for_installed_repository(self.app, repository) - def tool_shed_from_repository_clone_url(self): + def tool_shed_from_repository_clone_url(self) -> str: """Given a repository clone URL, return the tool shed that contains the repository.""" - cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url(self.repository_clone_url) + repository_clone_url = self.repository_clone_url + assert repository_clone_url + cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url(repository_clone_url) return ( common_util.remove_protocol_and_user_from_clone_url(cleaned_repository_clone_url) .split("/repos/")[0] diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index a8025d8fd786..8b5ad3529353 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -1,10 +1,18 @@ import logging import os import tempfile +from typing import ( + Any, + Dict, + List, + Optional, + Tuple, +) from sqlalchemy import and_ from galaxy import util +from galaxy.structured_app import MinimalManagerApp from galaxy.tool_shed.repository_type import ( REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, TOOL_DEPENDENCY_DEFINITION_FILENAME, @@ -37,15 +45,28 @@ log = logging.getLogger(__name__) +InvalidFileT = Tuple[str, str] +HandleResultT = Tuple[List, bool, str] + class MetadataGenerator: + app: MinimalManagerApp + invalid_file_tups: List[InvalidFileT] + changeset_revision: Optional[str] + repository_clone_url: Optional[str] + shed_config_dict: Dict[str, Any] + metadata_dict: Dict[str, Any] + relative_install_dir: Optional[str] + repository_files_dir: Optional[str] + persist: bool + def __init__( self, - app, + app: MinimalManagerApp, repository=None, - changeset_revision=None, - repository_clone_url=None, - shed_config_dict=None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False, @@ -120,8 +141,8 @@ def __init__( ] def _generate_data_manager_metadata( - self, repo_dir, data_manager_config_filename, metadata_dict, shed_config_dict=None - ): + self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None + ) -> Dict[str, Any]: """ Update the received metadata_dict with information from the parsed data_manager_config_filename. """ @@ -139,8 +160,8 @@ def _generate_data_manager_metadata( rel_data_manager_config_filename = os.path.join( relative_data_manager_dir, os.path.split(data_manager_config_filename)[1] ) - data_managers = {} - invalid_data_managers = [] + data_managers: Dict[str, dict] = {} + invalid_data_managers: List[dict] = [] data_manager_metadata = { "config_filename": rel_data_manager_config_filename, "data_managers": data_managers, @@ -178,7 +199,7 @@ def _generate_data_manager_metadata( # FIXME: default behavior is to fall back to tool.name. data_manager_name = data_manager_elem.get("name", data_manager_id) version = data_manager_elem.get("version", DataManager.DEFAULT_VERSION) - guid = self.generate_guid_for_object(DataManager.GUID_TYPE, data_manager_id, version) + guid = self._generate_guid_for_object(DataManager.GUID_TYPE, data_manager_id, version) data_tables = [] if tool_file is None: log.error(f'Data Manager entry is missing tool_file attribute in "{data_manager_config_filename}".') @@ -243,7 +264,8 @@ def generate_environment_dependency_metadata(self, elem, valid_tool_dependencies valid_tool_dependencies_dict["set_environment"] = [requirements_dict] return valid_tool_dependencies_dict - def generate_guid_for_object(self, guid_type, obj_id, version): + def _generate_guid_for_object(self, guid_type, obj_id, version) -> str: + assert self.repository_clone_url tmp_url = remove_protocol_and_user_from_clone_url(self.repository_clone_url) return f"{tmp_url}/{guid_type}/{obj_id}/{version}" @@ -745,14 +767,14 @@ def _check_elem_for_dep(elems): if rvs.invalid_tool_dependencies_dict: metadata_dict["invalid_tool_dependencies"] = rvs.invalid_tool_dependencies_dict if valid_repository_dependency_tups: - metadata_dict = self.update_repository_dependencies_metadata( + metadata_dict = self._update_repository_dependencies_metadata( metadata=metadata_dict, repository_dependency_tups=valid_repository_dependency_tups, is_valid=True, description=description, ) if invalid_repository_dependency_tups: - metadata_dict = self.update_repository_dependencies_metadata( + metadata_dict = self._update_repository_dependencies_metadata( metadata=metadata_dict, repository_dependency_tups=invalid_repository_dependency_tups, is_valid=False, @@ -820,7 +842,7 @@ def get_sample_files_from_disk(self, repository_files_dir, tool_path=None, relat sample_file_metadata_paths.append(relative_path_to_sample_file) return sample_file_metadata_paths, sample_file_copy_paths - def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False): + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: """ Process the received repository_elem which is a tag either from a repository_dependencies.xml file or a tool_dependencies.xml file. If the former, @@ -1005,13 +1027,15 @@ def _set_add_to_tool_panel_attribute_for_tool(self, tool): return False return True - def set_changeset_revision(self, changeset_revision): + def set_changeset_revision(self, changeset_revision: Optional[str]): self.changeset_revision = changeset_revision - def set_relative_install_dir(self, relative_install_dir): + def set_relative_install_dir(self, relative_install_dir: Optional[str]): self.relative_install_dir = relative_install_dir - def set_repository(self, repository, relative_install_dir=None, changeset_revision=None): + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): self.repository = repository # Shed related tool panel configs are only relevant to Galaxy. if self.app.name == "galaxy": @@ -1039,13 +1063,16 @@ def set_repository(self, repository, relative_install_dir=None, changeset_revisi self.persist = False self.invalid_file_tups = [] - def set_repository_clone_url(self, repository_clone_url): - self.repository_clone_url = repository_clone_url - - def set_repository_files_dir(self, repository_files_dir=None): + def set_repository_files_dir(self, repository_files_dir: Optional[str] = None): self.repository_files_dir = repository_files_dir - def update_repository_dependencies_metadata(self, metadata, repository_dependency_tups, is_valid, description): + def _update_repository_dependencies_metadata( + self, + metadata: Dict[str, Any], + repository_dependency_tups: List[tuple], + is_valid: bool, + description: Optional[str], + ) -> Dict[str, Any]: if is_valid: repository_dependencies_dict = metadata.get("repository_dependencies", None) else: @@ -1069,7 +1096,7 @@ def update_repository_dependencies_metadata(self, metadata, repository_dependenc return metadata -def _get_readme_file_names(repository_name): +def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" readme_files = ["readme", "read_me", "install"] valid_filenames = [f"{f}.txt" for f in readme_files] diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index c6e850acd175..5e7d2e3b3f2a 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -1,5 +1,9 @@ import logging import tempfile +from typing import ( + List, + Optional, +) from sqlalchemy import ( false, @@ -844,7 +848,7 @@ def reset_all_metadata_on_repository_in_tool_shed(self): # The list of changeset_revisions refers to repository_metadata records that have been created # or updated. When the following loop completes, we'll delete all repository_metadata records # for this repository that do not have a changeset_revision value in this list. - changeset_revisions = [] + changeset_revisions: List[Optional[str]] = [] # When a new repository_metadata record is created, it always uses the values of # metadata_changeset_revision and metadata_dict. metadata_changeset_revision = None From 338dfe961b57a9521f0d747383b62fed06d9c980 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 13 Sep 2022 16:43:18 -0400 Subject: [PATCH 03/73] metadata_generator: populate initial dict with helper (to split later) --- .../tool_shed/metadata/metadata_generator.py | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 8b5ad3529353..73ea72e468b8 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -269,6 +269,14 @@ def _generate_guid_for_object(self, guid_type, obj_id, version) -> str: tmp_url = remove_protocol_and_user_from_clone_url(self.repository_clone_url) return f"{tmp_url}/{guid_type}/{obj_id}/{version}" + def initial_metadata_dict(self) -> Dict[str, Any]: + if self.app.name == "galaxy": + # Shed related tool panel configs are only relevant to Galaxy. + metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} + else: + metadata_dict = {} + return metadata_dict + def generate_metadata_for_changeset_revision(self): """ Generate metadata for a repository using its files on disk. To generate metadata @@ -292,11 +300,7 @@ def generate_metadata_for_changeset_revision(self): else: original_repository_metadata = None readme_file_names = _get_readme_file_names(str(self.repository.name)) - if self.app.name == "galaxy": - # Shed related tool panel configs are only relevant to Galaxy. - metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} - else: - metadata_dict = {} + metadata_dict = self.initial_metadata_dict() readme_files = [] invalid_tool_configs = [] if self.resetting_all_metadata_on_repository: @@ -1046,7 +1050,6 @@ def set_repository( else: self.set_changeset_revision(changeset_revision) self.shed_config_dict = repository.get_shed_config_dict(self.app) - self.metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename", None)} else: if relative_install_dir is None and self.repository is not None: relative_install_dir = repository.repo_path(self.app) @@ -1055,7 +1058,10 @@ def set_repository( else: self.set_changeset_revision(changeset_revision) self.shed_config_dict = {} - self.metadata_dict = {} + self._reset_attributes_after_repository_update(relative_install_dir) + + def _reset_attributes_after_repository_update(self, relative_install_dir: Optional[str]): + self.metadata_dict = self.initial_metadata_dict() self.set_relative_install_dir(relative_install_dir) self.set_repository_files_dir() self.resetting_all_metadata_on_repository = False From f000b830f5eaabc7de21c851f1eb40523bafb105 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:04:52 -0400 Subject: [PATCH 04/73] metadata_generator: Spit into Base and Galaxy and ToolShed implementations... ... so we can get the typing right and not depend on app.name. --- .../metadata/installed_repository_metadata_manager.py | 4 ++-- lib/galaxy/tool_shed/metadata/metadata_generator.py | 10 +++++++++- lib/tool_shed/metadata/metadata_generator.py | 3 --- lib/tool_shed/metadata/repository_metadata_manager.py | 4 ++-- 4 files changed, 13 insertions(+), 8 deletions(-) delete mode 100644 lib/tool_shed/metadata/metadata_generator.py diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index 085566ead1d7..22c567a9ee81 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -13,7 +13,7 @@ from galaxy.model.tool_shed_install import ToolShedRepository from galaxy.structured_app import MinimalManagerApp from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager -from galaxy.tool_shed.metadata.metadata_generator import MetadataGenerator +from galaxy.tool_shed.metadata.metadata_generator import GalaxyMetadataGenerator from galaxy.tool_shed.util.repository_util import ( get_installed_tool_shed_repository, get_repository_owner, @@ -29,7 +29,7 @@ log = logging.getLogger(__name__) -class InstalledRepositoryMetadataManager(MetadataGenerator): +class InstalledRepositoryMetadataManager(GalaxyMetadataGenerator): def __init__( self, app: MinimalManagerApp, diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 73ea72e468b8..427aed01ecac 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -49,7 +49,7 @@ HandleResultT = Tuple[List, bool, str] -class MetadataGenerator: +class BaseMetadataGenerator: app: MinimalManagerApp invalid_file_tups: List[InvalidFileT] changeset_revision: Optional[str] @@ -1102,6 +1102,14 @@ def _update_repository_dependencies_metadata( return metadata +class GalaxyMetadataGenerator(BaseMetadataGenerator): + """A MetadataGenerator building on Galaxy's app and repository constructs.""" + + +class ToolShedMetadataGenerator(BaseMetadataGenerator): + """A MetadataGenerator building on ToolShed's app and repository constructs.""" + + def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" readme_files = ["readme", "read_me", "install"] diff --git a/lib/tool_shed/metadata/metadata_generator.py b/lib/tool_shed/metadata/metadata_generator.py deleted file mode 100644 index 06b7720d4a43..000000000000 --- a/lib/tool_shed/metadata/metadata_generator.py +++ /dev/null @@ -1,3 +0,0 @@ -from galaxy.tool_shed.metadata.metadata_generator import MetadataGenerator - -__all__ = ("MetadataGenerator",) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 5e7d2e3b3f2a..b4492186b1f3 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -14,7 +14,7 @@ from galaxy.model.base import transaction from galaxy.util import inflector from galaxy.web.form_builder import SelectField -from tool_shed.metadata import metadata_generator +from galaxy.tool_shed.galaxy_install.metadata_generator import ToolShedMetadataGenerator from tool_shed.repository_types import util as rt_util from tool_shed.repository_types.metadata import TipOnly from tool_shed.structured_app import ToolShedApp @@ -31,7 +31,7 @@ log = logging.getLogger(__name__) -class RepositoryMetadataManager(metadata_generator.MetadataGenerator): +class RepositoryMetadataManager(ToolShedMetadataGenerator): def __init__( self, app: ToolShedApp, From 3ba9888c6fdacb4a55ec8b160d58b71787674772 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:07:43 -0400 Subject: [PATCH 05/73] metadata_generator: simplify NOT_TOOL_CONFIG handling --- .../tool_shed/metadata/metadata_generator.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 427aed01ecac..c64b05a86256 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -48,6 +48,13 @@ InvalidFileT = Tuple[str, str] HandleResultT = Tuple[List, bool, str] +NOT_TOOL_CONFIGS = [ + suc.DATATYPES_CONFIG_FILENAME, + REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, + TOOL_DEPENDENCY_DEFINITION_FILENAME, + suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, +] + class BaseMetadataGenerator: app: MinimalManagerApp @@ -133,12 +140,6 @@ def __init__( self.persist = persist self.invalid_file_tups = [] self.sa_session = app.model.session - self.NOT_TOOL_CONFIGS = [ - suc.DATATYPES_CONFIG_FILENAME, - REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, - TOOL_DEPENDENCY_DEFINITION_FILENAME, - suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, - ] def _generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None @@ -377,7 +378,7 @@ def generate_metadata_for_changeset_revision(self): ) readme_files.append(relative_path_to_readme) # See if we have a tool config. - elif looks_like_a_tool(os.path.join(root, name), invalid_names=self.NOT_TOOL_CONFIGS): + elif looks_like_a_tool(os.path.join(root, name), invalid_names=NOT_TOOL_CONFIGS): full_path = str(os.path.abspath(os.path.join(root, name))) # why the str, seems very odd element_tree, error_message = parse_xml(full_path) if element_tree is None: From f338fc7a1363cfe1ac2ebc18f166365204af3f70 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:09:04 -0400 Subject: [PATCH 06/73] metadata_generator: split constructors into Galaxy and tool shed versions. Stronger typing and flatter functions --- .../tool_shed/metadata/metadata_generator.py | 171 ++++++++++-------- 1 file changed, 97 insertions(+), 74 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index c64b05a86256..f74674bd9500 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -67,80 +67,6 @@ class BaseMetadataGenerator: repository_files_dir: Optional[str] persist: bool - def __init__( - self, - app: MinimalManagerApp, - repository=None, - changeset_revision: Optional[str] = None, - repository_clone_url: Optional[str] = None, - shed_config_dict: Optional[Dict[str, Any]] = None, - relative_install_dir=None, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False, - metadata_dict=None, - user=None, - ): - self.app = app - self.user = user - self.repository = repository - if self.app.name == "galaxy": - if changeset_revision is None and self.repository is not None: - self.changeset_revision = self.repository.changeset_revision - else: - self.changeset_revision = changeset_revision - - if repository_clone_url is None and self.repository is not None: - self.repository_clone_url = generate_clone_url_for_installed_repository(self.app, self.repository) - else: - self.repository_clone_url = repository_clone_url - if shed_config_dict is None: - if self.repository is not None: - self.shed_config_dict = self.repository.get_shed_config_dict(self.app) - else: - self.shed_config_dict = {} - else: - self.shed_config_dict = shed_config_dict - if relative_install_dir is None and self.repository is not None: - tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) - if repository_files_dir is None and self.repository is not None: - repository_files_dir = self.repository.repo_files_directory(self.app) - if metadata_dict is None: - # Shed related tool panel configs are only relevant to Galaxy. - self.metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename", None)} - else: - self.metadata_dict = metadata_dict - else: - # We're in the Tool Shed. - if changeset_revision is None and self.repository is not None: - self.changeset_revision = self.repository.tip() - else: - self.changeset_revision = changeset_revision - if repository_clone_url is None and self.repository is not None: - self.repository_clone_url = generate_clone_url_for_repository_in_tool_shed(self.user, self.repository) - else: - self.repository_clone_url = repository_clone_url - if shed_config_dict is None: - self.shed_config_dict = {} - else: - self.shed_config_dict = shed_config_dict - if relative_install_dir is None and self.repository is not None: - relative_install_dir = self.repository.repo_path(self.app) - if repository_files_dir is None and self.repository is not None: - repository_files_dir = self.repository.repo_path(self.app) - if metadata_dict is None: - self.metadata_dict = {} - else: - self.metadata_dict = metadata_dict - self.relative_install_dir = relative_install_dir - self.repository_files_dir = repository_files_dir - self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository - self.updating_installed_repository = updating_installed_repository - self.persist = persist - self.invalid_file_tups = [] - self.sa_session = app.model.session - def _generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None ) -> Dict[str, Any]: @@ -1106,10 +1032,107 @@ def _update_repository_dependencies_metadata( class GalaxyMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on Galaxy's app and repository constructs.""" + def __init__( + self, + app: MinimalManagerApp, + repository=None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir=None, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False, + metadata_dict=None, + user=None, + ): + self.app = app + self.user = user + self.repository = repository + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.changeset_revision + else: + self.changeset_revision = changeset_revision + + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = generate_clone_url_for_installed_repository(self.app, self.repository) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + if self.repository is not None: + self.shed_config_dict = self.repository.get_shed_config_dict(self.app) + else: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_files_directory(self.app) + if metadata_dict is None: + # Shed related tool panel configs are only relevant to Galaxy. + self.metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename", None)} + else: + self.metadata_dict = metadata_dict + self.relative_install_dir = relative_install_dir + self.repository_files_dir = repository_files_dir + self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository + self.updating_installed_repository = updating_installed_repository + self.persist = persist + self.invalid_file_tups = [] + self.sa_session = app.model.session + class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" + def __init__( + self, + app: MinimalManagerApp, + repository=None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir=None, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False, + metadata_dict=None, + user=None, + ): + self.app = app + self.user = user + self.repository = repository + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.tip() + else: + self.changeset_revision = changeset_revision + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = generate_clone_url_for_repository_in_tool_shed(self.user, self.repository) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + relative_install_dir = self.repository.repo_path(self.app) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_path(self.app) + if metadata_dict is None: + self.metadata_dict = {} + else: + self.metadata_dict = metadata_dict + self.relative_install_dir = relative_install_dir + self.repository_files_dir = repository_files_dir + self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository + self.updating_installed_repository = updating_installed_repository + self.persist = persist + self.invalid_file_tups = [] + self.sa_session = app.model.session + def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" From cb8a7f42d1b7026c8b39c24937241459797e39de Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:10:53 -0400 Subject: [PATCH 07/73] metadata_generator: split initial_metadata_dict into Galaxy and TS implementations. --- .../tool_shed/metadata/metadata_generator.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index f74674bd9500..0323d800d867 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -67,6 +67,9 @@ class BaseMetadataGenerator: repository_files_dir: Optional[str] persist: bool + def initial_metadata_dict(self) -> Dict[str, Any]: + ... + def _generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None ) -> Dict[str, Any]: @@ -196,14 +199,6 @@ def _generate_guid_for_object(self, guid_type, obj_id, version) -> str: tmp_url = remove_protocol_and_user_from_clone_url(self.repository_clone_url) return f"{tmp_url}/{guid_type}/{obj_id}/{version}" - def initial_metadata_dict(self) -> Dict[str, Any]: - if self.app.name == "galaxy": - # Shed related tool panel configs are only relevant to Galaxy. - metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} - else: - metadata_dict = {} - return metadata_dict - def generate_metadata_for_changeset_revision(self): """ Generate metadata for a repository using its files on disk. To generate metadata @@ -1083,6 +1078,11 @@ def __init__( self.invalid_file_tups = [] self.sa_session = app.model.session + def initial_metadata_dict(self) -> Dict[str, Any]: + # Shed related tool panel configs are only relevant to Galaxy. + metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} + return metadata_dict + class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" @@ -1133,6 +1133,9 @@ def __init__( self.invalid_file_tups = [] self.sa_session = app.model.session + def initial_metadata_dict(self) -> Dict[str, Any]: + return {} + def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" From ea74b1f406eab476e3be8c707bfa3c5d1bd394c1 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:13:03 -0400 Subject: [PATCH 08/73] metadata_generator: split set_repository into Galaxy and TS implementations. --- .../tool_shed/metadata/metadata_generator.py | 49 ++++++++++--------- 1 file changed, 26 insertions(+), 23 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 0323d800d867..809a9d987dbf 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -959,29 +959,6 @@ def set_changeset_revision(self, changeset_revision: Optional[str]): def set_relative_install_dir(self, relative_install_dir: Optional[str]): self.relative_install_dir = relative_install_dir - def set_repository( - self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None - ): - self.repository = repository - # Shed related tool panel configs are only relevant to Galaxy. - if self.app.name == "galaxy": - if relative_install_dir is None and self.repository is not None: - tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) - if changeset_revision is None and self.repository is not None: - self.set_changeset_revision(self.repository.changeset_revision) - else: - self.set_changeset_revision(changeset_revision) - self.shed_config_dict = repository.get_shed_config_dict(self.app) - else: - if relative_install_dir is None and self.repository is not None: - relative_install_dir = repository.repo_path(self.app) - if changeset_revision is None and self.repository is not None: - self.set_changeset_revision(self.repository.tip()) - else: - self.set_changeset_revision(changeset_revision) - self.shed_config_dict = {} - self._reset_attributes_after_repository_update(relative_install_dir) - def _reset_attributes_after_repository_update(self, relative_install_dir: Optional[str]): self.metadata_dict = self.initial_metadata_dict() self.set_relative_install_dir(relative_install_dir) @@ -1083,6 +1060,19 @@ def initial_metadata_dict(self) -> Dict[str, Any]: metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} return metadata_dict + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): + self.repository = repository + if relative_install_dir is None and self.repository is not None: + tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision(self.repository.changeset_revision) + else: + self.set_changeset_revision(changeset_revision) + self.shed_config_dict = repository.get_shed_config_dict(self.app) + self._reset_attributes_after_repository_update(relative_install_dir) + class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" @@ -1136,6 +1126,19 @@ def __init__( def initial_metadata_dict(self) -> Dict[str, Any]: return {} + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): + self.repository = repository + if relative_install_dir is None and self.repository is not None: + relative_install_dir = repository.repo_path(self.app) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision(self.repository.tip()) + else: + self.set_changeset_revision(changeset_revision) + self.shed_config_dict = {} + self._reset_attributes_after_repository_update(relative_install_dir) + def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" From d22eb01912611a1e7aa981af8a70247c26ab51cb Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:15:39 -0400 Subject: [PATCH 09/73] metadata_generator: Split handle_repo_elem implementations into gx and ts versions. Avoid all the big conditionals in the middle - again for stronger typing and flatter functions. --- .../tool_shed/metadata/metadata_generator.py | 385 ++++++++++-------- 1 file changed, 211 insertions(+), 174 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 809a9d987dbf..2db212326953 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -768,180 +768,6 @@ def get_sample_files_from_disk(self, repository_files_dir, tool_path=None, relat sample_file_metadata_paths.append(relative_path_to_sample_file) return sample_file_metadata_paths, sample_file_copy_paths - def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: - """ - Process the received repository_elem which is a tag either from a - repository_dependencies.xml file or a tool_dependencies.xml file. If the former, - we're generating repository dependencies metadata for a repository in the Tool Shed. - If the latter, we're generating package dependency metadata within Galaxy or the - Tool Shed. - """ - is_valid = True - error_message = "" - toolshed = repository_elem.get("toolshed", None) - name = repository_elem.get("name", None) - owner = repository_elem.get("owner", None) - changeset_revision = repository_elem.get("changeset_revision", None) - prior_installation_required = str(repository_elem.get("prior_installation_required", False)) - repository_dependency_tup = [ - toolshed, - name, - owner, - changeset_revision, - prior_installation_required, - str(only_if_compiling_contained_td), - ] - if self.app.name == "galaxy": - if self.updating_installed_repository: - pass - else: - # We're installing a repository into Galaxy, so make sure its contained repository - # dependency definition is valid. - if toolshed is None or name is None or owner is None or changeset_revision is None: - # Several packages exist in the Tool Shed that contain invalid repository - # definitions, but will still install. We will report these errors to the - # installing user. Previously, we would: - # Raise an exception here instead of returning an error_message to keep the - # installation from proceeding. Reaching here implies a bug in the Tool Shed - # framework. - error_message = "Installation encountered an invalid repository dependency definition:\n" - error_message += util.xml_to_string(repository_elem, pretty=True) - log.error(error_message) - return repository_dependency_tup, False, error_message - if not toolshed: - # Default to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") - repository_dependency_tup[0] = toolshed - toolshed = remove_protocol_from_tool_shed_url(toolshed) - if self.app.name == "galaxy": - # We're in Galaxy. We reach here when we're generating the metadata for a tool - # dependencies package defined for a repository or when we're generating metadata - # for an installed repository. See if we can locate the installed repository via - # the changeset_revision defined in the repository_elem (it may be outdated). If - # we're successful in locating an installed repository with the attributes defined - # in the repository_elem, we know it is valid. - repository = get_repository_for_dependency_relationship(self.app, toolshed, name, owner, changeset_revision) - if repository: - return repository_dependency_tup, is_valid, error_message - else: - # Send a request to the tool shed to retrieve appropriate additional changeset - # revisions with which the repository - # may have been installed. - text = get_updated_changeset_revisions_from_tool_shed( - self.app, toolshed, name, owner, changeset_revision - ) - if text: - updated_changeset_revisions = util.listify(text) - for updated_changeset_revision in updated_changeset_revisions: - repository = get_repository_for_dependency_relationship( - self.app, toolshed, name, owner, updated_changeset_revision - ) - if repository: - return repository_dependency_tup, is_valid, error_message - if self.updating_installed_repository: - # The repository dependency was included in an update to the installed - # repository, so it will not yet be installed. Return the tuple for later - # installation. - return repository_dependency_tup, is_valid, error_message - if self.updating_installed_repository: - # The repository dependency was included in an update to the installed repository, - # so it will not yet be installed. Return the tuple for later installation. - return repository_dependency_tup, is_valid, error_message - # Don't generate an error message for missing repository dependencies that are required - # only if compiling the dependent repository's tool dependency. - if not only_if_compiling_contained_td: - # We'll currently default to setting the repository dependency definition as invalid - # if an installed repository cannot be found. This may not be ideal because the tool - # shed may have simply been inaccessible when metadata was being generated for the - # installed tool shed repository. - error_message = ( - f"Ignoring invalid repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision}." - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - else: - # We're in the tool shed. - if suc.tool_shed_is_this_tool_shed(toolshed): - try: - user = ( - self.sa_session.query(self.app.model.User) - .filter(self.app.model.User.table.c.username == owner) - .one() - ) - except Exception: - error_message = ( - f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision} because the owner is invalid." - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - try: - repository = ( - self.sa_session.query(self.app.model.Repository) - .filter( - and_( - self.app.model.Repository.table.c.name == name, - self.app.model.Repository.table.c.user_id == user.id, - ) - ) - .one() - ) - except Exception: - error_message = ( - f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision} because the name is invalid. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - repo = repository.hg_repo - - # The received changeset_revision may be None since defining it in the dependency definition is optional. - # If this is the case, the default will be to set its value to the repository dependency tip revision. - # This probably occurs only when handling circular dependency definitions. - tip_ctx = repo[repo.changelog.tip()] - # Make sure the repo.changlog includes at least 1 revision. - if changeset_revision is None and tip_ctx.rev() >= 0: - changeset_revision = str(tip_ctx) - repository_dependency_tup = [ - toolshed, - name, - owner, - changeset_revision, - prior_installation_required, - str(only_if_compiling_contained_td), - ] - return repository_dependency_tup, is_valid, error_message - else: - # Find the specified changeset revision in the repository's changelog to see if it's valid. - found = False - for changeset in repo.changelog: - changeset_hash = str(repo[changeset]) - if changeset_hash == changeset_revision: - found = True - break - if not found: - error_message = ( - f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision} because the changeset revision is invalid. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - else: - # Repository dependencies are currently supported within a single tool shed. - error_message = ( - "Repository dependencies are currently supported only within the same tool shed. Ignoring " - f"repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, changeset revision {changeset_revision}. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - return repository_dependency_tup, is_valid, error_message - def _set_add_to_tool_panel_attribute_for_tool(self, tool): """ Determine if a tool should be loaded into the Galaxy tool panel. Examples of valid tools that @@ -1073,6 +899,100 @@ def set_repository( self.shed_config_dict = repository.get_shed_config_dict(self.app) self._reset_attributes_after_repository_update(relative_install_dir) + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + """ + Process the received repository_elem which is a tag either from a + repository_dependencies.xml file or a tool_dependencies.xml file. If the former, + we're generating repository dependencies metadata for a repository in the Tool Shed. + If the latter, we're generating package dependency metadata within Galaxy or the + Tool Shed. + """ + is_valid = True + error_message = "" + toolshed = repository_elem.get("toolshed", None) + name = repository_elem.get("name", None) + owner = repository_elem.get("owner", None) + changeset_revision = repository_elem.get("changeset_revision", None) + prior_installation_required = str(repository_elem.get("prior_installation_required", False)) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + if self.updating_installed_repository: + pass + else: + # We're installing a repository into Galaxy, so make sure its contained repository + # dependency definition is valid. + if toolshed is None or name is None or owner is None or changeset_revision is None: + # Several packages exist in the Tool Shed that contain invalid repository + # definitions, but will still install. We will report these errors to the + # installing user. Previously, we would: + # Raise an exception here instead of returning an error_message to keep the + # installation from proceeding. Reaching here implies a bug in the Tool Shed + # framework. + error_message = "Installation encountered an invalid repository dependency definition:\n" + error_message += util.xml_to_string(repository_elem, pretty=True) + log.error(error_message) + return repository_dependency_tup, False, error_message + if not toolshed: + # Default to the current tool shed. + toolshed = str(url_for("/", qualified=True)).rstrip("/") + repository_dependency_tup[0] = toolshed + toolshed = remove_protocol_from_tool_shed_url(toolshed) + + # We're in Galaxy. We reach here when we're generating the metadata for a tool + # dependencies package defined for a repository or when we're generating metadata + # for an installed repository. See if we can locate the installed repository via + # the changeset_revision defined in the repository_elem (it may be outdated). If + # we're successful in locating an installed repository with the attributes defined + # in the repository_elem, we know it is valid. + repository = get_repository_for_dependency_relationship(self.app, toolshed, name, owner, changeset_revision) + if repository: + return repository_dependency_tup, is_valid, error_message + else: + # Send a request to the tool shed to retrieve appropriate additional changeset + # revisions with which the repository + # may have been installed. + text = get_updated_changeset_revisions_from_tool_shed(self.app, toolshed, name, owner, changeset_revision) + if text: + updated_changeset_revisions = util.listify(text) + for updated_changeset_revision in updated_changeset_revisions: + repository = get_repository_for_dependency_relationship( + self.app, toolshed, name, owner, updated_changeset_revision + ) + if repository: + return repository_dependency_tup, is_valid, error_message + if self.updating_installed_repository: + # The repository dependency was included in an update to the installed + # repository, so it will not yet be installed. Return the tuple for later + # installation. + return repository_dependency_tup, is_valid, error_message + if self.updating_installed_repository: + # The repository dependency was included in an update to the installed repository, + # so it will not yet be installed. Return the tuple for later installation. + return repository_dependency_tup, is_valid, error_message + # Don't generate an error message for missing repository dependencies that are required + # only if compiling the dependent repository's tool dependency. + if not only_if_compiling_contained_td: + # We'll currently default to setting the repository dependency definition as invalid + # if an installed repository cannot be found. This may not be ideal because the tool + # shed may have simply been inaccessible when metadata was being generated for the + # installed tool shed repository. + error_message = ( + "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, " + % (toolshed, name, owner) + ) + error_message += f"changeset revision {changeset_revision}." + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + return repository_dependency_tup, is_valid, error_message + + class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" @@ -1139,6 +1059,123 @@ def set_repository( self.shed_config_dict = {} self._reset_attributes_after_repository_update(relative_install_dir) + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + """ + Process the received repository_elem which is a tag either from a + repository_dependencies.xml file or a tool_dependencies.xml file. If the former, + we're generating repository dependencies metadata for a repository in the Tool Shed. + If the latter, we're generating package dependency metadata within Galaxy or the + Tool Shed. + """ + is_valid = True + error_message = "" + toolshed = repository_elem.get("toolshed", None) + name = repository_elem.get("name", None) + owner = repository_elem.get("owner", None) + changeset_revision = repository_elem.get("changeset_revision", None) + prior_installation_required = str(repository_elem.get("prior_installation_required", False)) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + if not toolshed: + # Default to the current tool shed. + toolshed = str(url_for("/", qualified=True)).rstrip("/") + repository_dependency_tup[0] = toolshed + toolshed = remove_protocol_from_tool_shed_url(toolshed) + + if suc.tool_shed_is_this_tool_shed(toolshed): + try: + user = ( + self.sa_session.query(self.app.model.User) + .filter(self.app.model.User.table.c.username == owner) + .one() + ) + except Exception: + error_message = ( + "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " + % (toolshed, name, owner) + ) + error_message += f"changeset revision {changeset_revision} because the owner is invalid." + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + try: + repository = ( + self.sa_session.query(self.app.model.Repository) + .filter( + and_( + self.app.model.Repository.table.c.name == name, + self.app.model.Repository.table.c.user_id == user.id, + ) + ) + .one() + ) + except Exception: + error_message = ( + "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " + % (toolshed, name, owner) + ) + error_message += f"changeset revision {changeset_revision} because the name is invalid. " + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + repo = repository.hg_repo + + # The received changeset_revision may be None since defining it in the dependency definition is optional. + # If this is the case, the default will be to set its value to the repository dependency tip revision. + # This probably occurs only when handling circular dependency definitions. + tip_ctx = repo[repo.changelog.tip()] + # Make sure the repo.changlog includes at least 1 revision. + if changeset_revision is None and tip_ctx.rev() >= 0: + changeset_revision = str(tip_ctx) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + return repository_dependency_tup, is_valid, error_message + else: + # Find the specified changeset revision in the repository's changelog to see if it's valid. + found = False + for changeset in repo.changelog: + changeset_hash = str(repo[changeset]) + if changeset_hash == changeset_revision: + found = True + break + if not found: + error_message = ( + "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " + % (toolshed, name, owner) + ) + error_message += ( + f"changeset revision {changeset_revision} because the changeset revision is invalid. " + ) + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + else: + # Repository dependencies are currently supported within a single tool shed. + error_message = ( + "Repository dependencies are currently supported only within the same tool shed. Ignoring " + ) + error_message += ( + "repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s. " + % (toolshed, name, owner, changeset_revision) + ) + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + return repository_dependency_tup, is_valid, error_message + + def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" From 7c357d73b45753425a6e57093ea01e92aa597c48 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:36:54 -0400 Subject: [PATCH 10/73] metadata_generator: protocols to better type repository abstractions... --- .../installed_repository_metadata_manager.py | 6 ++++- .../tool_shed/metadata/metadata_generator.py | 26 ++++++++++++++++--- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index 22c567a9ee81..3603d150cd2a 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -125,6 +125,7 @@ def get_repository_tools_tups(self): def reset_all_metadata_on_installed_repository(self): """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" if self.relative_install_dir: + assert self.repository original_metadata_dict = self.repository.metadata_ self.generate_metadata_for_changeset_revision() if self.metadata_dict != original_metadata_dict: @@ -140,7 +141,9 @@ def reset_all_metadata_on_installed_repository(self): else: log.debug(f"Metadata did not need to be reset on repository {self.repository.name}.") else: - log.debug(f"Error locating installation directory for repository {self.repository.name}.") + log.debug( + f"Error locating installation directory for repository {self.repository and self.repository.name}." + ) def reset_metadata_on_selected_repositories(self, user, **kwd): """ @@ -208,6 +211,7 @@ def update_in_shed_tool_config(self): A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list of config_elems instead of using the in-memory list. """ + assert self.repository shed_conf_dict = self.shed_config_dict or self.repository.get_shed_config_dict(self.app) shed_tool_conf = shed_conf_dict["config_filename"] tool_path = shed_conf_dict["tool_path"] diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 2db212326953..82e2798ea9d6 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -10,8 +10,10 @@ ) from sqlalchemy import and_ +from typing_extensions import Protocol from galaxy import util +from galaxy.model.tool_shed_install import ToolShedRepository from galaxy.structured_app import MinimalManagerApp from galaxy.tool_shed.repository_type import ( REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, @@ -56,8 +58,20 @@ ] +class RepositoryProtocol(Protocol): + def repo_path(self, app) -> Optional[str]: + ... + + +class ToolShedRepositoryProtocol(RepositoryProtocol): + + def tip(self) -> str: + ... + + class BaseMetadataGenerator: app: MinimalManagerApp + repository: Optional[RepositoryProtocol] invalid_file_tups: List[InvalidFileT] changeset_revision: Optional[str] repository_clone_url: Optional[str] @@ -78,12 +92,13 @@ def _generate_data_manager_metadata( """ if data_manager_config_filename is None: return metadata_dict + assert self.repository repo_path = self.repository.repo_path(self.app) - try: + if hasattr(self.repository, "repo_files_directory"): # Galaxy Side. - repo_files_directory = self.repository.repo_files_directory(self.app) + repo_files_directory = self.repository.repo_files_directory(self.app) # type: ignore[attr-defined] repo_dir = repo_files_directory - except AttributeError: + else: # Tool Shed side. repo_files_directory = repo_path relative_data_manager_dir = util.relpath(os.path.split(data_manager_config_filename)[0], repo_dir) @@ -830,6 +845,8 @@ def _update_repository_dependencies_metadata( class GalaxyMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on Galaxy's app and repository constructs.""" + repository: Optional[ToolShedRepository] + def __init__( self, app: MinimalManagerApp, @@ -996,11 +1013,12 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" + repository: Optional[ToolShedRepositoryProtocol] def __init__( self, app: MinimalManagerApp, - repository=None, + repository: Optional[ToolShedRepositoryProtocol] = None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, shed_config_dict: Optional[Dict[str, Any]] = None, From 32b2d38c4e24a292c2f86737e4c88e3009d358e1 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:43:46 -0400 Subject: [PATCH 11/73] metadata_generator: move tool shed impl into tool shed package the correct home and allows better typing with actual Repository type annotations --- .../tool_shed/metadata/metadata_generator.py | 193 ---------------- .../metadata/repository_metadata_manager.py | 206 +++++++++++++++++- 2 files changed, 205 insertions(+), 194 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 82e2798ea9d6..184ee767c819 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -9,7 +9,6 @@ Tuple, ) -from sqlalchemy import and_ from typing_extensions import Protocol from galaxy import util @@ -38,7 +37,6 @@ from galaxy.tools.repositories import ValidationContext from galaxy.util.tool_shed.common_util import ( generate_clone_url_for_installed_repository, - generate_clone_url_for_repository_in_tool_shed, remove_protocol_and_user_from_clone_url, remove_protocol_from_tool_shed_url, ) @@ -63,12 +61,6 @@ def repo_path(self, app) -> Optional[str]: ... -class ToolShedRepositoryProtocol(RepositoryProtocol): - - def tip(self) -> str: - ... - - class BaseMetadataGenerator: app: MinimalManagerApp repository: Optional[RepositoryProtocol] @@ -1010,191 +1002,6 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td return repository_dependency_tup, is_valid, error_message - -class ToolShedMetadataGenerator(BaseMetadataGenerator): - """A MetadataGenerator building on ToolShed's app and repository constructs.""" - repository: Optional[ToolShedRepositoryProtocol] - - def __init__( - self, - app: MinimalManagerApp, - repository: Optional[ToolShedRepositoryProtocol] = None, - changeset_revision: Optional[str] = None, - repository_clone_url: Optional[str] = None, - shed_config_dict: Optional[Dict[str, Any]] = None, - relative_install_dir=None, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False, - metadata_dict=None, - user=None, - ): - self.app = app - self.user = user - self.repository = repository - if changeset_revision is None and self.repository is not None: - self.changeset_revision = self.repository.tip() - else: - self.changeset_revision = changeset_revision - if repository_clone_url is None and self.repository is not None: - self.repository_clone_url = generate_clone_url_for_repository_in_tool_shed(self.user, self.repository) - else: - self.repository_clone_url = repository_clone_url - if shed_config_dict is None: - self.shed_config_dict = {} - else: - self.shed_config_dict = shed_config_dict - if relative_install_dir is None and self.repository is not None: - relative_install_dir = self.repository.repo_path(self.app) - if repository_files_dir is None and self.repository is not None: - repository_files_dir = self.repository.repo_path(self.app) - if metadata_dict is None: - self.metadata_dict = {} - else: - self.metadata_dict = metadata_dict - self.relative_install_dir = relative_install_dir - self.repository_files_dir = repository_files_dir - self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository - self.updating_installed_repository = updating_installed_repository - self.persist = persist - self.invalid_file_tups = [] - self.sa_session = app.model.session - - def initial_metadata_dict(self) -> Dict[str, Any]: - return {} - - def set_repository( - self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None - ): - self.repository = repository - if relative_install_dir is None and self.repository is not None: - relative_install_dir = repository.repo_path(self.app) - if changeset_revision is None and self.repository is not None: - self.set_changeset_revision(self.repository.tip()) - else: - self.set_changeset_revision(changeset_revision) - self.shed_config_dict = {} - self._reset_attributes_after_repository_update(relative_install_dir) - - def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: - """ - Process the received repository_elem which is a tag either from a - repository_dependencies.xml file or a tool_dependencies.xml file. If the former, - we're generating repository dependencies metadata for a repository in the Tool Shed. - If the latter, we're generating package dependency metadata within Galaxy or the - Tool Shed. - """ - is_valid = True - error_message = "" - toolshed = repository_elem.get("toolshed", None) - name = repository_elem.get("name", None) - owner = repository_elem.get("owner", None) - changeset_revision = repository_elem.get("changeset_revision", None) - prior_installation_required = str(repository_elem.get("prior_installation_required", False)) - repository_dependency_tup = [ - toolshed, - name, - owner, - changeset_revision, - prior_installation_required, - str(only_if_compiling_contained_td), - ] - if not toolshed: - # Default to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") - repository_dependency_tup[0] = toolshed - toolshed = remove_protocol_from_tool_shed_url(toolshed) - - if suc.tool_shed_is_this_tool_shed(toolshed): - try: - user = ( - self.sa_session.query(self.app.model.User) - .filter(self.app.model.User.table.c.username == owner) - .one() - ) - except Exception: - error_message = ( - "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " - % (toolshed, name, owner) - ) - error_message += f"changeset revision {changeset_revision} because the owner is invalid." - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - try: - repository = ( - self.sa_session.query(self.app.model.Repository) - .filter( - and_( - self.app.model.Repository.table.c.name == name, - self.app.model.Repository.table.c.user_id == user.id, - ) - ) - .one() - ) - except Exception: - error_message = ( - "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " - % (toolshed, name, owner) - ) - error_message += f"changeset revision {changeset_revision} because the name is invalid. " - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - repo = repository.hg_repo - - # The received changeset_revision may be None since defining it in the dependency definition is optional. - # If this is the case, the default will be to set its value to the repository dependency tip revision. - # This probably occurs only when handling circular dependency definitions. - tip_ctx = repo[repo.changelog.tip()] - # Make sure the repo.changlog includes at least 1 revision. - if changeset_revision is None and tip_ctx.rev() >= 0: - changeset_revision = str(tip_ctx) - repository_dependency_tup = [ - toolshed, - name, - owner, - changeset_revision, - prior_installation_required, - str(only_if_compiling_contained_td), - ] - return repository_dependency_tup, is_valid, error_message - else: - # Find the specified changeset revision in the repository's changelog to see if it's valid. - found = False - for changeset in repo.changelog: - changeset_hash = str(repo[changeset]) - if changeset_hash == changeset_revision: - found = True - break - if not found: - error_message = ( - "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " - % (toolshed, name, owner) - ) - error_message += ( - f"changeset revision {changeset_revision} because the changeset revision is invalid. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - else: - # Repository dependencies are currently supported within a single tool shed. - error_message = ( - "Repository dependencies are currently supported only within the same tool shed. Ignoring " - ) - error_message += ( - "repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s. " - % (toolshed, name, owner, changeset_revision) - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - return repository_dependency_tup, is_valid, error_message - - - def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" readme_files = ["readme", "read_me", "install"] diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index b4492186b1f3..a717a5ae9fb3 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -1,20 +1,29 @@ import logging import tempfile from typing import ( + Any, + Dict, List, Optional, ) from sqlalchemy import ( + and_, false, or_, ) from galaxy import util from galaxy.model.base import transaction +from galaxy.structured_app import MinimalManagerApp +from galaxy.tool_shed.metadata.metadata_generator import ( + BaseMetadataGenerator, + HandleResultT, + InvalidFileT, +) from galaxy.util import inflector +from galaxy.web import url_for from galaxy.web.form_builder import SelectField -from galaxy.tool_shed.galaxy_install.metadata_generator import ToolShedMetadataGenerator from tool_shed.repository_types import util as rt_util from tool_shed.repository_types.metadata import TipOnly from tool_shed.structured_app import ToolShedApp @@ -27,10 +36,199 @@ shed_util_common as suc, tool_util, ) +from tool_shed.webapp.model import Repository log = logging.getLogger(__name__) +class ToolShedMetadataGenerator(BaseMetadataGenerator): + """A MetadataGenerator building on ToolShed's app and repository constructs.""" + + repository: Optional[Repository] + + # why is mypy making me re-annotate these things from the base class, it didn't + # when they were in the same file + invalid_file_tups: List[InvalidFileT] + repository_clone_url: Optional[str] + + def __init__( + self, + app: MinimalManagerApp, + repository: Optional[Repository] = None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir=None, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False, + metadata_dict=None, + user=None, + ): + self.app = app + self.user = user + self.repository = repository + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.tip() + else: + self.changeset_revision = changeset_revision + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( + self.user, self.repository + ) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + relative_install_dir = self.repository.repo_path(self.app) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_path(self.app) + if metadata_dict is None: + self.metadata_dict = {} + else: + self.metadata_dict = metadata_dict + self.relative_install_dir = relative_install_dir + self.repository_files_dir = repository_files_dir + self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository + self.updating_installed_repository = updating_installed_repository + self.persist = persist + self.invalid_file_tups = [] + self.sa_session = app.model.session + + def initial_metadata_dict(self) -> Dict[str, Any]: + return {} + + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): + self.repository = repository + if relative_install_dir is None and self.repository is not None: + relative_install_dir = repository.repo_path(self.app) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision(self.repository.tip()) + else: + self.set_changeset_revision(changeset_revision) + self.shed_config_dict = {} + self._reset_attributes_after_repository_update(relative_install_dir) + + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + """ + Process the received repository_elem which is a tag either from a + repository_dependencies.xml file or a tool_dependencies.xml file. If the former, + we're generating repository dependencies metadata for a repository in the Tool Shed. + If the latter, we're generating package dependency metadata within Galaxy or the + Tool Shed. + """ + is_valid = True + error_message = "" + toolshed = repository_elem.get("toolshed", None) + name = repository_elem.get("name", None) + owner = repository_elem.get("owner", None) + changeset_revision = repository_elem.get("changeset_revision", None) + prior_installation_required = str(repository_elem.get("prior_installation_required", False)) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + if not toolshed: + # Default to the current tool shed. + toolshed = str(url_for("/", qualified=True)).rstrip("/") + repository_dependency_tup[0] = toolshed + toolshed = common_util.remove_protocol_from_tool_shed_url(toolshed) + + if suc.tool_shed_is_this_tool_shed(toolshed): + try: + user = ( + self.sa_session.query(self.app.model.User) + .filter(self.app.model.User.table.c.username == owner) + .one() + ) + except Exception: + error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % ( + toolshed, + name, + owner, + ) + error_message += f"changeset revision {changeset_revision} because the owner is invalid." + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + try: + repository = ( + self.sa_session.query(self.app.model.Repository) + .filter( + and_( + self.app.model.Repository.table.c.name == name, + self.app.model.Repository.table.c.user_id == user.id, + ) + ) + .one() + ) + except Exception: + error_message = f"Ignoring repository dependency definition for tool shed {toolshed}," + error_message += f"name {name}, owner {owner}, " + error_message += f"changeset revision {changeset_revision} because the name is invalid. " + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + repo = repository.hg_repo + + # The received changeset_revision may be None since defining it in the dependency definition is optional. + # If this is the case, the default will be to set its value to the repository dependency tip revision. + # This probably occurs only when handling circular dependency definitions. + tip_ctx = repo[repo.changelog.tip()] + # Make sure the repo.changlog includes at least 1 revision. + if changeset_revision is None and tip_ctx.rev() >= 0: + changeset_revision = str(tip_ctx) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + return repository_dependency_tup, is_valid, error_message + else: + # Find the specified changeset revision in the repository's changelog to see if it's valid. + found = False + for changeset in repo.changelog: + changeset_hash = str(repo[changeset]) + if changeset_hash == changeset_revision: + found = True + break + if not found: + error_message = ( + "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " + % (toolshed, name, owner) + ) + error_message += ( + f"changeset revision {changeset_revision} because the changeset revision is invalid. " + ) + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + else: + # Repository dependencies are currently supported within a single tool shed. + error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring " + error_message += ( + "repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s. " + % (toolshed, name, owner, changeset_revision) + ) + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + return repository_dependency_tup, is_valid, error_message + + class RepositoryMetadataManager(ToolShedMetadataGenerator): def __init__( self, @@ -101,6 +299,7 @@ def build_repository_ids_select_field( return repositories_select_field def clean_repository_metadata(self, changeset_revisions): + assert self.repository # Delete all repository_metadata records associated with the repository that have # a changeset_revision that is not in changeset_revisions. We sometimes see multiple # records with the same changeset revision value - no idea how this happens. We'll @@ -431,6 +630,7 @@ def create_or_update_repository_metadata(self, changeset_revision, metadata_dict downloadable = True else: downloadable = False + assert self.repository repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app, self.app.security.encode_id(self.repository.id), changeset_revision ) @@ -616,6 +816,7 @@ def new_metadata_required_for_utilities(self): self.repository because one or more Galaxy utilities may have been deleted from self.repository in the new tip. """ + assert self.repository repository_metadata = metadata_util.get_latest_repository_metadata( self.app, self.repository.id, downloadable=False ) @@ -843,6 +1044,7 @@ def new_workflow_metadata_required(self, repository_metadata): def reset_all_metadata_on_repository_in_tool_shed(self): """Reset all metadata on a single repository in a tool shed.""" + assert self.repository log.debug(f"Resetting all metadata on repository: {self.repository.name}") repo = self.repository.hg_repo # The list of changeset_revisions refers to repository_metadata records that have been created @@ -920,6 +1122,7 @@ def reset_all_metadata_on_repository_in_tool_shed(self): def reset_all_tool_versions(self, repo): """Reset tool version lineage for those changeset revisions that include valid tools.""" + assert self.repository encoded_repository_id = self.app.security.encode_id(self.repository.id) changeset_revisions_that_contain_tools = [] for changeset in repo.changelog: @@ -1022,6 +1225,7 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): Set metadata using the self.repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset has problems. """ + assert self.repository message = "" status = "done" encoded_id = self.app.security.encode_id(self.repository.id) From d0622fce7b9e6eae82df477da0463b74eee2ba47 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Sep 2022 10:55:50 -0400 Subject: [PATCH 12/73] Remove metadata_generator from mypy exclusions... --- mypy.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/mypy.ini b/mypy.ini index 99ab2d8a9073..6a3b9a641717 100644 --- a/mypy.ini +++ b/mypy.ini @@ -479,8 +479,6 @@ check_untyped_defs = False check_untyped_defs = False [mypy-galaxy.jobs] check_untyped_defs = False -[mypy-galaxy.tool_shed.metadata.metadata_generator] -check_untyped_defs = False [mypy-galaxy.jobs.handler] check_untyped_defs = False [mypy-galaxy.workflow.scheduling_manager] From bac9fb803e6850f674b12d8832e9f064a9d65e2a Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 16 Sep 2022 10:03:52 -0400 Subject: [PATCH 13/73] metadata_generator: more typing fixes/annotations --- .../tool_shed/metadata/metadata_generator.py | 24 +++++++++++++------ .../metadata/repository_metadata_manager.py | 1 + 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 184ee767c819..a2b0cc2b1567 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -3,6 +3,7 @@ import tempfile from typing import ( Any, + cast, Dict, List, Optional, @@ -57,6 +58,9 @@ class RepositoryProtocol(Protocol): + name: str + id: str + def repo_path(self, app) -> Optional[str]: ... @@ -76,6 +80,9 @@ class BaseMetadataGenerator: def initial_metadata_dict(self) -> Dict[str, Any]: ... + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + ... + def _generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None ) -> Dict[str, Any]: @@ -222,10 +229,12 @@ def generate_metadata_for_changeset_revision(self): """ if self.shed_config_dict is None: self.shed_config_dict = {} + assert self.repository if self.updating_installed_repository: # Keep the original tool shed repository metadata if setting metadata on a repository # installed into a local Galaxy instance for which we have pulled updates. - original_repository_metadata = self.repository.metadata_ + gx_repository = cast(ToolShedRepository, self.repository) # definitely in Galaxy version + original_repository_metadata = gx_repository.metadata_ else: original_repository_metadata = None readme_file_names = _get_readme_file_names(str(self.repository.name)) @@ -249,9 +258,11 @@ def generate_metadata_for_changeset_revision(self): work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-gmfcr") # All other files are on disk in the repository's repo_path, which is the value of # self.relative_install_dir. + assert self.relative_install_dir files_dir = self.relative_install_dir if self.shed_config_dict.get("tool_path"): files_dir = os.path.join(self.shed_config_dict["tool_path"], files_dir) + assert files_dir # Create ValidationContext to load and validate tools, data tables and datatypes with ValidationContext.from_app(app=self.app, work_dir=work_dir) as validation_context: tv = ToolValidator(validation_context) @@ -371,7 +382,7 @@ def generate_package_dependency_metadata(self, elem, valid_tool_dependencies_dic """ # TODO: make this function a class. repository_dependency_is_valid = True - repository_dependency_tup = [] + repository_dependency_tup: list = [] requirements_dict = {} error_message = "" package_name = elem.get("name", None) @@ -495,7 +506,7 @@ def generate_repository_dependency_metadata(self, repository_dependencies_config prior_installation_required, only_if_compiling_contained_td, ) = repository_dependency_tup - repository_dependency_tup = ( + invalid_repository_dependency_tup = ( toolshed, name, owner, @@ -504,7 +515,7 @@ def generate_repository_dependency_metadata(self, repository_dependencies_config only_if_compiling_contained_td, err_msg, ) - invalid_repository_dependency_tups.append(repository_dependency_tup) + invalid_repository_dependency_tups.append(invalid_repository_dependency_tup) error_message += err_msg if invalid_repository_dependency_tups: invalid_repository_dependencies_dict["repository_dependencies"] = invalid_repository_dependency_tups @@ -622,11 +633,10 @@ def generate_tool_dependency_metadata( root = tree.getroot() class RecurserValueStore: - pass + valid_tool_dependencies_dict = {} + invalid_tool_dependencies_dict = {} rvs = RecurserValueStore() - rvs.valid_tool_dependencies_dict = {} - rvs.invalid_tool_dependencies_dict = {} valid_repository_dependency_tups = [] invalid_repository_dependency_tups = [] description = root.get("description") diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index a717a5ae9fb3..d9d82836fda4 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -1061,6 +1061,7 @@ def reset_all_metadata_on_repository_in_tool_shed(self): work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-ramorits") ctx = repo[changeset] log.debug("Cloning repository changeset revision: %s", str(ctx.rev())) + assert self.repository_clone_url cloned_ok, error_message = hg_util.clone_repository(self.repository_clone_url, work_dir, str(ctx.rev())) if cloned_ok: log.debug("Generating metadata for changeset revision: %s", str(ctx.rev())) From f0af1bf218bc096cbf6e809d196168dd67bd817b Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 10 Oct 2022 13:53:12 -0400 Subject: [PATCH 14/73] metadata_generator: only tool shed side uses sa_session. --- lib/galaxy/tool_shed/metadata/metadata_generator.py | 1 - lib/tool_shed/metadata/repository_metadata_manager.py | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index a2b0cc2b1567..4fd179aee8d5 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -898,7 +898,6 @@ def __init__( self.updating_installed_repository = updating_installed_repository self.persist = persist self.invalid_file_tups = [] - self.sa_session = app.model.session def initial_metadata_dict(self) -> Dict[str, Any]: # Shed related tool panel configs are only relevant to Galaxy. diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index d9d82836fda4..22d44a7fa7f6 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -259,6 +259,7 @@ def __init__( metadata_dict=metadata_dict, user=user, ) + self.sa_session = app.model.context self.app = app self.user = user # Repository metadata comparisons for changeset revisions. From 10ce48780bb36af79efcdb524e60f8aed4f2063a Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 21 Oct 2022 09:24:37 -0400 Subject: [PATCH 15/73] Refactor a tool_shed-only utility into tool shed code. --- lib/galaxy/tool_shed/util/shed_util_common.py | 9 --------- lib/tool_shed/util/shed_util_common.py | 9 ++++++++- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/lib/galaxy/tool_shed/util/shed_util_common.py b/lib/galaxy/tool_shed/util/shed_util_common.py index 9e35745495de..76c757082b87 100644 --- a/lib/galaxy/tool_shed/util/shed_util_common.py +++ b/lib/galaxy/tool_shed/util/shed_util_common.py @@ -5,7 +5,6 @@ from galaxy.model.base import transaction from galaxy.tool_shed.util import repository_util from galaxy.util.tool_shed import common_util -from galaxy.web import url_for log = logging.getLogger(__name__) @@ -174,13 +173,6 @@ def set_image_paths(app, text, encoded_repository_id=None, tool_shed_repository= return text -def tool_shed_is_this_tool_shed(toolshed_base_url): - """Determine if a tool shed is the current tool shed.""" - cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url(toolshed_base_url) - cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url(str(url_for("/", qualified=True))) - return cleaned_toolshed_base_url == cleaned_tool_shed - - __all__ = ( "can_eliminate_repository_dependency", "clean_dependency_relationships", @@ -192,5 +184,4 @@ def tool_shed_is_this_tool_shed(toolshed_base_url): "get_user", "have_shed_tool_conf_for_install", "set_image_paths", - "tool_shed_is_this_tool_shed", ) diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index 85bde805a932..a37243a648d6 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -23,12 +23,12 @@ get_user, have_shed_tool_conf_for_install, set_image_paths, - tool_shed_is_this_tool_shed, ) from galaxy.util import ( checkers, unicodify, ) +from galaxy.web import url_for from tool_shed.util import ( basic_util, common_util, @@ -449,6 +449,13 @@ def open_repository_files_folder(app, folder_path, repository_id, is_admin=False return folder_contents +def tool_shed_is_this_tool_shed(toolshed_base_url): + """Determine if a tool shed is the current tool shed.""" + cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url(toolshed_base_url) + cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url(str(url_for("/", qualified=True))) + return cleaned_toolshed_base_url == cleaned_tool_shed + + __all__ = ( "can_eliminate_repository_dependency", "clean_dependency_relationships", From f1c2dde09f50f3b4a3f30594029268c212dce294 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 22 Sep 2022 11:41:20 -0400 Subject: [PATCH 16/73] Protocol to try to break up app for cleaner tool shed install typing. --- lib/galaxy/app.py | 27 +++++--- lib/galaxy/app_unittest_utils/galaxy_mock.py | 7 +- lib/galaxy/managers/tool_data.py | 6 +- .../model/tool_shed_install/__init__.py | 31 +++++++-- lib/galaxy/queue_worker.py | 5 +- lib/galaxy/structured_app.py | 9 ++- lib/galaxy/tool_shed/galaxy_install/client.py | 69 +++++++++++++++++++ .../galaxy_install/install_manager.py | 6 +- .../installed_repository_manager.py | 6 +- .../installed_repository_metadata_manager.py | 6 +- .../galaxy_install/tools/data_manager.py | 15 ++-- .../tools/tool_panel_manager.py | 8 +-- .../tool_shed/metadata/metadata_generator.py | 19 +++-- .../tool_shed/tools/data_table_manager.py | 12 +++- lib/galaxy/tools/__init__.py | 2 +- lib/galaxy/tools/data_manager/manager.py | 23 +++++-- .../metadata/repository_metadata_manager.py | 4 +- test/unit/app/jobs/test_job_wrapper.py | 2 +- test/unit/app/tools/test_toolbox.py | 2 +- .../test_installed_repository_manager.py | 2 +- test/unit/workflows/workflow_support.py | 2 +- 21 files changed, 195 insertions(+), 68 deletions(-) create mode 100644 lib/galaxy/tool_shed/galaxy_install/client.py diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index 7767f32b2a48..8743d155a0d8 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -84,7 +84,10 @@ install_model_scoped_session, ) from galaxy.model.tags import GalaxyTagHandler -from galaxy.model.tool_shed_install import mapping as install_mapping +from galaxy.model.tool_shed_install import ( + HasToolBox, + mapping as install_mapping, +) from galaxy.objectstore import ( BaseObjectStore, build_object_store_from_config, @@ -105,8 +108,10 @@ VaultFactory, ) from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager from galaxy.tool_shed.galaxy_install.update_repository_manager import UpdateRepositoryManager +from galaxy.tool_util.data import ToolDataTableManager as BaseToolDataTableManager from galaxy.tool_util.deps import containers from galaxy.tool_util.deps.dependencies import AppInfo from galaxy.tool_util.deps.views import DependencyResolversView @@ -214,14 +219,13 @@ def configure_sentry_client(self): ) -class MinimalGalaxyApplication(BasicSharedApp, HaltableContainer, SentryClientMixin): +class MinimalGalaxyApplication(BasicSharedApp, HaltableContainer, SentryClientMixin, HasToolBox): """Encapsulates the state of a minimal Galaxy application""" model: GalaxyModelMapping config: config.GalaxyAppConfiguration tool_cache: ToolCache job_config: jobs.JobConfiguration - toolbox: tools.ToolBox toolbox_search: ToolBoxSearch container_finder: containers.ContainerFinder install_model: ModelMapping @@ -297,15 +301,12 @@ def _configure_tool_config_files(self): self.config.tool_configs.append(self.config.migrated_tools_config) def _configure_toolbox(self): - if not isinstance(self, BasicSharedApp): - raise Exception("Must inherit from BasicSharedApp") - self.citations_manager = CitationsManager(self) self.biotools_metadata_source = get_galaxy_biotools_metadata_source(self.config) self.dynamic_tools_manager = DynamicToolManager(self) self._toolbox_lock = threading.RLock() - self.toolbox = tools.ToolBox(self.config.tool_configs, self.config.tool_path, self) + self._toolbox = tools.ToolBox(self.config.tool_configs, self.config.tool_path, self) galaxy_root_dir = os.path.abspath(self.config.root) file_path = os.path.abspath(self.config.file_path) app_info = AppInfo( @@ -345,6 +346,10 @@ def _configure_toolbox(self): ToolBoxSearch(self.toolbox, index_dir=self.config.tool_search_index_dir, index_help=index_help), ) + @property + def toolbox(self) -> tools.ToolBox: + return self._toolbox + def reindex_tool_search(self) -> None: # Call this when tools are added or removed. self.toolbox_search.build_index(tool_cache=self.tool_cache, toolbox=self.toolbox) @@ -366,7 +371,7 @@ def _set_enabled_container_types(self): def _configure_tool_data_tables(self, from_shed_config): # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path. - self.tool_data_tables = ToolDataTableManager( + self.tool_data_tables: BaseToolDataTableManager = ToolDataTableManager( tool_data_path=self.config.tool_data_path, config_filename=self.config.tool_data_table_config_path, other_config_dict=self.config, @@ -488,7 +493,7 @@ def _wait_for_database(self, url): time.sleep(pause) @property - def tool_dependency_dir(self): + def tool_dependency_dir(self) -> Optional[str]: return self.toolbox.dependency_manager.default_base_path def _shutdown_object_store(self): @@ -498,7 +503,7 @@ def _shutdown_model(self): self.model.engine.dispose() -class GalaxyManagerApplication(MinimalManagerApp, MinimalGalaxyApplication): +class GalaxyManagerApplication(MinimalManagerApp, MinimalGalaxyApplication, InstallationTarget[tools.ToolBox]): """Extends the MinimalGalaxyApplication with most managers that are not tied to a web or job handling context.""" model: GalaxyModelMapping @@ -685,7 +690,7 @@ def __init__(self, **kwargs) -> None: self.watchers = self._register_singleton(ConfigWatchers) self._configure_toolbox() # Load Data Manager - self.data_managers = self._register_singleton(DataManagers) + self.data_managers = self._register_singleton(DataManagers) # type: ignore[type-abstract] # Load the update repository manager. self.update_repository_manager = self._register_singleton( UpdateRepositoryManager, UpdateRepositoryManager(self) diff --git a/lib/galaxy/app_unittest_utils/galaxy_mock.py b/lib/galaxy/app_unittest_utils/galaxy_mock.py index e1870b023974..65d559a851a2 100644 --- a/lib/galaxy/app_unittest_utils/galaxy_mock.py +++ b/lib/galaxy/app_unittest_utils/galaxy_mock.py @@ -94,7 +94,7 @@ class MockApp(di.Container, GalaxyDataTestApp): config: "MockAppConfig" amqp_type: str job_search: Optional[JobSearch] = None - toolbox: ToolBox + _toolbox: ToolBox tool_cache: ToolCache install_model: ModelMapping watchers: ConfigWatchers @@ -110,6 +110,7 @@ def __init__(self, config=None, **kwargs) -> None: super().__init__() config = config or MockAppConfig(**kwargs) GalaxyDataTestApp.__init__(self, config=config, **kwargs) + self.install_model = self.model self[BasicSharedApp] = cast(BasicSharedApp, self) self[MinimalManagerApp] = cast(MinimalManagerApp, self) # type: ignore[type-abstract] self[StructuredApp] = cast(StructuredApp, self) # type: ignore[type-abstract] @@ -153,6 +154,10 @@ def url_for(*args, **kwds): self.url_for = url_for + @property + def toolbox(self) -> ToolBox: + return self._toolbox + def wait_for_toolbox_reload(self, toolbox): # TODO: If the tpm test case passes, does the operation really # need to wait. diff --git a/lib/galaxy/managers/tool_data.py b/lib/galaxy/managers/tool_data.py index fe4f7f2a215b..b754e0296e60 100644 --- a/lib/galaxy/managers/tool_data.py +++ b/lib/galaxy/managers/tool_data.py @@ -13,7 +13,10 @@ MinimalManagerApp, StructuredApp, ) -from galaxy.tool_util.data import BundleProcessingOptions +from galaxy.tool_util.data import ( + BundleProcessingOptions, + ToolDataTableManager, +) from galaxy.tool_util.data._schema import ( ToolDataDetails, ToolDataEntryList, @@ -23,7 +26,6 @@ TabularToolDataField, TabularToolDataTable, ToolDataTable, - ToolDataTableManager, ) diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py index 8bd5f9169065..d406935c79fb 100644 --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -22,12 +22,17 @@ registry, relationship, ) +from typing_extensions import Protocol from galaxy.model.custom_types import ( MutableJSONType, TrimmedString, ) from galaxy.model.orm.now import now +from galaxy.tool_util.toolbox.base import ( + AbstractToolBox, + DynamicToolConfDict, +) from galaxy.util import asbool from galaxy.util.bunch import Bunch from galaxy.util.dictifiable import Dictifiable @@ -48,6 +53,16 @@ class DeclarativeMeta(_DeclarativeMeta, type): from sqlalchemy.orm.decl_api import DeclarativeMeta +class HasToolBox(common_util.HasToolShedRegistry, Protocol): + @property + def tool_dependency_dir(self) -> Optional[str]: + ... + + @property + def toolbox(self) -> AbstractToolBox: + ... + + class Base(metaclass=DeclarativeMeta): __abstract__ = True registry = mapper_registry @@ -203,7 +218,7 @@ def can_deactivate(self): def can_reinstall_or_activate(self): return self.deleted - def get_sharable_url(self, app): + def get_sharable_url(self, app: HasToolBox): return common_util.get_tool_shed_repository_url(app, self.tool_shed, self.owner, self.name) @property @@ -214,7 +229,7 @@ def shed_config_filename(self): def shed_config_filename(self, value): self.metadata_["shed_config_filename"] = os.path.abspath(value) - def get_shed_config_dict(self, app): + def get_shed_config_dict(self, app: HasToolBox) -> DynamicToolConfDict: """ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry in the shed_tool_conf_dict. @@ -225,7 +240,7 @@ def get_shed_config_dict(self, app): return shed_config_dict return self.guess_shed_config(app) - def get_tool_relative_path(self, app): + def get_tool_relative_path(self, app: HasToolBox): # This is a somewhat public function, used by data_manager_manual for instance shed_conf_dict = self.get_shed_config_dict(app) tool_path = None @@ -237,7 +252,7 @@ def get_tool_relative_path(self, app): ) return tool_path, relative_path - def guess_shed_config(self, app): + def guess_shed_config(self, app: HasToolBox): tool_ids = [] for tool in self.metadata_.get("tools", []): tool_ids.append(tool.get("guid")) @@ -395,13 +410,13 @@ def missing_tool_dependencies(self): missing_dependencies.append(tool_dependency) return missing_dependencies - def repo_files_directory(self, app): + def repo_files_directory(self, app: HasToolBox): repo_path = self.repo_path(app) if repo_path: return os.path.join(repo_path, self.name) return None - def repo_path(self, app): + def repo_path(self, app: HasToolBox): tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url(self.tool_shed) for shed_tool_conf_dict in app.toolbox.dynamic_confs(include_migrated_tool_conf=True): tool_path = shed_tool_conf_dict["tool_path"] @@ -731,8 +746,9 @@ def can_update(self): def in_error_state(self): return self.status == self.installation_status.ERROR - def installation_directory(self, app): + def installation_directory(self, app: HasToolBox) -> Optional[str]: if self.type == "package": + assert app.tool_dependency_dir return os.path.join( app.tool_dependency_dir, self.name, @@ -742,6 +758,7 @@ def installation_directory(self, app): self.tool_shed_repository.installed_changeset_revision, ) if self.type == "set_environment": + assert app.tool_dependency_dir return os.path.join( app.tool_dependency_dir, "environment_settings", diff --git a/lib/galaxy/queue_worker.py b/lib/galaxy/queue_worker.py index 7de46f1b9a5c..e47ea390a5ea 100644 --- a/lib/galaxy/queue_worker.py +++ b/lib/galaxy/queue_worker.py @@ -185,7 +185,7 @@ def _get_new_toolbox(app, save_integrated_tool_panel=True): app.datatypes_registry.load_external_metadata_tool(new_toolbox) load_lib_tools(new_toolbox) [new_toolbox.register_tool(tool) for tool in new_toolbox.data_manager_tools.values()] - app.toolbox = new_toolbox + app._toolbox = new_toolbox app.toolbox.persist_cache() @@ -195,9 +195,8 @@ def reload_data_managers(app, **kwargs): log.debug("Executing data managers reload on '%s'", app.config.server_name) app._configure_tool_data_tables(from_shed_config=False) reload_tool_data_tables(app) - reload_count = app.data_managers._reload_count app.data_managers = DataManagers(app) - app.data_managers._reload_count = reload_count + 1 + app.data_managers.increment_reload_count() if hasattr(app, "tool_cache"): app.tool_cache.reset_status() if hasattr(app, "watchers"): diff --git a/lib/galaxy/structured_app.py b/lib/galaxy/structured_app.py index d18638c97c09..ba62268c35c6 100644 --- a/lib/galaxy/structured_app.py +++ b/lib/galaxy/structured_app.py @@ -30,6 +30,7 @@ from galaxy.security.idencoding import IdEncodingHelper from galaxy.security.vault import Vault from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_util.data import ToolDataTableManager from galaxy.tool_util.deps.containers import ContainerFinder from galaxy.tool_util.deps.views import DependencyResolversView from galaxy.tool_util.verify import test_data @@ -48,7 +49,6 @@ from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager from galaxy.tools import ToolBox from galaxy.tools.cache import ToolCache - from galaxy.tools.data import ToolDataTableManager from galaxy.tools.error_reports import ErrorReports from galaxy.visualization.genomes import Genomes @@ -67,10 +67,13 @@ class BasicSharedApp(Container): model: SharedModelMapping security: IdEncodingHelper auth_manager: AuthManager - toolbox: "ToolBox" security_agent: Any quota_agent: QuotaAgent + @property + def toolbox(self) -> "ToolBox": + raise NotImplementedError() + class MinimalToolApp(Protocol): is_webapp: bool @@ -150,7 +153,7 @@ class StructuredApp(MinimalManagerApp): webhooks_registry: WebhooksRegistry queue_worker: Any # 'galaxy.queue_worker.GalaxyQueueWorker' data_provider_registry: Any # 'galaxy.visualization.data_providers.registry.DataProviderRegistry' - tool_data_tables: "ToolDataTableManager" + tool_data_tables: ToolDataTableManager tool_cache: "ToolCache" tool_shed_repository_cache: Optional[ToolShedRepositoryCache] watchers: "ConfigWatchers" diff --git a/lib/galaxy/tool_shed/galaxy_install/client.py b/lib/galaxy/tool_shed/galaxy_install/client.py new file mode 100644 index 000000000000..c537b9d1def0 --- /dev/null +++ b/lib/galaxy/tool_shed/galaxy_install/client.py @@ -0,0 +1,69 @@ +import threading +from typing import ( + Any, + Generic, + List, + Optional, + TYPE_CHECKING, + TypeVar, + Union, +) + +from typing_extensions import Protocol + +from galaxy.model.base import ModelMapping +from galaxy.model.tool_shed_install import HasToolBox +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_util.data import ToolDataTableManager +from galaxy.tool_util.toolbox.base import AbstractToolBox + +if TYPE_CHECKING: + import galaxy.tool_shed.metadata.installed_repository_manger + + +class DataManagerInterface(Protocol): + GUID_TYPE: str = "data_manager" + DEFAULT_VERSION: str = "0.0.1" + + def process_result(self, out_data): + ... + + def write_bundle(self, out) -> None: + ... + + +class DataManagersInterface(Protocol): + @property + def _reload_count(self) -> int: + ... + + def load_manager_from_elem( + self, data_manager_elem, tool_path=None, add_manager=True + ) -> Optional[DataManagerInterface]: + ... + + def get_manager(self, data_manager_id: str) -> Optional[DataManagerInterface]: + ... + + def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: + ... + + +ToolBoxType = TypeVar("ToolBoxType", bound="AbstractToolBox") + + +class InstallationTarget(HasToolBox, Generic[ToolBoxType]): + data_managers: DataManagersInterface + install_model: ModelMapping + model: ModelMapping + security: IdEncodingHelper + config: Any + installed_repository_manager: "galaxy.tool_shed.metadata.installed_repository_manger.InstalledRepositoryManager" + watchers: Any # TODO: interface... + _toolbox_lock: threading.RLock + tool_shed_repository_cache: Optional[ToolShedRepositoryCache] + tool_data_tables: ToolDataTableManager + + def wait_for_toolbox_reload(self, old_toolbox: ToolBoxType) -> None: + ... diff --git a/lib/galaxy/tool_shed/galaxy_install/install_manager.py b/lib/galaxy/tool_shed/galaxy_install/install_manager.py index eb81181d5055..7a2aa68a835a 100644 --- a/lib/galaxy/tool_shed/galaxy_install/install_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/install_manager.py @@ -16,7 +16,7 @@ util, ) from galaxy.model.base import transaction -from galaxy.structured_app import StructuredApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import ( InstalledRepositoryMetadataManager, ) @@ -46,10 +46,10 @@ class InstallRepositoryManager: - app: StructuredApp + app: InstallationTarget tpm: tool_panel_manager.ToolPanelManager - def __init__(self, app: StructuredApp, tpm: Optional[tool_panel_manager.ToolPanelManager] = None): + def __init__(self, app: InstallationTarget, tpm: Optional[tool_panel_manager.ToolPanelManager] = None): self.app = app self.install_model = self.app.install_model self._view = views.DependencyResolversView(app) diff --git a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py index 4474b8841e8a..76bf5b0b24c1 100644 --- a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py @@ -20,7 +20,7 @@ ToolDependency, ToolShedRepository, ) -from galaxy.structured_app import MinimalManagerApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import ( InstalledRepositoryMetadataManager, ) @@ -44,14 +44,14 @@ class InstalledRepositoryManager: - app: MinimalManagerApp + app: InstallationTarget _tool_paths: List[str] installed_repository_dicts: List[Dict[str, Any]] repository_dependencies_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] installed_repository_dependencies_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] installed_dependent_repositories_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] - def __init__(self, app: MinimalManagerApp): + def __init__(self, app: InstallationTarget): """ Among other things, keep in in-memory sets of tuples defining installed repositories and tool dependencies along with the relationships between each of them. This will allow for quick discovery of those repositories or components that diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index 3603d150cd2a..f8fa39bf80bb 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -11,7 +11,7 @@ from galaxy import util from galaxy.model.base import transaction from galaxy.model.tool_shed_install import ToolShedRepository -from galaxy.structured_app import MinimalManagerApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager from galaxy.tool_shed.metadata.metadata_generator import GalaxyMetadataGenerator from galaxy.tool_shed.util.repository_util import ( @@ -30,9 +30,11 @@ class InstalledRepositoryMetadataManager(GalaxyMetadataGenerator): + app: InstallationTarget + def __init__( self, - app: MinimalManagerApp, + app: InstallationTarget, tpm: Optional[tool_panel_manager.ToolPanelManager] = None, repository: Optional[ToolShedRepository] = None, changeset_revision: Optional[str] = None, diff --git a/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py b/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py index 163822c290a7..d3cce73744b5 100644 --- a/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py @@ -7,9 +7,12 @@ Dict, List, Optional, - TYPE_CHECKING, ) +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + InstallationTarget, +) from galaxy.util import ( Element, etree, @@ -21,9 +24,6 @@ from galaxy.util.tool_shed.xml_util import parse_xml from . import tool_panel_manager -if TYPE_CHECKING: - from galaxy.tools.data_manager.manager import DataManager - log = logging.getLogger(__name__) SHED_DATA_MANAGER_CONF_XML = """ @@ -33,9 +33,10 @@ class DataManagerHandler: + app: InstallationTarget root: Optional[Element] = None - def __init__(self, app): + def __init__(self, app: InstallationTarget): self.app = app @property @@ -73,8 +74,8 @@ def install_data_managers( relative_install_dir: StrPath, repository, repository_tools_tups, - ) -> List["DataManager"]: - rval: List[DataManager] = [] + ) -> List["DataManagerInterface"]: + rval: List["DataManagerInterface"] = [] if "data_manager" in metadata_dict: tpm = tool_panel_manager.ToolPanelManager(self.app) repository_tools_by_guid = {} diff --git a/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py b/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py index 344f50fd6c6c..c68ba7fe4be3 100644 --- a/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py @@ -8,7 +8,7 @@ from galaxy.exceptions import RequestParameterInvalidException from galaxy.model.base import transaction -from galaxy.structured_app import MinimalManagerApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.util.basic_util import strip_path from galaxy.tool_shed.util.repository_util import get_repository_owner from galaxy.tool_shed.util.shed_util_common import get_tool_panel_config_tool_path_install_dir @@ -25,9 +25,9 @@ class ToolPanelManager: - app: MinimalManagerApp + app: InstallationTarget - def __init__(self, app: MinimalManagerApp): + def __init__(self, app: InstallationTarget): self.app = app def add_to_shed_tool_config(self, shed_tool_conf_dict: Dict[str, Any], elem_list: list) -> None: @@ -135,7 +135,7 @@ def add_to_tool_panel( self.app.toolbox.update_shed_config(shed_tool_conf_dict) self.add_to_shed_tool_config(shed_tool_conf_dict, elem_list) - def config_elems_to_xml_file(self, config_elems, config_filename, tool_path, tool_cache_data_dir=None): + def config_elems_to_xml_file(self, config_elems, config_filename, tool_path, tool_cache_data_dir=None) -> None: """ Persist the current in-memory list of config_elems to a file named by the value of config_filename. diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 4fd179aee8d5..b10e29148e40 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -8,13 +8,18 @@ List, Optional, Tuple, + Union, ) from typing_extensions import Protocol from galaxy import util from galaxy.model.tool_shed_install import ToolShedRepository -from galaxy.structured_app import MinimalManagerApp +from galaxy.structured_app import BasicSharedApp +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + InstallationTarget, +) from galaxy.tool_shed.repository_type import ( REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, TOOL_DEPENDENCY_DEFINITION_FILENAME, @@ -34,7 +39,6 @@ from galaxy.tool_shed.util.repository_util import get_repository_for_dependency_relationship from galaxy.tool_util.loader_directory import looks_like_a_tool from galaxy.tool_util.parser.interface import TestCollectionDef -from galaxy.tools.data_manager.manager import DataManager from galaxy.tools.repositories import ValidationContext from galaxy.util.tool_shed.common_util import ( generate_clone_url_for_installed_repository, @@ -66,7 +70,7 @@ def repo_path(self, app) -> Optional[str]: class BaseMetadataGenerator: - app: MinimalManagerApp + app: Union[BasicSharedApp, InstallationTarget] repository: Optional[RepositoryProtocol] invalid_file_tups: List[InvalidFileT] changeset_revision: Optional[str] @@ -142,8 +146,8 @@ def _generate_data_manager_metadata( continue # FIXME: default behavior is to fall back to tool.name. data_manager_name = data_manager_elem.get("name", data_manager_id) - version = data_manager_elem.get("version", DataManager.DEFAULT_VERSION) - guid = self._generate_guid_for_object(DataManager.GUID_TYPE, data_manager_id, version) + version = data_manager_elem.get("version", DataManagerInterface.DEFAULT_VERSION) + guid = self._generate_guid_for_object(DataManagerInterface.GUID_TYPE, data_manager_id, version) data_tables = [] if tool_file is None: log.error(f'Data Manager entry is missing tool_file attribute in "{data_manager_config_filename}".') @@ -699,7 +703,8 @@ def _check_elem_for_dep(elems): if original_valid_tool_dependencies_dict: # We're generating metadata on an update pulled to a tool shed repository installed # into a Galaxy instance, so handle changes to tool dependencies appropriately. - irm = self.app.installed_repository_manager + installation_target = cast(InstallationTarget, self.app) + irm = installation_target.installed_repository_manager ( updated_tool_dependency_names, deleted_tool_dependency_names, @@ -851,7 +856,7 @@ class GalaxyMetadataGenerator(BaseMetadataGenerator): def __init__( self, - app: MinimalManagerApp, + app: InstallationTarget, repository=None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, diff --git a/lib/galaxy/tool_shed/tools/data_table_manager.py b/lib/galaxy/tool_shed/tools/data_table_manager.py index c47b034113c1..1a6faf6ff8f4 100644 --- a/lib/galaxy/tool_shed/tools/data_table_manager.py +++ b/lib/galaxy/tool_shed/tools/data_table_manager.py @@ -1,9 +1,13 @@ import logging import os import shutil -from typing import List +from typing import ( + List, + Union, +) -from galaxy.structured_app import StructuredApp +from galaxy.structured_app import BasicSharedApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.util import hg_util from galaxy.util import etree from galaxy.util.tool_shed import xml_util @@ -12,7 +16,9 @@ class ShedToolDataTableManager: - def __init__(self, app: StructuredApp): + app: Union[BasicSharedApp, InstallationTarget] + + def __init__(self, app: Union[BasicSharedApp, InstallationTarget]): self.app = app def generate_repository_info_elem( diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index fc9b86b07d24..405e80035768 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -3097,7 +3097,7 @@ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, fina super().exec_after_process(app, inp_data, out_data, param_dict, job=job, **kwds) # process results of tool data_manager_id = job.data_manager_association.data_manager_id - data_manager = self.app.data_managers.get_manager(data_manager_id, None) + data_manager = self.app.data_managers.get_manager(data_manager_id) assert ( data_manager is not None ), f"Invalid data manager ({data_manager_id}) requested. It may have been removed before the job completed." diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py index aaa7e4d23ee3..a72801096ca6 100644 --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -3,13 +3,15 @@ import os from typing import ( Dict, + List, Optional, + Union, ) from typing_extensions import Protocol from galaxy import util -from galaxy.structured_app import StructuredApp +from galaxy.structured_app import MinimalManagerApp from galaxy.tool_util.data import ( BundleProcessingOptions, OutputDataset, @@ -19,20 +21,24 @@ RepoInfo, ) from galaxy.util import Element +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + DataManagersInterface, +) log = logging.getLogger(__name__) -class DataManagers: +class DataManagers(DataManagersInterface): data_managers: Dict[str, "DataManager"] managed_data_tables: Dict[str, "DataManager"] - def __init__(self, app: StructuredApp, xml_filename=None): + def __init__(self, app: MinimalManagerApp, xml_filename=None): self.app = app self.data_managers = {} self.managed_data_tables = {} self.tool_path = None - self._reload_count = 0 + self.__reload_count = 0 self.filename = xml_filename or self.app.config.data_manager_config_file for filename in util.listify(self.filename): if not filename: @@ -45,6 +51,13 @@ def __init__(self, app: StructuredApp, xml_filename=None): if exc.errno != errno.ENOENT or self.app.config.is_set("shed_data_manager_config_file"): raise + def increment_reload_count(self) -> None: + self.__reload_count += 1 + + @property + def _reload_count(self) -> int: + return self.__reload_count + def load_from_xml(self, xml_filename, store_tool_path=True) -> None: try: tree = util.parse_xml(xml_filename) @@ -102,7 +115,7 @@ def add_manager(self, data_manager): def get_manager(self, *args, **kwds): return self.data_managers.get(*args, **kwds) - def remove_manager(self, manager_ids): + def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: if not isinstance(manager_ids, list): manager_ids = [manager_ids] for manager_id in manager_ids: diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 22d44a7fa7f6..ff0d9381f3a3 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -15,7 +15,6 @@ from galaxy import util from galaxy.model.base import transaction -from galaxy.structured_app import MinimalManagerApp from galaxy.tool_shed.metadata.metadata_generator import ( BaseMetadataGenerator, HandleResultT, @@ -44,6 +43,7 @@ class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" + app: ToolShedApp repository: Optional[Repository] # why is mypy making me re-annotate these things from the base class, it didn't @@ -53,7 +53,7 @@ class ToolShedMetadataGenerator(BaseMetadataGenerator): def __init__( self, - app: MinimalManagerApp, + app: ToolShedApp, repository: Optional[Repository] = None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, diff --git a/test/unit/app/jobs/test_job_wrapper.py b/test/unit/app/jobs/test_job_wrapper.py index 43d958bc5400..40862720e24a 100644 --- a/test/unit/app/jobs/test_job_wrapper.py +++ b/test/unit/app/jobs/test_job_wrapper.py @@ -53,7 +53,7 @@ def setUp(self): self.model_objects: Dict[Type[Base], Dict[int, Base]] = {Job: {345: job}} self.app.model.session = MockContext(self.model_objects) - self.app.toolbox = cast(ToolBox, MockToolbox(MockTool(self))) + self.app._toolbox = cast(ToolBox, MockToolbox(MockTool(self))) self.working_directory = os.path.join(self.test_directory, "working") self.app.object_store = cast(BaseObjectStore, MockObjectStore(self.working_directory)) diff --git a/test/unit/app/tools/test_toolbox.py b/test/unit/app/tools/test_toolbox.py index 9b0e80ce4c23..6b6c780e6b0f 100644 --- a/test/unit/app/tools/test_toolbox.py +++ b/test/unit/app/tools/test_toolbox.py @@ -82,7 +82,7 @@ def assert_integerated_tool_panel(self, exists=True): @property def toolbox(self): if self._toolbox is None: - self.app.toolbox = self._toolbox = SimplifiedToolBox(self) + self.app._toolbox = self._toolbox = SimplifiedToolBox(self) return self._toolbox def setUp(self): diff --git a/test/unit/shed_unit/test_installed_repository_manager.py b/test/unit/shed_unit/test_installed_repository_manager.py index 27b21ef9eea3..58dc9d331b7f 100644 --- a/test/unit/shed_unit/test_installed_repository_manager.py +++ b/test/unit/shed_unit/test_installed_repository_manager.py @@ -25,7 +25,7 @@ def setUp(self): self._init_dynamic_tool_conf() self.app.config.tool_configs = self.config_files self.app.config.manage_dependency_relationships = False - self.app.toolbox = self.toolbox + self.app._toolbox = self.toolbox def _setup_repository(self): return self._repo_install(changeset="1", config_filename=self.config_files[0]) diff --git a/test/unit/workflows/workflow_support.py b/test/unit/workflows/workflow_support.py index f0b6fd064020..05064e722fac 100644 --- a/test/unit/workflows/workflow_support.py +++ b/test/unit/workflows/workflow_support.py @@ -35,7 +35,7 @@ def user(self): class MockApp(galaxy_mock.MockApp): def __init__(self): super().__init__() - self.toolbox = MockToolbox() + self._toolbox = MockToolbox() self.workflow_manager = WorkflowsManager(self) From 7f2bd7215edc3eb32c91993cc0d42e62054e8e86 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 31 Aug 2023 09:50:18 -0400 Subject: [PATCH 17/73] Better data managers interface? --- lib/galaxy/queue_worker.py | 4 ++-- lib/galaxy/tools/data_manager/manager.py | 8 +++----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/queue_worker.py b/lib/galaxy/queue_worker.py index e47ea390a5ea..e133a9ee399b 100644 --- a/lib/galaxy/queue_worker.py +++ b/lib/galaxy/queue_worker.py @@ -195,8 +195,8 @@ def reload_data_managers(app, **kwargs): log.debug("Executing data managers reload on '%s'", app.config.server_name) app._configure_tool_data_tables(from_shed_config=False) reload_tool_data_tables(app) - app.data_managers = DataManagers(app) - app.data_managers.increment_reload_count() + reload_count = app.data_managers._reload_count + 1 + app.data_managers = DataManagers(app, None, reload_count) if hasattr(app, "tool_cache"): app.tool_cache.reset_status() if hasattr(app, "watchers"): diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py index a72801096ca6..a91512aadd6f 100644 --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -32,13 +32,14 @@ class DataManagers(DataManagersInterface): data_managers: Dict[str, "DataManager"] managed_data_tables: Dict[str, "DataManager"] + __reload_count: int - def __init__(self, app: MinimalManagerApp, xml_filename=None): + def __init__(self, app: MinimalManagerApp, xml_filename=None, reload_count: Optional[int] = None): self.app = app self.data_managers = {} self.managed_data_tables = {} self.tool_path = None - self.__reload_count = 0 + self.__reload_count = reload_count or 0 self.filename = xml_filename or self.app.config.data_manager_config_file for filename in util.listify(self.filename): if not filename: @@ -51,9 +52,6 @@ def __init__(self, app: MinimalManagerApp, xml_filename=None): if exc.errno != errno.ENOENT or self.app.config.is_set("shed_data_manager_config_file"): raise - def increment_reload_count(self) -> None: - self.__reload_count += 1 - @property def _reload_count(self) -> int: return self.__reload_count From dc047e80a675f341c78c87fa646d0e8f97290a16 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 20 Sep 2022 18:41:39 -0400 Subject: [PATCH 18/73] More typing around tool validator... --- .../tool_shed/tools/data_table_manager.py | 6 +++-- lib/galaxy/tool_shed/tools/tool_validator.py | 7 ++++-- lib/galaxy/tools/repositories.py | 24 ++++++++++++++----- 3 files changed, 27 insertions(+), 10 deletions(-) diff --git a/lib/galaxy/tool_shed/tools/data_table_manager.py b/lib/galaxy/tool_shed/tools/data_table_manager.py index 1a6faf6ff8f4..41f91610c4a7 100644 --- a/lib/galaxy/tool_shed/tools/data_table_manager.py +++ b/lib/galaxy/tool_shed/tools/data_table_manager.py @@ -14,11 +14,13 @@ log = logging.getLogger(__name__) +RequiredAppT = Union[BasicSharedApp, InstallationTarget] + class ShedToolDataTableManager: - app: Union[BasicSharedApp, InstallationTarget] + app: RequiredAppT - def __init__(self, app: Union[BasicSharedApp, InstallationTarget]): + def __init__(self, app: RequiredAppT): self.app = app def generate_repository_info_elem( diff --git a/lib/galaxy/tool_shed/tools/tool_validator.py b/lib/galaxy/tool_shed/tools/tool_validator.py index 9b94813584a2..1d2780c81d3d 100644 --- a/lib/galaxy/tool_shed/tools/tool_validator.py +++ b/lib/galaxy/tool_shed/tools/tool_validator.py @@ -1,6 +1,9 @@ import logging -from galaxy.tool_shed.tools.data_table_manager import ShedToolDataTableManager +from galaxy.tool_shed.tools.data_table_manager import ( + RequiredAppT, + ShedToolDataTableManager, +) from galaxy.tool_shed.util import ( basic_util, hg_util, @@ -17,7 +20,7 @@ class ToolValidator: - def __init__(self, app): + def __init__(self, app: RequiredAppT): self.app = app self.stdtm = ShedToolDataTableManager(self.app) diff --git a/lib/galaxy/tools/repositories.py b/lib/galaxy/tools/repositories.py index c5e17273d0a0..50e848dda7ae 100644 --- a/lib/galaxy/tools/repositories.py +++ b/lib/galaxy/tools/repositories.py @@ -3,22 +3,34 @@ import shutil import tempfile from contextlib import contextmanager +from typing import Optional from galaxy.managers.dbkeys import GenomeBuilds from galaxy.tools.data import ToolDataTableManager from galaxy.util.bunch import Bunch +class ValidationContextConfig: + tool_data_path: Optional[str] + shed_tool_data_path: Optional[str] + tool_data_table_config: str + shed_tool_data_table_config: str + interactivetools_enable: bool + len_file_path: str + builds_file_path: Optional[str] + + class ValidationContext: """Minimal App object for tool validation.""" is_webapp = True + config: ValidationContextConfig def __init__( self, - app_name, - security, + app_name: str, model, + security, tool_data_path, shed_tool_data_path, tool_data_tables=None, @@ -27,9 +39,9 @@ def __init__( biotools_metadata_source=None, ): self.name = app_name - self.security = security self.model = model - self.config = Bunch() + self.security = security + self.config = ValidationContextConfig() self.config.tool_data_path = tool_data_path self.config.shed_tool_data_path = shed_tool_data_path self.temporary_path = tempfile.mkdtemp(prefix="tool_validation_") @@ -67,11 +79,11 @@ def from_app(app, work_dir=None): with ValidationContext( app_name=app.name, security=app.security, - model=app.model, + model=getattr(app, "model", None), tool_data_path=work_dir, shed_tool_data_path=work_dir, tool_data_tables=tool_data_tables, - registry=app.datatypes_registry, + registry=getattr(app, "datatypes_registry", None), hgweb_config_manager=getattr(app, "hgweb_config_manager", None), biotools_metadata_source=getattr(app, "biotools_metadata_source", None), ) as app: From 904a6006c551c7dbdc7ae0399082f02e068a8097 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 3 Nov 2022 13:29:02 -0400 Subject: [PATCH 19/73] Additional debugging when handling API errors during repository upload. --- lib/tool_shed/webapp/api/repositories.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index c5d3806e2b02..b69750ba2a56 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -1082,6 +1082,8 @@ def create_changeset_revision(self, trans, id, payload, **kwd): commit_message, new_repo_alert, ) + upload_message = message + files_removed = util.listify(undesirable_dirs_removed) + util.listify(undesirable_files_removed) if ok: # Update the repository files for browsing. hg_util.update_repository(repo_dir) @@ -1106,6 +1108,11 @@ def create_changeset_revision(self, trans, id, payload, **kwd): if os.path.exists(uploaded_file_name): os.remove(uploaded_file_name) if not ok: - return {"err_msg": message} + return { + "err_msg": message, + "content_alert": content_alert_str, + "files_removed": files_removed, + "upload_message": upload_message, + } else: return {"message": message} From ca1e7e53b9b834d52aea0a911bef13afb97ddddb Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Sep 2022 17:20:25 -0400 Subject: [PATCH 20/73] Standalone app for installing tools... --- .../tool_shed/unittest_utils/__init__.py | 215 ++++++++++++++++++ lib/galaxy/tools/data_manager/manager.py | 9 +- test/unit/app/test_galaxy_install.py | 60 +++++ 3 files changed, 278 insertions(+), 6 deletions(-) create mode 100644 lib/galaxy/tool_shed/unittest_utils/__init__.py create mode 100644 test/unit/app/test_galaxy_install.py diff --git a/lib/galaxy/tool_shed/unittest_utils/__init__.py b/lib/galaxy/tool_shed/unittest_utils/__init__.py new file mode 100644 index 000000000000..7fb28659c83e --- /dev/null +++ b/lib/galaxy/tool_shed/unittest_utils/__init__.py @@ -0,0 +1,215 @@ +import threading +from pathlib import Path +from typing import ( + Any, + Dict, + List, + NamedTuple, + Optional, + Union, +) + +from galaxy.model.migrations import ( + DatabaseStateVerifier, + TSI, +) +from galaxy.model.orm.engine_factory import build_engine +from galaxy.model.tool_shed_install import mapping as install_mapping +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + DataManagersInterface, + InstallationTarget, +) +from galaxy.tool_shed.util.repository_util import get_installed_repository +from galaxy.tool_util.data import ToolDataTableManager +from galaxy.tool_util.toolbox.base import AbstractToolBox +from galaxy.tool_util.toolbox.watcher import ( + get_tool_conf_watcher, + get_tool_watcher, +) +from galaxy.util.tool_shed.tool_shed_registry import Registry + + +class ToolShedTarget(NamedTuple): + url: str + name: str + + @property + def as_str(self) -> str: + return f""" + + + +""" + + +EMPTY_TOOL_DATA_TABLE_CONFIG = """ + + +""" + + +class Config: + tool_data_path: str + install_database_connection: str + install_database_engine_options: Dict[str, Any] = {} + update_integrated_tool_panel: bool = True + integrated_tool_panel_config: str + shed_tool_config_file: str + shed_tool_data_path: str + migrated_tools_config: Optional[str] = None + shed_tools_dir: str + edam_panel_views: list = [] + tool_configs: list = [] + shed_tool_data_table_config: str + shed_data_manager_config_file: str + + +class TestTool: + _macro_paths: List[str] = [] + params_with_missing_data_table_entry: list = [] + params_with_missing_index_file: list = [] + + def __init__(self, config_file, tool_shed_repository, guid): + self.config_file = config_file + self.tool_shed_repository = tool_shed_repository + self.guid = guid + self.id = guid + self.version = "1.0.0" + self.hidden = False + self._lineage = None + self.name = "test_tool" + + @property + def lineage(self): + return self._lineage + + +class TestToolBox(AbstractToolBox): + def create_tool(self, config_file, tool_cache_data_dir=None, **kwds): + tool = TestTool(config_file, kwds["tool_shed_repository"], kwds["guid"]) + tool._lineage = self._lineage_map.register(tool) # cleanup? + return tool + + def _get_tool_shed_repository(self, tool_shed, name, owner, installed_changeset_revision): + return get_installed_repository( + self.app, + tool_shed=tool_shed, + name=name, + owner=owner, + installed_changeset_revision=installed_changeset_revision, + from_cache=True, + ) + + +class Watchers: + def __init__(self, app): + self.app = app + self.tool_config_watcher = get_tool_conf_watcher( + reload_callback=self.app.reload_toolbox, + tool_cache=None, + ) + self.tool_watcher = get_tool_watcher(self, app.config) + + +class DummyDataManager(DataManagerInterface): + GUID_TYPE: str = "data_manager" + DEFAULT_VERSION: str = "0.0.1" + + def process_result(self, out_data): + return None + + def write_bundle(self, out) -> None: + return None + + +class StandaloneDataManagers(DataManagersInterface): + __reload_count = 0 + + def load_manager_from_elem( + self, data_manager_elem, tool_path=None, add_manager=True + ) -> Optional[DataManagerInterface]: + return DummyDataManager() + + def get_manager(self, data_manager_id: str) -> Optional[DataManagerInterface]: + return None + + def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: + return None + + @property + def _reload_count(self) -> int: + self.__reload_count += 1 + return self.__reload_count + + +class StandaloneInstallationTarget(InstallationTarget): + name: str = "galaxy" + tool_shed_registry: Registry + security: IdEncodingHelper + _toolbox: TestToolBox + _toolbox_lock: threading.RLock = threading.RLock() + tool_shed_repository_cache: Optional[ToolShedRepositoryCache] = None + data_managers = StandaloneDataManagers() + + def __init__( + self, + target_directory: Path, + tool_shed_target: Optional[ToolShedTarget] = None, + ): + tool_root_dir = target_directory / "tools" + config: Config = Config() + install_db_path = str(target_directory / "install.sqlite") + config.tool_data_path = str(target_directory / "tool_data") + config.shed_tool_data_path = config.tool_data_path + config.install_database_connection = f"sqlite:///{install_db_path}?isolation_level=IMMEDIATE" + config.integrated_tool_panel_config = str(target_directory / "integrated.xml") + config.shed_tool_data_table_config = str(target_directory / "shed_tool_data_table_conf.xml") + shed_conf = target_directory / "shed_conf.xml" + shed_data_manager_config_file = target_directory / "shed_data_manager_conf.xml" + config.shed_data_manager_config_file = str(shed_data_manager_config_file) + config.shed_tool_config_file = str(shed_conf) + shed_conf.write_text(f'\n') + (target_directory / "shed_tool_data_table_conf.xml").write_text(EMPTY_TOOL_DATA_TABLE_CONFIG) + self.config = config + install_engine = build_engine(config.install_database_connection, config.install_database_engine_options) + self.security = IdEncodingHelper(id_secret="notasecretfortests") + DatabaseStateVerifier( + install_engine, + TSI, + None, + None, + True, + False, + ).run() + self.install_model = install_mapping.configure_model_mapping(install_engine) + registry_config: Optional[Path] = None + if tool_shed_target: + registry_config = target_directory / "tool_sheds_conf.xml" + with registry_config.open("w") as f: + f.write(tool_shed_target.as_str) + + self.tool_shed_registry = Registry(registry_config) + self.tool_root_dir = tool_root_dir + self.tool_root_dir.mkdir() + config.shed_tools_dir = str(tool_root_dir) + self.watchers = Watchers(self) + self.reload_toolbox() + self.tool_data_tables = ToolDataTableManager( + tool_data_path=self.config.tool_data_path, + config_filename=self.config.shed_tool_data_table_config, + other_config_dict=self.config, + ) + + def reload_toolbox(self): + self._toolbox = TestToolBox( + config_filenames=[self.config.shed_tool_config_file], + tool_root_dir=self.tool_root_dir, + app=self, + ) + + @property + def toolbox(self) -> TestToolBox: + return self._toolbox diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py index a91512aadd6f..622e7189d20f 100644 --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -11,7 +11,8 @@ from typing_extensions import Protocol from galaxy import util -from galaxy.structured_app import MinimalManagerApp +from galaxy.structured_app import StructuredApp +from galaxy.tool_shed.galaxy_install.client import DataManagersInterface from galaxy.tool_util.data import ( BundleProcessingOptions, OutputDataset, @@ -21,10 +22,6 @@ RepoInfo, ) from galaxy.util import Element -from galaxy.tool_shed.galaxy_install.client import ( - DataManagerInterface, - DataManagersInterface, -) log = logging.getLogger(__name__) @@ -34,7 +31,7 @@ class DataManagers(DataManagersInterface): managed_data_tables: Dict[str, "DataManager"] __reload_count: int - def __init__(self, app: MinimalManagerApp, xml_filename=None, reload_count: Optional[int] = None): + def __init__(self, app: StructuredApp, xml_filename=None, reload_count: Optional[int] = None): self.app = app self.data_managers = {} self.managed_data_tables = {} diff --git a/test/unit/app/test_galaxy_install.py b/test/unit/app/test_galaxy_install.py new file mode 100644 index 000000000000..f0898a79d348 --- /dev/null +++ b/test/unit/app/test_galaxy_install.py @@ -0,0 +1,60 @@ +"""Test installation using galaxy.tool_shed package. + +It should be able to quickly test installing things from the real tool shed +and from bootstrapped tool sheds. +""" +from pathlib import Path +from typing import ( + Any, + Dict, +) + +from galaxy.model.tool_shed_install import ToolShedRepository +from galaxy.tool_shed.galaxy_install.client import InstallationTarget +from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager +from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager +from galaxy.tool_shed.unittest_utils import StandaloneInstallationTarget +from galaxy.tool_shed.util.repository_util import check_for_updates +from galaxy.util.tool_shed.tool_shed_registry import DEFAULT_TOOL_SHED_URL + + +def test_against_production_shed(tmp_path: Path): + repo_owner = "iuc" + repo_name = "collection_column_join" + repo_revision = "dfde09461b1e" + + install_target: InstallationTarget = StandaloneInstallationTarget(tmp_path) + install_manager = InstallRepositoryManager(install_target) + install_options: Dict[str, Any] = {} + install_manager.install( + DEFAULT_TOOL_SHED_URL, + repo_name, + repo_owner, + repo_revision, # revision 2, a known installable revision + install_options, + ) + with open(tmp_path / "shed_conf.xml") as f: + assert "toolshed.g2.bx.psu.edu/repos/iuc/collection_column_join/collection_column_join/0.0.2" in f.read() + repo_path = tmp_path / "tools" / "toolshed.g2.bx.psu.edu" / "repos" / repo_owner / repo_name / repo_revision + assert repo_path.exists() + + install_model_context = install_target.install_model.context + query = install_model_context.query(ToolShedRepository).where(ToolShedRepository.name == repo_name) + tsr = query.first() + assert tsr + message, status = check_for_updates( + install_target.tool_shed_registry, + install_model_context, + tsr.id, + ) + assert status + + irm = InstalledRepositoryManager(install_target) + errors = irm.uninstall_repository(repository=tsr, remove_from_disk=True) + assert not errors + + with open(tmp_path / "shed_conf.xml") as f: + assert "toolshed.g2.bx.psu.edu/repos/iuc/collection_column_join/collection_column_join/0.0.2" not in f.read() + + repo_path = tmp_path / "tools" / "toolshed.g2.bx.psu.edu" / "repos" / repo_owner / repo_name / repo_revision + assert not repo_path.exists() From 179f59217e4248aa9f964bcdedd685712aa52543 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 17 Aug 2023 14:49:15 -0400 Subject: [PATCH 21/73] Preparation for creating galaxy tool shed client package... --- .../app_unittest_utils/toolbox_support.py | 193 +++++++++++++++++ test/unit/app/tools/test_toolbox.py | 197 +----------------- 2 files changed, 194 insertions(+), 196 deletions(-) create mode 100644 lib/galaxy/app_unittest_utils/toolbox_support.py diff --git a/lib/galaxy/app_unittest_utils/toolbox_support.py b/lib/galaxy/app_unittest_utils/toolbox_support.py new file mode 100644 index 000000000000..6308fb14244f --- /dev/null +++ b/lib/galaxy/app_unittest_utils/toolbox_support.py @@ -0,0 +1,193 @@ +import collections +import json +import logging +import os +import string +from typing import Optional + +from galaxy.app_unittest_utils.tools_support import UsesTools +from galaxy.config_watchers import ConfigWatchers +from galaxy.model import tool_shed_install +from galaxy.model.base import transaction +from galaxy.model.tool_shed_install import mapping +from galaxy.tools import ToolBox +from galaxy.tools.cache import ToolCache +from galaxy.util.unittest import TestCase + +log = logging.getLogger(__name__) + + +CONFIG_TEST_TOOL_VERSION_TEMPLATE = string.Template( + """ + github.com + example + galaxyproject + ${version} + github.com/galaxyproject/example/test_tool/0.${version} + 0.${version} + + """ +) +CONFIG_TEST_TOOL_VERSION_1 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="1")) +CONFIG_TEST_TOOL_VERSION_2 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="2")) + +REPO_TYPE = collections.namedtuple( + "REPO_TYPE", + "tool_shed owner name changeset_revision installed_changeset_revision description status", +) +DEFAULT_TEST_REPO = REPO_TYPE("github.com", "galaxyproject", "example", "1", "1", "description", "OK") + + +class SimplifiedToolBox(ToolBox): + def __init__(self, test_case: "BaseToolBoxTestCase"): + app = test_case.app + app.watchers.tool_config_watcher.reload_callback = lambda: reload_callback(test_case) + # Handle app/config stuff needed by toolbox but not by tools. + app.tool_cache = ToolCache() if not hasattr(app, "tool_cache") else app.tool_cache + config_files = test_case.config_files + tool_root_dir = test_case.test_directory + super().__init__( + config_files, + tool_root_dir, + app, + ) + # Need to start thread now for new reload callback to take effect + self.app.watchers.start() + + +class BaseToolBoxTestCase(TestCase, UsesTools): + _toolbox: Optional[SimplifiedToolBox] = None + + @property + def integrated_tool_panel_path(self): + return os.path.join(self.test_directory, "integrated_tool_panel.xml") + + def assert_integerated_tool_panel(self, exists=True): + does_exist = os.path.exists(self.integrated_tool_panel_path) + if exists: + assert does_exist + else: + assert not does_exist + + @property + def toolbox(self): + if self._toolbox is None: + self.app._toolbox = self._toolbox = SimplifiedToolBox(self) + return self._toolbox + + def setUp(self): + self.reindexed = False + self.setup_app() + install_model = mapping.init("sqlite:///:memory:", create_tables=True) + self.app.tool_cache = ToolCache() + self.app.install_model = install_model + self.app.reindex_tool_search = self.__reindex # type: ignore[assignment] + itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml") + self.app.config.integrated_tool_panel_config = itp_config + self.app.watchers = ConfigWatchers(self.app) + self._toolbox = None + self.config_files = [] + + def tearDown(self): + self.app.watchers.shutdown() + + def _repo_install(self, changeset, config_filename=None): + metadata = { + "tools": [ + { + "add_to_tool_panel": False, # to have repository.includes_tools_for_display_in_tool_panel=False in InstalledRepositoryManager.activate_repository() + "guid": f"github.com/galaxyproject/example/test_tool/0.{changeset}", + "tool_config": "tool.xml", + } + ], + } + if config_filename: + metadata["shed_config_filename"] = config_filename + repository = tool_shed_install.ToolShedRepository(metadata_=metadata) + repository.tool_shed = DEFAULT_TEST_REPO.tool_shed + repository.owner = DEFAULT_TEST_REPO.owner + repository.name = DEFAULT_TEST_REPO.name + repository.changeset_revision = changeset + repository.installed_changeset_revision = changeset + repository.deleted = False + repository.uninstalled = False + self.app.install_model.context.add(repository) + session = self.app.install_model.context + with transaction(session): + session.commit() + return repository + + def _setup_two_versions(self): + self._repo_install(changeset="1") + version1 = tool_shed_install.ToolVersion() + version1.tool_id = "github.com/galaxyproject/example/test_tool/0.1" + self.app.install_model.context.add(version1) + session = self.app.install_model.context + with transaction(session): + session.commit() + + self._repo_install(changeset="2") + version2 = tool_shed_install.ToolVersion() + version2.tool_id = "github.com/galaxyproject/example/test_tool/0.2" + self.app.install_model.context.add(version2) + session = self.app.install_model.context + with transaction(session): + session.commit() + + version_association = tool_shed_install.ToolVersionAssociation() + version_association.parent_id = version1.id + version_association.tool_id = version2.id + + self.app.install_model.context.add(version_association) + session = self.app.install_model.context + with transaction(session): + session.commit() + + def _setup_two_versions_in_config(self, section=False): + if section: + template = """ +
+ %s +
+
+ %s +
+
""" + else: + template = """ + %s + %s +""" + self._add_config(template % (self.test_directory, CONFIG_TEST_TOOL_VERSION_1, CONFIG_TEST_TOOL_VERSION_2)) + + def _add_config(self, content, name="tool_conf.xml"): + is_json = name.endswith(".json") + path = self._tool_conf_path(name=name) + with open(path, "w") as f: + if not is_json or isinstance(content, str): + f.write(content) + else: + json.dump(content, f) + self.config_files.append(path) + + def _init_dynamic_tool_conf(self): + # Add a dynamic tool conf (such as a ToolShed managed one) to list of configs. + self._add_config(f"""""") + + def _tool_conf_path(self, name="tool_conf.xml"): + path = os.path.join(self.test_directory, name) + return path + + def _tool_path(self, name="tool.xml"): + path = os.path.join(self.test_directory, name) + return path + + def __reindex(self): + self.reindexed = True + + +def reload_callback(test_case): + test_case.app.tool_cache.cleanup() + log.debug("Reload callback called, toolbox contains %s", test_case._toolbox._tool_versions_by_id) + test_case._toolbox = test_case.app.toolbox = SimplifiedToolBox(test_case) + log.debug("After callback toolbox contains %s", test_case._toolbox._tool_versions_by_id) diff --git a/test/unit/app/tools/test_toolbox.py b/test/unit/app/tools/test_toolbox.py index 6b6c780e6b0f..f003fb519271 100644 --- a/test/unit/app/tools/test_toolbox.py +++ b/test/unit/app/tools/test_toolbox.py @@ -1,201 +1,21 @@ -import collections -import json import logging -import os -import string import time -from typing import Optional import pytest import routes from galaxy import model -from galaxy.app_unittest_utils.tools_support import UsesTools -from galaxy.config_watchers import ConfigWatchers -from galaxy.model import tool_shed_install +from galaxy.app_unittest_utils.toolbox_support import BaseToolBoxTestCase from galaxy.model.base import transaction -from galaxy.model.tool_shed_install import mapping from galaxy.tool_util.unittest_utils import mock_trans from galaxy.tool_util.unittest_utils.sample_data import ( SIMPLE_MACRO, SIMPLE_TOOL_WITH_MACRO, ) -from galaxy.tools import ToolBox -from galaxy.tools.cache import ToolCache -from galaxy.util.unittest import TestCase log = logging.getLogger(__name__) -CONFIG_TEST_TOOL_VERSION_TEMPLATE = string.Template( - """ - github.com - example - galaxyproject - ${version} - github.com/galaxyproject/example/test_tool/0.${version} - 0.${version} - - """ -) -CONFIG_TEST_TOOL_VERSION_1 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="1")) -CONFIG_TEST_TOOL_VERSION_2 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="2")) - -REPO_TYPE = collections.namedtuple( - "REPO_TYPE", - "tool_shed owner name changeset_revision installed_changeset_revision description status", -) -DEFAULT_TEST_REPO = REPO_TYPE("github.com", "galaxyproject", "example", "1", "1", "description", "OK") - - -class SimplifiedToolBox(ToolBox): - def __init__(self, test_case: "BaseToolBoxTestCase"): - app = test_case.app - app.watchers.tool_config_watcher.reload_callback = lambda: reload_callback(test_case) - # Handle app/config stuff needed by toolbox but not by tools. - app.tool_cache = ToolCache() if not hasattr(app, "tool_cache") else app.tool_cache - config_files = test_case.config_files - tool_root_dir = test_case.test_directory - super().__init__( - config_files, - tool_root_dir, - app, - ) - # Need to start thread now for new reload callback to take effect - self.app.watchers.start() - - -class BaseToolBoxTestCase(TestCase, UsesTools): - _toolbox: Optional[SimplifiedToolBox] = None - - @property - def integrated_tool_panel_path(self): - return os.path.join(self.test_directory, "integrated_tool_panel.xml") - - def assert_integerated_tool_panel(self, exists=True): - does_exist = os.path.exists(self.integrated_tool_panel_path) - if exists: - assert does_exist - else: - assert not does_exist - - @property - def toolbox(self): - if self._toolbox is None: - self.app._toolbox = self._toolbox = SimplifiedToolBox(self) - return self._toolbox - - def setUp(self): - self.reindexed = False - self.setup_app() - install_model = mapping.init("sqlite:///:memory:", create_tables=True) - self.app.tool_cache = ToolCache() - self.app.install_model = install_model - self.app.reindex_tool_search = self.__reindex # type: ignore[assignment] - itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml") - self.app.config.integrated_tool_panel_config = itp_config - self.app.watchers = ConfigWatchers(self.app) - self._toolbox = None - self.config_files = [] - - def tearDown(self): - self.app.watchers.shutdown() - - def _repo_install(self, changeset, config_filename=None): - metadata = { - "tools": [ - { - "add_to_tool_panel": False, # to have repository.includes_tools_for_display_in_tool_panel=False in InstalledRepositoryManager.activate_repository() - "guid": f"github.com/galaxyproject/example/test_tool/0.{changeset}", - "tool_config": "tool.xml", - } - ], - } - if config_filename: - metadata["shed_config_filename"] = config_filename - repository = tool_shed_install.ToolShedRepository(metadata_=metadata) - repository.tool_shed = DEFAULT_TEST_REPO.tool_shed - repository.owner = DEFAULT_TEST_REPO.owner - repository.name = DEFAULT_TEST_REPO.name - repository.changeset_revision = changeset - repository.installed_changeset_revision = changeset - repository.deleted = False - repository.uninstalled = False - self.app.install_model.context.add(repository) - session = self.app.install_model.context - with transaction(session): - session.commit() - return repository - - def _setup_two_versions(self): - self._repo_install(changeset="1") - version1 = tool_shed_install.ToolVersion() - version1.tool_id = "github.com/galaxyproject/example/test_tool/0.1" - self.app.install_model.context.add(version1) - session = self.app.install_model.context - with transaction(session): - session.commit() - - self._repo_install(changeset="2") - version2 = tool_shed_install.ToolVersion() - version2.tool_id = "github.com/galaxyproject/example/test_tool/0.2" - self.app.install_model.context.add(version2) - session = self.app.install_model.context - with transaction(session): - session.commit() - - version_association = tool_shed_install.ToolVersionAssociation() - version_association.parent_id = version1.id - version_association.tool_id = version2.id - - self.app.install_model.context.add(version_association) - session = self.app.install_model.context - with transaction(session): - session.commit() - - def _setup_two_versions_in_config(self, section=False): - if section: - template = """ -
- %s -
-
- %s -
-
""" - else: - template = """ - %s - %s -""" - self._add_config(template % (self.test_directory, CONFIG_TEST_TOOL_VERSION_1, CONFIG_TEST_TOOL_VERSION_2)) - - def _add_config(self, content, name="tool_conf.xml"): - is_json = name.endswith(".json") - path = self._tool_conf_path(name=name) - with open(path, "w") as f: - if not is_json or isinstance(content, str): - f.write(content) - else: - json.dump(content, f) - self.config_files.append(path) - - def _init_dynamic_tool_conf(self): - # Add a dynamic tool conf (such as a ToolShed managed one) to list of configs. - self._add_config(f"""""") - - def _tool_conf_path(self, name="tool_conf.xml"): - path = os.path.join(self.test_directory, name) - return path - - def _tool_path(self, name="tool.xml"): - path = os.path.join(self.test_directory, name) - return path - - def __reindex(self): - self.reindexed = True - - class TestToolBox(BaseToolBoxTestCase): def test_load_file(self): self._init_tool() @@ -609,18 +429,3 @@ def __verify_get_tool_for_default_lineage(self): default_tool = self.toolbox.get_tool("test_tool") assert default_tool.id == "test_tool" assert default_tool.version == "0.2" - - def __setup_shed_tool_conf(self): - self._add_config("""""") - - self.toolbox # noqa: B018 create toolbox - assert not self.reindexed - - os.remove(self.integrated_tool_panel_path) - - -def reload_callback(test_case): - test_case.app.tool_cache.cleanup() - log.debug("Reload callback called, toolbox contains %s", test_case._toolbox._tool_versions_by_id) - test_case._toolbox = test_case.app.toolbox = SimplifiedToolBox(test_case) - log.debug("After callback toolbox contains %s", test_case._toolbox._tool_versions_by_id) From 8c0141076a9721e46f98428af6cc1018644fd09a Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 18 Oct 2022 15:12:49 -0400 Subject: [PATCH 22/73] Make galaxy login logic in tool shed more uniform. Progress toward a separate backend. --- lib/tool_shed/test/base/twilltestcase.py | 6 +++++- .../test/functional/test_1000_install_basic_repository.py | 4 ++-- .../test_1010_install_repository_with_tool_dependencies.py | 4 ++-- ..._1020_install_repository_with_repository_dependencies.py | 4 ++-- ...est_1030_install_repository_with_dependency_revisions.py | 4 ++-- ...t_1040_install_repository_basic_circular_dependencies.py | 3 ++- .../functional/test_1050_circular_dependencies_4_levels.py | 3 ++- lib/tool_shed/test/functional/test_1070_invalid_tool.py | 4 ++-- .../test_1080_advanced_circular_dependency_installation.py | 4 ++-- .../functional/test_1090_repository_dependency_handling.py | 4 ++-- .../test_1100_install_updated_repository_dependencies.py | 5 ++--- ...est_1120_install_repository_with_complex_dependencies.py | 3 ++- ...install_repository_with_invalid_repository_dependency.py | 3 ++- ...est_1140_simple_repository_dependency_multiple_owners.py | 3 ++- lib/tool_shed/test/functional/test_1160_tool_help_images.py | 2 ++ .../functional/test_1170_prior_installation_required.py | 4 ++-- .../test_1180_circular_prior_installation_required.py | 4 ++-- .../test_1190_complex_prior_installation_required.py | 3 ++- .../test_1200_uninstall_and_reinstall_basic_repository.py | 4 ++-- ...uninstall_reinstall_repository_with_tool_dependencies.py | 4 ++-- ...nstall_reinstall_repository_with_dependency_revisions.py | 4 ++-- .../test/functional/test_1300_reset_all_metadata.py | 3 ++- lib/tool_shed/test/functional/test_1410_update_manager.py | 5 ++--- .../functional/test_1430_repair_installed_repository.py | 3 ++- lib/tool_shed/test/functional/test_1460_data_managers.py | 3 ++- .../functional/test_1470_updating_installed_repositories.py | 4 ++-- 26 files changed, 55 insertions(+), 42 deletions(-) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 92100a73fce0..34427b808e7a 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -61,6 +61,8 @@ class ShedTwillTestCase(ShedApiTestCase): """Class of FunctionalTestCase geared toward HTML interactions using the Twill library.""" + requires_galaxy: bool = False + def setUp(self): super().setUp() # Security helper @@ -75,6 +77,8 @@ def setUp(self): self.tool_data_path = os.environ.get("GALAXY_TEST_TOOL_DATA_PATH") self.shed_tool_conf = os.environ.get("GALAXY_TEST_SHED_TOOL_CONF") self.test_db_util = test_db_util + if self.requires_galaxy: + self._galaxy_login(email=common.admin_email, username=common.admin_username) def check_for_strings(self, strings_displayed=None, strings_not_displayed=None): strings_displayed = strings_displayed or [] @@ -711,7 +715,7 @@ def galaxy_token(self): token = html[(token_quote_start_index + 1) : token_quote_end_index] return token - def galaxy_login( + def _galaxy_login( self, email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="", logout_first=True ): if logout_first: diff --git a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py index 605290d8e95d..61634ee43955 100644 --- a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py @@ -10,11 +10,12 @@ class TestBasicToolShedFeatures(ShedTwillTestCase): """Test installing a basic repository.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0000 category and upload the filtering repository to it, if necessary.""" @@ -83,7 +84,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_browse_tool_sheds(self): """Browse the available tool sheds in this Galaxy instance.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed( url=self.url, strings_displayed=["Test 0000 Basic Repository Features 1", "Test 0000 Basic Repository Features 2"], diff --git a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py index 370d150b7048..defd12795fd5 100644 --- a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py @@ -16,9 +16,10 @@ class TestToolWithToolDependencies(ShedTwillTestCase): """Test installing a repository with tool dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -111,7 +112,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the freebayes tool.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=[category_name]) category = self.populator.get_category_with_name(category_name) self.browse_category(category, strings_displayed=[repository_name]) diff --git a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py index a109ec0b0133..d7a6c90b69b8 100644 --- a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py @@ -15,11 +15,12 @@ class TestToolWithRepositoryDependencies(ShedTwillTestCase): """Test installing a repository with repository dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0020 category and any missing repositories.""" @@ -81,7 +82,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the emboss tool.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=["Test 0020 Basic Repository Dependencies"]) category = self.populator.get_category_with_name("Test 0020 Basic Repository Dependencies") self.browse_category(category, strings_displayed=[emboss_repository_name]) diff --git a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py index 8c5464484b7a..9c167b919f20 100644 --- a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py @@ -19,11 +19,12 @@ class TestRepositoryWithDependencyRevisions(ShedTwillTestCase): """Test installing a repository with dependency revisions.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0030 category and add repositories to it, if necessary.""" @@ -160,7 +161,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the emboss tool.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=["Test 0030 Repository Dependency Revisions"]) category = self.populator.get_category_with_name("Test 0030 Repository Dependency Revisions") self.browse_category(category, strings_displayed=[emboss_repository_name]) diff --git a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py index ff1b9ea7b392..8a21f93adedf 100644 --- a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py @@ -19,6 +19,8 @@ class TestInstallingCircularDependencies(ShedTwillTestCase): """Verify that the code correctly handles installing repositories with circular dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -122,7 +124,6 @@ def test_0020_create_repository_dependencies(self): def test_0025_install_freebayes_repository(self): """Install freebayes with blank tool panel section, without tool dependencies but with repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( freebayes_repository_name, common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index 6cc8abc38a20..b03ff14f8fda 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -36,6 +36,8 @@ class TestInstallRepositoryCircularDependencies(ShedTwillTestCase): """Verify that the code correctly handles circular dependencies down to n levels.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -325,7 +327,6 @@ def test_0050_verify_tool_dependencies(self): def test_0055_install_column_repository(self): """Install column_maker with repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( column_repository_name, common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1070_invalid_tool.py b/lib/tool_shed/test/functional/test_1070_invalid_tool.py index d0ce2fa0a196..9457e6026adc 100644 --- a/lib/tool_shed/test/functional/test_1070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_1070_invalid_tool.py @@ -13,9 +13,10 @@ class TestFreebayesRepository(ShedTwillTestCase): """Test repository with multiple revisions with invalid tools.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -58,7 +59,6 @@ def test_0005_ensure_existence_of_repository_and_category(self): def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the bismark repository.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=[category_name]) category = self.populator.get_category_with_name(category_name) self.browse_category(category, strings_displayed=[repository_name]) diff --git a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py index 2db80b7569e0..71af425c3b96 100644 --- a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py +++ b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py @@ -24,9 +24,10 @@ class TestRepositoryDependencies(ShedTwillTestCase): """Testing uninstalling and reinstalling repository dependencies, and setting tool panel sections.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -119,7 +120,6 @@ def test_0015_upload_dependency_xml_if_needed(self): def test_0020_install_convert_repository(self): """Install convert_chars without repository dependencies into convert_chars tool panel section.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( convert_repository_name, common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py index 7ecbb727afc6..a359f3bb35bc 100644 --- a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py +++ b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py @@ -22,9 +22,10 @@ class TestRepositoryDependencies(ShedTwillTestCase): """Testing the behavior of repository dependencies with tool panel sections.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -103,7 +104,6 @@ def test_0015_create_and_upload_dependency_files(self): def test_0020_install_repositories(self): """Install column_maker into column_maker tool panel section and install repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( column_repository_name, common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py index 3451407adf4a..bed335900a21 100644 --- a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py @@ -18,9 +18,10 @@ class TestRepositoryDependencies(ShedTwillTestCase): """Test installing a repository, then updating it to include repository dependencies.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -77,7 +78,6 @@ def test_0010_create_and_populate_convert_repository(self): def test_0015_install_and_uninstall_column_repository(self): """Install and uninstall the column_maker repository.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( column_repository_name, common.test_user_1_name, @@ -115,7 +115,6 @@ def test_0025_verify_repository_dependency(self): def test_0030_reinstall_column_repository(self): """Reinstall column_maker and verify it installs repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) strings_not_displayed = ["column_maker_1087"] self._assert_has_no_installed_repos_with_names(*strings_not_displayed) self._install_repository( diff --git a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py index 50bd7449cfa9..522383132095 100644 --- a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py +++ b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py @@ -25,6 +25,8 @@ class TestInstallingComplexRepositoryDependencies(ShedTwillTestCase): """Test features related to installing repositories with complex repository dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -255,7 +257,6 @@ def test_0040_update_tool_repository(self): def test_0045_install_base_repository(self): """Verify installation of the repository with complex repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) tool_repository = self._get_repository_by_name_and_owner(bwa_package_repository_name, common.test_user_1_name) preview_strings_displayed = [tool_repository.name, self.get_repository_tip(tool_repository)] self._install_repository( diff --git a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py index aa68f5c33f09..3fefe921ce41 100644 --- a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py +++ b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py @@ -19,6 +19,8 @@ class TestBasicRepositoryDependencies(ShedTwillTestCase): """Testing emboss 5 with repository dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts and login as an admin user.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -168,7 +170,6 @@ def test_0040_generate_repository_dependency_with_invalid_changeset_revision(sel def test_0045_install_repository_with_invalid_repository_dependency(self): """Install the repository and verify that galaxy detects invalid repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) preview_strings_displayed = [ "emboss_0110", diff --git a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py index 354528effcb5..6a437338f5ba 100644 --- a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py @@ -30,6 +30,8 @@ class TestInstallRepositoryMultipleOwners(ShedTwillTestCase): + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts and login as an admin user. @@ -174,7 +176,6 @@ def test_0045_install_blastxml_to_top_descr(self): We are at step 1, Galaxy side. Install blastxml_to_top_descr_0120 to Galaxy, with repository dependencies, so that the datatypes repository is also installed. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( name="blastxml_to_top_descr_0120", owner=common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1160_tool_help_images.py b/lib/tool_shed/test/functional/test_1160_tool_help_images.py index 72717272d19b..632788755e43 100644 --- a/lib/tool_shed/test/functional/test_1160_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_1160_tool_help_images.py @@ -23,6 +23,8 @@ class TestToolHelpImages(ShedTwillTestCase): """Test features related to tool help images.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py index 486db3241632..7935a829de67 100644 --- a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py @@ -38,9 +38,10 @@ class TestSimplePriorInstallation(ShedTwillTestCase): """Test features related to datatype converters.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -133,7 +134,6 @@ def test_0020_verify_repository_dependency(self): def test_0025_install_column_repository(self): """Install column_maker_0150.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name) preview_strings_displayed = ["column_maker_0150", self.get_repository_tip(column_repository)] self._install_repository( diff --git a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py index 621688811161..cf151ef687a5 100644 --- a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py @@ -51,9 +51,10 @@ class TestSimplePriorInstallation(ShedTwillTestCase): """Test features related to datatype converters.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -214,7 +215,6 @@ def test_0025_verify_repository_dependency(self): def test_0030_install_filtering_repository(self): """Install the filtering_0160 repository.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) filter_repository = self._get_repository_by_name_and_owner(filter_repository_name, common.test_user_1_name) preview_strings_displayed = ["filtering_0160", self.get_repository_tip(filter_repository)] self._install_repository( diff --git a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py index af5b08617a20..7531e600584f 100644 --- a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py @@ -36,6 +36,8 @@ class TestComplexPriorInstallation(ShedTwillTestCase): """Test features related to datatype converters.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -170,7 +172,6 @@ def test_0025_install_matplotlib_repository(self): This is step 4 - Install package_matplotlib_1_2_0170 with repository dependencies. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) matplotlib_repository = self._get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name ) diff --git a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py index f896bc68ab5d..d528189f87e0 100644 --- a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py @@ -7,11 +7,12 @@ class TestUninstallingAndReinstallingRepositories(ShedTwillTestCase): """Test uninstalling and reinstalling a basic repository.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0000 category and upload the filtering repository to the tool shed, if necessary.""" @@ -77,7 +78,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_install_filtering_repository(self): """Install the filtering repository into the Galaxy instance.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( "filtering_0000", common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py index f79935b79f49..6d7c798cdef0 100644 --- a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py @@ -9,9 +9,10 @@ class TestUninstallingAndReinstallingRepositories(ShedTwillTestCase): """Test uninstalling and reinstalling a repository with tool dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -105,7 +106,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_install_freebayes_repository(self): """Install the freebayes repository into the Galaxy instance.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( "freebayes_0010", common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py index a48611c0f473..bd74f4bf3b68 100644 --- a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py @@ -19,9 +19,10 @@ class TestUninstallingAndReinstallingRepositories(ShedTwillTestCase): """Test uninstalling and reinstalling a repository with repository dependency revisions.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -151,7 +152,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_install_emboss_repository(self): """Install the emboss repository into the Galaxy instance.""" global running_standalone - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( emboss_repository_name, common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py index 424f059c9685..26f7db2239fd 100644 --- a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py @@ -44,6 +44,8 @@ class TestResetInstalledRepositoryMetadata(ShedTwillTestCase): """Verify that the "Reset selected metadata" feature works.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -516,7 +518,6 @@ def test_0035_create_repositories_from_0050_series(self): def test_9900_install_all_missing_repositories(self): """Call the install_repository method to ensure that all required repositories are installed.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository("filtering_0000", common.test_user_1_name, category_0000_name) self._install_repository("freebayes_0010", common.test_user_1_name, category_0010_name) self._install_repository("emboss_0020", common.test_user_1_name, category_0020_name) diff --git a/lib/tool_shed/test/functional/test_1410_update_manager.py b/lib/tool_shed/test/functional/test_1410_update_manager.py index 98164c77daa3..bb337c7b05a9 100644 --- a/lib/tool_shed/test/functional/test_1410_update_manager.py +++ b/lib/tool_shed/test/functional/test_1410_update_manager.py @@ -26,6 +26,8 @@ class TestUpdateManager(ShedTwillTestCase): """Test the Galaxy update manager.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts and login as an admin user. @@ -34,7 +36,6 @@ def test_0000_initiate_users(self): """ self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_create_filtering_repository(self): """Create and populate the filtering_1410 repository. @@ -70,7 +71,6 @@ def test_0010_install_filtering_repository(self): We are at step 2 - Install filtering_1410 to Galaxy. Install the filtering repository to Galaxy. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( repository_name, common.test_user_1_name, category_name, new_tool_panel_section_label="test_1410" ) @@ -111,7 +111,6 @@ def test_0020_check_for_displayed_update(self): """ # Wait 3 seconds, just to be sure we're past hours_between_check. time.sleep(3) - self.galaxy_login(email=common.admin_email, username=common.admin_username) installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name ) diff --git a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py index e4bf404185e6..47f699200129 100644 --- a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py +++ b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py @@ -37,6 +37,8 @@ class TestRepairRepository(ShedTwillTestCase): """Test repairing an installed repository.""" + requires_galaxy = True + def test_0000_initiate_users_and_category(self): """Create necessary user accounts and login as an admin user.""" self.login(email=common.admin_email, username=common.admin_username) @@ -123,7 +125,6 @@ def test_0020_install_column_repository(self): handle repository dependencies so that the filter_1430 repository is also installed. Make sure to install the repositories in a specified section of the tool panel. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( "column_1430", common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1460_data_managers.py b/lib/tool_shed/test/functional/test_1460_data_managers.py index ec514898e7af..09fb0f984228 100644 --- a/lib/tool_shed/test/functional/test_1460_data_managers.py +++ b/lib/tool_shed/test/functional/test_1460_data_managers.py @@ -30,6 +30,8 @@ class TestDataManagers(ShedTwillTestCase): """Test installing a repository containing a Data Manager.""" + requires_galaxy = True + def test_0000_initiate_users_and_category(self): """Create necessary user accounts and login as an admin user.""" self.login(email=common.admin_email, username=common.admin_username) @@ -71,7 +73,6 @@ def test_0020_install_data_manager_repository(self): This is step 3 - Attempt to install the repository into a galaxy instance, verify that it is installed. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( data_manager_repository_name, common.test_user_1_name, diff --git a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py index 6a868e4a2654..5c984fa4885c 100644 --- a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py +++ b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py @@ -29,6 +29,8 @@ class TestUpdateInstalledRepository(ShedTwillTestCase): """Verify that the code correctly handles updating an installed repository, then uninstalling and reinstalling.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -63,7 +65,6 @@ def test_0010_install_filtering_to_galaxy(self): This is step 1 - Install a repository into Galaxy. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( repository_name, common.test_user_1_name, @@ -101,7 +102,6 @@ def test_0020_get_repository_updates(self): This is step 3 - In Galaxy, get updates to the repository. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name ) From 691ba8d0242a2f9342a7e618c7b0b41473bebaac Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 21 Dec 2022 09:48:31 -0500 Subject: [PATCH 23/73] Improved error message for repository_util. --- lib/galaxy/tool_shed/util/repository_util.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index 674048937359..d13e1b3d2740 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -229,6 +229,8 @@ def generate_tool_shed_repository_install_dir(repository_clone_url, changeset_re # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column items = tmp_url.split("/repos/") tool_shed_url = items[0] + if len(items) == 1: + raise Exception(f"Processing an invalid tool shed clone URL {repository_clone_url} - tmp_url {tmp_url}") repo_path = items[1] tool_shed_url = common_util.remove_port_from_tool_shed_url(tool_shed_url) return "/".join((tool_shed_url, "repos", repo_path, changeset_revision)) From 7cc01d3d95e894f892b2ff79bad42be188b95b50 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 18 Oct 2022 15:25:20 -0400 Subject: [PATCH 24/73] Setup abstraction in twilltestcase to allow for separate installation target backend. --- lib/tool_shed/test/base/twilltestcase.py | 565 +++++++++++------- ...ll_repository_with_dependency_revisions.py | 2 +- ...est_1050_circular_dependencies_4_levels.py | 2 +- .../test/functional/test_1070_invalid_tool.py | 2 +- ...vanced_circular_dependency_installation.py | 2 +- ...est_1090_repository_dependency_handling.py | 8 +- ...install_updated_repository_dependencies.py | 2 +- ...ninstall_and_reinstall_basic_repository.py | 2 +- ...stall_repository_with_tool_dependencies.py | 2 +- ...ll_repository_with_dependency_revisions.py | 2 +- .../functional/test_1410_update_manager.py | 2 +- .../test_1430_repair_installed_repository.py | 2 +- ...st_1470_updating_installed_repositories.py | 4 +- 13 files changed, 351 insertions(+), 246 deletions(-) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 34427b808e7a..a7017e2a2719 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -7,6 +7,8 @@ import time from json import loads from typing import ( + Any, + Dict, List, Optional, ) @@ -58,6 +60,301 @@ tc.options["equiv_refresh_interval"] = 0 +class ToolShedInstallationClient: + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + ... + + def setup(self) -> None: + ... + + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + ... + + def display_installed_jobs_list_page( + self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None + ) -> None: + ... + + def installed_repository_extended_info( + self, installed_repository: galaxy_model.ToolShedRepository + ) -> Dict[str, Any]: + ... + + def install_repository( + self, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool, + install_repository_dependencies: bool, + new_tool_panel_section_label: Optional[str], + ) -> None: + ... + + def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + ... + + def reset_metadata_on_selected_installed_repositories(self, repository_ids: List[str]) -> None: + ... + + def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: + ... + + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + ... + + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + ... + + def get_tool_names(self) -> List[str]: + ... + + +class GalaxyInteractorToolShedInstallationClient(ToolShedInstallationClient): + """A Galaxy API + Database as a installation target for the tool shed.""" + + def __init__(self, testcase): + self.testcase = testcase + + def setup(self): + self._galaxy_login() + + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + metadata = repository.metadata_ + assert "tools" in metadata, f"Tools not found in repository metadata: {metadata}" + # If integrated_tool_panel.xml is to be tested, this test method will need to be enhanced to handle tools + # from the same repository in different tool panel sections. Getting the first tool guid is ok, because + # currently all tools contained in a single repository will be loaded into the same tool panel section. + if repository.status in [ + galaxy_model.ToolShedRepository.installation_status.UNINSTALLED, + galaxy_model.ToolShedRepository.installation_status.DEACTIVATED, + ]: + tool_panel_section = self._get_tool_panel_section_from_repository_metadata(metadata) + else: + tool_panel_section = self._get_tool_panel_section_from_api(metadata) + assert ( + tool_panel_section == expected_tool_panel_section + ), f"Expected to find tool panel section *{expected_tool_panel_section}*, but instead found *{tool_panel_section}*\nMetadata: {metadata}\n" + + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + encoded_id = self.testcase.security.encode_id(installed_repository.id) + api_key = get_admin_api_key() + response = requests.delete( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/{encoded_id}", + data={"remove_from_disk": False, "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def display_installed_jobs_list_page( + self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None + ) -> None: + data_managers = installed_repository.metadata_.get("data_manager", {}).get("data_managers", {}) + if data_manager_names: + if not isinstance(data_manager_names, list): + data_manager_names = [data_manager_names] + for data_manager_name in data_manager_names: + assert ( + data_manager_name in data_managers + ), f"The requested Data Manager '{data_manager_name}' was not found in repository metadata." + else: + data_manager_name = list(data_managers.keys()) + for data_manager_name in data_manager_names: + params = {"id": data_managers[data_manager_name]["guid"]} + self._visit_galaxy_url("/data_manager/jobs_list", params=params) + self.testcase.check_for_strings(strings_displayed) + + def installed_repository_extended_info( + self, installed_repository: galaxy_model.ToolShedRepository + ) -> Dict[str, Any]: + params = {"id": self.testcase.security.encode_id(installed_repository.id)} + self._visit_galaxy_url("/admin_toolshed/manage_repository_json", params=params) + return loads(self.testcase.last_page()) + + def install_repository( + self, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool, + install_repository_dependencies: bool, + new_tool_panel_section_label: Optional[str], + ): + payload = { + "tool_shed_url": self.testcase.url, + "name": name, + "owner": owner, + "changeset_revision": changeset_revision, + "install_tool_dependencies": install_tool_dependencies, + "install_repository_dependencies": install_repository_dependencies, + "install_resolver_dependencies": False, + } + if new_tool_panel_section_label: + payload["new_tool_panel_section_label"] = new_tool_panel_section_label + create_response = self.testcase.galaxy_interactor._post( + "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True + ) + assert_status_code_is_ok(create_response) + create_response_object = create_response.json() + if isinstance(create_response_object, dict): + assert "status" in create_response_object + assert "ok" == create_response_object["status"] # repo already installed... + return + assert isinstance(create_response_object, list) + repository_ids = [repo["id"] for repo in create_response.json()] + log.debug(f"Waiting for the installation of repository IDs: {repository_ids}") + self._wait_for_repository_installation(repository_ids) + + def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + params = dict(id=self.testcase.security.encode_id(installed_repository.id)) + url = "/admin_toolshed/restore_repository" + self._visit_galaxy_url(url, params=params) + + def reset_metadata_on_selected_installed_repositories(self, repository_ids: List[str]) -> None: + api_key = get_admin_api_key() + response = requests.post( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", + data={"repository_ids": repository_ids, "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + encoded_id = self.testcase.security.encode_id(installed_repository.id) + api_key = get_admin_api_key() + response = requests.delete( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/{encoded_id}", + data={"remove_from_disk": True, "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + repository_id = self.testcase.security.encode_id(installed_repository.id) + params = { + "id": repository_id, + } + api_key = get_admin_api_key() + response = requests.get( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/check_for_updates?key={api_key}", + params=params, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + response.raise_for_status() + response_dict = response.json() + if verify_no_updates: + assert "message" in response_dict + message = response_dict["message"] + assert "The status has not changed in the tool shed for repository" in message, str(response_dict) + return response_dict + + def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: + encoded_id = self.testcase.security.encode_id(repository.id) + api_key = get_admin_api_key() + response = requests.post( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", + data={"repository_ids": [encoded_id], "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def get_tool_names(self) -> List[str]: + response = self.testcase.galaxy_interactor._get("tools?in_panel=false") + response.raise_for_status() + tool_list = response.json() + return [t["name"] for t in tool_list] + + def _galaxy_login(self, email="test@bx.psu.edu", password="testuser", username="admin-user"): + self._galaxy_logout() + self._create_user_in_galaxy(email=email, password=password, username=username) + params = {"login": email, "password": password, "session_csrf_token": self._galaxy_token()} + self._visit_galaxy_url("/user/login", params=params) + + def _galaxy_logout(self): + self._visit_galaxy_url("/user/logout", params=dict(session_csrf_token=self._galaxy_token())) + + def _create_user_in_galaxy(self, email="test@bx.psu.edu", password="testuser", username="admin-user"): + params = { + "username": username, + "email": email, + "password": password, + "confirm": password, + "session_csrf_token": self._galaxy_token(), + } + self._visit_galaxy_url("/user/create", params=params, allowed_codes=[200, 400]) + + def _galaxy_token(self): + self._visit_galaxy_url("/") + html = self.testcase.last_page() + token_def_index = html.find("session_csrf_token") + token_sep_index = html.find(":", token_def_index) + token_quote_start_index = html.find('"', token_sep_index) + token_quote_end_index = html.find('"', token_quote_start_index + 1) + token = html[(token_quote_start_index + 1) : token_quote_end_index] + return token + + def _get_tool_panel_section_from_api(self, metadata): + tool_metadata = metadata["tools"] + tool_guid = quote_plus(tool_metadata[0]["guid"], safe="") + api_url = f"/api/tools/{tool_guid}" + self._visit_galaxy_url(api_url) + tool_dict = loads(self.testcase.last_page()) + tool_panel_section = tool_dict["panel_section_name"] + return tool_panel_section + + def _get_tool_panel_section_from_repository_metadata(self, metadata): + tool_metadata = metadata["tools"] + tool_guid = tool_metadata[0]["guid"] + assert "tool_panel_section" in metadata, f"Tool panel section not found in metadata: {metadata}" + tool_panel_section_metadata = metadata["tool_panel_section"] + # tool_section_dict = dict( tool_config=guids_and_configs[ guid ], + # id=section_id, + # name=section_name, + # version=section_version ) + # This dict is appended to tool_panel_section_metadata[ tool_guid ] + tool_panel_section = tool_panel_section_metadata[tool_guid][0]["name"] + return tool_panel_section + + def _wait_for_repository_installation(self, repository_ids): + final_states = [ + galaxy_model.ToolShedRepository.installation_status.ERROR, + galaxy_model.ToolShedRepository.installation_status.INSTALLED, + ] + # Wait until all repositories are in a final state before returning. This ensures that subsequent tests + # are running against an installed repository, and not one that is still in the process of installing. + if repository_ids: + for repository_id in repository_ids: + galaxy_repository = test_db_util.get_installed_repository_by_id( + self.testcase.security.decode_id(repository_id) + ) + timeout_counter = 0 + while galaxy_repository.status not in final_states: + test_db_util.ga_refresh(galaxy_repository) + timeout_counter = timeout_counter + 1 + # This timeout currently defaults to 10 minutes. + if timeout_counter > repository_installation_timeout: + raise AssertionError( + "Repository installation timed out, %d seconds elapsed, repository state is %s." + % (timeout_counter, galaxy_repository.status) + ) + break + time.sleep(1) + + def _visit_galaxy_url(self, url, params=None, doseq=False, allowed_codes=None): + if allowed_codes is None: + allowed_codes = [200] + url = f"{self.testcase.galaxy_url}{url}" + self.testcase.visit_url(url, params=params, doseq=doseq, allowed_codes=allowed_codes) + + class ShedTwillTestCase(ShedApiTestCase): """Class of FunctionalTestCase geared toward HTML interactions using the Twill library.""" @@ -77,8 +374,9 @@ def setUp(self): self.tool_data_path = os.environ.get("GALAXY_TEST_TOOL_DATA_PATH") self.shed_tool_conf = os.environ.get("GALAXY_TEST_SHED_TOOL_CONF") self.test_db_util = test_db_util + self._installation_client = GalaxyInteractorToolShedInstallationClient(self) if self.requires_galaxy: - self._galaxy_login(email=common.admin_email, username=common.admin_username) + self._installation_client.setup() def check_for_strings(self, strings_displayed=None, strings_not_displayed=None): strings_displayed = strings_displayed or [] @@ -314,23 +612,6 @@ def check_galaxy_repository_db_status(self, repository_name, owner, expected_sta installed_repository.status == expected_status ), f"Status in database is {installed_repository.status}, expected {expected_status}" - def check_galaxy_repository_tool_panel_section(self, repository, expected_tool_panel_section): - metadata = repository.metadata_ - assert "tools" in metadata, f"Tools not found in repository metadata: {metadata}" - # If integrated_tool_panel.xml is to be tested, this test method will need to be enhanced to handle tools - # from the same repository in different tool panel sections. Getting the first tool guid is ok, because - # currently all tools contained in a single repository will be loaded into the same tool panel section. - if repository.status in [ - galaxy_model.ToolShedRepository.installation_status.UNINSTALLED, - galaxy_model.ToolShedRepository.installation_status.DEACTIVATED, - ]: - tool_panel_section = self.get_tool_panel_section_from_repository_metadata(metadata) - else: - tool_panel_section = self.get_tool_panel_section_from_api(metadata) - assert ( - tool_panel_section == expected_tool_panel_section - ), f"Expected to find tool panel section *{expected_tool_panel_section}*, but instead found *{tool_panel_section}*\nMetadata: {metadata}\n" - def check_repository_changelog(self, repository: Repository, strings_displayed=None, strings_not_displayed=None): params = {"id": repository.id} self.visit_url("/repository/view_changelog", params=params) @@ -433,6 +714,11 @@ def check_string_count_in_page(self, pattern, min_count, max_count=None): ) raise AssertionError(errmsg) + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + self._installation_client.check_galaxy_repository_tool_panel_section(repository, expected_tool_panel_section) + def clone_repository(self, repository: Repository, destination_path: str) -> None: url = f"{self.url}/repos/{repository.owner}/{repository.name}" success, message = hg_util.clone_repository(url, destination_path, self.get_repository_tip(repository)) @@ -511,27 +797,8 @@ def create_repository_dependency( strings_not_displayed=None, ) - def create_user_in_galaxy( - self, cntrller="user", email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="" - ): - params = { - "username": username, - "email": email, - "password": password, - "confirm": password, - "session_csrf_token": self.galaxy_token(), - } - self.visit_galaxy_url("/user/create", params=params, allowed_codes=[200, 400]) - - def deactivate_repository(self, installed_repository, strings_displayed=None, strings_not_displayed=None): - encoded_id = self.security.encode_id(installed_repository.id) - api_key = get_admin_api_key() - response = requests.delete( - f"{self.galaxy_url}/api/tool_shed_repositories/{encoded_id}", - data={"remove_from_disk": False, "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + self._installation_client.deactivate_repository(installed_repository) def delete_files_from_repository(self, repository: Repository, filenames: List[str]): temp_directory = tempfile.mkdtemp(prefix="toolshedrepowithoutfiles") @@ -569,27 +836,12 @@ def delete_repository(self, repository: Repository) -> None: self.check_for_strings(strings_displayed, strings_not_displayed) def display_installed_jobs_list_page(self, installed_repository, data_manager_names=None, strings_displayed=None): - data_managers = installed_repository.metadata_.get("data_manager", {}).get("data_managers", {}) - if data_manager_names: - if not isinstance(data_manager_names, list): - data_manager_names = [data_manager_names] - for data_manager_name in data_manager_names: - assert ( - data_manager_name in data_managers - ), f"The requested Data Manager '{data_manager_name}' was not found in repository metadata." - else: - data_manager_name = list(data_managers.keys()) - for data_manager_name in data_manager_names: - params = {"id": data_managers[data_manager_name]["guid"]} - self.visit_galaxy_url("/data_manager/jobs_list", params=params) - self.check_for_strings(strings_displayed) + self._installation_client.display_installed_jobs_list_page( + installed_repository, data_manager_names, strings_displayed + ) def display_installed_repository_manage_json(self, installed_repository): - params = {"id": self.security.encode_id(installed_repository.id)} - self.visit_galaxy_url("/admin_toolshed/manage_repository_json", params=params) - import json - - return json.loads(self.last_page()) + return self._installation_client.installed_repository_extended_info(installed_repository) def display_manage_repository_page( self, repository: Repository, changeset_revision=None, strings_displayed=None, strings_not_displayed=None @@ -705,28 +957,6 @@ def fetch_repository_metadata(self, repository: Repository, strings_displayed=No self.visit_url(url) self.check_for_strings(strings_displayed, strings_not_displayed) - def galaxy_token(self): - self.visit_galaxy_url("/") - html = self.last_page() - token_def_index = html.find("session_csrf_token") - token_sep_index = html.find(":", token_def_index) - token_quote_start_index = html.find('"', token_sep_index) - token_quote_end_index = html.find('"', token_quote_start_index + 1) - token = html[(token_quote_start_index + 1) : token_quote_end_index] - return token - - def _galaxy_login( - self, email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="", logout_first=True - ): - if logout_first: - self.galaxy_logout() - self.create_user_in_galaxy(email=email, password=password, username=username, redirect=redirect) - params = {"login": email, "password": password, "session_csrf_token": self.galaxy_token()} - self.visit_galaxy_url("/user/login", params=params) - - def galaxy_logout(self): - self.visit_galaxy_url("/user/logout", params=dict(session_csrf_token=self.galaxy_token())) - def generate_complex_dependency_xml(self, filename, filepath, repository_tuples, package, version): file_path = os.path.join(filepath, filename) dependency_entries = [] @@ -946,28 +1176,6 @@ def get_tools_from_repository_metadata(self, repository, include_invalid=False): ) return valid_tools, invalid_tools - def get_tool_panel_section_from_api(self, metadata): - tool_metadata = metadata["tools"] - tool_guid = quote_plus(tool_metadata[0]["guid"], safe="") - api_url = f"/api/tools/{tool_guid}" - self.visit_galaxy_url(api_url) - tool_dict = loads(self.last_page()) - tool_panel_section = tool_dict["panel_section_name"] - return tool_panel_section - - def get_tool_panel_section_from_repository_metadata(self, metadata): - tool_metadata = metadata["tools"] - tool_guid = tool_metadata[0]["guid"] - assert "tool_panel_section" in metadata, f"Tool panel section not found in metadata: {metadata}" - tool_panel_section_metadata = metadata["tool_panel_section"] - # tool_section_dict = dict( tool_config=guids_and_configs[ guid ], - # id=section_id, - # name=section_name, - # version=section_version ) - # This dict is appended to tool_panel_section_metadata[ tool_guid ] - tool_panel_section = tool_panel_section_metadata[tool_guid][0]["name"] - return tool_panel_section - def grant_role_to_user(self, user, role): strings_displayed = [self.security.encode_id(role.id), role.name] strings_not_displayed = [] @@ -1029,30 +1237,14 @@ def _install_repository( # repository_id = repository.id if changeset_revision is None: changeset_revision = self.get_repository_tip(repository) - payload = { - "tool_shed_url": self.url, - "name": name, - "owner": owner, - "changeset_revision": changeset_revision, - "install_tool_dependencies": install_tool_dependencies, - "install_repository_dependencies": install_repository_dependencies, - "install_resolver_dependencies": False, - } - if new_tool_panel_section_label: - payload["new_tool_panel_section_label"] = new_tool_panel_section_label - create_response = self.galaxy_interactor._post( - "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True + self._installation_client.install_repository( + name, + owner, + changeset_revision, + install_tool_dependencies, + install_repository_dependencies, + new_tool_panel_section_label, ) - assert_status_code_is_ok(create_response) - create_response_object = create_response.json() - if isinstance(create_response_object, dict): - assert "status" in create_response_object - assert "ok" == create_response_object["status"] # repo already installed... - return - assert isinstance(create_response_object, list) - repository_ids = [repo["id"] for repo in create_response.json()] - log.debug(f"Waiting for the installation of repository IDs: {repository_ids}") - self.wait_for_repository_installation(repository_ids) def load_citable_url( self, @@ -1154,9 +1346,7 @@ def preview_repository_in_tool_shed( self.check_for_strings(strings_displayed, strings_not_displayed) def reactivate_repository(self, installed_repository): - params = dict(id=self.security.encode_id(installed_repository.id)) - url = "/admin_toolshed/restore_repository" - self.visit_galaxy_url(url, params=params) + self._installation_client.reactivate_repository(installed_repository) def reinstall_repository_api( self, @@ -1167,59 +1357,27 @@ def reinstall_repository_api( ): name = installed_repository.name owner = installed_repository.owner - payload = { - "tool_shed_url": self.url, # wish this used tool_shed. - "name": name, - "owner": owner, - "changeset_revision": installed_repository.installed_changeset_revision, - "install_tool_dependencies": install_tool_dependencies, - "install_repository_dependencies": install_repository_dependencies, - "install_resolver_dependencies": False, - } - if new_tool_panel_section_label: - payload["new_tool_panel_section_label"] = new_tool_panel_section_label - create_response = self.galaxy_interactor._post( - "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True + self._installation_client.install_repository( + name, + owner, + installed_repository.installed_changeset_revision, + install_tool_dependencies, + install_repository_dependencies, + new_tool_panel_section_label, ) - assert_status_code_is_ok(create_response) - create_response_object = create_response.json() - if isinstance(create_response_object, dict): - assert "status" in create_response_object - assert "ok" == create_response_object["status"] # repo already installed... - return - assert isinstance(create_response_object, list) - repository_ids = [repo["id"] for repo in create_response.json()] - log.debug(f"Waiting for the installation of repository IDs: {repository_ids}") - self.wait_for_repository_installation(repository_ids) def repository_is_new(self, repository: Repository) -> bool: repo = self.get_hg_repo(self.get_repo_path(repository)) tip_ctx = repo[repo.changelog.tip()] return tip_ctx.rev() < 0 - def reset_installed_repository_metadata(self, repository): - encoded_id = self.security.encode_id(repository.id) - api_key = get_admin_api_key() - response = requests.post( - f"{self.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", - data={"repository_ids": [encoded_id], "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content - def reset_metadata_on_selected_repositories(self, repository_ids): self.visit_url("/admin/reset_metadata_on_selected_repositories_in_tool_shed") kwd = dict(repository_ids=repository_ids) self.submit_form(button="reset_metadata_on_selected_repositories_button", **kwd) def reset_metadata_on_selected_installed_repositories(self, repository_ids): - api_key = get_admin_api_key() - response = requests.post( - f"{self.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", - data={"repository_ids": repository_ids, "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content + self._installation_client.reset_metadata_on_selected_installed_repositories(repository_ids) def reset_repository_metadata(self, repository): params = {"id": repository.id} @@ -1309,34 +1467,13 @@ def undelete_repository(self, repository: Repository) -> None: strings_not_displayed: List[str] = [] self.check_for_strings(strings_displayed, strings_not_displayed) - def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: - encoded_id = self.security.encode_id(installed_repository.id) - api_key = get_admin_api_key() - response = requests.delete( - f"{self.galaxy_url}/api/tool_shed_repositories/{encoded_id}", - data={"remove_from_disk": True, "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content + def _uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + self._installation_client.uninstall_repository(installed_repository) - def update_installed_repository_api(self, installed_repository, verify_no_updates=False): - repository_id = self.security.encode_id(installed_repository.id) - params = { - "id": repository_id, - } - api_key = get_admin_api_key() - response = requests.get( - f"{self.galaxy_url}/api/tool_shed_repositories/check_for_updates?key={api_key}", - params=params, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - response.raise_for_status() - response_dict = response.json() - if verify_no_updates: - assert "message" in response_dict - message = response_dict["message"] - assert "The status has not changed in the tool shed for repository" in message, str(response_dict) - return response_dict + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + return self._installation_client.update_installed_repository(installed_repository, verify_no_updates=False) def upload_file( self, @@ -1442,7 +1579,7 @@ def verify_installed_repositories(self, installed_repositories=None, uninstalled def verify_installed_repository_metadata_unchanged(self, name, owner): installed_repository = test_db_util.get_installed_repository_by_name_owner(name, owner) metadata = installed_repository.metadata_ - self.reset_installed_repository_metadata(installed_repository) + self._installation_client.reset_installed_repository_metadata(installed_repository) new_metadata = installed_repository.metadata_ assert metadata == new_metadata, f"Metadata for installed repository {name} differs after metadata reset." @@ -1581,11 +1718,8 @@ def _assert_is_not_missing_dependency( def _assert_has_valid_tool_with_name(self, tool_name: str) -> None: def assert_has(): - response = self.galaxy_interactor._get("tools?in_panel=false") - response.raise_for_status() - tool_list = response.json() - tool_list = [t for t in tool_list if t["name"] == tool_name] - assert tool_list + tool_names = self._installation_client.get_tool_names() + assert tool_name in tool_names # May need to wait on toolbox reload. wait_on_assertion(assert_has, f"toolbox to contain {tool_name}", 10) @@ -1627,32 +1761,3 @@ def verify_unchanged_repository_metadata(self, repository: Repository): # Python's dict comparison recursively compares sorted key => value pairs and returns true if any key or value differs, # or if the number of keys differs. assert old_metadata == new_metadata, f"Metadata changed after reset on repository {repository.name}." - - def visit_galaxy_url(self, url, params=None, doseq=False, allowed_codes=None): - if allowed_codes is None: - allowed_codes = [200] - url = f"{self.galaxy_url}{url}" - self.visit_url(url, params=params, doseq=doseq, allowed_codes=allowed_codes) - - def wait_for_repository_installation(self, repository_ids): - final_states = [ - galaxy_model.ToolShedRepository.installation_status.ERROR, - galaxy_model.ToolShedRepository.installation_status.INSTALLED, - ] - # Wait until all repositories are in a final state before returning. This ensures that subsequent tests - # are running against an installed repository, and not one that is still in the process of installing. - if repository_ids: - for repository_id in repository_ids: - galaxy_repository = test_db_util.get_installed_repository_by_id(self.security.decode_id(repository_id)) - timeout_counter = 0 - while galaxy_repository.status not in final_states: - test_db_util.ga_refresh(galaxy_repository) - timeout_counter = timeout_counter + 1 - # This timeout currently defaults to 10 minutes. - if timeout_counter > repository_installation_timeout: - raise AssertionError( - "Repository installation timed out, %d seconds elapsed, repository state is %s." - % (timeout_counter, galaxy_repository.status) - ) - break - time.sleep(1) diff --git a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py index 9c167b919f20..8bdfe650dca0 100644 --- a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py @@ -186,7 +186,7 @@ def test_0015_install_emboss_repository(self): ) self._assert_repo_has_tool_with_id(installed_repository, "EMBOSS: antigenic1") self._assert_has_valid_tool_with_name("antigenic") - self.update_installed_repository_api(installed_repository, verify_no_updates=True) + self.update_installed_repository(installed_repository, verify_no_updates=True) def test_0025_verify_installed_repository_metadata(self): """Verify that resetting the metadata on an installed repository does not change the metadata.""" diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index b03ff14f8fda..9b6f558771e2 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -404,7 +404,7 @@ def test_0070_uninstall_emboss_repository(self): repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - self.uninstall_repository(repository) + self._uninstall_repository(repository) self._assert_has_no_installed_repos_with_names(repository.name) self.test_db_util.ga_refresh(repository) self.check_galaxy_repository_tool_panel_section(repository, "emboss_5_0050") diff --git a/lib/tool_shed/test/functional/test_1070_invalid_tool.py b/lib/tool_shed/test/functional/test_1070_invalid_tool.py index 9457e6026adc..12d4a11fe44b 100644 --- a/lib/tool_shed/test/functional/test_1070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_1070_invalid_tool.py @@ -81,5 +81,5 @@ def test_0015_install_freebayes_repository(self): assert self.get_installed_repository_for( common.test_user_1, repository_name, installed_repository.installed_changeset_revision ) - self.update_installed_repository_api(installed_repository, verify_no_updates=True) + self.update_installed_repository(installed_repository, verify_no_updates=True) self._assert_repo_has_invalid_tool_in_file(installed_repository, "bismark_bowtie_wrapper.xml") diff --git a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py index 71af425c3b96..a77e3e2c77b5 100644 --- a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py +++ b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py @@ -242,7 +242,7 @@ def test_0060_uninstall_column_repository(self): installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_column_repository) + self._uninstall_repository(installed_column_repository) self._assert_has_missing_dependency(installed_convert_repository, "column_maker_0080") def test_0065_reinstall_column_repository(self): diff --git a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py index a359f3bb35bc..df32d9b64479 100644 --- a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py +++ b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py @@ -123,7 +123,7 @@ def test_0025_uninstall_column_repository(self): installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_column_repository) + self._uninstall_repository(installed_column_repository) self.test_db_util.ga_refresh(installed_column_repository) self.check_galaxy_repository_tool_panel_section(installed_column_repository, "column_maker") @@ -131,7 +131,7 @@ def test_0030_uninstall_convert_repository(self): installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_convert_repository) + self._uninstall_repository(installed_convert_repository) self.test_db_util.ga_refresh(installed_convert_repository) self.check_galaxy_repository_tool_panel_section(installed_convert_repository, "column_maker") @@ -170,8 +170,8 @@ def test_0040_reinstall_convert_repository(self): # common.test_user_1_name ) # installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, # common.test_user_1_name ) - # self.uninstall_repository( installed_convert_repository ) - # self.uninstall_repository( installed_column_repository ) + # self._uninstall_repository( installed_convert_repository ) + # self._uninstall_repository( installed_column_repository ) # self.test_db_util.ga_refresh( installed_convert_repository ) # self.test_db_util.ga_refresh( installed_column_repository ) # self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'new_column_maker' ) diff --git a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py index bed335900a21..f23510d125df 100644 --- a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py @@ -89,7 +89,7 @@ def test_0015_install_and_uninstall_column_repository(self): installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_column_repository) + self._uninstall_repository(installed_column_repository) def test_0020_upload_dependency_xml(self): """Upload a repository_dependencies.xml file to column_maker that specifies convert_chars.""" diff --git a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py index d528189f87e0..8daf29980a6f 100644 --- a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py @@ -91,7 +91,7 @@ def test_0015_uninstall_filtering_repository(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( "filtering_0000", common.test_user_1_name ) - self.uninstall_repository(installed_repository) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filtering_0000") def test_0020_reinstall_filtering_repository(self): diff --git a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py index 6d7c798cdef0..55d3f6294267 100644 --- a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py @@ -119,7 +119,7 @@ def test_0015_uninstall_freebayes_repository(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( "freebayes_0010", common.test_user_1_name ) - self.uninstall_repository(installed_repository) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("freebayes_0010") def test_0020_reinstall_freebayes_repository(self): diff --git a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py index bd74f4bf3b68..b9ececb46112 100644 --- a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py @@ -165,7 +165,7 @@ def test_0015_uninstall_emboss_repository(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_repository) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names(emboss_repository_name) def test_0020_reinstall_emboss_repository(self): diff --git a/lib/tool_shed/test/functional/test_1410_update_manager.py b/lib/tool_shed/test/functional/test_1410_update_manager.py index bb337c7b05a9..5c017181da40 100644 --- a/lib/tool_shed/test/functional/test_1410_update_manager.py +++ b/lib/tool_shed/test/functional/test_1410_update_manager.py @@ -114,6 +114,6 @@ def test_0020_check_for_displayed_update(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name ) - response = self.update_installed_repository_api(installed_repository) + response = self.update_installed_repository(installed_repository) assert response["status"] == "ok" assert "has been updated" in response["message"] diff --git a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py index 47f699200129..7635d884a9c6 100644 --- a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py +++ b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py @@ -142,5 +142,5 @@ def test_0025_uninstall_filter_repository(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( "filter_1430", common.test_user_1_name ) - self.uninstall_repository(installed_repository) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filter_1430") diff --git a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py index 5c984fa4885c..1b8750d08b99 100644 --- a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py +++ b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py @@ -105,7 +105,7 @@ def test_0020_get_repository_updates(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name ) - self.update_installed_repository_api(installed_repository) + self.update_installed_repository(installed_repository) def test_0025_uninstall_repository(self): """Uninstall the filtering_1470 repository. @@ -115,7 +115,7 @@ def test_0025_uninstall_repository(self): installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_repository) + self._uninstall_repository(installed_repository) def test_0030_reinstall_repository(self): """Reinstall the filtering_1470 repository. From d729553b8053ab5b363ff557607c72f7b60726df Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 18 Oct 2022 16:35:17 -0400 Subject: [PATCH 25/73] Abstraction for getting install object that can be swapped out. --- lib/tool_shed/test/base/twilltestcase.py | 57 +++++++++++++++++-- .../test_1000_install_basic_repository.py | 12 ++-- ...stall_repository_with_tool_dependencies.py | 6 +- ...repository_with_repository_dependencies.py | 8 +-- ...ll_repository_with_dependency_revisions.py | 4 +- ..._repository_basic_circular_dependencies.py | 32 +++++------ ...est_1050_circular_dependencies_4_levels.py | 8 +-- .../test/functional/test_1070_invalid_tool.py | 6 +- ...vanced_circular_dependency_installation.py | 52 ++++++++--------- ...est_1090_repository_dependency_handling.py | 14 ++--- ...install_updated_repository_dependencies.py | 2 +- ...ll_repository_with_complex_dependencies.py | 8 +-- ...tory_with_invalid_repository_dependency.py | 2 +- ...e_repository_dependency_multiple_owners.py | 4 +- .../test_1170_prior_installation_required.py | 4 +- ...80_circular_prior_installation_required.py | 20 +++---- ...190_complex_prior_installation_required.py | 6 +- ...ninstall_and_reinstall_basic_repository.py | 16 ++---- ...stall_repository_with_tool_dependencies.py | 16 ++---- ...ll_repository_with_dependency_revisions.py | 8 +-- .../functional/test_1410_update_manager.py | 8 +-- .../test_1430_repair_installed_repository.py | 4 +- .../functional/test_1460_data_managers.py | 4 +- ...st_1470_updating_installed_repositories.py | 16 ++---- 24 files changed, 158 insertions(+), 159 deletions(-) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index a7017e2a2719..dc3421fc0ea9 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -113,6 +113,21 @@ def update_installed_repository( def get_tool_names(self) -> List[str]: ... + def get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + ... + + def get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + ... + + def get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + ... + class GalaxyInteractorToolShedInstallationClient(ToolShedInstallationClient): """A Galaxy API + Database as a installation target for the tool shed.""" @@ -272,6 +287,23 @@ def get_tool_names(self) -> List[str]: tool_list = response.json() return [t["name"] for t in tool_list] + def get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + return test_db_util.get_installed_repository_by_name_owner(repository_name, repository_owner) + + def get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + return test_db_util.get_installed_repository_by_name_owner( + repository_name, repository_owner, return_multiple=True + ) + + def get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + return self.testcase.get_installed_repository_for(owner=owner, name=name, changeset=changeset) + def _galaxy_login(self, email="test@bx.psu.edu", password="testuser", username="admin-user"): self._galaxy_logout() self._create_user_in_galaxy(email=email, password=password, username=username) @@ -607,7 +639,7 @@ def check_for_valid_tools(self, repository, strings_displayed=None, strings_not_ self.display_manage_repository_page(repository, strings_displayed, strings_not_displayed) def check_galaxy_repository_db_status(self, repository_name, owner, expected_status): - installed_repository = test_db_util.get_installed_repository_by_name_owner(repository_name, owner) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, owner) assert ( installed_repository.status == expected_status ), f"Status in database is {installed_repository.status}, expected {expected_status}" @@ -1570,14 +1602,14 @@ def verify_installed_repositories(self, installed_repositories=None, uninstalled installed_repositories = installed_repositories or [] uninstalled_repositories = uninstalled_repositories or [] for repository_name, repository_owner in installed_repositories: - galaxy_repository = test_db_util.get_installed_repository_by_name_owner(repository_name, repository_owner) + galaxy_repository = self._get_installed_repository_by_name_owner(repository_name, repository_owner) if galaxy_repository: assert ( galaxy_repository.status == "Installed" ), f"Repository {repository_name} should be installed, but is {galaxy_repository.status}" def verify_installed_repository_metadata_unchanged(self, name, owner): - installed_repository = test_db_util.get_installed_repository_by_name_owner(name, owner) + installed_repository = self._get_installed_repository_by_name_owner(name, owner) metadata = installed_repository.metadata_ self._installation_client.reset_installed_repository_metadata(installed_repository) new_metadata = installed_repository.metadata_ @@ -1648,13 +1680,28 @@ def verify_installed_repository_data_table_entries(self, required_data_table_ent # or we know that the repository was not correctly installed! assert found, f"No entry for {required_data_table_entry} in {self.shed_tool_data_table_conf}." + def _get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + return self._installation_client.get_installed_repository_by_name_owner(repository_name, repository_owner) + + def _get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + return self._installation_client.get_installed_repositories_by_name_owner(repository_name, repository_owner) + + def _get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ): + return self._installation_client.get_installed_repository_for(owner=owner, name=name, changeset=changeset) + def _assert_has_installed_repos_with_names(self, *names): for name in names: - assert self.get_installed_repository_for(name=name) + assert self._get_installed_repository_for(name=name) def _assert_has_no_installed_repos_with_names(self, *names): for name in names: - assert not self.get_installed_repository_for(name=name) + assert not self._get_installed_repository_for(name=name) def _assert_has_missing_dependency( self, installed_repository: galaxy_model.ToolShedRepository, repository_name: str diff --git a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py index 61634ee43955..0b6ec2c89dcc 100644 --- a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py @@ -107,19 +107,15 @@ def test_0025_install_filtering_repository(self): "Test 0000 Basic Repository Features 1", new_tool_panel_section_label="test_1000", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repo_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repo_name, common.test_user_1_name) changeset = str(installed_repository.installed_changeset_revision) - assert self.get_installed_repository_for(common.test_user_1, repo_name, changeset) + assert self._get_installed_repository_for(common.test_user_1, repo_name, changeset) self._assert_has_valid_tool_with_name("Filter1") self._assert_repo_has_tool_with_id(installed_repository, "Filter1") def test_0030_install_filtering_repository_again(self): """Attempt to install the already installed filtering repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repo_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repo_name, common.test_user_1_name) # Just make sure the repo is still installed, used to monitoring tests but we've # removed that page. self._install_repository( @@ -128,7 +124,7 @@ def test_0030_install_filtering_repository_again(self): "Test 0000 Basic Repository Features 1", ) changeset = str(installed_repository.installed_changeset_revision) - assert self.get_installed_repository_for(common.test_user_1, repo_name, changeset) + assert self._get_installed_repository_for(common.test_user_1, repo_name, changeset) def test_0035_verify_installed_repository_metadata(self): """Verify that resetting the metadata on an installed repository does not change the metadata.""" diff --git a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py index defd12795fd5..7348eda4f8a4 100644 --- a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py @@ -129,10 +129,8 @@ def test_0015_install_freebayes_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1010", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - assert self.get_installed_repository_for( + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + assert self._get_installed_repository_for( common.test_user_1, repository_name, installed_repository.installed_changeset_revision ) self._assert_has_valid_tool_with_name("FreeBayes") diff --git a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py index d7a6c90b69b8..29b938967af8 100644 --- a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py @@ -98,10 +98,10 @@ def test_0015_install_emboss_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1020", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, emboss_repository_name, installed_repository.installed_changeset_revision ) self._assert_has_valid_tool_with_name("antigenic") @@ -113,14 +113,14 @@ def test_0020_verify_installed_repository_metadata(self): def test_0025_deactivate_datatypes_repository(self): """Deactivate the emboss_datatypes repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( column_maker_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_repository) def test_0030_reactivate_datatypes_repository(self): """Reactivate the datatypes repository and verify that the datatypes are again present.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( column_maker_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_repository) diff --git a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py index 8bdfe650dca0..9bd1eb5f7826 100644 --- a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py @@ -178,10 +178,10 @@ def test_0015_install_emboss_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1030", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, emboss_repository_name, installed_repository.installed_changeset_revision ) self._assert_repo_has_tool_with_id(installed_repository, "EMBOSS: antigenic1") diff --git a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py index 8a21f93adedf..5dfdcf31fd6c 100644 --- a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py @@ -134,16 +134,16 @@ def test_0025_install_freebayes_repository(self): def test_0030_uninstall_filtering_repository(self): """Deactivate filtering, verify tool panel section and missing repository dependency.""" - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_filtering_repository) @@ -153,7 +153,7 @@ def test_0030_uninstall_filtering_repository(self): def test_0035_reactivate_filtering_repository(self): """Reinstall filtering into 'filtering' tool panel section.""" - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) self.reinstall_repository_api( @@ -162,27 +162,27 @@ def test_0035_reactivate_filtering_repository(self): install_repository_dependencies=True, new_tool_panel_section_label="filtering", ) - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) self._assert_is_not_missing_dependency(installed_freebayes_repository, filtering_repository_name) def test_0040_uninstall_freebayes_repository(self): """Deactivate freebayes, verify tool panel section and missing repository dependency.""" - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_freebayes_repository) - assert not self.get_installed_repository_for( + assert not self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) self.test_db_util.ga_refresh(installed_freebayes_repository) @@ -191,20 +191,20 @@ def test_0040_uninstall_freebayes_repository(self): def test_0045_deactivate_filtering_repository(self): """Deactivate filtering, verify tool panel section.""" - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name ) - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_filtering_repository) - assert not self.get_installed_repository_for( + assert not self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - assert not self.get_installed_repository_for( + assert not self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.test_db_util.ga_refresh(installed_filtering_repository) diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index 9b6f558771e2..90fb8db3e309 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -382,9 +382,7 @@ def test_0060_install_emboss_repository(self): def test_0065_deactivate_bismark_repository(self): """Deactivate bismark and verify things are okay.""" - repository = self.test_db_util.get_installed_repository_by_name_owner( - bismark_repository_name, common.test_user_1_name - ) + repository = self._get_installed_repository_by_name_owner(bismark_repository_name, common.test_user_1_name) self.deactivate_repository(repository) # Now we have emboss, bismark, column_maker, and convert_chars installed, filtering and freebayes never installed. installed_repositories = [ @@ -401,9 +399,7 @@ def test_0065_deactivate_bismark_repository(self): def test_0070_uninstall_emboss_repository(self): """Uninstall the emboss_5 repository.""" - repository = self.test_db_util.get_installed_repository_by_name_owner( - emboss_repository_name, common.test_user_1_name - ) + repository = self._get_installed_repository_by_name_owner(emboss_repository_name, common.test_user_1_name) self._uninstall_repository(repository) self._assert_has_no_installed_repos_with_names(repository.name) self.test_db_util.ga_refresh(repository) diff --git a/lib/tool_shed/test/functional/test_1070_invalid_tool.py b/lib/tool_shed/test/functional/test_1070_invalid_tool.py index 12d4a11fe44b..8b720697215b 100644 --- a/lib/tool_shed/test/functional/test_1070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_1070_invalid_tool.py @@ -75,10 +75,8 @@ def test_0015_install_freebayes_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1070", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - assert self.get_installed_repository_for( + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + assert self._get_installed_repository_for( common.test_user_1, repository_name, installed_repository.installed_changeset_revision ) self.update_installed_repository(installed_repository, verify_no_updates=True) diff --git a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py index a77e3e2c77b5..dd31fb94f8c6 100644 --- a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py +++ b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py @@ -128,13 +128,13 @@ def test_0020_install_convert_repository(self): install_repository_dependencies=False, new_tool_panel_section_label="convert_chars", ) - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "convert_chars_0080", installed_convert_repository.installed_changeset_revision ) self._assert_has_installed_repository_dependency( @@ -154,16 +154,16 @@ def test_0025_install_column_repository(self): install_repository_dependencies=True, new_tool_panel_section_label="column_maker", ) - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "convert_chars_0080", installed_convert_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "column_maker_0080", installed_column_repository.installed_changeset_revision ) self._assert_has_installed_repository_dependency( @@ -172,10 +172,10 @@ def test_0025_install_column_repository(self): def test_0030_deactivate_convert_repository(self): """Deactivate convert_chars, verify that column_maker is installed and missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_convert_repository) @@ -183,10 +183,10 @@ def test_0030_deactivate_convert_repository(self): def test_0035_reactivate_convert_repository(self): """Reactivate convert_chars, both convert_chars and column_maker should now show as installed.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_convert_repository) @@ -196,10 +196,10 @@ def test_0035_reactivate_convert_repository(self): def test_0040_deactivate_column_repository(self): """Deactivate column_maker, verify that convert_chars is installed and missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_column_repository) @@ -207,7 +207,7 @@ def test_0040_deactivate_column_repository(self): def test_0045_deactivate_convert_repository(self): """Deactivate convert_chars, verify that both convert_chars and column_maker are deactivated.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_convert_repository) @@ -215,7 +215,7 @@ def test_0045_deactivate_convert_repository(self): def test_0050_reactivate_column_repository(self): """Reactivate column_maker. This should not automatically reactivate convert_chars, so column_maker should be displayed as installed but missing repository dependencies.""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_column_repository) @@ -223,10 +223,10 @@ def test_0050_reactivate_column_repository(self): def test_0055_reactivate_convert_repository(self): """Activate convert_chars. Both convert_chars and column_maker should now show as installed.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_convert_repository) @@ -236,10 +236,10 @@ def test_0055_reactivate_convert_repository(self): def test_0060_uninstall_column_repository(self): """Uninstall column_maker. Verify that convert_chars is installed and missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_column_repository) @@ -247,10 +247,10 @@ def test_0060_uninstall_column_repository(self): def test_0065_reinstall_column_repository(self): """Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reinstall_repository_api(installed_column_repository, install_repository_dependencies=False) @@ -260,10 +260,10 @@ def test_0065_reinstall_column_repository(self): def test_0070_uninstall_convert_repository(self): """Uninstall convert_chars, verify column_maker installed but missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_convert_repository) @@ -271,7 +271,7 @@ def test_0070_uninstall_convert_repository(self): def test_0075_uninstall_column_repository(self): """Uninstall column_maker, verify that both convert_chars and column_maker are uninstalled.""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_column_repository) @@ -279,10 +279,10 @@ def test_0075_uninstall_column_repository(self): def test_0080_reinstall_convert_repository(self): """Reinstall convert_chars with repository dependencies, verify that this installs both convert_chars and column_maker.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reinstall_repository_api(installed_convert_repository, install_repository_dependencies=True) diff --git a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py index df32d9b64479..02a80ad62895 100644 --- a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py +++ b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py @@ -112,7 +112,7 @@ def test_0020_install_repositories(self): install_repository_dependencies=True, new_tool_panel_section_label="column_maker", ) - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self._assert_has_installed_repos_with_names("convert_chars_1085", "column_maker_1085") @@ -120,7 +120,7 @@ def test_0020_install_repositories(self): def test_0025_uninstall_column_repository(self): """uninstall column_maker, verify same section""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_column_repository) @@ -128,7 +128,7 @@ def test_0025_uninstall_column_repository(self): self.check_galaxy_repository_tool_panel_section(installed_column_repository, "column_maker") def test_0030_uninstall_convert_repository(self): - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_convert_repository) @@ -137,7 +137,7 @@ def test_0030_uninstall_convert_repository(self): def test_0035_reinstall_column_repository(self): """reinstall column_maker into new section 'new_column_maker' (no_changes = false), no dependencies""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reinstall_repository_api( @@ -150,7 +150,7 @@ def test_0035_reinstall_column_repository(self): def test_0040_reinstall_convert_repository(self): """reinstall convert_chars into new section 'new_convert_chars' (no_changes = false), no dependencies""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self.reinstall_repository_api( @@ -166,9 +166,9 @@ def test_0040_reinstall_convert_repository(self): # https://jenkins.galaxyproject.org/job/docker-toolshed/5198/ # def test_0045_uninstall_and_verify_tool_panel_sections( self ): # '''uninstall both and verify tool panel sections''' - # installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, + # installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, # common.test_user_1_name ) - # installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, + # installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, # common.test_user_1_name ) # self._uninstall_repository( installed_convert_repository ) # self._uninstall_repository( installed_column_repository ) diff --git a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py index f23510d125df..3cd7e9b6b89d 100644 --- a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py @@ -86,7 +86,7 @@ def test_0015_install_and_uninstall_column_repository(self): install_repository_dependencies=True, new_tool_panel_section_label="column_maker", ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_column_repository) diff --git a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py index 522383132095..8e41e0c5b20b 100644 --- a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py +++ b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py @@ -269,16 +269,16 @@ def test_0045_install_base_repository(self): def test_0050_verify_installed_repositories(self): """Verify that the installed repositories are displayed properly.""" - base_repository = self.test_db_util.get_installed_repository_by_name_owner( + base_repository = self._get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name ) - tool_repository = self.test_db_util.get_installed_repository_by_name_owner( + tool_repository = self._get_installed_repository_by_name_owner( bwa_package_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "bwa_base_repository_0100", base_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "package_bwa_0_5_9_0100", tool_repository.installed_changeset_revision ) self._assert_has_installed_repository_dependency(base_repository, "package_bwa_0_5_9_0100") diff --git a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py index 3fefe921ce41..d59f45563ce8 100644 --- a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py +++ b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py @@ -184,7 +184,7 @@ def test_0045_install_repository_with_invalid_repository_dependency(self): install_repository_dependencies=True, preview_strings_displayed=preview_strings_displayed, ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) json = self.display_installed_repository_manage_json(installed_repository) diff --git a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py index 6a437338f5ba..83b5a6d0c4de 100644 --- a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py @@ -192,8 +192,6 @@ def test_0050_verify_repository_installation(self): are now new datatypes in the registry matching the ones defined in blast_datatypes_0120. Also check that blast_datatypes_0120 is labeled as an installed repository dependency of blastxml_to_top_descr_0120. """ - tool_repository = self.test_db_util.get_installed_repository_by_name_owner( - tool_repository_name, common.test_user_1_name - ) + tool_repository = self._get_installed_repository_by_name_owner(tool_repository_name, common.test_user_1_name) self._assert_has_valid_tool_with_name("BLAST top hit") self._assert_repo_has_tool_with_id(tool_repository, "blastxml_to_top_descr") diff --git a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py index 7935a829de67..2894b96fcecc 100644 --- a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py @@ -147,10 +147,10 @@ def test_0025_install_column_repository(self): def test_0030_verify_installation_order(self): """Verify that convert_chars_0150 was installed before column_maker_0150.""" - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) # Column maker was selected for installation, so convert chars should have been installed first, as reflected by the update_time field. diff --git a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py index cf151ef687a5..5a19560911ff 100644 --- a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py @@ -228,13 +228,13 @@ def test_0030_install_filtering_repository(self): def test_0035_verify_installation_order(self): """Verify that convert_chars_0160 and column_maker_0160 were installed before filtering_0160.""" - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) # Filtering was selected for installation, so convert chars and column maker should have been installed first. @@ -247,13 +247,13 @@ def test_0035_verify_installation_order(self): def test_0040_deactivate_all_repositories(self): """Uninstall convert_chars_0160, column_maker_0160, and filtering_0160.""" - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self.deactivate_repository(filter_repository) @@ -262,7 +262,7 @@ def test_0040_deactivate_all_repositories(self): def test_0045_reactivate_filter_repository(self): """Reinstall the filtering_0160 repository.""" - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) self.reactivate_repository(filter_repository) @@ -273,13 +273,13 @@ def test_0045_reactivate_filter_repository(self): def test_0050_verify_reinstallation_order(self): """Verify that convert_chars_0160 and column_maker_0160 were reinstalled before filtering_0160.""" # Fixme: this test is not covering any important behavior since repositories were only deactivated and not uninstalled. - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) # Filtering was selected for reinstallation, so convert chars and column maker should have been installed first. diff --git a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py index 7531e600584f..c3d049890108 100644 --- a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py @@ -193,12 +193,10 @@ def test_0030_verify_installation_order(self): prior_installation_required attribute set. Confirm that this resulted in package_numpy_1_7_0170 being installed before package_matplotlib_1_2_0170. """ - matplotlib_repository = self.test_db_util.get_installed_repository_by_name_owner( + matplotlib_repository = self._get_installed_repository_by_name_owner( matplotlib_repository_name, common.test_user_1_name ) - numpy_repository = self.test_db_util.get_installed_repository_by_name_owner( - numpy_repository_name, common.test_user_1_name - ) + numpy_repository = self._get_installed_repository_by_name_owner(numpy_repository_name, common.test_user_1_name) assert ( matplotlib_repository.update_time > numpy_repository.update_time ), "Error: package_numpy_1_7_0170 shows a later update time than package_matplotlib_1_2_0170" diff --git a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py index 8daf29980a6f..ee8ae3610bcb 100644 --- a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py @@ -88,17 +88,13 @@ def test_0010_install_filtering_repository(self): def test_0015_uninstall_filtering_repository(self): """Uninstall the filtering repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filtering_0000") def test_0020_reinstall_filtering_repository(self): """Reinstall the filtering repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self.reinstall_repository_api(installed_repository) self._assert_has_installed_repos_with_names("filtering_0000") self._assert_has_valid_tool_with_name("Filter1") @@ -106,17 +102,13 @@ def test_0020_reinstall_filtering_repository(self): def test_0025_deactivate_filtering_repository(self): """Deactivate the filtering repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self.deactivate_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filtering_0000") def test_0030_reactivate_filtering_repository(self): """Reactivate the filtering repository and verify that it now shows up in the list of installed repositories.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self.reactivate_repository(installed_repository) self._assert_has_installed_repos_with_names("filtering_0000") self._assert_has_valid_tool_with_name("Filter1") diff --git a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py index 55d3f6294267..c18a1ab6fd9e 100644 --- a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py @@ -116,17 +116,13 @@ def test_0010_install_freebayes_repository(self): def test_0015_uninstall_freebayes_repository(self): """Uninstall the freebayes repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("freebayes_0010") def test_0020_reinstall_freebayes_repository(self): """Reinstall the freebayes repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self.reinstall_repository_api(installed_repository) self._assert_has_installed_repos_with_names("freebayes_0010") self._assert_has_valid_tool_with_name("FreeBayes") @@ -134,17 +130,13 @@ def test_0020_reinstall_freebayes_repository(self): def test_0025_deactivate_freebayes_repository(self): """Deactivate the freebayes repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self.deactivate_repository(installed_repository) self._assert_has_no_installed_repos_with_names("freebayes_0010") def test_0030_reactivate_freebayes_repository(self): """Reactivate the freebayes repository and verify that it now shows up in the list of installed repositories.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self.reactivate_repository(installed_repository) self._assert_has_installed_repos_with_names("freebayes_0010") self._assert_has_valid_tool_with_name("FreeBayes") diff --git a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py index b9ececb46112..787e3247f3e6 100644 --- a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py @@ -162,7 +162,7 @@ def test_0010_install_emboss_repository(self): def test_0015_uninstall_emboss_repository(self): """Uninstall the emboss repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_repository) @@ -170,7 +170,7 @@ def test_0015_uninstall_emboss_repository(self): def test_0020_reinstall_emboss_repository(self): """Reinstall the emboss repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self.reinstall_repository_api(installed_repository) @@ -179,7 +179,7 @@ def test_0020_reinstall_emboss_repository(self): def test_0025_deactivate_emboss_repository(self): """Deactivate the emboss repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_repository) @@ -187,7 +187,7 @@ def test_0025_deactivate_emboss_repository(self): def test_0030_reactivate_emboss_repository(self): """Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_repository) diff --git a/lib/tool_shed/test/functional/test_1410_update_manager.py b/lib/tool_shed/test/functional/test_1410_update_manager.py index 5c017181da40..ba4112d7cd24 100644 --- a/lib/tool_shed/test/functional/test_1410_update_manager.py +++ b/lib/tool_shed/test/functional/test_1410_update_manager.py @@ -74,9 +74,7 @@ def test_0010_install_filtering_repository(self): self._install_repository( repository_name, common.test_user_1_name, category_name, new_tool_panel_section_label="test_1410" ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) self._assert_has_installed_repos_with_names(repository_name) self._assert_has_valid_tool_with_name("Filter") self._assert_repo_has_tool_with_id(installed_repository, "Filter1") @@ -111,9 +109,7 @@ def test_0020_check_for_displayed_update(self): """ # Wait 3 seconds, just to be sure we're past hours_between_check. time.sleep(3) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) response = self.update_installed_repository(installed_repository) assert response["status"] == "ok" assert "has been updated" in response["message"] diff --git a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py index 7635d884a9c6..5c0829ede090 100644 --- a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py +++ b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py @@ -139,8 +139,6 @@ def test_0025_uninstall_filter_repository(self): This is step 2 - Uninstall the filter_1430 repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filter_1430", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filter_1430", common.test_user_1_name) self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filter_1430") diff --git a/lib/tool_shed/test/functional/test_1460_data_managers.py b/lib/tool_shed/test/functional/test_1460_data_managers.py index 09fb0f984228..e26853817aab 100644 --- a/lib/tool_shed/test/functional/test_1460_data_managers.py +++ b/lib/tool_shed/test/functional/test_1460_data_managers.py @@ -82,9 +82,7 @@ def test_0020_install_data_manager_repository(self): def test_0030_verify_data_manager_tool(self): """Verify that the data_manager_1460 repository is installed and Data Manager tool appears in list in Galaxy.""" - repository = self.test_db_util.get_installed_repository_by_name_owner( - data_manager_repository_name, common.test_user_1_name - ) + repository = self._get_installed_repository_by_name_owner(data_manager_repository_name, common.test_user_1_name) strings_displayed = ["status", "jobs", data_manager_name] self.display_installed_jobs_list_page( repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed diff --git a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py index 1b8750d08b99..2019b0c8f477 100644 --- a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py +++ b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py @@ -102,9 +102,7 @@ def test_0020_get_repository_updates(self): This is step 3 - In Galaxy, get updates to the repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) self.update_installed_repository(installed_repository) def test_0025_uninstall_repository(self): @@ -112,9 +110,7 @@ def test_0025_uninstall_repository(self): This is step 4 - In Galaxy, uninstall the repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) self._uninstall_repository(installed_repository) def test_0030_reinstall_repository(self): @@ -122,9 +118,7 @@ def test_0030_reinstall_repository(self): This is step 5 - In Galaxy, reinstall the repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) self.reinstall_repository_api(installed_repository) def test_0035_verify_absence_of_ghosts(self): @@ -132,9 +126,7 @@ def test_0035_verify_absence_of_ghosts(self): This is step 6 - Make sure step 5 created no white ghosts. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name, return_multiple=True - ) + installed_repository = self._get_installed_repositories_by_name_owner(repository_name, common.test_user_1_name) assert ( len(installed_repository) == 1 ), 'Multiple filtering repositories found in the Galaxy database, possibly indicating a "white ghost" scenario.' From ebf8e2b0c317a924b4bfadd818bb3827a3de1d95 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 19 Oct 2022 09:10:01 -0400 Subject: [PATCH 26/73] Standalone implementation of tool shed client... --- .github/workflows/toolshed.yaml | 5 +- .../repository_dependency_manager.py | 4 +- .../tool_shed/unittest_utils/__init__.py | 21 +- .../tool_shed/util/dependency_display.py | 21 +- lib/galaxy/tool_util/toolbox/base.py | 9 + .../galaxy/controllers/admin_toolshed.py | 15 +- lib/tool_shed/test/base/test_db_util.py | 49 +- lib/tool_shed/test/base/twilltestcase.py | 419 +++++++++++++++--- ..._repository_basic_circular_dependencies.py | 6 +- ...est_1050_circular_dependencies_4_levels.py | 3 +- ...vanced_circular_dependency_installation.py | 21 +- ...est_1090_repository_dependency_handling.py | 4 +- ...ll_repository_with_complex_dependencies.py | 1 - ...80_circular_prior_installation_required.py | 5 +- .../test_1300_reset_all_metadata.py | 13 +- .../functional/test_1460_data_managers.py | 11 +- 16 files changed, 474 insertions(+), 133 deletions(-) diff --git a/.github/workflows/toolshed.yaml b/.github/workflows/toolshed.yaml index 64f148c0f346..e8fa6e852a48 100644 --- a/.github/workflows/toolshed.yaml +++ b/.github/workflows/toolshed.yaml @@ -21,6 +21,7 @@ jobs: strategy: matrix: python-version: ['3.7'] + test-install-client: ['standalone', 'galaxy_api'] services: postgres: image: postgres:13 @@ -54,9 +55,11 @@ jobs: key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-toolshed - name: Run tests run: './run_tests.sh -toolshed' + env: + TOOL_SHED_TEST_INSTALL_CLIENT: ${{ matrix.test-install-client }} working-directory: 'galaxy root' - uses: actions/upload-artifact@v3 if: failure() with: - name: Toolshed test results (${{ matrix.python-version }}) + name: Toolshed test results (${{ matrix.python-version }}, ${{ matrix.test-install-client }}) path: 'galaxy root/run_toolshed_tests.html' diff --git a/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py b/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py index cc9a99b8ac6c..79f9c88dae46 100644 --- a/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py @@ -16,6 +16,7 @@ ) from galaxy.model.base import transaction +from galaxy.tool_shed.galaxy_install import installed_repository_manager from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager from galaxy.tool_shed.util import repository_util from galaxy.tool_shed.util.container_util import get_components_from_key @@ -268,7 +269,8 @@ def create_repository_dependency_objects( log.info( f"Reactivating deactivated tool_shed_repository '{str(repository_db_record.name)}'." ) - self.app.installed_repository_manager.activate_repository(repository_db_record) + irm = installed_repository_manager.InstalledRepositoryManager(self.app) + irm.activate_repository(repository_db_record) # No additional updates to the database record are necessary. can_update_db_record = False elif repository_db_record.status not in [ diff --git a/lib/galaxy/tool_shed/unittest_utils/__init__.py b/lib/galaxy/tool_shed/unittest_utils/__init__.py index 7fb28659c83e..61062d76164c 100644 --- a/lib/galaxy/tool_shed/unittest_utils/__init__.py +++ b/lib/galaxy/tool_shed/unittest_utils/__init__.py @@ -24,7 +24,11 @@ ) from galaxy.tool_shed.util.repository_util import get_installed_repository from galaxy.tool_util.data import ToolDataTableManager -from galaxy.tool_util.toolbox.base import AbstractToolBox +from galaxy.tool_util.loader_directory import looks_like_a_tool +from galaxy.tool_util.toolbox.base import ( + AbstractToolBox, + NullToolTagManager, +) from galaxy.tool_util.toolbox.watcher import ( get_tool_conf_watcher, get_tool_watcher, @@ -66,6 +70,9 @@ class Config: shed_tool_data_table_config: str shed_data_manager_config_file: str + def get(self, key, default): + return getattr(self, key, default) + class TestTool: _macro_paths: List[str] = [] @@ -103,6 +110,12 @@ def _get_tool_shed_repository(self, tool_shed, name, owner, installed_changeset_ from_cache=True, ) + def _looks_like_a_tool(self, path): + return looks_like_a_tool(path, enable_beta_formats=False) + + def tool_tag_manager(self): + return NullToolTagManager() + class Watchers: def __init__(self, app): @@ -202,6 +215,12 @@ def __init__( config_filename=self.config.shed_tool_data_table_config, other_config_dict=self.config, ) + dependency_dir = target_directory / "_dependencies" + dependency_dir.mkdir() + + @property + def tool_dependency_dir(self) -> Optional[str]: + return None def reload_toolbox(self): self._toolbox = TestToolBox( diff --git a/lib/galaxy/tool_shed/util/dependency_display.py b/lib/galaxy/tool_shed/util/dependency_display.py index 99910d03add4..f4c2ef30a60a 100644 --- a/lib/galaxy/tool_shed/util/dependency_display.py +++ b/lib/galaxy/tool_shed/util/dependency_display.py @@ -2,6 +2,7 @@ import os from galaxy import util +from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager from galaxy.tool_shed.util import utility_container_manager from galaxy.util import UNKNOWN from galaxy.util.tool_shed.common_util import parse_repository_dependency_tuple @@ -198,11 +199,12 @@ def populate_containers_dict_from_repository_metadata(self, repository): """ metadata = repository.metadata_ if metadata: + irm = InstalledRepositoryManager(self.app) # Handle repository dependencies. ( installed_repository_dependencies, missing_repository_dependencies, - ) = self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies(repository) + ) = irm.get_installed_and_missing_repository_dependencies(repository) # Handle the current repository's tool dependencies. repository_tool_dependencies = metadata.get("tool_dependencies", None) # Make sure to display missing tool dependencies as well. @@ -291,3 +293,20 @@ def build_repository_containers( except Exception as e: log.debug(f"Exception in build_repository_containers: {str(e)}") return containers_dict + + +def build_manage_repository_dict(app, status, repository): + dd = DependencyDisplayer(app) + containers_dict = dd.populate_containers_dict_from_repository_metadata( + repository=repository, + ) + management_dict = { + "status": status, + } + missing_repo_dependencies = containers_dict.get("missing_repository_dependencies", None) + if missing_repo_dependencies: + management_dict["missing_repository_dependencies"] = missing_repo_dependencies.to_dict() + repository_dependencies = containers_dict.get("repository_dependencies", None) + if repository_dependencies: + management_dict["repository_dependencies"] = repository_dependencies.to_dict() + return management_dict diff --git a/lib/galaxy/tool_util/toolbox/base.py b/lib/galaxy/tool_util/toolbox/base.py index 39fc18c68f01..912bc1ea35cd 100644 --- a/lib/galaxy/tool_util/toolbox/base.py +++ b/lib/galaxy/tool_util/toolbox/base.py @@ -120,6 +120,15 @@ def handle_tags(self, tool_id, tool_definition_source): """Parse out tags and persist them.""" +class NullToolTagManager(AbstractToolTagManager): + + def reset_tags(self) -> None: + return None + + def handle_tags(self, tool_id, tool_definition_source) -> None: + return None + + class AbstractToolBox(Dictifiable, ManagesIntegratedToolPanelMixin): """ Abstract container for managing a ToolPanel - containing tools and diff --git a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py index b3dbfff241c6..81b6642e4ee4 100644 --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -153,17 +153,4 @@ def _manage_repository_json(self, trans, **kwd): with transaction(trans.install_model.context): trans.install_model.context.commit() message = "The repository information has been updated." - dd = dependency_display.DependencyDisplayer(trans.app) - containers_dict = dd.populate_containers_dict_from_repository_metadata( - repository=repository, - ) - management_dict = { - "status": status, - } - missing_repo_dependencies = containers_dict.get("missing_repository_dependencies", None) - if missing_repo_dependencies: - management_dict["missing_repository_dependencies"] = missing_repo_dependencies.to_dict() - repository_dependencies = containers_dict.get("repository_dependencies", None) - if repository_dependencies: - management_dict["repository_dependencies"] = repository_dependencies.to_dict() - return management_dict + return dependency_display.build_manage_repository_dict(trans.app, status, repository) diff --git a/lib/tool_shed/test/base/test_db_util.py b/lib/tool_shed/test/base/test_db_util.py index 0ae2c8e76828..9081a007a097 100644 --- a/lib/tool_shed/test/base/test_db_util.py +++ b/lib/tool_shed/test/base/test_db_util.py @@ -1,5 +1,8 @@ import logging -from typing import Optional +from typing import ( + List, + Optional, +) from sqlalchemy import ( and_, @@ -47,23 +50,21 @@ def get_all_repositories(): return sa_session().query(model.Repository).all() -def get_all_installed_repositories(actually_installed=False): - if actually_installed: - return ( - install_session() - .query(galaxy.model.tool_shed_install.ToolShedRepository) - .filter( - and_( - galaxy.model.tool_shed_install.ToolShedRepository.table.c.deleted == false(), - galaxy.model.tool_shed_install.ToolShedRepository.table.c.uninstalled == false(), - galaxy.model.tool_shed_install.ToolShedRepository.table.c.status - == galaxy.model.tool_shed_install.ToolShedRepository.installation_status.INSTALLED, - ) +def get_all_installed_repositories(session=None) -> List[galaxy.model.tool_shed_install.ToolShedRepository]: + if session is None: + session = install_session() + return list( + session.query(galaxy.model.tool_shed_install.ToolShedRepository) + .filter( + and_( + galaxy.model.tool_shed_install.ToolShedRepository.table.c.deleted == false(), + galaxy.model.tool_shed_install.ToolShedRepository.table.c.uninstalled == false(), + galaxy.model.tool_shed_install.ToolShedRepository.table.c.status + == galaxy.model.tool_shed_install.ToolShedRepository.installation_status.INSTALLED, ) - .all() ) - else: - return install_session().query(galaxy.model.tool_shed_install.ToolShedRepository).all() + .all() + ) def get_galaxy_repository_by_name_owner_changeset_revision(repository_name, owner, changeset_revision): @@ -90,15 +91,13 @@ def get_installed_repository_by_id(repository_id): ) -def get_installed_repository_by_name_owner(repository_name, owner, return_multiple=False): - query = ( - install_session() - .query(galaxy.model.tool_shed_install.ToolShedRepository) - .filter( - and_( - galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name, - galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner, - ) +def get_installed_repository_by_name_owner(repository_name, owner, return_multiple=False, session=None): + if session is None: + session = install_session() + query = session.query(galaxy.model.tool_shed_install.ToolShedRepository).filter( + and_( + galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name, + galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner, ) ) if return_multiple: diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index dc3421fc0ea9..3961f1b00ebb 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1,3 +1,4 @@ +import abc import logging import os import shutil @@ -6,6 +7,7 @@ import tempfile import time from json import loads +from pathlib import Path from typing import ( Any, Dict, @@ -25,9 +27,25 @@ hg, ui, ) +from sqlalchemy import ( + and_, + false, +) import galaxy.model.tool_shed_install as galaxy_model +from galaxy.schema.schema import CheckForUpdatesResponse from galaxy.security import idencoding +from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager +from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager +from galaxy.tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import ( + InstalledRepositoryMetadataManager, +) +from galaxy.tool_shed.unittest_utils import ( + StandaloneInstallationTarget, + ToolShedTarget, +) +from galaxy.tool_shed.util.dependency_display import build_manage_repository_dict +from galaxy.tool_shed.util.repository_util import check_for_updates from galaxy.util import ( DEFAULT_SOCKET_TIMEOUT, smart_str, @@ -60,28 +78,34 @@ tc.options["equiv_refresh_interval"] = 0 -class ToolShedInstallationClient: +class ToolShedInstallationClient(metaclass=abc.ABCMeta): + @abc.abstractmethod def check_galaxy_repository_tool_panel_section( self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str ) -> None: - ... + """ """ + @abc.abstractmethod def setup(self) -> None: - ... + """Setup client interaction.""" + @abc.abstractmethod def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: - ... + """Deactivate the supplied repository.""" + @abc.abstractmethod def display_installed_jobs_list_page( self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None ) -> None: - ... + """If available, check data manager jobs for supplied strings.""" + @abc.abstractmethod def installed_repository_extended_info( self, installed_repository: galaxy_model.ToolShedRepository ) -> Dict[str, Any]: - ... + """""" + @abc.abstractmethod def install_repository( self, name: str, @@ -91,42 +115,67 @@ def install_repository( install_repository_dependencies: bool, new_tool_panel_section_label: Optional[str], ) -> None: - ... + """""" + @abc.abstractmethod def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: - ... + """""" - def reset_metadata_on_selected_installed_repositories(self, repository_ids: List[str]) -> None: - ... + @abc.abstractmethod + def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + """""" + @abc.abstractmethod def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: - ... + """""" + @abc.abstractmethod def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: - ... + """""" + @abc.abstractmethod def update_installed_repository( self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False ) -> Dict[str, Any]: - ... + """""" + + @abc.abstractproperty + def tool_data_path(self) -> str: + """""" + @abc.abstractproperty + def shed_tool_data_table_conf(self) -> str: + """""" + + @abc.abstractmethod def get_tool_names(self) -> List[str]: - ... + """""" + @abc.abstractmethod def get_installed_repository_by_name_owner( self, repository_name: str, repository_owner: str ) -> galaxy_model.ToolShedRepository: - ... + """""" + @abc.abstractmethod def get_installed_repositories_by_name_owner( self, repository_name: str, repository_owner: str ) -> List[galaxy_model.ToolShedRepository]: - ... + """""" + @abc.abstractmethod def get_installed_repository_for( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None ) -> Optional[Dict[str, Any]]: - ... + """""" + + @abc.abstractmethod + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + """""" + + @abc.abstractmethod + def refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + """""" class GalaxyInteractorToolShedInstallationClient(ToolShedInstallationClient): @@ -150,7 +199,7 @@ def check_galaxy_repository_tool_panel_section( galaxy_model.ToolShedRepository.installation_status.UNINSTALLED, galaxy_model.ToolShedRepository.installation_status.DEACTIVATED, ]: - tool_panel_section = self._get_tool_panel_section_from_repository_metadata(metadata) + tool_panel_section = _get_tool_panel_section_from_repository_metadata(metadata) else: tool_panel_section = self._get_tool_panel_section_from_api(metadata) assert ( @@ -231,7 +280,10 @@ def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepos url = "/admin_toolshed/restore_repository" self._visit_galaxy_url(url, params=params) - def reset_metadata_on_selected_installed_repositories(self, repository_ids: List[str]) -> None: + def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + repository_ids = [] + for repository in repositories: + repository_ids.append(self.testcase.security.encode_id(repository.id)) api_key = get_admin_api_key() response = requests.post( f"{self.testcase.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", @@ -281,6 +333,14 @@ def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedR ) assert response.status_code != 403, response.content + @property + def tool_data_path(self): + return os.environ.get("GALAXY_TEST_TOOL_DATA_PATH") + + @property + def shed_tool_data_table_conf(self): + return os.environ.get("TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF") + def get_tool_names(self) -> List[str]: response = self.testcase.galaxy_interactor._get("tools?in_panel=false") response.raise_for_status() @@ -304,6 +364,15 @@ def get_installed_repository_for( ) -> Optional[Dict[str, Any]]: return self.testcase.get_installed_repository_for(owner=owner, name=name, changeset=changeset) + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + repositories = test_db_util.get_all_installed_repositories() + for repository in repositories: + test_db_util.ga_refresh(repository) + return repositories + + def refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + test_db_util.ga_refresh(repo) + def _galaxy_login(self, email="test@bx.psu.edu", password="testuser", username="admin-user"): self._galaxy_logout() self._create_user_in_galaxy(email=email, password=password, username=username) @@ -342,24 +411,7 @@ def _get_tool_panel_section_from_api(self, metadata): tool_panel_section = tool_dict["panel_section_name"] return tool_panel_section - def _get_tool_panel_section_from_repository_metadata(self, metadata): - tool_metadata = metadata["tools"] - tool_guid = tool_metadata[0]["guid"] - assert "tool_panel_section" in metadata, f"Tool panel section not found in metadata: {metadata}" - tool_panel_section_metadata = metadata["tool_panel_section"] - # tool_section_dict = dict( tool_config=guids_and_configs[ guid ], - # id=section_id, - # name=section_name, - # version=section_version ) - # This dict is appended to tool_panel_section_metadata[ tool_guid ] - tool_panel_section = tool_panel_section_metadata[tool_guid][0]["name"] - return tool_panel_section - def _wait_for_repository_installation(self, repository_ids): - final_states = [ - galaxy_model.ToolShedRepository.installation_status.ERROR, - galaxy_model.ToolShedRepository.installation_status.INSTALLED, - ] # Wait until all repositories are in a final state before returning. This ensures that subsequent tests # are running against an installed repository, and not one that is still in the process of installing. if repository_ids: @@ -367,18 +419,7 @@ def _wait_for_repository_installation(self, repository_ids): galaxy_repository = test_db_util.get_installed_repository_by_id( self.testcase.security.decode_id(repository_id) ) - timeout_counter = 0 - while galaxy_repository.status not in final_states: - test_db_util.ga_refresh(galaxy_repository) - timeout_counter = timeout_counter + 1 - # This timeout currently defaults to 10 minutes. - if timeout_counter > repository_installation_timeout: - raise AssertionError( - "Repository installation timed out, %d seconds elapsed, repository state is %s." - % (timeout_counter, galaxy_repository.status) - ) - break - time.sleep(1) + _wait_for_installation(galaxy_repository, test_db_util.ga_refresh) def _visit_galaxy_url(self, url, params=None, doseq=False, allowed_codes=None): if allowed_codes is None: @@ -387,10 +428,186 @@ def _visit_galaxy_url(self, url, params=None, doseq=False, allowed_codes=None): self.testcase.visit_url(url, params=params, doseq=doseq, allowed_codes=allowed_codes) +class StandaloneToolShedInstallationClient(ToolShedInstallationClient): + def __init__(self, testcase): + self.testcase = testcase + self.temp_directory = Path(tempfile.mkdtemp(prefix="toolshedtestinstalltarget")) + tool_shed_target = ToolShedTarget( + self.testcase.url, + "Tool Shed for Testing", + ) + self._installation_target = StandaloneInstallationTarget(self.temp_directory, tool_shed_target=tool_shed_target) + + def setup(self) -> None: + pass + + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + metadata = repository.metadata_ + assert "tools" in metadata, f"Tools not found in repository metadata: {metadata}" + # TODO: check actual toolbox if tool is already installed... + tool_panel_section = _get_tool_panel_section_from_repository_metadata(metadata) + assert ( + tool_panel_section == expected_tool_panel_section + ), f"Expected to find tool panel section *{expected_tool_panel_section}*, but instead found *{tool_panel_section}*\nMetadata: {metadata}\n" + + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + irm = InstalledRepositoryManager(app=self._installation_target) + errors = irm.uninstall_repository(repository=installed_repository, remove_from_disk=False) + if errors: + raise Exception( + f"Attempting to uninstall tool dependencies for repository named {installed_repository.name} resulted in errors: {errors}" + ) + + def display_installed_jobs_list_page( + self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None + ) -> None: + raise NotImplementedError() + + def installed_repository_extended_info( + self, installed_repository: galaxy_model.ToolShedRepository + ) -> Dict[str, Any]: + self._installation_target.install_model.context.refresh(installed_repository) + return build_manage_repository_dict(self._installation_target, "ok", installed_repository) + + def install_repository( + self, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool, + install_repository_dependencies: bool, + new_tool_panel_section_label: Optional[str], + ): + tool_shed_url = self.testcase.url + payload = { + "tool_shed_url": tool_shed_url, + "name": name, + "owner": owner, + "changeset_revision": changeset_revision, + "install_tool_dependencies": install_tool_dependencies, + "install_repository_dependencies": install_repository_dependencies, + "install_resolver_dependencies": False, + } + if new_tool_panel_section_label: + payload["new_tool_panel_section_label"] = new_tool_panel_section_label + irm = InstallRepositoryManager(app=self._installation_target) + installed_tool_shed_repositories = irm.install(str(tool_shed_url), name, owner, changeset_revision, payload) + for installed_tool_shed_repository in installed_tool_shed_repositories or []: + _wait_for_installation( + installed_tool_shed_repository, self._installation_target.install_model.context.refresh + ) + + def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + irm = InstalledRepositoryManager(app=self._installation_target) + irm.activate_repository(installed_repository) + + def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + for repository in repositories: + irmm = InstalledRepositoryMetadataManager(self._installation_target) + irmm.set_repository(repository) + irmm.reset_all_metadata_on_installed_repository() + + def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: + irmm = InstalledRepositoryMetadataManager(self._installation_target) + irmm.set_repository(repository) + irmm.reset_all_metadata_on_installed_repository() + + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + irm = InstalledRepositoryManager(app=self._installation_target) + errors = irm.uninstall_repository(repository=installed_repository, remove_from_disk=True) + if errors: + raise Exception( + f"Attempting to uninstall tool dependencies for repository named {installed_repository.name} resulted in errors: {errors}" + ) + + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + message, status = check_for_updates( + self._installation_target.tool_shed_registry, + self._installation_target.install_model.context, + installed_repository.id, + ) + response = CheckForUpdatesResponse(message=message, status=status) + response_dict = response.dict() + if verify_no_updates: + assert "message" in response_dict + message = response_dict["message"] + assert "The status has not changed in the tool shed for repository" in message, str(response_dict) + return response_dict + + def get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + return test_db_util.get_installed_repository_by_name_owner( + repository_name, repository_owner, session=self._installation_target.install_model.context + ) + + def get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + return test_db_util.get_installed_repository_by_name_owner( + repository_name, + repository_owner, + return_multiple=True, + session=self._installation_target.install_model.context, + ) + + def get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + clause_list = [] + if name is not None: + clause_list.append(galaxy_model.ToolShedRepository.table.c.name == name) + if owner is not None: + clause_list.append(galaxy_model.ToolShedRepository.table.c.owner == owner) + if changeset is not None: + clause_list.append(galaxy_model.ToolShedRepository.table.c.changeset_revision == changeset) + clause_list.append(galaxy_model.ToolShedRepository.table.c.deleted == false()) + clause_list.append(galaxy_model.ToolShedRepository.table.c.uninstalled == false()) + + query = self._installation_target.install_model.context.query(galaxy_model.ToolShedRepository) + if len(clause_list) > 0: + query = query.filter(and_(*clause_list)) + repository = query.one_or_none() + if repository: + return repository.to_dict() + else: + return None + + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + repositories = test_db_util.get_all_installed_repositories( + session=self._installation_target.install_model.context + ) + for repository in repositories: + self._installation_target.install_model.context.refresh(repository) + return repositories + + def refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + self._installation_target.install_model.context.refresh(repo) + + @property + def shed_tool_data_table_conf(self): + return self._installation_target.config.shed_tool_data_table_config + + @property + def tool_data_path(self): + return self._installation_target.config.tool_data_path + + def get_tool_names(self) -> List[str]: + tool_names = [] + for _, tool in self._installation_target.toolbox.tools(): + tool_names.append(tool.name) + return tool_names + + class ShedTwillTestCase(ShedApiTestCase): """Class of FunctionalTestCase geared toward HTML interactions using the Twill library.""" requires_galaxy: bool = False + _installation_client = None def setUp(self): super().setUp() @@ -401,14 +618,22 @@ def setUp(self): self.hgweb_config_manager = hgweb_config.hgweb_config_manager self.hgweb_config_manager.hgweb_config_dir = self.hgweb_config_dir self.tool_shed_test_tmp_dir = os.environ.get("TOOL_SHED_TEST_TMP_DIR", None) - self.shed_tool_data_table_conf = os.environ.get("TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF") self.file_dir = os.environ.get("TOOL_SHED_TEST_FILE_DIR", None) - self.tool_data_path = os.environ.get("GALAXY_TEST_TOOL_DATA_PATH") self.shed_tool_conf = os.environ.get("GALAXY_TEST_SHED_TOOL_CONF") self.test_db_util = test_db_util - self._installation_client = GalaxyInteractorToolShedInstallationClient(self) - if self.requires_galaxy: - self._installation_client.setup() + if os.environ.get("TOOL_SHED_TEST_INSTALL_CLIENT") == "standalone": + # TODO: once nose is out of the way - try to get away without + # instantiating the unused Galaxy server here. + installation_client_class = StandaloneToolShedInstallationClient + full_stack_galaxy = False + else: + installation_client_class = GalaxyInteractorToolShedInstallationClient + full_stack_galaxy = True + self.full_stack_galaxy = full_stack_galaxy + if self.requires_galaxy and (self.__class__._installation_client is None): + self.__class__._installation_client = installation_client_class(self) + self.__class__._installation_client.setup() + self._installation_client = self.__class__._installation_client def check_for_strings(self, strings_displayed=None, strings_not_displayed=None): strings_displayed = strings_displayed or [] @@ -640,6 +865,7 @@ def check_for_valid_tools(self, repository, strings_displayed=None, strings_not_ def check_galaxy_repository_db_status(self, repository_name, owner, expected_status): installed_repository = self._get_installed_repository_by_name_owner(repository_name, owner) + self._refresh_tool_shed_repository(installed_repository) assert ( installed_repository.status == expected_status ), f"Status in database is {installed_repository.status}, expected {expected_status}" @@ -749,6 +975,7 @@ def check_string_count_in_page(self, pattern, min_count, max_count=None): def check_galaxy_repository_tool_panel_section( self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str ) -> None: + assert self._installation_client self._installation_client.check_galaxy_repository_tool_panel_section(repository, expected_tool_panel_section) def clone_repository(self, repository: Repository, destination_path: str) -> None: @@ -830,6 +1057,7 @@ def create_repository_dependency( ) def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + assert self._installation_client self._installation_client.deactivate_repository(installed_repository) def delete_files_from_repository(self, repository: Repository, filenames: List[str]): @@ -868,11 +1096,13 @@ def delete_repository(self, repository: Repository) -> None: self.check_for_strings(strings_displayed, strings_not_displayed) def display_installed_jobs_list_page(self, installed_repository, data_manager_names=None, strings_displayed=None): + assert self._installation_client self._installation_client.display_installed_jobs_list_page( installed_repository, data_manager_names, strings_displayed ) def display_installed_repository_manage_json(self, installed_repository): + assert self._installation_client return self._installation_client.installed_repository_extended_info(installed_repository) def display_manage_repository_page( @@ -1060,6 +1290,10 @@ def generate_temp_path(self, test_script_path, additional_paths=None): os.makedirs(temp_path) return temp_path + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + assert self._installation_client + return self._installation_client.get_all_installed_repositories() + def get_filename(self, filename, filepath=None): if filepath is not None: return os.path.abspath(os.path.join(filepath, filename)) @@ -1269,6 +1503,7 @@ def _install_repository( # repository_id = repository.id if changeset_revision is None: changeset_revision = self.get_repository_tip(repository) + assert self._installation_client self._installation_client.install_repository( name, owner, @@ -1408,8 +1643,9 @@ def reset_metadata_on_selected_repositories(self, repository_ids): kwd = dict(repository_ids=repository_ids) self.submit_form(button="reset_metadata_on_selected_repositories_button", **kwd) - def reset_metadata_on_selected_installed_repositories(self, repository_ids): - self._installation_client.reset_metadata_on_selected_installed_repositories(repository_ids) + def reset_metadata_on_installed_repositories(self, repositories): + assert self._installation_client + self._installation_client.reset_metadata_on_installed_repositories(repositories) def reset_repository_metadata(self, repository): params = {"id": repository.id} @@ -1500,11 +1736,13 @@ def undelete_repository(self, repository: Repository) -> None: self.check_for_strings(strings_displayed, strings_not_displayed) def _uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + assert self._installation_client self._installation_client.uninstall_repository(installed_repository) def update_installed_repository( self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False ) -> Dict[str, Any]: + assert self._installation_client return self._installation_client.update_installed_repository(installed_repository, verify_no_updates=False) def upload_file( @@ -1610,7 +1848,9 @@ def verify_installed_repositories(self, installed_repositories=None, uninstalled def verify_installed_repository_metadata_unchanged(self, name, owner): installed_repository = self._get_installed_repository_by_name_owner(name, owner) + assert installed_repository metadata = installed_repository.metadata_ + assert self._installation_client self._installation_client.reset_installed_repository_metadata(installed_repository) new_metadata = installed_repository.metadata_ assert metadata == new_metadata, f"Metadata for installed repository {name} differs after metadata reset." @@ -1620,9 +1860,27 @@ def verify_installed_repository_no_tool_panel_section(self, repository): metadata = repository.metadata_ assert "tool_panel_section" not in metadata, f"Tool panel section incorrectly found in metadata: {metadata}" + @property + def shed_tool_data_table_conf(self): + return self._installation_client.shed_tool_data_table_conf + + @property + def tool_data_path(self): + return self._installation_client.tool_data_path + + def _refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + assert self._installation_client + self._installation_client.refresh_tool_shed_repository(repo) + def verify_installed_repository_data_table_entries(self, required_data_table_entries): # The value of the received required_data_table_entries will be something like: [ 'sam_fa_indexes' ] - data_tables, error_message = xml_util.parse_xml(self.shed_tool_data_table_conf) + shed_tool_data_table_conf = self.shed_tool_data_table_conf + data_tables, error_message = xml_util.parse_xml(shed_tool_data_table_conf) + with open(shed_tool_data_table_conf) as f: + shed_tool_data_table_conf_contents = f.read() + assert ( + not error_message + ), f"Failed to parse {shed_tool_data_table_conf} properly. File contents [{shed_tool_data_table_conf_contents}]" found = False # With the tool shed, the "path" attribute that is hard-coded into the tool_data_tble_conf.xml # file is ignored. This is because the tool shed requires the directory location to which this @@ -1678,21 +1936,32 @@ def verify_installed_repository_data_table_entries(self, required_data_table_ent break # We better have an entry like: in our parsed data_tables # or we know that the repository was not correctly installed! - assert found, f"No entry for {required_data_table_entry} in {self.shed_tool_data_table_conf}." + if not found: + if required_data_table_entry is None: + raise AssertionError( + f"No tables found in {shed_tool_data_table_conf}. File contents {shed_tool_data_table_conf_contents}" + ) + else: + raise AssertionError( + f"No entry for {required_data_table_entry} in {shed_tool_data_table_conf}. File contents {shed_tool_data_table_conf_contents}" + ) def _get_installed_repository_by_name_owner( self, repository_name: str, repository_owner: str ) -> galaxy_model.ToolShedRepository: + assert self._installation_client return self._installation_client.get_installed_repository_by_name_owner(repository_name, repository_owner) def _get_installed_repositories_by_name_owner( self, repository_name: str, repository_owner: str ) -> List[galaxy_model.ToolShedRepository]: + assert self._installation_client return self._installation_client.get_installed_repositories_by_name_owner(repository_name, repository_owner) def _get_installed_repository_for( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None ): + assert self._installation_client return self._installation_client.get_installed_repository_for(owner=owner, name=name, changeset=changeset) def _assert_has_installed_repos_with_names(self, *names): @@ -1731,8 +2000,9 @@ def _assert_has_installed_repository_dependency( changeset: Optional[str] = None, ) -> None: json = self.display_installed_repository_manage_json(installed_repository) - assert "repository_dependencies" in json, ( - "No repository dependencies were defined in %s." % installed_repository.name + assert "repository_dependencies" in json, "No repository dependencies were defined in %s. manage json is %s" % ( + installed_repository.name, + json, ) repository_dependencies = json["repository_dependencies"] found = False @@ -1765,6 +2035,7 @@ def _assert_is_not_missing_dependency( def _assert_has_valid_tool_with_name(self, tool_name: str) -> None: def assert_has(): + assert self._installation_client tool_names = self._installation_client.get_tool_names() assert tool_name in tool_names @@ -1808,3 +2079,33 @@ def verify_unchanged_repository_metadata(self, repository: Repository): # Python's dict comparison recursively compares sorted key => value pairs and returns true if any key or value differs, # or if the number of keys differs. assert old_metadata == new_metadata, f"Metadata changed after reset on repository {repository.name}." + + +def _wait_for_installation(repository: galaxy_model.ToolShedRepository, refresh): + final_states = [ + galaxy_model.ToolShedRepository.installation_status.ERROR, + galaxy_model.ToolShedRepository.installation_status.INSTALLED, + ] + # Wait until all repositories are in a final state before returning. This ensures that subsequent tests + # are running against an installed repository, and not one that is still in the process of installing. + timeout_counter = 0 + while repository.status not in final_states: + refresh(repository) + timeout_counter = timeout_counter + 1 + # This timeout currently defaults to 10 minutes. + if timeout_counter > repository_installation_timeout: + raise AssertionError( + "Repository installation timed out, %d seconds elapsed, repository state is %s." + % (timeout_counter, repository.status) + ) + break + time.sleep(1) + + +def _get_tool_panel_section_from_repository_metadata(metadata): + tool_metadata = metadata["tools"] + tool_guid = tool_metadata[0]["guid"] + assert "tool_panel_section" in metadata, f"Tool panel section not found in metadata: {metadata}" + tool_panel_section_metadata = metadata["tool_panel_section"] + tool_panel_section = tool_panel_section_metadata[tool_guid][0]["name"] + return tool_panel_section diff --git a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py index 5dfdcf31fd6c..c71ad0c31ca0 100644 --- a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py @@ -147,7 +147,7 @@ def test_0030_uninstall_filtering_repository(self): common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_filtering_repository) - self.test_db_util.ga_refresh(installed_filtering_repository) + self._refresh_tool_shed_repository(installed_filtering_repository) self._assert_has_missing_dependency(installed_freebayes_repository, filtering_repository_name) self.check_galaxy_repository_db_status(filtering_repository_name, common.test_user_1_name, "Deactivated") @@ -185,7 +185,7 @@ def test_0040_uninstall_freebayes_repository(self): assert not self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - self.test_db_util.ga_refresh(installed_freebayes_repository) + self._refresh_tool_shed_repository(installed_filtering_repository) self._assert_has_missing_dependency(installed_filtering_repository, freebayes_repository_name) self.check_galaxy_repository_db_status("freebayes_0040", "user1", "Deactivated") @@ -207,6 +207,6 @@ def test_0045_deactivate_filtering_repository(self): assert not self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) - self.test_db_util.ga_refresh(installed_filtering_repository) + self._refresh_tool_shed_repository(installed_freebayes_repository) self._assert_has_missing_dependency(installed_freebayes_repository, filtering_repository_name) self.check_galaxy_repository_db_status(filtering_repository_name, common.test_user_1_name, "Deactivated") diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index 90fb8db3e309..713e7f7e84be 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -389,7 +389,6 @@ def test_0065_deactivate_bismark_repository(self): (column_repository_name, common.test_user_1_name), (emboss_repository_name, common.test_user_1_name), (convert_repository_name, common.test_user_1_name), - (bismark_repository_name, common.test_user_1_name), ] strings_displayed = ["emboss_0050", "column_maker_0050", "convert_chars_0050"] strings_not_displayed = ["bismark", "filtering_0050", "freebayes_0050"] @@ -402,7 +401,7 @@ def test_0070_uninstall_emboss_repository(self): repository = self._get_installed_repository_by_name_owner(emboss_repository_name, common.test_user_1_name) self._uninstall_repository(repository) self._assert_has_no_installed_repos_with_names(repository.name) - self.test_db_util.ga_refresh(repository) + self._refresh_tool_shed_repository(repository) self.check_galaxy_repository_tool_panel_section(repository, "emboss_5_0050") # Now we have bismark, column_maker, and convert_chars installed, filtering and freebayes never installed, # and emboss uninstalled. diff --git a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py index dd31fb94f8c6..f9352bcb05f3 100644 --- a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py +++ b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py @@ -135,15 +135,18 @@ def test_0020_install_convert_repository(self): column_repository_name, common.test_user_1_name ) assert self._get_installed_repository_for( - common.test_user_1, "convert_chars_0080", installed_convert_repository.installed_changeset_revision - ) - self._assert_has_installed_repository_dependency( - installed_convert_repository, "column_maker_0080", installed_column_repository.installed_changeset_revision - ) - # installed_convert_repository has required_repositories and the following string - # is included when not installing via the API. This distrubs me but we've not installed - # not from the API for a long time so I'm just dropping the check. -John - # "Missing repository dependencies", + common.test_user_1, convert_repository_name, installed_convert_repository.installed_changeset_revision + ) + if self.full_stack_galaxy: + # This branch has been broken since we switched from mako to API for installing... + self._assert_has_installed_repository_dependency( + installed_convert_repository, + column_repository_name, + installed_column_repository.installed_changeset_revision, + ) + else: + # Previous mako had some string checks and such equivalent to this. + self._assert_has_missing_dependency(installed_convert_repository, column_repository_name) def test_0025_install_column_repository(self): """Install column maker with repository dependencies into column_maker tool panel section.""" diff --git a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py index 02a80ad62895..0a249455275c 100644 --- a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py +++ b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py @@ -115,6 +115,7 @@ def test_0020_install_repositories(self): installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) + self._refresh_tool_shed_repository(installed_convert_repository) self._assert_has_installed_repos_with_names("convert_chars_1085", "column_maker_1085") self._assert_is_not_missing_dependency(installed_convert_repository, "column_maker_1085") @@ -124,7 +125,6 @@ def test_0025_uninstall_column_repository(self): column_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_column_repository) - self.test_db_util.ga_refresh(installed_column_repository) self.check_galaxy_repository_tool_panel_section(installed_column_repository, "column_maker") def test_0030_uninstall_convert_repository(self): @@ -132,7 +132,7 @@ def test_0030_uninstall_convert_repository(self): convert_repository_name, common.test_user_1_name ) self._uninstall_repository(installed_convert_repository) - self.test_db_util.ga_refresh(installed_convert_repository) + self._refresh_tool_shed_repository(installed_convert_repository) self.check_galaxy_repository_tool_panel_section(installed_convert_repository, "column_maker") def test_0035_reinstall_column_repository(self): diff --git a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py index 8e41e0c5b20b..b3fb64a7d6d7 100644 --- a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py +++ b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py @@ -263,7 +263,6 @@ def test_0045_install_base_repository(self): bwa_base_repository_name, common.test_user_1_name, category_name, - install_tool_dependencies=True, preview_strings_displayed=preview_strings_displayed, ) diff --git a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py index 5a19560911ff..1b1b29ddf974 100644 --- a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py @@ -283,8 +283,9 @@ def test_0050_verify_reinstallation_order(self): convert_repository_name, common.test_user_1_name ) # Filtering was selected for reinstallation, so convert chars and column maker should have been installed first. - for repo in [convert_repository, column_repository, filter_repository]: - self.test_db_util.install_session().refresh(repo) + if self.full_stack_galaxy: + for repo in [convert_repository, column_repository, filter_repository]: + self.test_db_util.install_session().refresh(repo) assert ( filter_repository.update_time > convert_repository.update_time ), "Prior installed convert_chars_0160 shows a later update time than filtering_0160" diff --git a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py index 26f7db2239fd..eda3d6dc693b 100644 --- a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py @@ -526,14 +526,13 @@ def test_9900_install_all_missing_repositories(self): def test_9905_reset_metadata_on_all_repositories(self): """Reset metadata on all repositories, then verify that it has not changed.""" - repository_metadata = dict() - repositories = self.test_db_util.get_all_installed_repositories(actually_installed=True) + repositories = self.get_all_installed_repositories() + repository_metadata = {} for repository in repositories: - repository_metadata[self.security.encode_id(repository.id)] = repository.metadata_ - self.reset_metadata_on_selected_installed_repositories(list(repository_metadata.keys())) - for repository in repositories: - self.test_db_util.ga_refresh(repository) - old_metadata = repository_metadata[self.security.encode_id(repository.id)] + repository_metadata[repository.id] = repository.metadata_ + self.reset_metadata_on_installed_repositories(repositories) + for repository in self.get_all_installed_repositories(): + old_metadata = repository_metadata[repository.id] # When a repository with tools to be displayed in a tool panel section is deactivated and reinstalled, # the tool panel section remains in the repository metadata. However, when the repository's metadata # is subsequently reset, the tool panel section is removed from the repository metadata. While this diff --git a/lib/tool_shed/test/functional/test_1460_data_managers.py b/lib/tool_shed/test/functional/test_1460_data_managers.py index e26853817aab..7269e72fd12d 100644 --- a/lib/tool_shed/test/functional/test_1460_data_managers.py +++ b/lib/tool_shed/test/functional/test_1460_data_managers.py @@ -77,16 +77,17 @@ def test_0020_install_data_manager_repository(self): data_manager_repository_name, common.test_user_1_name, category_name, - install_tool_dependencies=True, + install_tool_dependencies=False, ) def test_0030_verify_data_manager_tool(self): """Verify that the data_manager_1460 repository is installed and Data Manager tool appears in list in Galaxy.""" repository = self._get_installed_repository_by_name_owner(data_manager_repository_name, common.test_user_1_name) - strings_displayed = ["status", "jobs", data_manager_name] - self.display_installed_jobs_list_page( - repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed - ) + if self.full_stack_galaxy: + strings_displayed = ["status", "jobs", data_manager_name] + self.display_installed_jobs_list_page( + repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed + ) def test_0040_verify_data_manager_data_table(self): """Verify that the installed repository populated shed_tool_data_table.xml and the sample files.""" From 59cff45fb132829252be8fdf4a7f710339ba395f Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 23 Feb 2023 11:19:38 -0500 Subject: [PATCH 27/73] Decompose tool shed + schema package. This commit creates two new packages - below a description of these packages and why they are being created. galaxy-schema: This package contains the pydantic models that power the API. The purpose of packaging them is to ensure they can be reused by clients with minimal external dependencies. Reusing the schema in client code would very quickly provide rapid documentation and validation and static checking for Python clients using the Galaxy API. galaxy-tool-shed: This package contains the tool shed server code. This has been a long term project to allow the tool shed to be spun out but maintain real galaxy dependencies and maintain testing. More discussions around this project can be found as part of https://github.com/galaxyproject/galaxy/pull/8830. --- lib/galaxy/schema/fields.py | 6 +- lib/galaxy/tool_shed/util/repository_util.py | 6 +- lib/tool_shed/py.typed | 0 lib/tool_shed_client/py.typed | 0 packages/data/galaxy/schema | 1 - packages/data/setup.cfg | 1 + packages/packages_by_dep_dag.txt | 2 + packages/schema/HISTORY.rst | 10 ++ packages/schema/LICENSE | 1 + packages/schema/MANIFEST.in | 1 + packages/schema/Makefile | 1 + packages/schema/README.rst | 14 ++ packages/schema/dev-requirements.txt | 1 + packages/schema/galaxy/__init__.py | 1 + packages/schema/galaxy/py.typed | 0 packages/schema/galaxy/schema | 1 + packages/schema/mypy.ini | 1 + packages/schema/pyproject.toml | 1 + packages/schema/scripts | 1 + packages/schema/setup.cfg | 41 +++++ packages/schema/test-requirements.txt | 1 + packages/schema/tests/__init__.py | 0 packages/schema/tests/schema | 1 + packages/tool_shed/HISTORY.rst | 10 ++ packages/tool_shed/LICENSE | 1 + packages/tool_shed/MANIFEST.in | 1 + packages/tool_shed/Makefile | 1 + packages/tool_shed/README.rst | 14 ++ packages/tool_shed/dev-requirements.txt | 1 + packages/tool_shed/mypy.ini | 1 + packages/tool_shed/pyproject.toml | 1 + packages/tool_shed/scripts | 1 + packages/tool_shed/setup.cfg | 40 +++++ packages/tool_shed/test-requirements.txt | 1 + packages/tool_shed/tests/__init__.py | 0 packages/tool_shed/tests/tool_shed | 1 + packages/{web_apps => tool_shed}/tool_shed | 0 test/unit/schema/__init__.py | 0 test/unit/{data => schema}/test_schema.py | 0 .../test_installed_repository_manager.py | 141 ------------------ .../unit/{shed_unit => tool_shed}/__init__.py | 0 test/unit/tool_shed/model/__init__.py | 0 .../{shed_unit => tool_shed}/test_dbscript.py | 0 .../{shed_unit => tool_shed}/test_hg_util.py | 0 .../test_shed_index.py | 0 .../test_tool_panel_manager.py | 8 +- 46 files changed, 162 insertions(+), 152 deletions(-) create mode 100644 lib/tool_shed/py.typed create mode 100644 lib/tool_shed_client/py.typed delete mode 120000 packages/data/galaxy/schema create mode 100644 packages/schema/HISTORY.rst create mode 120000 packages/schema/LICENSE create mode 100644 packages/schema/MANIFEST.in create mode 120000 packages/schema/Makefile create mode 100644 packages/schema/README.rst create mode 120000 packages/schema/dev-requirements.txt create mode 100644 packages/schema/galaxy/__init__.py create mode 100644 packages/schema/galaxy/py.typed create mode 120000 packages/schema/galaxy/schema create mode 120000 packages/schema/mypy.ini create mode 120000 packages/schema/pyproject.toml create mode 120000 packages/schema/scripts create mode 100644 packages/schema/setup.cfg create mode 100644 packages/schema/test-requirements.txt create mode 100644 packages/schema/tests/__init__.py create mode 120000 packages/schema/tests/schema create mode 100644 packages/tool_shed/HISTORY.rst create mode 120000 packages/tool_shed/LICENSE create mode 100644 packages/tool_shed/MANIFEST.in create mode 120000 packages/tool_shed/Makefile create mode 100644 packages/tool_shed/README.rst create mode 120000 packages/tool_shed/dev-requirements.txt create mode 120000 packages/tool_shed/mypy.ini create mode 120000 packages/tool_shed/pyproject.toml create mode 120000 packages/tool_shed/scripts create mode 100644 packages/tool_shed/setup.cfg create mode 100644 packages/tool_shed/test-requirements.txt create mode 100644 packages/tool_shed/tests/__init__.py create mode 120000 packages/tool_shed/tests/tool_shed rename packages/{web_apps => tool_shed}/tool_shed (100%) create mode 100644 test/unit/schema/__init__.py rename test/unit/{data => schema}/test_schema.py (100%) delete mode 100644 test/unit/shed_unit/test_installed_repository_manager.py rename test/unit/{shed_unit => tool_shed}/__init__.py (100%) create mode 100644 test/unit/tool_shed/model/__init__.py rename test/unit/{shed_unit => tool_shed}/test_dbscript.py (100%) rename test/unit/{shed_unit => tool_shed}/test_hg_util.py (100%) rename test/unit/{shed_unit => tool_shed}/test_shed_index.py (100%) rename test/unit/{shed_unit => tool_shed}/test_tool_panel_manager.py (99%) diff --git a/lib/galaxy/schema/fields.py b/lib/galaxy/schema/fields.py index 40eec4f2b12f..687785e6eff8 100644 --- a/lib/galaxy/schema/fields.py +++ b/lib/galaxy/schema/fields.py @@ -3,7 +3,11 @@ from pydantic import Field from typing_extensions import get_args -from galaxy.security.idencoding import IdEncodingHelper +try: + from galaxy.security.idencoding import IdEncodingHelper +except ImportError: + IdEncodingHelper = object # type: ignore[assignment,misc] + ENCODED_DATABASE_ID_PATTERN = re.compile("f?[0-9a-f]+") ENCODED_ID_LENGTH_MULTIPLE = 16 diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index d13e1b3d2740..11e9a675039d 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -21,10 +21,7 @@ ) from sqlalchemy.orm import joinedload -from galaxy import ( - util, - web, -) +from galaxy import util from galaxy.model.base import transaction from galaxy.model.scoped_session import install_model_scoped_session from galaxy.model.tool_shed_install import ToolShedRepository @@ -726,7 +723,6 @@ def repository_was_previously_installed(app, tool_shed_url, repository_name, rep # Get all previous changeset revisions from the tool shed for the repository back to, but excluding, # the previous valid changeset revision to see if it was previously installed using one of them. params = dict( - galaxy_url=web.url_for("/", qualified=True), name=repository_name, owner=repository_owner, changeset_revision=changeset_revision, diff --git a/lib/tool_shed/py.typed b/lib/tool_shed/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/lib/tool_shed_client/py.typed b/lib/tool_shed_client/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/data/galaxy/schema b/packages/data/galaxy/schema deleted file mode 120000 index 14df7cabc56f..000000000000 --- a/packages/data/galaxy/schema +++ /dev/null @@ -1 +0,0 @@ -../../../lib/galaxy/schema \ No newline at end of file diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index a0e4228e33f8..02f74386b9d5 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -36,6 +36,7 @@ install_requires = galaxy-objectstore galaxy-util[template] alembic + alembic-utils bdbag bx-python dnspython diff --git a/packages/packages_by_dep_dag.txt b/packages/packages_by_dep_dag.txt index 2975c52c5150..3d9bc1f307aa 100644 --- a/packages/packages_by_dep_dag.txt +++ b/packages/packages_by_dep_dag.txt @@ -1,4 +1,5 @@ util +schema config files job_metrics @@ -17,3 +18,4 @@ web_apps test_base test_driver test_api +tool_shed \ No newline at end of file diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst new file mode 100644 index 000000000000..e947a90a461d --- /dev/null +++ b/packages/schema/HISTORY.rst @@ -0,0 +1,10 @@ +History +------- + +.. to_doc + +--------------------- +21.1.0.dev0 +--------------------- + +* First release. diff --git a/packages/schema/LICENSE b/packages/schema/LICENSE new file mode 120000 index 000000000000..1ef648f64b34 --- /dev/null +++ b/packages/schema/LICENSE @@ -0,0 +1 @@ +../../LICENSE.txt \ No newline at end of file diff --git a/packages/schema/MANIFEST.in b/packages/schema/MANIFEST.in new file mode 100644 index 000000000000..12302eb8dff0 --- /dev/null +++ b/packages/schema/MANIFEST.in @@ -0,0 +1 @@ +include *.rst *.txt LICENSE */py.typed diff --git a/packages/schema/Makefile b/packages/schema/Makefile new file mode 120000 index 000000000000..37af8bae5baa --- /dev/null +++ b/packages/schema/Makefile @@ -0,0 +1 @@ +../package.Makefile \ No newline at end of file diff --git a/packages/schema/README.rst b/packages/schema/README.rst new file mode 100644 index 000000000000..aed8193c9a85 --- /dev/null +++ b/packages/schema/README.rst @@ -0,0 +1,14 @@ + +.. image:: https://badge.fury.io/py/galaxy-schema.svg + :target: https://pypi.org/project/galaxy-schema/ + + + +Overview +-------- + +The Galaxy_ API schema objects. + +* Code: https://github.com/galaxyproject/galaxy + +.. _Galaxy: http://galaxyproject.org/ diff --git a/packages/schema/dev-requirements.txt b/packages/schema/dev-requirements.txt new file mode 120000 index 000000000000..467b90d7a232 --- /dev/null +++ b/packages/schema/dev-requirements.txt @@ -0,0 +1 @@ +../package-dev-requirements.txt \ No newline at end of file diff --git a/packages/schema/galaxy/__init__.py b/packages/schema/galaxy/__init__.py new file mode 100644 index 000000000000..2e50d9cce896 --- /dev/null +++ b/packages/schema/galaxy/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/schema/galaxy/py.typed b/packages/schema/galaxy/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/schema/galaxy/schema b/packages/schema/galaxy/schema new file mode 120000 index 000000000000..baebb232dbd6 --- /dev/null +++ b/packages/schema/galaxy/schema @@ -0,0 +1 @@ +../../../lib/galaxy/schema/ \ No newline at end of file diff --git a/packages/schema/mypy.ini b/packages/schema/mypy.ini new file mode 120000 index 000000000000..141a30f41afc --- /dev/null +++ b/packages/schema/mypy.ini @@ -0,0 +1 @@ +../../mypy.ini \ No newline at end of file diff --git a/packages/schema/pyproject.toml b/packages/schema/pyproject.toml new file mode 120000 index 000000000000..01a3b08b8872 --- /dev/null +++ b/packages/schema/pyproject.toml @@ -0,0 +1 @@ +../package-pyproject.toml \ No newline at end of file diff --git a/packages/schema/scripts b/packages/schema/scripts new file mode 120000 index 000000000000..9aec9dc5a067 --- /dev/null +++ b/packages/schema/scripts @@ -0,0 +1 @@ +../build_scripts \ No newline at end of file diff --git a/packages/schema/setup.cfg b/packages/schema/setup.cfg new file mode 100644 index 000000000000..507f563909ea --- /dev/null +++ b/packages/schema/setup.cfg @@ -0,0 +1,41 @@ +[metadata] +author = Galaxy Project and Community +author_email = galaxy-committers@lists.galaxyproject.org +classifiers = + Development Status :: 5 - Production/Stable + Environment :: Console + Intended Audience :: Developers + License :: OSI Approved :: Academic Free License (AFL) + Natural Language :: English + Operating System :: POSIX + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Topic :: Software Development + Topic :: Software Development :: Code Generators + Topic :: Software Development :: Testing +description = Galaxy auth framework and implementations +keywords = + Galaxy +license = AFL +license_files = + LICENSE +long_description = file: README.rst, HISTORY.rst +long_description_content_type = text/x-rst +name = galaxy-schema +url = https://github.com/galaxyproject/galaxy +version = 23.1.0.dev0 + +[options] +include_package_data = True +install_requires = + galaxy-util + pydantic[email] +packages = find: +python_requires = >=3.7 + +[options.packages.find] +exclude = + tests* diff --git a/packages/schema/test-requirements.txt b/packages/schema/test-requirements.txt new file mode 100644 index 000000000000..e079f8a6038d --- /dev/null +++ b/packages/schema/test-requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/packages/schema/tests/__init__.py b/packages/schema/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/schema/tests/schema b/packages/schema/tests/schema new file mode 120000 index 000000000000..f2bc0c0ba5cd --- /dev/null +++ b/packages/schema/tests/schema @@ -0,0 +1 @@ +../../../test/unit/schema/ \ No newline at end of file diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst new file mode 100644 index 000000000000..7fccc8db62eb --- /dev/null +++ b/packages/tool_shed/HISTORY.rst @@ -0,0 +1,10 @@ +History +------- + +.. to_doc + +--------------------- +23.1.0.dev0 +--------------------- + +* First release. diff --git a/packages/tool_shed/LICENSE b/packages/tool_shed/LICENSE new file mode 120000 index 000000000000..1ef648f64b34 --- /dev/null +++ b/packages/tool_shed/LICENSE @@ -0,0 +1 @@ +../../LICENSE.txt \ No newline at end of file diff --git a/packages/tool_shed/MANIFEST.in b/packages/tool_shed/MANIFEST.in new file mode 100644 index 000000000000..12302eb8dff0 --- /dev/null +++ b/packages/tool_shed/MANIFEST.in @@ -0,0 +1 @@ +include *.rst *.txt LICENSE */py.typed diff --git a/packages/tool_shed/Makefile b/packages/tool_shed/Makefile new file mode 120000 index 000000000000..37af8bae5baa --- /dev/null +++ b/packages/tool_shed/Makefile @@ -0,0 +1 @@ +../package.Makefile \ No newline at end of file diff --git a/packages/tool_shed/README.rst b/packages/tool_shed/README.rst new file mode 100644 index 000000000000..6ae09c66ae5b --- /dev/null +++ b/packages/tool_shed/README.rst @@ -0,0 +1,14 @@ + +.. image:: https://badge.fury.io/py/galaxy-tool-shed.svg + :target: https://pypi.org/project/galaxy-tool-shed/ + + + +Overview +-------- + +The Galaxy_ tool shed server. + +* Code: https://github.com/galaxyproject/galaxy + +.. _Galaxy: http://galaxyproject.org/ diff --git a/packages/tool_shed/dev-requirements.txt b/packages/tool_shed/dev-requirements.txt new file mode 120000 index 000000000000..467b90d7a232 --- /dev/null +++ b/packages/tool_shed/dev-requirements.txt @@ -0,0 +1 @@ +../package-dev-requirements.txt \ No newline at end of file diff --git a/packages/tool_shed/mypy.ini b/packages/tool_shed/mypy.ini new file mode 120000 index 000000000000..141a30f41afc --- /dev/null +++ b/packages/tool_shed/mypy.ini @@ -0,0 +1 @@ +../../mypy.ini \ No newline at end of file diff --git a/packages/tool_shed/pyproject.toml b/packages/tool_shed/pyproject.toml new file mode 120000 index 000000000000..01a3b08b8872 --- /dev/null +++ b/packages/tool_shed/pyproject.toml @@ -0,0 +1 @@ +../package-pyproject.toml \ No newline at end of file diff --git a/packages/tool_shed/scripts b/packages/tool_shed/scripts new file mode 120000 index 000000000000..9aec9dc5a067 --- /dev/null +++ b/packages/tool_shed/scripts @@ -0,0 +1 @@ +../build_scripts \ No newline at end of file diff --git a/packages/tool_shed/setup.cfg b/packages/tool_shed/setup.cfg new file mode 100644 index 000000000000..4c5cc82377d2 --- /dev/null +++ b/packages/tool_shed/setup.cfg @@ -0,0 +1,40 @@ +[metadata] +author = Galaxy Project and Community +author_email = galaxy-committers@lists.galaxyproject.org +classifiers = + Development Status :: 5 - Production/Stable + Environment :: Console + Intended Audience :: Developers + License :: OSI Approved :: Academic Free License (AFL) + Natural Language :: English + Operating System :: POSIX + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Topic :: Software Development + Topic :: Software Development :: Code Generators + Topic :: Software Development :: Testing +description = Galaxy auth framework and implementations +keywords = + Galaxy +license = AFL +license_files = + LICENSE +long_description = file: README.rst, HISTORY.rst +long_description_content_type = text/x-rst +name = galaxy-tool-shed +url = https://github.com/galaxyproject/galaxy +version = 23.1.0.dev0 + +[options] +include_package_data = True +install_requires = + galaxy-webapps +packages = find: +python_requires = >=3.7 + +[options.packages.find] +exclude = + tests* diff --git a/packages/tool_shed/test-requirements.txt b/packages/tool_shed/test-requirements.txt new file mode 100644 index 000000000000..e079f8a6038d --- /dev/null +++ b/packages/tool_shed/test-requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/packages/tool_shed/tests/__init__.py b/packages/tool_shed/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/tool_shed/tests/tool_shed b/packages/tool_shed/tests/tool_shed new file mode 120000 index 000000000000..8f2e9fcd98e2 --- /dev/null +++ b/packages/tool_shed/tests/tool_shed @@ -0,0 +1 @@ +../../../test/unit/tool_shed \ No newline at end of file diff --git a/packages/web_apps/tool_shed b/packages/tool_shed/tool_shed similarity index 100% rename from packages/web_apps/tool_shed rename to packages/tool_shed/tool_shed diff --git a/test/unit/schema/__init__.py b/test/unit/schema/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/unit/data/test_schema.py b/test/unit/schema/test_schema.py similarity index 100% rename from test/unit/data/test_schema.py rename to test/unit/schema/test_schema.py diff --git a/test/unit/shed_unit/test_installed_repository_manager.py b/test/unit/shed_unit/test_installed_repository_manager.py deleted file mode 100644 index 58dc9d331b7f..000000000000 --- a/test/unit/shed_unit/test_installed_repository_manager.py +++ /dev/null @@ -1,141 +0,0 @@ -import os -from typing import ( - Any, - Dict, -) -from unittest.mock import MagicMock - -from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager -from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager -from galaxy.tool_shed.galaxy_install.update_repository_manager import UpdateRepositoryManager -from galaxy.tool_shed.util import ( - hg_util, - repository_util, -) -from galaxy.util.tool_shed import common_util -from ..app.tools.test_toolbox import ( - BaseToolBoxTestCase, - DEFAULT_TEST_REPO, -) - - -class ToolShedRepoBaseTestCase(BaseToolBoxTestCase): - def setUp(self): - super().setUp() - self._init_dynamic_tool_conf() - self.app.config.tool_configs = self.config_files - self.app.config.manage_dependency_relationships = False - self.app._toolbox = self.toolbox - - def _setup_repository(self): - return self._repo_install(changeset="1", config_filename=self.config_files[0]) - - -class TestInstallRepositoryManager(ToolShedRepoBaseTestCase): - def setUp(self): - super().setUp() - self.irm = InstallRepositoryManager(self.app) - self.app.config.enable_tool_shed_check = False - self.app.update_repository_manager = UpdateRepositoryManager(self.app) - - def test_tool_shed_repository_install(self): - hg_util.clone_repository = MagicMock(return_value=(True, None)) - repository_util.get_tool_shed_status_for = MagicMock(return_value={"revision_update": "true"}) - self._install_tool_shed_repository(start_status="New", end_status="Installed", changeset_revision="1") - hg_util.clone_repository.assert_called_with( - "github.com/repos/galaxyproject/example", - os.path.abspath(os.path.join("../shed_tools", "github.com/repos/galaxyproject/example/1/example")), - "1", - ) - - def test_tool_shed_repository_update(self): - common_util.get_tool_shed_url_from_tool_shed_registry = MagicMock(return_value="https://github.com") - repository_util.get_tool_shed_status_for = MagicMock(return_value={"revision_update": "false"}) - hg_util.pull_repository = MagicMock() - hg_util.update_repository = MagicMock(return_value=(True, None)) - self._install_tool_shed_repository(start_status="Installed", end_status="Installed", changeset_revision="2") - assert hg_util.pull_repository.call_args[0][0].endswith("github.com/repos/galaxyproject/example/1/example") - assert hg_util.pull_repository.call_args[0][1] == "https://github.com/repos/galaxyproject/example" - assert hg_util.pull_repository.call_args[0][2] == "2" - assert hg_util.update_repository.call_args[0][0].endswith("github.com/repos/galaxyproject/example/1/example") - assert hg_util.update_repository.call_args[0][1] == "2" - - def _install_tool_shed_repository(self, start_status, end_status, changeset_revision): - repository = self._setup_repository() - repository.status = start_status - repo_info_dict: Dict[str, Any] = { - "example": ( - "description", - "github.com/repos/galaxyproject/example", - changeset_revision, - changeset_revision, - "galaxyproject", - [], - [], - ) - } - self.irm.install_tool_shed_repository( - repository, - repo_info_dict, - "section_key", - self.app.config.tool_configs[0], - "../shed_tools", - False, - False, - reinstalling=False, - ) - assert repository.status == end_status - assert repository.changeset_revision == changeset_revision - - -class TestInstalledRepositoryManager(ToolShedRepoBaseTestCase): - def setUp(self): - super().setUp() - self.irm = InstalledRepositoryManager(self.app) - - def test_uninstall_repository(self): - repository = self._setup_repository() - assert repository.uninstalled is False - self.irm.uninstall_repository(repository=repository, remove_from_disk=True) - assert repository.uninstalled is True - - def test_deactivate_repository(self): - self._deactivate_repository() - - def test_activate_repository(self): - repository = self._deactivate_repository() - self.irm.activate_repository(repository) - assert repository.status == self.app.install_model.ToolShedRepository.installation_status.INSTALLED - - def test_create_or_update_tool_shed_repository_update(self): - repository = self._setup_repository() - self._create_or_update_tool_shed_repository(repository=repository, changeset_revision="2") - - def test_create_or_update_tool_shed_repository_create(self): - self._create_or_update_tool_shed_repository(repository=None, changeset_revision="2") - - def _create_or_update_tool_shed_repository(self, repository=None, changeset_revision="2"): - if repository is None: - repository = DEFAULT_TEST_REPO - new_repository = repository_util.create_or_update_tool_shed_repository( - app=self.app, - name=repository.name, - description=repository.description, - installed_changeset_revision=repository.installed_changeset_revision, - ctx_rev=repository.changeset_revision, - repository_clone_url=f"https://github.com/galaxyproject/example/test_tool/0.{repository.installed_changeset_revision}", # not needed if owner is given - status=repository.status, - metadata_dict=None, - current_changeset_revision=str(int(repository.changeset_revision) + 1), - owner=repository.owner, - dist_to_shed=False, - ) - assert new_repository.changeset_revision == changeset_revision - - def _deactivate_repository(self): - repository = self._setup_repository() - assert repository.uninstalled is False - self.irm.uninstall_repository(repository=repository, remove_from_disk=False) - assert repository.uninstalled is False - assert repository.status == self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED - return repository diff --git a/test/unit/shed_unit/__init__.py b/test/unit/tool_shed/__init__.py similarity index 100% rename from test/unit/shed_unit/__init__.py rename to test/unit/tool_shed/__init__.py diff --git a/test/unit/tool_shed/model/__init__.py b/test/unit/tool_shed/model/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/unit/shed_unit/test_dbscript.py b/test/unit/tool_shed/test_dbscript.py similarity index 100% rename from test/unit/shed_unit/test_dbscript.py rename to test/unit/tool_shed/test_dbscript.py diff --git a/test/unit/shed_unit/test_hg_util.py b/test/unit/tool_shed/test_hg_util.py similarity index 100% rename from test/unit/shed_unit/test_hg_util.py rename to test/unit/tool_shed/test_hg_util.py diff --git a/test/unit/shed_unit/test_shed_index.py b/test/unit/tool_shed/test_shed_index.py similarity index 100% rename from test/unit/shed_unit/test_shed_index.py rename to test/unit/tool_shed/test_shed_index.py diff --git a/test/unit/shed_unit/test_tool_panel_manager.py b/test/unit/tool_shed/test_tool_panel_manager.py similarity index 99% rename from test/unit/shed_unit/test_tool_panel_manager.py rename to test/unit/tool_shed/test_tool_panel_manager.py index a206bfff60d0..751a7c5f5c4d 100644 --- a/test/unit/shed_unit/test_tool_panel_manager.py +++ b/test/unit/tool_shed/test_tool_panel_manager.py @@ -1,12 +1,12 @@ import os -from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager -from galaxy.util import parse_xml -from tool_shed.tools import tool_version_manager -from ..app.tools.test_toolbox import ( +from galaxy.app_unittest_utils.toolbox_support import ( BaseToolBoxTestCase, SimplifiedToolBox, ) +from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager +from galaxy.util import parse_xml +from tool_shed.tools import tool_version_manager DEFAULT_GUID = "123456" From 3c3e31ef8cb4facc8cbfadff0d6394b3b8cad91b Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sat, 22 Oct 2022 21:11:10 -0400 Subject: [PATCH 28/73] metadata_generator: use TYPE_CHECKING to avoid dependency issues --- .../tool_shed/metadata/metadata_generator.py | 17 ++++++++++------- .../tool_shed/tools/data_table_manager.py | 8 ++++++-- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index b10e29148e40..9cbaf9e94960 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -8,6 +8,7 @@ List, Optional, Tuple, + TYPE_CHECKING, Union, ) @@ -15,7 +16,6 @@ from galaxy import util from galaxy.model.tool_shed_install import ToolShedRepository -from galaxy.structured_app import BasicSharedApp from galaxy.tool_shed.galaxy_install.client import ( DataManagerInterface, InstallationTarget, @@ -46,7 +46,10 @@ remove_protocol_from_tool_shed_url, ) from galaxy.util.tool_shed.xml_util import parse_xml -from galaxy.web import url_for + +if TYPE_CHECKING: + from galaxy.structured_app import BasicSharedApp + log = logging.getLogger(__name__) @@ -70,7 +73,7 @@ def repo_path(self, app) -> Optional[str]: class BaseMetadataGenerator: - app: Union[BasicSharedApp, InstallationTarget] + app: Union["BasicSharedApp", InstallationTarget] repository: Optional[RepositoryProtocol] invalid_file_tups: List[InvalidFileT] changeset_revision: Optional[str] @@ -961,10 +964,10 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td error_message += util.xml_to_string(repository_elem, pretty=True) log.error(error_message) return repository_dependency_tup, False, error_message - if not toolshed: - # Default to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") - repository_dependency_tup[0] = toolshed + + # Must be present in Galaxy side code I think. + assert toolshed + toolshed = remove_protocol_from_tool_shed_url(toolshed) # We're in Galaxy. We reach here when we're generating the metadata for a tool diff --git a/lib/galaxy/tool_shed/tools/data_table_manager.py b/lib/galaxy/tool_shed/tools/data_table_manager.py index 41f91610c4a7..f2d02a4d96a6 100644 --- a/lib/galaxy/tool_shed/tools/data_table_manager.py +++ b/lib/galaxy/tool_shed/tools/data_table_manager.py @@ -3,18 +3,22 @@ import shutil from typing import ( List, + TYPE_CHECKING, Union, ) -from galaxy.structured_app import BasicSharedApp from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.util import hg_util from galaxy.util import etree from galaxy.util.tool_shed import xml_util +if TYPE_CHECKING: + from galaxy.structured_app import BasicSharedApp + log = logging.getLogger(__name__) -RequiredAppT = Union[BasicSharedApp, InstallationTarget] + +RequiredAppT = Union["BasicSharedApp", InstallationTarget] class ShedToolDataTableManager: From 43e1574c32ba97055b3e04fdc4fffa92b3b9432c Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sat, 29 Oct 2022 17:28:57 -0400 Subject: [PATCH 29/73] metadata_generator: mark cleanup_repository_metadata as private Fix up clean up repository metadata... --- lib/tool_shed/metadata/repository_metadata_manager.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index ff0d9381f3a3..416ff9e97713 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -299,14 +299,13 @@ def build_repository_ids_select_field( repositories_select_field.add_option(option_label, option_value) return repositories_select_field - def clean_repository_metadata(self, changeset_revisions): + def _clean_repository_metadata(self, changeset_revisions): assert self.repository # Delete all repository_metadata records associated with the repository that have # a changeset_revision that is not in changeset_revisions. We sometimes see multiple # records with the same changeset revision value - no idea how this happens. We'll # assume we can delete the older records, so we'll order by update_time descending and # delete records that have the same changeset_revision we come across later. - changeset_revisions_checked = [] for repository_metadata in ( self.sa_session.query(self.app.model.RepositoryMetadata) .filter(self.app.model.RepositoryMetadata.table.c.repository_id == self.repository.id) @@ -316,7 +315,7 @@ def clean_repository_metadata(self, changeset_revisions): ) ): changeset_revision = repository_metadata.changeset_revision - if changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions: + if changeset_revision not in changeset_revisions: self.sa_session.delete(repository_metadata) session = self.sa_session() with transaction(session): @@ -1117,7 +1116,7 @@ def reset_all_metadata_on_repository_in_tool_shed(self): basic_util.remove_dir(work_dir) # Delete all repository_metadata records for this repository that do not have a changeset_revision # value in changeset_revisions. - self.clean_repository_metadata(changeset_revisions) + self._clean_repository_metadata(changeset_revisions) # Set tool version information for all downloadable changeset revisions. Get the list of changeset # revisions from the changelog. self.reset_all_tool_versions(repo) From fca10e4d80147ec4130fb72a1e22725ccf8038db Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Dec 2022 14:08:09 -0500 Subject: [PATCH 30/73] metadata_generator: Small typing fixes for tool shed metadata_generator --- lib/galaxy/tool_shed/metadata/metadata_generator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 9cbaf9e94960..39d40555b841 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -85,10 +85,10 @@ class BaseMetadataGenerator: persist: bool def initial_metadata_dict(self) -> Dict[str, Any]: - ... + raise NotImplementedError() def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: - ... + raise NotImplementedError() def _generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None @@ -855,6 +855,7 @@ def _update_repository_dependencies_metadata( class GalaxyMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on Galaxy's app and repository constructs.""" + app: InstallationTarget repository: Optional[ToolShedRepository] def __init__( From e611a9566af06f064a75a6c58c25dcfb07f8b686 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Dec 2022 20:24:57 -0500 Subject: [PATCH 31/73] Move generate_clone_url_for_repository_in_tool_shed into tool shed. --- lib/galaxy/util/tool_shed/common_util.py | 12 ------------ lib/tool_shed/util/common_util.py | 15 ++++++++++++++- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/lib/galaxy/util/tool_shed/common_util.py b/lib/galaxy/util/tool_shed/common_util.py index 6636897ceea8..b65eab240f8f 100644 --- a/lib/galaxy/util/tool_shed/common_util.py +++ b/lib/galaxy/util/tool_shed/common_util.py @@ -51,17 +51,6 @@ def generate_clone_url_for_installed_repository(app: HasToolShedRegistry, reposi return util.build_url(tool_shed_url, pathspec=["repos", str(repository.owner), str(repository.name)]) -def generate_clone_url_for_repository_in_tool_shed(user, repository) -> str: - """Generate the URL for cloning a repository that is in the tool shed.""" - base_url = url_for("/", qualified=True).rstrip("/") - if user: - protocol, base = base_url.split("://") - username = f"{user.username}@" - return f"{protocol}://{username}{base}/repos/{repository.user.username}/{repository.name}" - else: - return f"{base_url}/repos/{repository.user.username}/{repository.name}" - - def generate_clone_url_from_repo_info_tup(app: HasToolShedRegistry, repo_info_tup) -> str: """Generate the URL for cloning a repository given a tuple of toolshed, name, owner, changeset_revision.""" # Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False] @@ -286,7 +275,6 @@ def remove_protocol_from_tool_shed_url(tool_shed_url: str) -> str: "accumulate_tool_dependencies", "check_tool_tag_set", "generate_clone_url_for_installed_repository", - "generate_clone_url_for_repository_in_tool_shed", "generate_clone_url_from_repo_info_tup", "get_repository_dependencies", "get_protocol_from_tool_shed_url", diff --git a/lib/tool_shed/util/common_util.py b/lib/tool_shed/util/common_util.py index 00ce0d153ff0..7519967fe9e1 100644 --- a/lib/tool_shed/util/common_util.py +++ b/lib/tool_shed/util/common_util.py @@ -1,8 +1,9 @@ +from routes import url_for + from galaxy.util.tool_shed.common_util import ( accumulate_tool_dependencies, check_tool_tag_set, generate_clone_url_for_installed_repository, - generate_clone_url_for_repository_in_tool_shed, generate_clone_url_from_repo_info_tup, get_protocol_from_tool_shed_url, get_repository_dependencies, @@ -18,6 +19,18 @@ remove_protocol_from_tool_shed_url, ) + +def generate_clone_url_for_repository_in_tool_shed(user, repository) -> str: + """Generate the URL for cloning a repository that is in the tool shed.""" + base_url = url_for("/", qualified=True).rstrip("/") + if user: + protocol, base = base_url.split("://") + username = f"{user.username}@" + return f"{protocol}://{username}{base}/repos/{repository.user.username}/{repository.name}" + else: + return f"{base_url}/repos/{repository.user.username}/{repository.name}" + + __all__ = ( "accumulate_tool_dependencies", "check_tool_tag_set", From 89107140148a5f1d07d4cfda8346004a0c45b60c Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 10 Jan 2023 13:22:08 -0500 Subject: [PATCH 32/73] Allow external shed testing. --- lib/tool_shed/test/base/driver.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/tool_shed/test/base/driver.py b/lib/tool_shed/test/base/driver.py index f8885c50a8e5..7dc778643e81 100644 --- a/lib/tool_shed/test/base/driver.py +++ b/lib/tool_shed/test/base/driver.py @@ -43,6 +43,14 @@ class ToolShedTestDriver(driver_util.TestDriver): def setup(self): """Entry point for test driver script.""" + self.external_shed = bool(os.environ.get("TOOL_SHED_TEST_EXTERNAL", None)) + if not self.external_shed: + self._setup_local() + else: + # Going to also need to set TOOL_SHED_TEST_HOST. + assert os.environ["TOOL_SHED_TEST_HOST"] + + def _setup_local(self): # ---- Configuration ------------------------------------------------------ tool_shed_test_tmp_dir = driver_util.setup_tool_shed_tmp_dir() if not os.path.isdir(tool_shed_test_tmp_dir): From 2fa165781bc83c6d966602328348f17e8b18282d Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sun, 11 Dec 2022 16:32:50 -0500 Subject: [PATCH 33/73] Use repository manager to capture more of the upload process. --- lib/tool_shed/managers/repositories.py | 42 ++++++++ .../metadata/repository_metadata_manager.py | 19 +++- lib/tool_shed/util/commit_util.py | 2 + lib/tool_shed/util/repository_content_util.py | 77 +++++++++++---- lib/tool_shed/webapp/api/repositories.py | 95 ++++--------------- lib/tool_shed/webapp/api/users.py | 14 +-- lib/tool_shed/webapp/controllers/upload.py | 14 +-- 7 files changed, 142 insertions(+), 121 deletions(-) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 4b26be1ad274..77709dd733e4 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -11,8 +11,14 @@ from galaxy.exceptions import ( InconsistentDatabase, InternalServerError, + MalformedContents, RequestParameterInvalidException, ) +from tool_shed.metadata import repository_metadata_manager +from tool_shed.structured_app import ToolShedApp +from tool_shed.util import hg_util +from tool_shed.util.repository_content_util import upload_tar +from tool_shed.webapp.model import Repository, User log = logging.getLogger(__name__) @@ -76,3 +82,39 @@ def delete(self, trans, group, undelete=False): """ Mark given group deleted/undeleted based on the flag. """ + + +def upload_tar_and_set_metadata( + app: ToolShedApp, + host: str, + user: User, + repository: Repository, + uploaded_file, + upload_point, + commit_message: str, +): + repo_dir = repository.repo_path(app) + tip = repository.tip() + (ok, message, _, content_alert_str, _, _,) = upload_tar( + app, + host, + user.username, + repository, + uploaded_file, + upload_point, + commit_message, + ) + if ok: + # Update the repository files for browsing. + hg_util.update_repository(repo_dir) + # Get the new repository tip. + if tip == repository.tip(): + raise MalformedContents("No changes to repository.") + else: + rmm = repository_metadata_manager.RepositoryMetadataManager(app=app, user=user, repository=repository) + _, error_message = rmm.set_repository_metadata_due_to_new_tip(host, content_alert_str=content_alert_str) + if error_message: + raise InternalServerError(error_message) + else: + raise InternalServerError(message) + return message diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 416ff9e97713..2b1f4282b605 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -1042,7 +1042,7 @@ def new_workflow_metadata_required(self, repository_metadata): # repository_metadata table record is not needed. return False - def reset_all_metadata_on_repository_in_tool_shed(self): + def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=None): """Reset all metadata on a single repository in a tool shed.""" assert self.repository log.debug(f"Resetting all metadata on repository: {self.repository.name}") @@ -1062,7 +1062,8 @@ def reset_all_metadata_on_repository_in_tool_shed(self): ctx = repo[changeset] log.debug("Cloning repository changeset revision: %s", str(ctx.rev())) assert self.repository_clone_url - cloned_ok, error_message = hg_util.clone_repository(self.repository_clone_url, work_dir, str(ctx.rev())) + repository_clone_url = repository_clone_url or self.repository_clone_url + cloned_ok, error_message = hg_util.clone_repository(repository_clone_url, work_dir, str(ctx.rev())) if cloned_ok: log.debug("Generating metadata for changeset revision: %s", str(ctx.rev())) self.set_changeset_revision(str(ctx)) @@ -1080,9 +1081,11 @@ def reset_all_metadata_on_repository_in_tool_shed(self): # self.SUBSET - ancestor metadata is a subset of current metadata, so continue from current # self.NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current # metadata, so persist ancestor metadata. + log.info(f"amd {ancestor_metadata_dict}") comparison = self.compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict ) + log.info(f"comparison {comparison}") if comparison in [self.NO_METADATA, self.EQUAL, self.SUBSET]: ancestor_changeset_revision = self.changeset_revision ancestor_metadata_dict = self.metadata_dict @@ -1131,6 +1134,7 @@ def reset_all_tool_versions(self, repo): repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app, encoded_repository_id, changeset_revision ) + log.info(f"changeset_is {changeset_revision} with rm {repository_metadata}") if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -1153,7 +1157,9 @@ def reset_all_tool_versions(self, repo): # first changeset_revision will be the "old_id" in the tool config. for tool_dict in tool_dicts: tool_versions_dict[tool_dict["guid"]] = tool_dict["id"] + log.info(f"reset_all... tool_dicts is {tool_dicts}") else: + log.info(f"reset_all... tool_dicts is {tool_dicts}") for tool_dict in tool_dicts: parent_id = self.get_parent_id( encoded_repository_id, @@ -1164,6 +1170,9 @@ def reset_all_tool_versions(self, repo): ) tool_versions_dict[tool_dict["guid"]] = parent_id if tool_versions_dict: + print( + f"\n reset_all_tool_versions:: id: {encoded_repository_id} rmi: {repository_metadata.id} d: {tool_versions_dict}\n" + ) repository_metadata.tool_versions = tool_versions_dict self.sa_session.add(repository_metadata) session = self.sa_session() @@ -1217,9 +1226,11 @@ def reset_metadata_on_selected_repositories(self, **kwd): status = "error" return message, status - def set_repository(self, repository): + def set_repository(self, repository, repository_clone_url=None): super().set_repository(repository) - self.repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed(self.user, repository) + self.repository_clone_url = repository_clone_url or common_util.generate_clone_url_for_repository_in_tool_shed( + self.user, repository + ) def set_repository_metadata(self, host, content_alert_str="", **kwd): """ diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py index 51940220d918..c2054bcbb504 100644 --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -199,6 +199,7 @@ def handle_directory_changes( content_alert_str = "" files_to_remove = [] filenames_in_archive = [os.path.normpath(os.path.join(full_path, name)) for name in filenames_in_archive] + print(filenames_in_archive) if remove_repo_files_not_in_tar and not repository.is_new(): # We have a repository that is not new (it contains files), so discover those files that are in the # repository, but not in the uploaded archive. @@ -232,6 +233,7 @@ def handle_directory_changes( # Check file content to ensure it is appropriate. if check_contents and os.path.isfile(filename_in_archive): content_alert_str += check_file_content_for_html_and_images(filename_in_archive) + print(filename_in_archive) hg_util.add_changeset(repo_path, filename_in_archive) if filename_in_archive.endswith("tool_data_table_conf.xml.sample"): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 705f8fc3e9a0..418e9ca9a4e7 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -1,36 +1,68 @@ import os import shutil +import tarfile +from typing import ( + Optional, + TYPE_CHECKING, +) import tool_shed.repository_types.util as rt_util -from tool_shed.util import ( - commit_util, - xml_util, +from galaxy.util import checkers +from tool_shed.dependencies.attribute_handlers import ( + RepositoryDependencyAttributeHandler, + ToolDependencyAttributeHandler, +) +from tool_shed.util import xml_util +from tool_shed.util.commit_util import ( + ChangeResponseT, + check_archive, + handle_directory_changes, ) +if TYPE_CHECKING: + from tool_shed.structured_app import ToolShedApp + from tool_shed.webapp.model import Repository + def upload_tar( - trans, - rdah, - tdah, - repository, - tar, + app: "ToolShedApp", + host: str, + username: str, + repository: "Repository", uploaded_file, upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, -): + commit_message: str, + remove_repo_files_not_in_tar: bool = True, + new_repo_alert: bool = False, + tar=None, + rdah: Optional[RepositoryDependencyAttributeHandler] = None, + tdah: Optional[ToolDependencyAttributeHandler] = None, +) -> ChangeResponseT: + if tar is None: + isgzip = False + isbz2 = False + isgzip = checkers.is_gzip(uploaded_file) + if not isgzip: + isbz2 = checkers.is_bz2(uploaded_file) + if isgzip or isbz2: + # Open for reading with transparent compression. + tar = tarfile.open(uploaded_file, "r:*") + else: + tar = tarfile.open(uploaded_file) + + rdah = rdah or RepositoryDependencyAttributeHandler(app, unpopulate=False) + tdah = tdah or ToolDependencyAttributeHandler(app, unpopulate=False) # Upload a tar archive of files. undesirable_dirs_removed = 0 undesirable_files_removed = 0 - check_results = commit_util.check_archive(repository, tar) + check_results = check_archive(repository, tar) if check_results.invalid: tar.close() uploaded_file.close() message = "{} Invalid paths were: {}".format(" ".join(check_results.errors), ", ".join(check_results.invalid)) return False, message, [], "", undesirable_dirs_removed, undesirable_files_removed else: - repo_dir = repository.repo_path(trans.app) + repo_dir = repository.repo_path(app) if upload_point is not None: full_path = os.path.abspath(os.path.join(repo_dir, upload_point)) else: @@ -41,7 +73,10 @@ def upload_tar( # Extract the uploaded tar to the load_point within the repository hierarchy. tar.extractall(path=full_path, members=check_results.valid) tar.close() - uploaded_file.close() + try: + uploaded_file.close() + except AttributeError: + pass for filename in filenames_in_archive: uploaded_file_name = os.path.join(full_path, filename) if os.path.split(uploaded_file_name)[-1] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: @@ -49,7 +84,7 @@ def upload_tar( # are missing and if so, set them appropriately. altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) if error_message: - return False, error_message, [], "", [], [] + return False, error_message, [], "", 0, 0 elif altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, uploaded_file_name) @@ -58,14 +93,14 @@ def upload_tar( # attributes are missing and if so, set them appropriately. altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) if error_message: - return False, error_message, [], "", [], [] + return False, error_message, [], "", 0, 0 if altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, uploaded_file_name) - return commit_util.handle_directory_changes( - trans.app, - trans.request.host, - trans.user.username, + return handle_directory_changes( + app, + host, + username, repository, full_path, filenames_in_archive, diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index b69750ba2a56..a0d5df75247e 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -1,7 +1,6 @@ import json import logging import os -import tarfile from collections import namedtuple from io import StringIO from time import strftime @@ -24,11 +23,11 @@ ActionInputError, ConfigDoesNotAllowException, InsufficientPermissionsException, + MessageException, ObjectNotFound, RequestParameterInvalidException, RequestParameterMissingException, ) -from galaxy.util import checkers from galaxy.web import ( expose_api, expose_api_anonymous_and_sessionless, @@ -38,15 +37,13 @@ BaseAPIController, HTTPBadRequest, ) -from tool_shed.dependencies import attribute_handlers +from tool_shed.managers.repositories import upload_tar_and_set_metadata from tool_shed.metadata import repository_metadata_manager from tool_shed.repository_types import util as rt_util from tool_shed.util import ( commit_util, encoding_util, - hg_util, metadata_util, - repository_content_util, repository_util, tool_util, ) @@ -1013,9 +1010,6 @@ def create_changeset_revision(self, trans, id, payload, **kwd): """ # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135 - rdah = attribute_handlers.RepositoryDependencyAttributeHandler(self.app, unpopulate=False) - tdah = attribute_handlers.ToolDependencyAttributeHandler(self.app, unpopulate=False) - repository = repository_util.get_repository_in_tool_shed(self.app, id) if not ( @@ -1028,10 +1022,7 @@ def create_changeset_revision(self, trans, id, payload, **kwd): "err_msg": "You do not have permission to update this repository.", } - repo_dir = repository.repo_path(self.app) - upload_point = commit_util.get_upload_point(repository, **kwd) - tip = repository.tip() file_data = payload.get("file") # Code stolen from gx's upload_common.py @@ -1046,73 +1037,21 @@ def create_changeset_revision(self, trans, id, payload, **kwd): commit_message = kwd.get("commit_message", "Uploaded") - uploaded_file = open(file_data["local_filename"], "rb") uploaded_file_name = file_data["local_filename"] - - isgzip = False - isbz2 = False - isgzip = checkers.is_gzip(uploaded_file_name) - if not isgzip: - isbz2 = checkers.is_bz2(uploaded_file_name) - if isgzip or isbz2: - # Open for reading with transparent compression. - tar = tarfile.open(uploaded_file_name, "r:*") - else: - tar = tarfile.open(uploaded_file_name) - - new_repo_alert = False - remove_repo_files_not_in_tar = True - - ( - ok, - message, - files_to_remove, - content_alert_str, - undesirable_dirs_removed, - undesirable_files_removed, - ) = repository_content_util.upload_tar( - trans, - rdah, - tdah, - repository, - tar, - uploaded_file, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ) - upload_message = message - files_removed = util.listify(undesirable_dirs_removed) + util.listify(undesirable_files_removed) - if ok: - # Update the repository files for browsing. - hg_util.update_repository(repo_dir) - # Get the new repository tip. - if tip == repository.tip(): - trans.response.status = 400 - message = "No changes to repository." - ok = False - else: - rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, user=trans.user, repository=repository - ) - status, error_message = rmm.set_repository_metadata_due_to_new_tip( - trans.request.host, content_alert_str=content_alert_str, **kwd - ) - if error_message: - ok = False - trans.response.status = 500 - message = error_message - else: - trans.response.status = 500 + try: + message = upload_tar_and_set_metadata( + self.app, + trans.request.host, + trans.user, + repository, + uploaded_file_name, + upload_point, + commit_message, + ) + rval = {"message": message} + except MessageException as e: + trans.response.status = e.status_code + rval = {"err_msg": str(e)} if os.path.exists(uploaded_file_name): os.remove(uploaded_file_name) - if not ok: - return { - "err_msg": message, - "content_alert": content_alert_str, - "files_removed": files_removed, - "upload_message": upload_message, - } - else: - return {"message": message} + return rval diff --git a/lib/tool_shed/webapp/api/users.py b/lib/tool_shed/webapp/api/users.py index 6371b29b92ae..558c2518aadb 100644 --- a/lib/tool_shed/webapp/api/users.py +++ b/lib/tool_shed/webapp/api/users.py @@ -13,6 +13,7 @@ validate_publicname, ) from galaxy.webapps.base.controller import BaseAPIController +from tool_shed.managers.users import create_user log = logging.getLogger(__name__) @@ -50,18 +51,7 @@ def create(self, trans, payload, **kwd): return user_dict def __create_user(self, trans, email, username, password): - user = trans.app.model.User(email=email) - user.set_password_cleartext(password) - user.username = username - if trans.app.config.user_activation_on: - user.active = False - else: - user.active = True # Activation is off, every new user is active by default. - trans.sa_session.add(user) - with transaction(trans.sa_session): - trans.sa_session.commit() - trans.app.security_agent.create_private_user_role(user) - return user + return create_user(trans.app, email, username, password) def __get_value_mapper(self, trans): value_mapper = {"id": trans.security.encode_id} diff --git a/lib/tool_shed/webapp/controllers/upload.py b/lib/tool_shed/webapp/controllers/upload.py index ef3ef730dd78..373a12cea534 100644 --- a/lib/tool_shed/webapp/controllers/upload.py +++ b/lib/tool_shed/webapp/controllers/upload.py @@ -131,16 +131,18 @@ def upload(self, trans, **kwd): undesirable_dirs_removed, undesirable_files_removed, ) = repository_content_util.upload_tar( - trans, - rdah, - tdah, + trans.app, + trans.request.host, + trans.user.username, repository, - tar, uploaded_file, upload_point, - remove_repo_files_not_in_tar, commit_message, - new_repo_alert, + remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, + new_repo_alert=new_repo_alert, + rdah=rdah, + tdah=tdah, + tar=tar, ) elif uploaded_directory: ( From 95b9c6c36c765f3ad5bf12ccd1665d940d6e3cc2 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 13 Dec 2022 16:16:18 -0500 Subject: [PATCH 34/73] Remove unused repository manager code (hard to de-conflict with upload stuff). --- lib/tool_shed/managers/repositories.py | 68 -------------------------- 1 file changed, 68 deletions(-) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 77709dd733e4..e25bd4eee2d8 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -3,16 +3,9 @@ """ import logging -from sqlalchemy.orm.exc import ( - MultipleResultsFound, - NoResultFound, -) - from galaxy.exceptions import ( - InconsistentDatabase, InternalServerError, MalformedContents, - RequestParameterInvalidException, ) from tool_shed.metadata import repository_metadata_manager from tool_shed.structured_app import ToolShedApp @@ -23,67 +16,6 @@ log = logging.getLogger(__name__) -# ============================================================================= -class RepoManager: - """ - Interface/service object for interacting with TS repositories. - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def get(self, trans, decoded_repo_id): - """ - Get the repo from the DB. - - :param decoded_repo_id: decoded repo id - :type decoded_repo_id: int - - :returns: the requested repo - :rtype: tool_shed.webapp.model.Repository - """ - try: - repo = ( - trans.sa_session.query(trans.app.model.Repository) - .filter(trans.app.model.Repository.table.c.id == decoded_repo_id) - .one() - ) - except MultipleResultsFound: - raise InconsistentDatabase("Multiple repositories found with the same id.") - except NoResultFound: - raise RequestParameterInvalidException("No repository found with the id provided.") - except Exception: - raise InternalServerError("Error loading from the database.") - return repo - - def list_by_owner(self, trans, user_id): - """ - Return a list of of repositories owned by a given TS user from the DB. - - :returns: query that will emit repositories owned by given user - :rtype: sqlalchemy query - """ - query = trans.sa_session.query(trans.app.model.Repository).filter( - trans.app.model.Repository.table.c.user_id == user_id - ) - return query - - def create(self, trans, name, description=""): - """ - Create a new group. - """ - - def update(self, trans, group, name=None, description=None): - """ - Update the given group - """ - - def delete(self, trans, group, undelete=False): - """ - Mark given group deleted/undeleted based on the flag. - """ - - def upload_tar_and_set_metadata( app: ToolShedApp, host: str, From 2c5f63a054e3531d1cc8f0c05060bdd68ac2de6d Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sat, 29 Oct 2022 11:37:10 -0400 Subject: [PATCH 35/73] Integration-y unit tests for the core tool shed code. --- lib/tool_shed/managers/users.py | 18 +++ test/unit/tool_shed/_util.py | 145 ++++++++++++++++++ test/unit/tool_shed/conftest.py | 28 ++++ .../test_repository_metadata_manager.py | 43 ++++++ test/unit/tool_shed/test_repository_utils.py | 50 ++++++ 5 files changed, 284 insertions(+) create mode 100644 lib/tool_shed/managers/users.py create mode 100644 test/unit/tool_shed/_util.py create mode 100644 test/unit/tool_shed/conftest.py create mode 100644 test/unit/tool_shed/test_repository_metadata_manager.py create mode 100644 test/unit/tool_shed/test_repository_utils.py diff --git a/lib/tool_shed/managers/users.py b/lib/tool_shed/managers/users.py new file mode 100644 index 000000000000..a6cc02a1dc05 --- /dev/null +++ b/lib/tool_shed/managers/users.py @@ -0,0 +1,18 @@ +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.model import User + + +def create_user(app: ToolShedApp, email: str, username: str, password: str) -> User: + sa_session = app.model.context + user = User(email=email) + user.set_password_cleartext(password) + user.username = username + # API was doing this but mypy doesn't think user has an active boolean attribute. + # if app.config.user_activation_on: + # user.active = False + # else: + # user.active = True # Activation is off, every new user is active by default. + sa_session.add(user) + sa_session.flush() + app.security_agent.create_private_user_role(user) + return user diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py new file mode 100644 index 000000000000..54e080d7cb98 --- /dev/null +++ b/test/unit/tool_shed/_util.py @@ -0,0 +1,145 @@ +import os +import random +import string +import tarfile +from pathlib import Path +from tempfile import ( + mkdtemp, + NamedTemporaryFile, +) +from typing import Optional + +import tool_shed.repository_registry +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.util import safe_makedirs +from tool_shed.managers.repositories import upload_tar_and_set_metadata +from tool_shed.managers.users import create_user +from tool_shed.repository_types import util as rt_util +from tool_shed.repository_types.registry import Registry as RepositoryTypesRegistry +from tool_shed.structured_app import ToolShedApp +from tool_shed.test.base.populators import repo_tars, TEST_DATA_REPO_FILES +from tool_shed.util.hgweb_config import hgweb_config_manager +from tool_shed.util.repository_util import create_repository +from tool_shed.webapp.model import ( + mapping, + Repository, + User, +) + + +TEST_DATA_FILES = TEST_DATA_REPO_FILES +TEST_HOST = "localhost" +TEST_COMMIT_MESSAGE = "Test Commit Message" + + +class TestToolShedConfig: + user_activation_on = False + file_path: str + id_secret: str = "thisistheshedunittestsecret" + smtp_server: Optional[str] = None + + def __init__(self, temp_directory): + files_path = os.path.join(temp_directory, "files") + safe_makedirs(files_path) + self.file_path = files_path + + def get(self, key, default): + assert key == "admin_users" + return "admin@galaxyproject.org" + + +class TestToolShedApp(ToolShedApp): + repository_types_registry = RepositoryTypesRegistry() + model = mapping.init( + "sqlite:///:memory:", + create_tables=True, + ) + config: TestToolShedConfig + hgweb_config_manager = hgweb_config_manager + repository_registry: tool_shed.repository_registry.Registry + security: IdEncodingHelper + name: str = "ToolShed" + + def __init__(self, temp_directory=None): + temp_directory = temp_directory or mkdtemp() + hgweb_config_dir = os.path.join(temp_directory, "hgweb") + safe_makedirs(hgweb_config_dir) + self.hgweb_config_manager.hgweb_config_dir = hgweb_config_dir + self.config = TestToolShedConfig(temp_directory) + self.security = IdEncodingHelper(id_secret=self.config.id_secret) + self.repository_registry = tool_shed.repository_registry.Registry(self) + + @property + def security_agent(self): + return self.model.security_agent + + +def user_fixture( + app: TestToolShedApp, username: str, password: str = "testpassword", email: Optional[str] = None +) -> User: + email = email or f"{username}@galaxyproject.org" + return create_user( + app, + email, + username, + password, + ) + + +def repository_fixture(app: TestToolShedApp, user: User, name: str) -> Repository: + type = rt_util.UNRESTRICTED + description = f"test repo named {name}" + long_description = f"test repo named {name} a longer description" + repository, message = create_repository( + app, + name, + type, + description, + long_description, + user.id, + category_ids=None, + remote_repository_url=None, + homepage_url=None, + ) + assert "created" in message + return repository + + +def _mock_url_for(x, qualified: bool = False): + return "shed_unit_test://localhost/" + + +from unittest import mock + +patch_url_for = mock.patch("galaxy.util.tool_shed.common_util.url_for", _mock_url_for) + + +def upload(app: TestToolShedApp, repository: Repository, path: Path, arcname: Optional[str] = None): + if path.is_dir(): + tf = NamedTemporaryFile(delete=False) + with tarfile.open(tf.name, "w:gz") as tar: + print(path.name) + print(str(path)) + tar.add(str(path), arcname=arcname or path.name) + tar_path = tf.name + else: + tar_path = str(path) + return upload_tar_and_set_metadata( + app, + TEST_HOST, + repository.user, + repository, + tar_path, + None, + TEST_COMMIT_MESSAGE, + ) + + +def upload_directories_to_repository(app: TestToolShedApp, repository: Repository, test_data_path: str): + paths = repo_tars(test_data_path) + for path in paths: + upload(app, repository, Path(path), arcname=test_data_path) + + +def random_name(len: int = 10) -> str: + return "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)) diff --git a/test/unit/tool_shed/conftest.py b/test/unit/tool_shed/conftest.py new file mode 100644 index 000000000000..f527ce5847a1 --- /dev/null +++ b/test/unit/tool_shed/conftest.py @@ -0,0 +1,28 @@ +import pytest + +from tool_shed.webapp.model import ( + Repository, + User, +) +from ._util import ( + random_name, + repository_fixture, + TestToolShedApp, + user_fixture, +) + + +@pytest.fixture +def shed_app(): + app = TestToolShedApp() + yield app + + +@pytest.fixture +def new_user(shed_app: TestToolShedApp) -> User: + return user_fixture(shed_app, random_name()) + + +@pytest.fixture +def new_repository(shed_app: TestToolShedApp, new_user: User) -> Repository: + return repository_fixture(shed_app, new_user, random_name()) diff --git a/test/unit/tool_shed/test_repository_metadata_manager.py b/test/unit/tool_shed/test_repository_metadata_manager.py new file mode 100644 index 000000000000..e57fbebc2fca --- /dev/null +++ b/test/unit/tool_shed/test_repository_metadata_manager.py @@ -0,0 +1,43 @@ +from tool_shed.metadata import repository_metadata_manager +from tool_shed.webapp.model import Repository +from ._util import ( + patch_url_for, + TestToolShedApp, + upload_directories_to_repository, +) + + +@patch_url_for +def test_reset_simple(shed_app: TestToolShedApp, new_repository: Repository): + upload_directories_to_repository(shed_app, new_repository, "column_maker") + assert len(new_repository.downloadable_revisions) == 3 + assert "2:" in new_repository.revision() + rmm = repository_metadata_manager.RepositoryMetadataManager( + app=shed_app, + user=new_repository.user, + repository=new_repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + repo_path = new_repository.repo_path(app=shed_app) + rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) + assert len(new_repository.downloadable_revisions) == 3 + + +@patch_url_for +def test_reset_on_repo_with_uninstallable_revisions(shed_app: TestToolShedApp, new_repository: Repository): + upload_directories_to_repository(shed_app, new_repository, "column_maker_with_download_gaps") + assert len(new_repository.downloadable_revisions) == 3 + assert "3:" in new_repository.revision() + rmm = repository_metadata_manager.RepositoryMetadataManager( + app=shed_app, + user=new_repository.user, + repository=new_repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + repo_path = new_repository.repo_path(app=shed_app) + rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) + assert len(new_repository.downloadable_revisions) == 3 diff --git a/test/unit/tool_shed/test_repository_utils.py b/test/unit/tool_shed/test_repository_utils.py new file mode 100644 index 000000000000..e408929eb494 --- /dev/null +++ b/test/unit/tool_shed/test_repository_utils.py @@ -0,0 +1,50 @@ +from tool_shed.util.repository_content_util import upload_tar +from tool_shed.webapp.model import ( + Repository, + User, +) +from ._util import ( + repository_fixture, + TEST_DATA_FILES, + TestToolShedApp, +) + + +def test_create_repository(shed_app: TestToolShedApp, new_user: User): + name = "testname" + manager = shed_app.hgweb_config_manager + entry = None + entry_name = f"repos/{new_user.username}/{name}" + try: + entry = manager.get_entry(entry_name) + except Exception: + pass + assert not entry + repository_fixture(shed_app, new_user, name) + entry = manager.get_entry(entry_name) + assert entry + + +def test_upload_tar(shed_app: TestToolShedApp, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("convert_chars/convert_chars.tar") + old_tip = new_repository.tip() + upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( + shed_app, + "localhost", + new_repository.user.username, + new_repository, + tar_resource, + None, + "Commit Message", + ) + assert upload_ok + assert alert == "" + assert dirs_removed == 0 + assert files_removed == 0 + new_tip = new_repository.tip() + assert old_tip != new_tip + changesets = new_repository.get_changesets_for_setting_metadata(shed_app) + assert len(changesets) == 1 + for change in changesets: + ctx = new_repository.hg_repo[change] + assert str(ctx) == new_tip From 7ba099ab636500ac7aaa2f092fef6392ec1c6f7e Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Dec 2022 10:02:45 -0500 Subject: [PATCH 36/73] Fix API uploads to do more work that happens in web uploads. Allows porting more of the tests to the API. --- lib/tool_shed/managers/repositories.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index e25bd4eee2d8..da1ebd0f4f1a 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -7,7 +7,9 @@ InternalServerError, MalformedContents, ) +from galaxy.tool_shed.util import dependency_display from tool_shed.metadata import repository_metadata_manager +from tool_shed.repository_types import util as rt_util from tool_shed.structured_app import ToolShedApp from tool_shed.util import hg_util from tool_shed.util.repository_content_util import upload_tar @@ -47,6 +49,26 @@ def upload_tar_and_set_metadata( _, error_message = rmm.set_repository_metadata_due_to_new_tip(host, content_alert_str=content_alert_str) if error_message: raise InternalServerError(error_message) + dd = dependency_display.DependencyDisplayer(app) + if str(repository.type) not in [ + rt_util.REPOSITORY_SUITE_DEFINITION, + rt_util.TOOL_DEPENDENCY_DEFINITION, + ]: + # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies + # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency + # definitions can define orphan tool dependencies (no relationship to any tools contained in the + # repository), so warning messages are important because orphans are always valid. The repository + # owner must be warned in case they did not intend to define an orphan dependency, but simply + # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. + if repository.metadata_revisions: + # A repository's metadata revisions are order descending by update_time, so the zeroth revision + # will be the tip just after an upload. + metadata_dict = repository.metadata_revisions[0].metadata + else: + metadata_dict = {} + orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict) + if orphan_message: + message += orphan_message else: raise InternalServerError(message) return message From 8852f0733b99e2902acc7db9faae21000a3f1bf3 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 2 Nov 2022 13:18:34 -0400 Subject: [PATCH 37/73] Progress toward exercising less of the tool shed upload API. So we can kill it. --- .ci/flake8_ignorelist.txt | 1 + .flake8 | 1 + .isort.cfg | 2 +- lib/tool_shed/test/base/api.py | 15 +- lib/tool_shed/test/base/populators.py | 55 +- lib/tool_shed/test/base/twilltestcase.py | 231 +++---- .../test_0000_basic_repository_features.py | 72 +- ..._0010_repository_with_tool_dependencies.py | 82 +-- ...test_0020_basic_repository_dependencies.py | 24 +- ...st_0030_repository_dependency_revisions.py | 48 +- ...t_0040_repository_circular_dependencies.py | 20 +- ...est_0050_circular_dependencies_4_levels.py | 62 +- .../test/functional/test_0070_invalid_tool.py | 25 +- ...est_0080_advanced_circular_dependencies.py | 20 +- .../test/functional/test_0090_tool_search.py | 50 +- ...st_0100_complex_repository_dependencies.py | 34 +- ..._invalid_simple_repository_dependencies.py | 20 +- ...e_repository_dependency_multiple_owners.py | 20 +- .../functional/test_0140_tool_help_images.py | 10 +- .../test_0150_prior_installation_required.py | 20 +- ...60_circular_prior_installation_required.py | 29 +- ...170_complex_prior_installation_required.py | 33 +- .../test_0300_reset_all_metadata.py | 209 +----- .../functional/test_0310_hg_api_features.py | 21 +- ...test_0420_citable_urls_for_repositories.py | 22 +- .../functional/test_0430_browse_utilities.py | 20 +- ...st_0440_deleting_dependency_definitions.py | 66 +- .../test_0460_upload_to_repository.py | 231 +------ ...st_0470_tool_dependency_repository_type.py | 72 +- ...t_0480_tool_dependency_xml_verification.py | 12 +- .../test_0530_repository_admin_feature.py | 10 +- ...test_0550_metadata_updated_dependencies.py | 51 +- .../test_1000_install_basic_repository.py | 46 +- ...stall_repository_with_tool_dependencies.py | 74 +-- ...repository_with_repository_dependencies.py | 20 +- ...ll_repository_with_dependency_revisions.py | 40 +- ..._repository_basic_circular_dependencies.py | 20 +- ...est_1050_circular_dependencies_4_levels.py | 62 +- .../test/functional/test_1070_invalid_tool.py | 23 +- ...vanced_circular_dependency_installation.py | 20 +- ...est_1090_repository_dependency_handling.py | 20 +- ...install_updated_repository_dependencies.py | 20 +- ...ll_repository_with_complex_dependencies.py | 36 +- ...tory_with_invalid_repository_dependency.py | 20 +- ...e_repository_dependency_multiple_owners.py | 16 +- .../functional/test_1160_tool_help_images.py | 10 +- .../test_1170_prior_installation_required.py | 20 +- ...80_circular_prior_installation_required.py | 30 +- ...190_complex_prior_installation_required.py | 37 +- ...ninstall_and_reinstall_basic_repository.py | 46 +- ...stall_repository_with_tool_dependencies.py | 90 +-- ...ll_repository_with_dependency_revisions.py | 32 +- .../test_1300_reset_all_metadata.py | 179 +---- .../functional/test_1410_update_manager.py | 22 +- .../test_1430_repair_installed_repository.py | 20 +- .../functional/test_1460_data_managers.py | 11 +- ...st_1470_updating_installed_repositories.py | 22 +- .../test/test_data/bismark/bismark.tar | Bin 593920 -> 0 bytes .../0470_files/emboss_complex_dependency.tar | Bin 11776 -> 0 bytes .../0480/0}/tool_dependencies.xml | 0 .../bismark/0/bismark_bowtie2_wrapper.xml | 616 ++++++++++++++++++ .../bismark/0/bismark_bowtie_wrapper.xml | 614 +++++++++++++++++ .../bismark/0/bismark_genome_preparation | 492 ++++++++++++++ .../0/bismark_methylation_extractor.py | 186 ++++++ .../0/bismark_methylation_extractor.xml | 306 +++++++++ .../repos/bismark/0/bismark_wrapper.py | 288 ++++++++ .../repos/bismark/0/bowtie2_indices.loc | 37 ++ .../bismark/0/bowtie2_indices.loc.sample | 37 ++ .../bismark/0/tool_data_table_conf.xml.sample | 13 + .../repos/bismark/0/tool_dependencies.xml | 61 ++ .../bismark/1/bismark_bowtie2_wrapper.xml | 616 ++++++++++++++++++ .../bismark/1/bismark_bowtie_wrapper.xml | 614 +++++++++++++++++ .../bismark/1/bismark_genome_preparation | 492 ++++++++++++++ .../1/bismark_methylation_extractor.py | 186 ++++++ .../1}/bismark_methylation_extractor.xml | 0 .../repos/bismark/1/bismark_wrapper.py | 288 ++++++++ .../bismark/1/tool_data_table_conf.xml.sample | 13 + .../repos/bismark/1/tool_dependencies.xml | 61 ++ .../emboss_5_0470/0/emboss_antigenic.xml | 58 ++ .../0/emboss_format_corrector.py | 53 ++ .../emboss_5_0470/0/tool_dependencies.xml | 6 + .../emboss_5_0470/1/emboss_antigenic.xml | 58 ++ .../1/emboss_format_corrector.py | 53 ++ .../emboss_5_0470/1}/tool_dependencies.xml | 0 .../libx11_proto/0}/tool_dependencies.xml | 0 .../libx11_proto/1}/tool_dependencies.xml | 0 .../0}/tool_dependencies.xml | 0 .../1}/tool_dependencies.xml | 0 .../test/test_data/safetar_with_symlink.tar | Bin 0 -> 3072 bytes lib/tool_shed/util/repository_content_util.py | 26 +- lib/tool_shed/webapp/api/repositories.py | 1 - mypy.ini | 2 +- pyproject.toml | 1 + test/unit/webapps/test_tool_validation.py | 22 +- 94 files changed, 5666 insertions(+), 2058 deletions(-) delete mode 100644 lib/tool_shed/test/test_data/bismark/bismark.tar delete mode 100644 lib/tool_shed/test/test_data/emboss/0470_files/emboss_complex_dependency.tar rename lib/tool_shed/test/test_data/{0480_files => repos/0480/0}/tool_dependencies.xml (100%) create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie2_wrapper.xml create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml create mode 100755 lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py create mode 100755 lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc create mode 100755 lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample create mode 100644 lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml create mode 100644 lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml create mode 100644 lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml create mode 100755 lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation create mode 100644 lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py rename lib/tool_shed/test/test_data/{bismark => repos/bismark/1}/bismark_methylation_extractor.xml (100%) create mode 100644 lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py create mode 100644 lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample create mode 100644 lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml create mode 100644 lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml create mode 100644 lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py create mode 100644 lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml create mode 100644 lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml create mode 100644 lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py rename lib/tool_shed/test/test_data/{emboss/0470_files => repos/emboss_5_0470/1}/tool_dependencies.xml (100%) rename lib/tool_shed/test/test_data/{emboss/libx11_proto/first_tool_dependency => repos/libx11_proto/0}/tool_dependencies.xml (100%) rename lib/tool_shed/test/test_data/{emboss/libx11_proto/second_tool_dependency => repos/libx11_proto/1}/tool_dependencies.xml (100%) rename lib/tool_shed/test/test_data/{emboss/emboss_5_0_0/first_tool_dependency => repos/package_emboss_5_0_0_0470/0}/tool_dependencies.xml (100%) rename lib/tool_shed/test/test_data/{emboss/emboss_5_0_0/second_tool_dependency => repos/package_emboss_5_0_0_0470/1}/tool_dependencies.xml (100%) create mode 100644 lib/tool_shed/test/test_data/safetar_with_symlink.tar diff --git a/.ci/flake8_ignorelist.txt b/.ci/flake8_ignorelist.txt index f1cffc4f5952..d4ced27b5b8d 100644 --- a/.ci/flake8_ignorelist.txt +++ b/.ci/flake8_ignorelist.txt @@ -11,6 +11,7 @@ database doc/build eggs lib/galaxy/web/proxy/js/node_modules +lib/tool_shed/test/test_data/repos static/maps static/scripts test/functional/tools/cwl_tools/v1.?/ diff --git a/.flake8 b/.flake8 index fe463fb975e1..3e086e9d1287 100644 --- a/.flake8 +++ b/.flake8 @@ -7,3 +7,4 @@ # W503 is line breaks before binary operators, which has been reversed in PEP 8. # D** are docstring linting - which we mostly ignore except D302. (Hopefully we will solve more over time). ignore = B008,E203,E402,E501,W503,D100,D101,D102,D103,D104,D105,D106,D107,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403,D412,D413 +exclude = lib/tool_shed/test/test_data/repos diff --git a/.isort.cfg b/.isort.cfg index c99585f7f278..ca490cd2930a 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -10,5 +10,5 @@ profile=black reverse_relative=true skip_gitignore=true # Make isort run faster by skipping database -skip_glob=database/* +skip_glob=database/*,lib/tool_shed/test/test_data/repos/* src_paths=lib diff --git a/lib/tool_shed/test/base/api.py b/lib/tool_shed/test/base/api.py index 3cd998769013..b14c19dd7c15 100644 --- a/lib/tool_shed/test/base/api.py +++ b/lib/tool_shed/test/base/api.py @@ -35,7 +35,7 @@ class ShedBaseTestCase(DrivenFunctionalTestCase): @property def populator(self) -> ToolShedPopulator: if self._populator is None: - self._populator = ToolShedPopulator(self.admin_api_interactor, self.api_interactor) + self._populator = self._get_populator(self.api_interactor) return self._populator @property @@ -50,7 +50,18 @@ def api_interactor(self) -> ShedApiInteractor: password = "testpassword" ensure_user_with_email(self.admin_api_interactor, email, password) user_api_key = self._api_key(email, password) - return ShedApiInteractor(self.url, user_api_key) + return self._api_interactor(user_api_key) + + def _api_interactor_by_credentials(self, email: str, password: str) -> ShedApiInteractor: + ensure_user_with_email(self.admin_api_interactor, email, password) + user_api_key = self._api_key(email, password) + return self._api_interactor(user_api_key) + + def _api_interactor(self, api_key: str) -> ShedApiInteractor: + return ShedApiInteractor(self.url, api_key) + + def _get_populator(self, user_api_interactor) -> ToolShedPopulator: + return ToolShedPopulator(self.admin_api_interactor, user_api_interactor) def _api_key(self, email: str, password: str) -> str: headers = baseauth_headers(email, password) diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 01f3f6c0e870..2dd53dc27537 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -81,21 +81,52 @@ def __init__(self, admin_api_interactor: ShedApiInteractor, api_interactor: Shed self._admin_api_interactor = admin_api_interactor self._api_interactor = api_interactor - def setup_test_data_repo(self, test_data_path: str) -> Repository: - prefix = test_data_path.replace("_", "") - category_id = self.new_category(prefix=prefix).id - repository = self.new_repository(category_id, prefix=prefix) - repository_id = repository.id + def setup_bismark_repo(self, repository_id: HasRepositoryId, end: Optional[int] = None): + self.setup_test_data_repo_by_id("bismark", repository_id, assert_ok=False, end=end) + + def setup_test_data_repo_by_id( + self, + test_data_path: str, + repository_id: Optional[HasRepositoryId] = None, + assert_ok=True, + start: int = 0, + end: Optional[int] = None, + ) -> HasRepositoryId: + if repository_id is None: + prefix = test_data_path.replace("_", "") + category_id = self.new_category(prefix=prefix).id + repository = self.new_repository(category_id, prefix=prefix) + repository_id = repository.id + assert repository_id for index, repo_tar in enumerate(repo_tars(test_data_path)): + if index < start: + continue + + if end and index >= end: + break + commit_message = f"Updating {test_data_path} with index {index} with tar {repo_tar}" - response = self.upload_revision( - repository_id, - repo_tar, - commit_message=commit_message, - ) - assert response.is_ok + response = self.upload_revision_raw(repository_id, repo_tar, commit_message) + if assert_ok: + api_asserts.assert_status_code_is_ok(response) + assert RepositoryUpdate(__root__=response.json()).is_ok + return repository_id + + def setup_test_data_repo( + self, + test_data_path: str, + repository: Optional[Repository] = None, + assert_ok=True, + start: int = 0, + end: Optional[int] = None, + ) -> Repository: + if repository is None: + prefix = test_data_path.replace("_", "") + category_id = self.new_category(prefix=prefix).id + repository = self.new_repository(category_id, prefix=prefix) + self.setup_test_data_repo_by_id(test_data_path, repository, assert_ok=assert_ok, start=start, end=end) return repository def setup_column_maker_repo(self, prefix=DEFAULT_PREFIX) -> Repository: @@ -152,7 +183,7 @@ def upload_revision_raw( def upload_revision( self, repository: HasRepositoryId, path: Traversable, commit_message: str = DEFAULT_COMMIT_MESSAGE ): - response = self.upload_revision_raw(repository, path, commit_message) + response = self.upload_revision_raw(repository, path, commit_message=commit_message) if response.status_code != 200: response_json = None err_msg = None diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 3961f1b00ebb..89743aff6877 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1,4 +1,5 @@ import abc +import contextlib import logging import os import shutil @@ -11,6 +12,7 @@ from typing import ( Any, Dict, + Iterator, List, Optional, ) @@ -59,6 +61,8 @@ hgweb_config, xml_util, ) +from tool_shed.test.base.populators import TEST_DATA_REPO_FILES +from tool_shed.util.repository_content_util import tar_open from tool_shed.webapp.model import Repository as DbRepository from tool_shed_client.schema import ( Category, @@ -716,6 +720,12 @@ def last_page(self): def last_url(self): return tc.browser.url + def user_api_interactor(self, email="test@bx.psu.edu", password="testuser"): + return self._api_interactor_by_credentials(email, password) + + def user_populator(self, email="test@bx.psu.edu", password="testuser"): + return self._get_populator(self.user_api_interactor(email=email, password=password)) + def login( self, email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="", logout_first=True ): @@ -1025,7 +1035,7 @@ def create_repository_dependency( repository_names = [] if complex: filename = "tool_dependencies.xml" - self.generate_complex_dependency_xml( + target = self.generate_complex_dependency_xml( filename=filename, filepath=filepath, repository_tuples=repository_tuples, @@ -1037,55 +1047,115 @@ def create_repository_dependency( repository_names.append(name) dependency_description = f"{repository.name} depends on {', '.join(repository_names)}." filename = "repository_dependencies.xml" - self.generate_simple_dependency_xml( + target = self.generate_simple_dependency_xml( repository_tuples=repository_tuples, filename=filename, filepath=filepath, dependency_description=dependency_description, prior_installation_required=prior_installation_required, ) - self.upload_file( - repository, - filename=filename, - filepath=filepath, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message=f"Uploaded dependency on {', '.join(repository_names)}.", - strings_displayed=None, - strings_not_displayed=None, - ) + self.add_file_to_repository(repository, target, filename, strings_displayed=strings_displayed) def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: assert self._installation_client self._installation_client.deactivate_repository(installed_repository) - def delete_files_from_repository(self, repository: Repository, filenames: List[str]): + @contextlib.contextmanager + def cloned_repo(self, repository: Repository) -> Iterator[str]: temp_directory = tempfile.mkdtemp(prefix="toolshedrepowithoutfiles") try: self.clone_repository(repository, temp_directory) - for filename in filenames: - to_delete = os.path.join(temp_directory, filename) - os.remove(to_delete) shutil.rmtree(os.path.join(temp_directory, ".hg")) - tf = tempfile.NamedTemporaryFile() - with tarfile.open(tf.name, "w:gz") as tar: - tar.add(temp_directory, arcname="repo") - target = os.path.abspath(tf.name) - self.upload_file( - repository, - filename=os.path.basename(target), - filepath=os.path.dirname(target), - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded revision with deleted files.", - strings_displayed=[], - strings_not_displayed=[], - ) + contents = os.listdir(temp_directory) + if len(contents) == 1 and contents[0] == "repo": + yield os.path.join(temp_directory, "repo") + else: + yield temp_directory finally: shutil.rmtree(temp_directory) + def setup_freebayes_0010_repo(self, repository: Repository): + strings_displayed = [ + "Metadata may have been defined", + "This file requires an entry", + "tool_data_table_conf", + ] + self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) + strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] + self.add_file_to_repository(repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed) + self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") + target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") + self.add_file_to_repository( + repository, target, strings_displayed=["Exception attempting to parse", "invalid element name"] + ) + target = os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml") + strings_displayed = [ + "The settings for name, version and type from a contained tool configuration" + ] + # , strings_displayed=strings_displayed + self.add_file_to_repository(repository, target) + target = os.path.join("freebayes", "tool_dependencies.xml") + self.add_file_to_repository(repository, target) + + def add_file_to_repository( + self, repository: Repository, source: str, target: Optional[str] = None, strings_displayed=None, commit_message: Optional[str] = None + ): + with self.cloned_repo(repository) as temp_directory: + if target is None: + target = os.path.basename(source) + full_target = os.path.join(temp_directory, target) + full_source = TEST_DATA_REPO_FILES.joinpath(source) + shutil.copyfile(str(full_source), full_target) + commit_message = commit_message or "Uploaded revision with added file." + self._upload_dir_to_repository(repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed) + + def add_tar_to_repository( + self, repository: Repository, source: str, strings_displayed=None + ): + with self.cloned_repo(repository) as temp_directory: + full_source = TEST_DATA_REPO_FILES.joinpath(source) + tar = tar_open(full_source) + tar.extractall(path=temp_directory) + tar.close() + commit_message = "Uploaded revision with added files from tar." + self._upload_dir_to_repository(repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed) + + def commit_tar_to_repository( + self, repository: Repository, source: str, commit_message=None, strings_displayed=None + ): + full_source = TEST_DATA_REPO_FILES.joinpath(source) + assert full_source.is_file(), f"Attempting to upload {full_source} as a tar which is not a file" + populator = self.user_populator() + if strings_displayed is None: + # Just assume this is a valid upload... + populator.upload_revision(repository, full_source, commit_message=commit_message) + else: + response = populator.upload_revision_raw(repository, full_source, commit_message=commit_message) + try: + text = response.json()["message"] + except Exception: + text = response.text + for string_displayed in strings_displayed: + if string_displayed not in text: + raise AssertionError(f"Failed to find {string_displayed} in JSON response {text}") + + def delete_files_from_repository(self, repository: Repository, filenames: List[str]): + with self.cloned_repo(repository) as temp_directory: + for filename in filenames: + to_delete = os.path.join(temp_directory, filename) + os.remove(to_delete) + commit_message = "Uploaded revision with deleted files." + self._upload_dir_to_repository(repository, temp_directory, commit_message=commit_message) + + def _upload_dir_to_repository(self, repository: Repository, target, commit_message, strings_displayed=None): + tf = tempfile.NamedTemporaryFile() + with tarfile.open(tf.name, "w:gz") as tar: + tar.add(target, arcname=".") + target = os.path.abspath(tf.name) + self.commit_tar_to_repository( + repository, target, commit_message=commit_message, strings_displayed=strings_displayed + ) + def delete_repository(self, repository: Repository) -> None: repository_id = repository.id self.visit_url("/admin/browse_repositories") @@ -1241,6 +1311,7 @@ def generate_complex_dependency_xml(self, filename, filepath, repository_tuples, ) # Save the generated xml to the specified location. open(file_path, "w").write(repository_dependency_xml) + return file_path def generate_simple_dependency_xml( self, @@ -1282,6 +1353,7 @@ def generate_simple_dependency_xml( # Save the generated xml to the specified location. full_path = os.path.join(filepath, filename) open(full_path, "w").write(repository_dependency_xml) + return full_path def generate_temp_path(self, test_script_path, additional_paths=None): additional_paths = additional_paths or [] @@ -1417,6 +1489,10 @@ def get_repository_tip(self, repository: Repository) -> str: repo = self.get_hg_repo(self.get_repo_path(repository)) return str(repo[repo.changelog.tip()]) + def get_repository_first_revision(self, repository: Repository) -> str: + repo = self.get_hg_repo(self.get_repo_path(repository)) + return str(repo[0]) + def _get_metadata_revision_count(self, repository: Repository) -> int: repostiory_metadata: RepositoryMetadata = self.populator.get_metadata(repository, downloadable_only=False) return len(repostiory_metadata.__root__) @@ -1745,97 +1821,6 @@ def update_installed_repository( assert self._installation_client return self._installation_client.update_installed_repository(installed_repository, verify_no_updates=False) - def upload_file( - self, - repository: Repository, - filename, - filepath, - valid_tools_only, - uncompress_file, - remove_repo_files_not_in_tar, - commit_message, - strings_displayed=None, - strings_not_displayed=None, - ): - if strings_displayed is None: - strings_displayed = [] - if strings_not_displayed is None: - strings_not_displayed = [] - removed_message = "files were removed from the repository" - if remove_repo_files_not_in_tar: - if not self.repository_is_new(repository): - if removed_message not in strings_displayed: - strings_displayed.append(removed_message) - else: - if removed_message not in strings_not_displayed: - strings_not_displayed.append(removed_message) - params = {"repository_id": repository.id} - self.visit_url("/upload/upload", params=params) - if valid_tools_only: - strings_displayed.extend(["has been successfully", "uploaded to the repository."]) - tc.formfile("1", "file_data", self.get_filename(filename, filepath)) - if uncompress_file: - tc.fv(1, "uncompress_file", "Yes") - else: - tc.fv(1, "uncompress_file", "No") - if not self.repository_is_new(repository): - if remove_repo_files_not_in_tar: - tc.fv(1, "remove_repo_files_not_in_tar", "Yes") - else: - tc.fv(1, "remove_repo_files_not_in_tar", "No") - tc.fv(1, "commit_message", commit_message) - tc.submit("upload_button") - self.check_for_strings(strings_displayed, strings_not_displayed) - # Uncomment this if it becomes necessary to wait for an asynchronous process to complete after submitting an upload. - # for i in range( 5 ): - # try: - # self.check_for_strings( strings_displayed, strings_not_displayed ) - # break - # except Exception as e: - # if i == 4: - # raise e - # else: - # time.sleep( 1 ) - # continue - - def upload_url( - self, - repository, - url, - filepath, - valid_tools_only, - uncompress_file, - remove_repo_files_not_in_tar, - commit_message, - strings_displayed=None, - strings_not_displayed=None, - ): - removed_message = "files were removed from the repository" - if remove_repo_files_not_in_tar: - if not self.repository_is_new(repository): - if removed_message not in strings_displayed: - strings_displayed.append(removed_message) - else: - if removed_message not in strings_not_displayed: - strings_not_displayed.append(removed_message) - params = {"repository_id": repository.id} - self.visit_url("/upload/upload", params=params) - if valid_tools_only: - strings_displayed.extend(["has been successfully", "uploaded to the repository."]) - tc.fv("1", "url", url) - if uncompress_file: - tc.fv(1, "uncompress_file", "Yes") - else: - tc.fv(1, "uncompress_file", "No") - if not self.repository_is_new(repository): - if remove_repo_files_not_in_tar: - tc.fv(1, "remove_repo_files_not_in_tar", "Yes") - else: - tc.fv(1, "remove_repo_files_not_in_tar", "No") - tc.fv(1, "commit_message", commit_message) - tc.submit("upload_button") - self.check_for_strings(strings_displayed, strings_not_displayed) - def verify_installed_repositories(self, installed_repositories=None, uninstalled_repositories=None): installed_repositories = installed_repositories or [] uninstalled_repositories = uninstalled_repositories or [] diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index 1bd1e51948fd..261e9c5a6187 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -78,17 +78,7 @@ def test_0030_grant_write_access(self): def test_0035_upload_filtering_1_1_0(self): """Upload filtering_1.1.0.tar to the repository""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded filtering 1.1.0", - strings_displayed=[], - strings_not_displayed=[], - ) + self.commit_tar_to_repository(repository, "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0") def test_0040_verify_repository(self): """Display basic repository pages""" @@ -164,17 +154,7 @@ def test_0050_display_repository_tip_file(self): def test_0055_upload_filtering_txt_file(self): """Upload filtering.txt file associated with tool version 1.1.0.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_0000.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering.txt", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "filtering/filtering_0000.txt") self.display_manage_repository_page( repository, strings_displayed=["Readme file for filtering 1.1.0"] ) @@ -182,17 +162,7 @@ def test_0055_upload_filtering_txt_file(self): def test_0060_upload_filtering_test_data(self): """Upload filtering test data.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_test_data.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering test data", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_tar_to_repository(repository, "filtering/filtering_test_data.tar") self.display_repository_file_contents( repository=repository, filename="1.bed", @@ -205,17 +175,7 @@ def test_0060_upload_filtering_test_data(self): def test_0065_upload_filtering_2_2_0(self): """Upload filtering version 2.2.0""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") def test_0070_verify_filtering_repository(self): """Verify the new tool versions and repository metadata.""" @@ -245,17 +205,7 @@ def test_0070_verify_filtering_repository(self): def test_0075_upload_readme_txt_file(self): """Upload readme.txt file associated with tool version 2.2.0.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.txt", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "readme.txt") self.display_manage_repository_page( repository, strings_displayed=["This is a readme file."] ) @@ -378,17 +328,7 @@ def test_0125_upload_new_readme_file(self): self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) # Upload readme.txt to the filtering_0000 repository and verify that it is now displayed. - self.upload_file( - repository, - filename="filtering/readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded new readme.txt with invalid ascii characters.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "filtering/readme.txt") self.display_manage_repository_page( repository, strings_displayed=["These characters should not"] ) diff --git a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py index f4fb76e6acc7..3ec4755caed8 100644 --- a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py @@ -53,17 +53,8 @@ def test_0010_create_freebayes_repository_and_upload_tool_xml(self): strings_displayed=[], ) assert repository - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool xml.", - strings_displayed=["Metadata may have been defined", "This file requires an entry", "tool_data_table_conf"], - strings_not_displayed=[], - ) + strings_displayed = ["Metadata may have been defined", "This file requires an entry", "tool_data_table_conf"] + self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) self.display_manage_repository_page( repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] ) @@ -78,17 +69,8 @@ def test_0015_upload_missing_tool_data_table_conf_file(self): Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool data table sample file.", - strings_displayed=[], - strings_not_displayed=[], - ) + strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] + self.add_file_to_repository(repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed) self.display_manage_repository_page( repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] ) @@ -103,17 +85,7 @@ def test_0020_upload_missing_sample_loc_file(self): Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool data table .loc file.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") def test_0025_upload_malformed_tool_dependency_xml(self): """Upload tool_dependencies.xml with bad characters in the readme tag. @@ -122,16 +94,9 @@ def test_0025_upload_malformed_tool_dependency_xml(self): Upload a tool_dependencies.xml file that contains <> in the text of the readme tag. This should show an error message about malformed xml. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename=os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded malformed tool dependency XML.", - strings_displayed=["Exception attempting to parse", "invalid element name"], - strings_not_displayed=[], + target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") + self.add_file_to_repository( + repository, target, strings_displayed=["Exception attempting to parse", "invalid element name"] ) def test_0030_upload_invalid_tool_dependency_xml(self): @@ -141,19 +106,11 @@ def test_0030_upload_invalid_tool_dependency_xml(self): This should result in a message about the tool dependency configuration not matching the tool's requirements. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename=os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded invalid tool dependency XML.", - strings_displayed=[ - "The settings for name, version and type from a contained tool configuration" - ], - strings_not_displayed=[], - ) + target = os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml") + strings_displayed = [ + "The settings for name, version and type from a contained tool configuration" + ] + self.add_file_to_repository(repository, target, strings_displayed=strings_displayed) def test_0035_upload_valid_tool_dependency_xml(self): """Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package. @@ -162,17 +119,8 @@ def test_0035_upload_valid_tool_dependency_xml(self): At this stage, there should be no errors on the upload page, as every missing or invalid file has been corrected. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename=os.path.join("freebayes", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded valid tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], - ) + target = os.path.join("freebayes", "tool_dependencies.xml") + self.add_file_to_repository(repository, target) def test_0040_verify_tool_dependencies(self): """Verify that the uploaded tool_dependencies.xml specifies the correct package versions. diff --git a/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py b/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py index bd4fcd822d3a..47fe9b0a0b84 100644 --- a/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py @@ -38,17 +38,7 @@ def test_0010_create_column_maker_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.commit_tar_to_repository(column_maker_repository, "column_maker/column_maker.tar") def test_0020_create_emboss_5_repository_and_upload_files(self): """Create and populate the emboss_5_0020 repository.""" @@ -61,17 +51,7 @@ def test_0020_create_emboss_5_repository_and_upload_files(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], - ) + self.commit_tar_to_repository(repository, "emboss/emboss.tar") def test_0025_generate_and_upload_repository_dependencies_xml(self): """Generate and upload the repository_dependencies.xml file""" diff --git a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py index 17a403bea00f..a1a54a06efd2 100644 --- a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py @@ -39,16 +39,8 @@ def test_0010_create_emboss_5_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( - repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], + self.commit_tar_to_repository( + repository, "emboss/emboss.tar", commit_message="Uploaded tool tarball." ) def test_0015_create_emboss_6_repository(self): @@ -62,16 +54,8 @@ def test_0015_create_emboss_6_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( - repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], + self.commit_tar_to_repository( + repository, "emboss/emboss.tar", commit_message="Uploaded tool tarball.", ) def test_0020_create_dependent_repository(self): @@ -86,16 +70,8 @@ def test_0020_create_dependent_repository(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], + self.commit_tar_to_repository( + repository, "column_maker/column_maker.tar", commit_message="Uploaded bismark tarball.", ) def test_0025_create_emboss_repository(self): @@ -109,16 +85,8 @@ def test_0025_create_emboss_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( - repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], + self.commit_tar_to_repository( + repository, "emboss/emboss.tar", commit_message="Uploaded the tool tarball.", ) def test_0030_generate_repository_dependencies_for_emboss_5(self): diff --git a/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py b/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py index c469597b738b..d0382b3de0cc 100644 --- a/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py @@ -42,16 +42,10 @@ def test_0010_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -66,16 +60,10 @@ def test_0015_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded the tool tarball for filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_dependency_on_freebayes(self): diff --git a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py index 1178402c6b2c..6a0609262b8b 100644 --- a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py @@ -51,16 +51,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -74,16 +68,10 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_emboss_datatypes_repository(self): @@ -101,16 +89,10 @@ def test_0020_create_emboss_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_filtering_repository(self): @@ -124,16 +106,10 @@ def test_0025_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_freebayes_repository(self): @@ -147,16 +123,10 @@ def test_0030_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_bismark_repository(self): @@ -170,17 +140,7 @@ def test_0035_create_bismark_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository, end=1) def test_0040_create_and_upload_dependency_definitions(self): column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0070_invalid_tool.py b/lib/tool_shed/test/functional/test_0070_invalid_tool.py index b3fcd3842d5f..9462bdb3b251 100644 --- a/lib/tool_shed/test/functional/test_0070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_0070_invalid_tool.py @@ -30,30 +30,9 @@ def test_0005_create_category_and_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository) + invalid_revision = self.get_repository_first_revision(repository) self.display_manage_repository_page(repository, strings_displayed=["Invalid tools"]) - invalid_revision = self.get_repository_tip(repository) - self.upload_file( - repository, - filename="bismark/bismark_methylation_extractor.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded an updated tool xml.", - strings_displayed=[], - strings_not_displayed=[], - ) valid_revision = self.get_repository_tip(repository) tool_guid = f"{self.url.replace('http://', '').rstrip('/')}/repos/user1/bismark_0070/bismark_methylation_extractor/0.7.7.3" tool_metadata_strings_displayed = [ diff --git a/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py b/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py index c00941209860..1d063d2e0f9d 100644 --- a/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py @@ -35,16 +35,10 @@ def test_0005_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0005_create_convert_repository(self): @@ -60,16 +54,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependencies(self): diff --git a/lib/tool_shed/test/functional/test_0090_tool_search.py b/lib/tool_shed/test/functional/test_0090_tool_search.py index 566629d42fb5..2ceb5c982509 100644 --- a/lib/tool_shed/test/functional/test_0090_tool_search.py +++ b/lib/tool_shed/test/functional/test_0090_tool_search.py @@ -47,16 +47,10 @@ def test_0005_create_bwa_base_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/bwa_base.tar", commit_message="Uploaded BWA tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_bwa_color_repository(self): @@ -71,16 +65,10 @@ def test_0010_create_bwa_color_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/bwa_color.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/bwa_color.tar", commit_message="Uploaded BWA color tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_repository(self): @@ -94,16 +82,10 @@ def test_0020_create_emboss_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_filtering_repository(self): @@ -117,16 +99,10 @@ def test_0025_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_freebayes_repository(self): @@ -140,16 +116,10 @@ def test_0030_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_and_upload_dependency_definitions(self): diff --git a/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py b/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py index f7841a849445..6ff5a4a431ed 100644 --- a/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py @@ -43,17 +43,7 @@ def test_0005_create_bwa_package_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml.", - strings_displayed=["This repository currently contains a single file named tool_dependencies.xml"], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") # Visit the manage repository page for package_bwa_0_5_9_0100. self.display_manage_repository_page( repository, strings_displayed=["Tool dependencies", "will not be", "to this repository"] @@ -73,16 +63,10 @@ def test_0010_create_bwa_base_repository(self): strings_displayed=[], ) # Populate the repository named bwa_base_repository_0100 with a bwa_base tool archive. - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/complex/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/complex/bwa_base.tar", commit_message="Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_generate_complex_repository_dependency_invalid_shed_url(self): @@ -217,17 +201,7 @@ def test_0040_generate_tool_dependency(self): open(xml_filename, "w").write( open(old_tool_dependency).read().replace("__PATH__", self.get_filename("bwa/complex")) ) - self.upload_file( - tool_repository, - filename=xml_filename, - filepath=new_tool_dependency_path, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded new tool_dependencies.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(tool_repository, xml_filename, "tool_dependencies.xml") # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file. repository_tip = self.get_repository_tip(tool_repository) strings_displayed = ["bwa", "0.5.9", "package"] diff --git a/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py b/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py index a2762fe0c007..60b4ad2a0912 100644 --- a/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py @@ -39,16 +39,10 @@ def test_0010_create_emboss_datatypes_repository_and_upload_tarball(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_5_repository_and_upload_files(self): @@ -62,16 +56,10 @@ def test_0020_create_emboss_5_repository_and_upload_files(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_generate_repository_dependency_with_invalid_url(self): diff --git a/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py index 670f61f1a25a..71faea2f7758 100644 --- a/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py @@ -51,16 +51,10 @@ def test_0005_create_datatypes_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blast_datatypes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "blast/blast_datatypes.tar", commit_message="Uploaded blast_datatypes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_verify_datatypes_repository(self): @@ -100,16 +94,10 @@ def test_0015_create_tool_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blastxml_to_top_descr.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "blast/blastxml_to_top_descr.tar", commit_message="Uploaded blastxml_to_top_descr tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_verify_tool_repository(self): diff --git a/lib/tool_shed/test/functional/test_0140_tool_help_images.py b/lib/tool_shed/test/functional/test_0140_tool_help_images.py index 1bc8e1050b8c..bce376d06a6c 100644 --- a/lib/tool_shed/test/functional/test_0140_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_0140_tool_help_images.py @@ -50,16 +50,10 @@ def test_0005_create_htseq_count_repository(self): strings_displayed=[], ) # Upload htseq_count.tar to the repository. - self.upload_file( + self.commit_tar_to_repository( repository, - filename="htseq_count/htseq_count.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "htseq_count/htseq_count.tar", commit_message="Uploaded htseq_count.tar.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_load_tool_page(self): diff --git a/lib/tool_shed/test/functional/test_0150_prior_installation_required.py b/lib/tool_shed/test/functional/test_0150_prior_installation_required.py index e8d832eede17..22800d42c359 100644 --- a/lib/tool_shed/test/functional/test_0150_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0150_prior_installation_required.py @@ -49,16 +49,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -72,16 +66,10 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py index 822758a6a712..5ce2a7d0f7d6 100644 --- a/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py @@ -48,16 +48,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -71,16 +65,11 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -94,16 +83,10 @@ def test_0015_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py index e37888b4e02f..6b2b78107609 100644 --- a/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py @@ -52,16 +52,11 @@ def test_0005_create_matplotlib_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_matplotlib/package_matplotlib_1_2.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_matplotlib/package_matplotlib_1_2.tar", commit_message="Uploaded matplotlib tool dependency tarball.", - strings_displayed=["This repository currently contains a single file named tool_dependencies.xml"], - strings_not_displayed=[], + strings_displayed=["tool_dependencies.xml"], ) def test_0010_create_numpy_repository(self): @@ -79,16 +74,10 @@ def test_0010_create_numpy_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_numpy/package_numpy_1_7.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_numpy/package_numpy_1_7.tar", commit_message="Uploaded numpy tool dependency tarball.", - strings_displayed=["This repository currently contains a single file named tool_dependencies.xml"], - strings_not_displayed=[], ) def test_0015_create_complex_repository_dependency(self): @@ -120,17 +109,7 @@ def test_0015_create_complex_repository_dependency(self): new_xml_file = os.path.join(dependency_xml_path, "tool_dependencies.xml") open(new_xml_file, "w").write(original_xml.replace("", processed_xml)) # Upload the generated complex repository dependency XML to the matplotlib repository. - self.upload_file( - matplotlib_repository, - filename="tool_dependencies.xml", - filepath=dependency_xml_path, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency on numpy 1.7.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(matplotlib_repository, new_xml_file, "tool_dependencies.xml") def test_0020_verify_generated_dependency(self): """Verify that matplotlib now has a package tool dependency and a complex repository dependency. diff --git a/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py index c920dd5c58ff..f20c29b08cbd 100644 --- a/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py @@ -65,28 +65,12 @@ def test_0005_create_filtering_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") def test_0010_create_freebayes_repository(self): """Create and populate the freebayes_0010 repository.""" @@ -106,50 +90,7 @@ def test_0010_create_freebayes_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded freebayes.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_data_table_conf.xml.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded sam_fa_indices.loc.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_dependencies.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml", - strings_displayed=[], - strings_not_displayed=[], - ) + self.setup_freebayes_0010_repo(repository) def test_0015_create_datatypes_0020_repository(self): """Create and populate the column_maker_0020 repository.""" @@ -169,16 +110,10 @@ def test_0015_create_datatypes_0020_repository(self): category=category_0020, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_0020_repository(self): @@ -199,16 +134,10 @@ def test_0020_create_emboss_0020_repository(self): category=category_0020, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_emboss_datatypes_0030_repository(self): @@ -229,16 +158,10 @@ def test_0025_create_emboss_datatypes_0030_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_emboss_5_repository(self): @@ -259,16 +182,10 @@ def test_0030_create_emboss_5_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_5_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_emboss_6_repository(self): @@ -289,16 +206,10 @@ def test_0035_create_emboss_6_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0040_create_emboss_0030_repository(self): @@ -319,16 +230,10 @@ def test_0040_create_emboss_0030_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0045_create_repository_dependencies_for_0030(self): @@ -395,16 +300,10 @@ def test_0050_create_freebayes_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0055_create_filtering_repository(self): @@ -425,16 +324,10 @@ def test_0055_create_filtering_repository(self): category=category_0040, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0060_create_dependency_structure(self): @@ -482,16 +375,10 @@ def test_0065_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0070_create_column_repository(self): @@ -509,16 +396,10 @@ def test_0070_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0075_create_emboss_datatypes_repository(self): @@ -539,16 +420,10 @@ def test_0080_create_emboss_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0085_create_filtering_repository(self): @@ -566,16 +441,10 @@ def test_0085_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0090_create_freebayes_repository(self): @@ -593,16 +462,10 @@ def test_0090_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0095_create_bismark_repository(self): @@ -620,17 +483,7 @@ def test_0095_create_bismark_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository, end=1) def test_0100_create_and_upload_dependency_definitions(self): """Create the dependency structure for test 0050.""" diff --git a/lib/tool_shed/test/functional/test_0310_hg_api_features.py b/lib/tool_shed/test/functional/test_0310_hg_api_features.py index c108a2532db5..589d077ef2b5 100644 --- a/lib/tool_shed/test/functional/test_0310_hg_api_features.py +++ b/lib/tool_shed/test/functional/test_0310_hg_api_features.py @@ -46,27 +46,10 @@ def test_0005_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_test_data.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering test data.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_clone(self): diff --git a/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py b/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py index 323ef273d692..31d06c129fe2 100644 --- a/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py +++ b/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py @@ -59,16 +59,10 @@ def test_0005_create_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_2.2.0.tar", commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) # We'll be checking for this hash later, after uploading another file to the repository, making get_repository_tip() not usable. first_changeset_hash = self.get_repository_tip(repository) @@ -81,17 +75,7 @@ def test_0010_upload_new_file_to_repository(self): The repository should now contain two changeset revisions, 0: and 1:. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.txt.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "readme.txt") def test_0015_load_user_view_page(self): """Load the /view/ page amd check for strings. diff --git a/lib/tool_shed/test/functional/test_0430_browse_utilities.py b/lib/tool_shed/test/functional/test_0430_browse_utilities.py index 4a817c031199..104d2e0e28b3 100644 --- a/lib/tool_shed/test/functional/test_0430_browse_utilities.py +++ b/lib/tool_shed/test/functional/test_0430_browse_utilities.py @@ -54,16 +54,10 @@ def test_0010_create_emboss_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_tool_dependency_repository(self): @@ -85,16 +79,10 @@ def test_0020_create_tool_dependency_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes.tar.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_browse_tools(self): diff --git a/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py b/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py index 21313d07287f..1da5abd03751 100644 --- a/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py +++ b/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py @@ -83,16 +83,10 @@ def test_0005_create_column_maker_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column maker tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_convert_chars_repository(self): @@ -111,16 +105,10 @@ def test_0010_create_convert_chars_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert chars tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_dependency_on_convert_chars(self): @@ -213,17 +201,7 @@ def test_0030_create_bwa_package_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded package tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") def test_0035_create_bwa_base_repository(self): """Create and populate the bwa_base_0440 repository. @@ -242,16 +220,10 @@ def test_0035_create_bwa_base_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/complex/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/complex/bwa_base.tar", commit_message="Uploaded BWA nucleotide space mapping tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0040_create_dependency_on_bwa_package_repository(self): @@ -342,17 +314,7 @@ def test_0055_create_bwa_tool_dependency_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded package tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") def test_0060_delete_bwa_tool_dependency_definition(self): """Delete the tool_dependencies.xml file from bwa_tool_dependency_0440. @@ -400,17 +362,7 @@ def test_0065_reupload_bwa_tool_dependency_definition(self): # Record the current tip, so we can verify that it's still not a downloadable revision after tool_dependencies.xml # is re-uploaded and a new downloadable revision is created. old_changeset_revision = self.get_repository_tip(repository) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded package tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") new_changeset_revision = self.get_repository_tip(repository) # Check that the old changeset revision is still downloadable. metadata_record = self._get_repository_metadata_by_changeset_revision(repository, old_changeset_revision) diff --git a/lib/tool_shed/test/functional/test_0460_upload_to_repository.py b/lib/tool_shed/test/functional/test_0460_upload_to_repository.py index 07b43d6f9d28..5c8cb5ea9907 100644 --- a/lib/tool_shed/test/functional/test_0460_upload_to_repository.py +++ b/lib/tool_shed/test/functional/test_0460_upload_to_repository.py @@ -87,16 +87,10 @@ def test_0010_create_bwa_package_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.add_file_to_repository( repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "bwa/complex/tool_dependencies.xml", commit_message="Populate package_bwa_0_5_9_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_tool_dependency_repositories(self): @@ -134,17 +128,7 @@ def test_0020_populate_complex_dependency_test_1_0460(self): """ repository = self._get_repository_by_name_and_owner("complex_dependency_test_1_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_file( - repository, - filename="0460_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/tool_dependencies.xml") changeset_revision = self.get_repository_tip(package_repository) strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) @@ -160,16 +144,10 @@ def test_0025_populate_complex_dependency_test_2_0460(self): """ repository = self._get_repository_by_name_and_owner("complex_dependency_test_2_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/tool_dependencies_in_root.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/tool_dependencies_in_root.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] @@ -186,16 +164,10 @@ def test_0030_populate_complex_dependency_test_3_0460(self): """ repository = self._get_repository_by_name_and_owner("complex_dependency_test_3_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/tool_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/tool_dependencies_in_subfolder.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] @@ -218,17 +190,7 @@ def test_0035_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="0460_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate hg_tool_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/tool_dependencies.xml") repository = self.get_or_create_repository( name="hg_subfolder_tool_dependency_0460", description=bwa_repository_description, @@ -237,68 +199,10 @@ def test_0035_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/tool_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "0460_files/tool_dependencies_in_subfolder.tar", commit_message="Populate hg_subfolder_tool_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - - def test_0040_url_upload_to_complex_test(self): - """Populate complex_dependency_test_4_0460. - - This is step 8 - Upload to complex_dependency_test_4_0460 using the url hg:///repos/user1/hg_tool_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_tool_dependency_0460" - repository = self._get_repository_by_name_and_owner("complex_dependency_test_4_0460", common.test_user_1_name) - package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] - ) - - def test_0045_url_upload_to_complex_test(self): - """Populate complex_dependency_test_4_0460. - - This is step 9 - Upload to complex_dependency_test_5_0460 using the url hg:///repos/user1/hg_subfolder_tool_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_subfolder_tool_dependency_0460" - repository = self._get_repository_by_name_and_owner("complex_dependency_test_5_0460", common.test_user_1_name) - package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", filepath="subfolder", strings_displayed=[changeset_revision] ) def test_0050_create_repositories_for_simple_dependencies(self): @@ -338,17 +242,7 @@ def test_0055_populate_repository_dependency_test_1_0460(self): "repository_dependency_test_1_0460", common.test_user_1_name ) package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="0460_files/repository_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/repository_dependencies.xml") changeset_revision = self.get_repository_tip(package_repository) strings_displayed = [bwa_repository_name, "user1", changeset_revision] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) @@ -365,16 +259,10 @@ def test_0060_populate_repository_dependency_test_2_0460(self): "repository_dependency_test_2_0460", common.test_user_1_name ) package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/in_root/repository_dependencies_in_root.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/in_root/repository_dependencies_in_root.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) strings_displayed = [bwa_repository_name, "user1", changeset_revision] @@ -393,16 +281,10 @@ def test_0065_populate_repository_dependency_test_3_0460(self): "repository_dependency_test_3_0460", common.test_user_1_name ) package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) strings_displayed = [bwa_repository_name, "user1", changeset_revision] @@ -429,17 +311,7 @@ def test_0070_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="0460_files/repository_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate hg_repository_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/repository_dependencies.xml") repository = self.get_or_create_repository( name="hg_subfolder_repository_dependency_0460", description=bwa_repository_description, @@ -448,75 +320,8 @@ def test_0070_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", commit_message="Populate hg_subfolder_repository_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - - def test_0075_url_upload_to_complex_test(self): - """Populate repository_dependency_test_4_0460. - - This is step 15 - Upload to repository_dependency_test_4_0460 using the url - hg:///repos/user1/hg_repository_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_repository_dependency_0460" - repository = self._get_repository_by_name_and_owner( - "repository_dependency_test_4_0460", common.test_user_1_name - ) - package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] - ) - - def test_0080_url_upload_to_complex_test(self): - """Populate repository_dependency_test_4_0460. - - This is step 16 - Upload to repository_dependency_test_5_0460 using the url - hg:///repos/user1/hg_subfolder_repository_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_subfolder_repository_dependency_0460" - repository = self._get_repository_by_name_and_owner( - "repository_dependency_test_5_0460", common.test_user_1_name - ) - package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, - filename="repository_dependencies.xml", - filepath="subfolder", - strings_displayed=[changeset_revision], ) diff --git a/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py b/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py index 8086ac37b12b..79df2baff835 100644 --- a/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py +++ b/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py @@ -90,17 +90,7 @@ def test_0005_create_libx11_repository(self): strings_displayed=[], ) # Upload the tool dependency definition to the package_x11_client_1_5_proto_7_0_0470 repository. - self.upload_file( - repository, - filename="emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_x11_client_1_5_proto_7_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("libx11_proto", repository, end=1) def test_0010_create_emboss_5_0_0_repository(self): """Create and populate package_emboss_5_0_0_0470. @@ -121,17 +111,7 @@ def test_0010_create_emboss_5_0_0_repository(self): strings_displayed=[], ) # Upload the edited tool dependency definition to the package_emboss_5_0_0 repository. - self.upload_file( - repository, - filename="emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_emboss_5_0_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("package_emboss_5_0_0_0470", repository, end=1) def test_0015_create_emboss_5_repository(self): """Create and populate emboss_5_0470. @@ -152,17 +132,7 @@ def test_0015_create_emboss_5_repository(self): strings_displayed=[], ) # Populate emboss_5 with tool and dependency definitions. - self.upload_file( - repository, - filename="emboss/0470_files/emboss_complex_dependency.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Populate emboss_5 with tool and dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("emboss_5_0470", repository, end=1) def test_0020_upload_updated_tool_dependency_to_package_x11(self): """Upload a new tool_dependencies.xml to package_x11_client_1_5_proto_7_0_0470. @@ -174,17 +144,7 @@ def test_0020_upload_updated_tool_dependency_to_package_x11(self): package_libx11_repository_name, common.test_user_1_name ) # Upload the tool dependency definition to the package_x11_client_1_5_proto_7_0_0470 repository. - self.upload_file( - package_x11_repository, - filename="emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_x11_client_1_5_proto_7_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("libx11_proto", package_x11_repository, start=1, end=2) count = self._get_metadata_revision_count(package_x11_repository) assert count == 1, ( "package_x11_client_1_5_proto_7_0_0470 has incorrect number of metadata revisions, expected 1 but found %d" @@ -202,16 +162,8 @@ def test_0025_upload_updated_tool_dependency_to_package_emboss(self): package_emboss_repository_name, common.test_user_1_name ) # Populate package_emboss_5_0_0_0470 with updated tool dependency definition. - self.upload_file( - package_emboss_repository, - filename="emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_emboss_5_0_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], + self.user_populator().setup_test_data_repo( + "package_emboss_5_0_0_0470", package_emboss_repository, start=1, end=2 ) count = self._get_metadata_revision_count(package_emboss_repository) assert count == 2, ( @@ -227,17 +179,7 @@ def test_0030_upload_updated_tool_dependency_to_emboss_5_repository(self): """ emboss_repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) # Populate package_emboss_5_0_0_0470 with updated tool dependency definition. - self.upload_file( - emboss_repository, - filename="emboss/0470_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Upload updated complex repository dependency definition to emboss_5_0470.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("emboss_5_0470", emboss_repository, start=1, end=2) count = self._get_metadata_revision_count(emboss_repository) assert count == 2, "package_emboss_5_0_0_0470 has incorrect number of metadata revisions" diff --git a/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py b/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py index 45510dd92d5d..48f121ead36b 100644 --- a/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py +++ b/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py @@ -48,17 +48,7 @@ def test_0005_create_tool_dependency_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="0480_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_invalid_tool_dependency_xml_1_0_0 with an improperly defined tool dependency.", - strings_displayed=["package cannot be installed because", "missing either an <actions> tag set"], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("0480", repository, assert_ok=False) def test_0010_populate_tool_dependency_repository(self): """Verify package_invalid_tool_dependency_xml_1_0_0. diff --git a/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py b/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py index 3465af3c582c..19294287a283 100644 --- a/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py +++ b/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py @@ -57,16 +57,10 @@ def test_0005_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_verify_repository_admin_role_exists(self): diff --git a/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py b/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py index 2787292505f1..f622b05ac2ab 100644 --- a/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py +++ b/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py @@ -67,16 +67,9 @@ def test_0005_freebayes_repository(self): strings_displayed=[], ) assert freebayes is not None, f"Error creating freebayes {repositories['freebayes']['name']}" - self.upload_file( + self.commit_tar_to_repository( freebayes, - filename="0550_files/package_freebayes_1_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded.", - strings_displayed=["has been successfully uploaded to the repository", "contains a single file"], - strings_not_displayed=None, + "0550_files/package_freebayes_1_0550.tgz", ) # Visit the manage repository page for package_freebayes_0_5_9_0100. self.display_manage_repository_page( @@ -95,16 +88,10 @@ def test_0010_create_samtools_repository(self): strings_displayed=[], ) assert samtools is not None, f"Error creating samtools {repositories['samtools']['name']}" - self.upload_file( + self.commit_tar_to_repository( samtools, - filename="0550_files/package_samtools_1_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/package_samtools_1_0550.tgz", commit_message="Uploaded samtools 1.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -119,16 +106,10 @@ def test_0015_create_filtering_repository(self): strings_displayed=[], ) assert repository is not None, f"Error creating repository {repositories['filtering']['name']}" - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0550_files/filtering_1.0.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/filtering_1.0.tgz", commit_message="Uploaded filtering 1.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) def test_0020_check_repository_dependency(self): @@ -146,27 +127,15 @@ def test_0025_update_dependent_repositories(self): freebayes = self._get_repository_by_name_and_owner(repositories["freebayes"]["name"], common.test_user_1_name) samtools = self._get_repository_by_name_and_owner(repositories["samtools"]["name"], common.test_user_1_name) filtering = self._get_repository_by_name_and_owner(repositories["filtering"]["name"], common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( freebayes, - filename="0550_files/package_freebayes_2_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/package_freebayes_2_0550.tgz", commit_message="Uploaded freebayes 2.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( samtools, - filename="0550_files/package_samtools_2_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/package_samtools_2_0550.tgz", commit_message="Uploaded samtools 2.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) strings_displayed = [ repositories["freebayes"]["name"], diff --git a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py index 0b6ec2c89dcc..81760f008c34 100644 --- a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py @@ -37,50 +37,14 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_0000.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 1.1.0", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 2.2.0", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_file_to_repository(repository, "filtering/filtering_0000.txt") + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") + self.add_file_to_repository(repository, "readme.txt") def test_0010_browse_tool_sheds(self): """Browse the available tool sheds in this Galaxy instance.""" diff --git a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py index 7348eda4f8a4..2df6669aee2f 100644 --- a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py @@ -1,5 +1,4 @@ import logging -import os from ..base.twilltestcase import ( common, @@ -37,78 +36,7 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool xml.", - strings_displayed=[ - "Metadata may have been defined", - "This file requires an entry", - "tool_data_table_conf", - ], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool data table sample file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool data table .loc file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded malformed tool dependency XML.", - strings_displayed=["Exception attempting to parse", "invalid element name"], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded invalid tool dependency XML.", - strings_displayed=[ - "The settings for name, version and type from a contained tool configuration" - ], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded valid tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.setup_freebayes_0010_repo(repository) def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the freebayes tool.""" diff --git a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py index 29b938967af8..fbcfbb28d3be 100644 --- a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py @@ -37,16 +37,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): strings_displayed=[], ) if self.repository_is_new(column_maker_repository): - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) emboss_repository = self.get_or_create_repository( name=emboss_repository_name, @@ -56,16 +50,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1020", additional_paths=["emboss", "5"]) repository_tuple = ( diff --git a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py index 9bd1eb5f7826..17cebc758712 100644 --- a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py @@ -43,16 +43,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): ) if self.repository_is_new(column_maker_repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) emboss_5_repository = self.get_or_create_repository( name=emboss_5_repository_name, @@ -62,16 +56,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_5_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) column_maker_tuple = ( @@ -93,16 +81,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "6"]) column_maker_tuple = ( @@ -124,16 +106,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) dependency_tuple = ( diff --git a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py index c71ad0c31ca0..60748c5b2763 100644 --- a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py @@ -43,16 +43,10 @@ def test_0005_create_freebayes_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -72,16 +66,10 @@ def test_0015_create_filtering_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded the tool tarball for filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependencies(self): diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index 713e7f7e84be..48e7039032a3 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -58,16 +58,10 @@ def test_0005_create_convert_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -82,16 +76,10 @@ def test_0010_create_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_emboss_datatypes_repository(self): @@ -110,16 +98,10 @@ def test_0020_create_emboss_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_filtering_repository(self): @@ -134,16 +116,10 @@ def test_0025_create_filtering_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_freebayes_repository(self): @@ -158,16 +134,10 @@ def test_0030_create_freebayes_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_bismark_repository(self): @@ -182,17 +152,7 @@ def test_0035_create_bismark_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository, end=1) def test_0040_create_and_upload_dependency_definitions(self): """Set up the dependency structure.""" diff --git a/lib/tool_shed/test/functional/test_1070_invalid_tool.py b/lib/tool_shed/test/functional/test_1070_invalid_tool.py index 8b720697215b..0f8e08e822e1 100644 --- a/lib/tool_shed/test/functional/test_1070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_1070_invalid_tool.py @@ -34,28 +34,7 @@ def test_0005_ensure_existence_of_repository_and_category(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="bismark/bismark_methylation_extractor.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded an updated tool xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository) def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the bismark repository.""" diff --git a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py index f9352bcb05f3..b9f82f4c098c 100644 --- a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py +++ b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py @@ -45,16 +45,10 @@ def test_0005_create_and_populate_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) running_standalone = True @@ -73,16 +67,10 @@ def test_0010_create_and_populate_convert_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) running_standalone = True diff --git a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py index 0a249455275c..37192cf55b45 100644 --- a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py +++ b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py @@ -42,16 +42,10 @@ def test_0005_create_and_populate_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_and_populate_convert_repository(self): @@ -67,16 +61,10 @@ def test_0010_create_and_populate_convert_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_and_upload_dependency_files(self): diff --git a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py index 3cd7e9b6b89d..b824593a34d4 100644 --- a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py @@ -38,16 +38,10 @@ def test_0005_create_and_populate_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_and_populate_convert_repository(self): @@ -64,16 +58,10 @@ def test_0010_create_and_populate_convert_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_install_and_uninstall_column_repository(self): diff --git a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py index b3fb64a7d6d7..a4c0fcf55550 100644 --- a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py +++ b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py @@ -53,19 +53,7 @@ def test_0005_create_bwa_package_repository(self): open(xml_filename, "w").write( open(old_tool_dependency).read().replace("__PATH__", self.get_filename("bwa/complex")) ) - self.upload_file( - repository, - filename=xml_filename, - filepath=new_tool_dependency_path, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml.", - strings_displayed=[ - "This repository currently contains a single file named tool_dependencies.xml" - ], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, xml_filename, "tool_dependencies.xml") self.display_manage_repository_page( repository, strings_displayed=["Tool dependencies", "consider setting its type"] ) @@ -85,16 +73,10 @@ def test_0010_create_bwa_base_repository(self): strings_displayed=[], ) self._get_repository_by_name_and_owner(bwa_package_repository_name, common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/complex/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/complex/bwa_base.tar", commit_message="Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_generate_complex_repository_dependency_invalid_shed_url(self): @@ -237,17 +219,7 @@ def test_0040_update_tool_repository(self): open(xml_filename, "w").write( open(old_tool_dependency).read().replace("__PATH__", self.get_filename("bwa/complex")) ) - self.upload_file( - tool_repository, - filename=xml_filename, - filepath=new_tool_dependency_path, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded new tool_dependencies.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(tool_repository, xml_filename, "tool_dependencies.xml") # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file. self.display_manage_repository_page( base_repository, diff --git a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py index d59f45563ce8..db5139d4247d 100644 --- a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py +++ b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py @@ -45,16 +45,10 @@ def test_0010_create_emboss_dependendent_column_maker_repository_and_upload_tarb ) if self.repository_is_new(column_maker_repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_5_repository_and_upload_files(self): @@ -70,16 +64,10 @@ def test_0020_create_emboss_5_repository_and_upload_files(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_generate_repository_dependency_with_invalid_url(self): diff --git a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py index 83b5a6d0c4de..557f20fa8ed6 100644 --- a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py @@ -60,16 +60,10 @@ def test_0005_create_datatypes_repository(self): strings_displayed=strings_displayed, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blast_datatypes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "blast/blast_datatypes.tar", commit_message="Uploaded blast_datatypes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_verify_datatypes_repository(self): @@ -112,13 +106,13 @@ def test_0015_create_tool_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blastxml_to_top_descr.tar", + "blast/blastxml_to_top_descr.tar", filepath=None, valid_tools_only=True, uncompress_file=True, - remove_repo_files_not_in_tar=False, + remove_repo_files_not_in_tar=True, commit_message="Uploaded blastxml_to_top_descr tarball.", strings_displayed=[], strings_not_displayed=[], diff --git a/lib/tool_shed/test/functional/test_1160_tool_help_images.py b/lib/tool_shed/test/functional/test_1160_tool_help_images.py index 632788755e43..dd46f0dbae92 100644 --- a/lib/tool_shed/test/functional/test_1160_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_1160_tool_help_images.py @@ -49,16 +49,10 @@ def test_0005_create_htseq_count_repository(self): ) if self.repository_is_new(repository): # Upload htseq_count.tar to the repository if it hasn't already been populated. - self.upload_file( + self.commit_tar_to_repository( repository, - filename="htseq_count/htseq_count.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "htseq_count/htseq_count.tar", commit_message="Uploaded htseq_count.tar.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_load_tool_page(self): diff --git a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py index 2894b96fcecc..c126a29ca4b8 100644 --- a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py @@ -60,16 +60,10 @@ def test_0005_create_convert_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -85,16 +79,10 @@ def test_0010_create_column_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py index 1b1b29ddf974..755c823211cc 100644 --- a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py @@ -73,16 +73,10 @@ def test_0005_create_convert_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -98,16 +92,10 @@ def test_0010_create_column_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -123,16 +111,10 @@ def test_0015_create_filtering_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py index c3d049890108..57714179cd0e 100644 --- a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py @@ -61,18 +61,10 @@ def test_0005_create_matplotlib_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_matplotlib/package_matplotlib_1_2.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_matplotlib/package_matplotlib_1_2.tar", commit_message="Uploaded matplotlib tool dependency tarball.", - strings_displayed=[ - "This repository currently contains a single file named tool_dependencies.xml" - ], - strings_not_displayed=[], ) def test_0010_create_numpy_repository(self): @@ -92,18 +84,10 @@ def test_0010_create_numpy_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_numpy/package_numpy_1_7.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_numpy/package_numpy_1_7.tar", commit_message="Uploaded numpy tool dependency tarball.", - strings_displayed=[ - "This repository currently contains a single file named tool_dependencies.xml" - ], - strings_not_displayed=[], ) def test_0015_create_complex_repository_dependency(self): @@ -136,18 +120,7 @@ def test_0015_create_complex_repository_dependency(self): dependency_xml_path = self.generate_temp_path("test_0170", additional_paths=["matplotlib"]) new_xml_file = os.path.join(dependency_xml_path, "tool_dependencies.xml") open(new_xml_file, "w").write(original_xml.replace("", processed_xml)) - # Upload the generated complex repository dependency XML to the matplotlib repository. - self.upload_file( - matplotlib_repository, - filename="tool_dependencies.xml", - filepath=dependency_xml_path, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency on numpy 1.7.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(matplotlib_repository, dependency_xml_path, "tool_dependencies.xml") def test_0020_verify_generated_dependency(self): """Verify that matplotlib now has a package tool dependency and a complex repository dependency. diff --git a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py index ee8ae3610bcb..ca7d95a38507 100644 --- a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py @@ -31,50 +31,14 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_0000.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 1.1.0", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 2.2.0", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_file_to_repository(repository, "filtering/filtering_0000.txt") + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") + self.add_file_to_repository(repository, "readme.txt") def test_0010_install_filtering_repository(self): """Install the filtering repository into the Galaxy instance.""" diff --git a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py index c18a1ab6fd9e..66be32573652 100644 --- a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py @@ -31,78 +31,28 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool xml.", - strings_displayed=[ - "Metadata may have been defined", - "This file requires an entry", - "tool_data_table_conf", - ], - strings_not_displayed=[], + strings_displayed = [ + "Metadata may have been defined", + "This file requires an entry", + "tool_data_table_conf", + ] + self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) + strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] + self.add_file_to_repository( + repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool data table sample file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool data table .loc file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded malformed tool dependency XML.", - strings_displayed=["Exception attempting to parse", "invalid element name"], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded invalid tool dependency XML.", - strings_displayed=[ - "The settings for name, version and type from a contained tool configuration" - ], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded valid tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], + self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") + target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") + self.add_file_to_repository( + repository, target, strings_displayed=["Exception attempting to parse", "invalid element name"] ) + target = os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml") + strings_displayed = [ + "The settings for name, version and type from a contained tool configuration" + ] + self.add_file_to_repository(repository, target, strings_displayed=strings_displayed) + target = os.path.join("freebayes", "tool_dependencies.xml") + self.add_file_to_repository(repository, target) def test_0010_install_freebayes_repository(self): """Install the freebayes repository into the Galaxy instance.""" diff --git a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py index 787e3247f3e6..91720f67bfe1 100644 --- a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py @@ -52,16 +52,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], + "column_maker/column_maker.tar", + commit_message="Uploaded column maker tarball.", ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) column_maker_tuple = ( @@ -83,16 +77,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "6"]) column_maker_tuple = ( @@ -114,16 +102,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) dependency_tuple = ( diff --git a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py index eda3d6dc693b..4b5571c74146 100644 --- a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py @@ -81,28 +81,12 @@ def test_0010_create_repositories_from_0000_series(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") def test_0015_create_repositories_from_0010_series(self): """Create repository freebayes_0010.""" @@ -116,50 +100,7 @@ def test_0015_create_repositories_from_0010_series(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded freebayes.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_data_table_conf.xml.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded sam_fa_indices.loc.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_dependencies.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml", - strings_displayed=[], - strings_not_displayed=[], - ) + self.setup_freebayes_0010_repo(repository) def test_0020_create_repositories_from_0020_series(self): """Create repositories emboss_0020 and column_maker_0020 if necessary.""" @@ -173,16 +114,10 @@ def test_0020_create_repositories_from_0020_series(self): strings_displayed=[], ) if self.repository_is_new(column_maker_repository): - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository = self.get_or_create_repository( name="emboss_0020", @@ -192,16 +127,10 @@ def test_0020_create_repositories_from_0020_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_repositories_from_0030_series(self): @@ -217,16 +146,10 @@ def test_0025_create_repositories_from_0030_series(self): strings_displayed=[], ) if self.repository_is_new(column_maker_repository): - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], ) emboss_5_repository = self.get_or_create_repository( name="emboss_5_0030", @@ -236,16 +159,10 @@ def test_0025_create_repositories_from_0030_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_5_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0330", additional_paths=["emboss", "5"]) dependency_tuple = ( @@ -267,16 +184,10 @@ def test_0025_create_repositories_from_0030_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0330", additional_paths=["emboss", "6"]) dependency_tuple = ( @@ -298,16 +209,10 @@ def test_0025_create_repositories_from_0030_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0330", additional_paths=["emboss", "5"]) dependency_tuple = ( @@ -345,16 +250,10 @@ def test_0030_create_repositories_from_0040_series(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository = self.get_or_create_repository( name="filtering_0040", @@ -363,17 +262,11 @@ def test_0030_create_repositories_from_0040_series(self): owner=common.test_user_1_name, category=category, strings_displayed=[], - ) - self.upload_file( + ) + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded the tool tarball for filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) repository = self._get_repository_by_name_and_owner("freebayes_0040", common.test_user_1_name) filtering_repository = self._get_repository_by_name_and_owner("filtering_0040", common.test_user_1_name) @@ -432,38 +325,20 @@ def test_0035_create_repositories_from_0050_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( freebayes_repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0350", additional_paths=["emboss"]) repository_dependencies_path = self.generate_temp_path("test_0350", additional_paths=["filtering"]) diff --git a/lib/tool_shed/test/functional/test_1410_update_manager.py b/lib/tool_shed/test/functional/test_1410_update_manager.py index ba4112d7cd24..7debeaf57c90 100644 --- a/lib/tool_shed/test/functional/test_1410_update_manager.py +++ b/lib/tool_shed/test/functional/test_1410_update_manager.py @@ -53,16 +53,10 @@ def test_0005_create_filtering_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_install_filtering_repository(self): @@ -89,17 +83,7 @@ def test_0015_upload_readme_file(self): """ self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.txt", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "readme.txt") def test_0020_check_for_displayed_update(self): """Browse installed repositories and verify update. diff --git a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py index 5c0829ede090..01cbdbea57e5 100644 --- a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py +++ b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py @@ -62,16 +62,10 @@ def test_0005_create_filter_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Populate filter_1430 with version 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -90,16 +84,10 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Populate column_1430 with tool definitions.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_1460_data_managers.py b/lib/tool_shed/test/functional/test_1460_data_managers.py index 7269e72fd12d..42ad60b4ea6d 100644 --- a/lib/tool_shed/test/functional/test_1460_data_managers.py +++ b/lib/tool_shed/test/functional/test_1460_data_managers.py @@ -55,17 +55,12 @@ def test_0010_create_data_manager_repository(self): category=category, strings_displayed=[], ) + assert repository, "No repository created with name {commit_tar_to_repository}" # Upload the data manager files to the repository. - self.upload_file( + self.commit_tar_to_repository( repository, - filename=data_manager_tar_file, - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + data_manager_tar_file, commit_message=f"Populate {data_manager_repository_name} with a data manager configuration.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_install_data_manager_repository(self): diff --git a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py index 2019b0c8f477..fa3ac3cf55be 100644 --- a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py +++ b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py @@ -48,16 +48,10 @@ def test_0005_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_install_filtering_to_galaxy(self): @@ -85,17 +79,7 @@ def test_0015_update_repository(self): """ self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "filtering/readme.txt") def test_0020_get_repository_updates(self): """Get updates to the installed repository. diff --git a/lib/tool_shed/test/test_data/bismark/bismark.tar b/lib/tool_shed/test/test_data/bismark/bismark.tar deleted file mode 100644 index e24183c72963675839556944d1859a98c2c62d33..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 593920 zcmeFaYj<16kuE%6rC+fvD~q7bivY=XB3oLfDNE)$w&Ym!o8&|pERftJ5rF^$fT9(R z=C{AkQlj`Qo$Bvj5`0pXcK`l&gYm&A*T1*V55By4`+q+DRR4UE&nKtz`}e<}jE}y&dGqGSA7{JA z!&!DZog7UECmDX-o}9j(4v&uKS!a7S>)q+zS?~7Nvd8&o_)GSGrf2z4KFvCZ{Bz@_ z{`vLZU~f7&9-M3p_BYO6uGWrwIL!h0$ z2pV8X{A6~VA7pzn4!b`l49^XQ?C}Hx59Y(kcrD9^_Eh-A5TuPo?{G%6`l`AqiiqFu(^k4qcu(i zgk*o&+5PU>&%4>y)4yeZ+4||Ht*5(x`x1@k#}j;;zshx};mPS}h=l^P)4_QD8o+ae zAHMx*`#W^m`ex_*o!!4-3EAVF-KXC^f1W*l_EWZ%{kZkh?#}kl-*5et{rK}wKR$c@ z?M9Y82a13T4zK{hhq6c0JUhtegW+hV%l;cSbq3f*2iY+WEH-yPAHD))gKQsUw2n*qYFj_32!`I-P!nJB;}Milg%}w1eQ@;{eYl zqx_dacjNB*zs?4unJ#pF{qz_o;q!a;A?TO%y0Wq2B(Tec3Z#gE8=Erw?PH@K2Pmn zw08S!d@?va%@5XY4^H-mM`x2Ww6HzEfU-wxw{gl&&xC$_xZc|BSA#K5>j13=C;joH ze~dw9FNdcX^mIO1yFJfOPDygBARHr&eo9Xrl8<2USa3N9rkwrvLTA0CZQK(^jl z_I$RMfo9J}heNVSq9_|p_KkVzN;9thSa?5186OA*jn7W@z?{f2huABe9YvL}{DZT7 zp3QNblW_fE|9Cn%nar>+fG@yBdW_#B0~t3y0Sa>*GVZ4i1NJ({p&)%=n0@;*do`F2 z(N@^ke5MB#VC_5tmd0Qv<5_<`*n>PP(6N&nh}nOgfm%V3`^P-r=~)MFIvD2Jt)It( zy%D(C1mXnT2>%Yp!}$>6;(RiF$@8i-F`43^eDq>`i}cNdjeW@HgxwCbE(APnb}X1Z zJf5MYv;xrh7Gk4Met|DKw;ew0GmcWt(m)t@usogX$E}^8zJ0X*?bAou*7rM4p8oLd z)7|IzPEM*1l|A_1Y-P=lCpM{bdz&YK$AxYBT^tmCz{8>+ta9(LWSq&9!9kuKVpH+k zJnssjaC$%ofd78yAL|d_@F>3pU@P0ZU>*nECtLidx4o-7Do5YicMI*cVOP8Lo@`ay z_ZGCT8~<*heG#5yd|ANhbcmC3>&5uT0gm^V|2Q0mwwHpA~dp5x985*8ZK=GtcbYE?Ek!#Ncgc5y&(-d_z5aHzcdEW^Cn zX)SONPS^lrqy4T>R{Xmcfg?!{4h}%NWL|QBNZ3$>#-rEG6U2oTX$3s2NGrWYTInUU zLMR>&X7er12ttzKg-V^Nr2IFdfO)+2e0R&KHXv$e45lgrgHHhmkge&saou(Z62^@a zf1^VB+@QVTE>xcxA(OEl1Ff^Z;=NCzh{qDU+m!`F`cAMlQF4>wGv%0^ri`Q|r|GQ$ zuQzYrh-Ylbg>%FET^_aa5&p8iZBN>kfbq6+7E9w&T|R7RAcGYe$mv`(La9M@1Uavz z5t`OkYTK|jvLR;8*^k9;IDhrURR{^4|RM8ymG2XsrOHEdpBY0+Cg^W5?uYy zzYbSlIE`}k8}+PuA<>GO71?A1(WytKu4;vr zTQo>P5j7Mv-$K{R;54IMU`89w@aH>EpM3vq)kuzFC>Vmz(SDR)#t@`%A`Cm~Bp8ZE zFX#bqV2s^e=tdj802$qEC7YzAnbHi9mM%)K!GtmkiXQtq`-yP1GM^fCW~|AV3Ha-* z7_m#}KqyM9s%b+%2!Bd?&IL%Ta^EeH|JMqbuB)i0hIJLy%&Hj{*JTUj{%*sH6xY!` zc$W?jb9$h&1`i!we(q&D98PEO>5u2r*EU*NVI@7nc7!#1b%nJ`p|x9dnJF(m4Cz!s z$rK-eD~p1Gc(`j^2MOVOr{y%v8p2nt;#%n|U22x;ipn00@?KO4{~ZAKA5QpO^I_!VKjcq$|O3&n7RBoV_Uv09x82Z({1o3pb$1Qsrk^vM+28}t-`R&eTF zFdr!P>pn3lJz?1;PEv7nSKWm4Xm7sQ+_Z>OhvAu|7e*|Gb=dx7v$OHZi_KO1clGur z=c7c4Y$6D(N^fn8#^lhVDv<3$-$|XtaF!3~)IS@|;kJdFP8Y-|5dY$!E)nCRg60Wt z`}3_I%tj`6V!#BBxB-R}AMH01t;%{B6^`^U=>KN3&D)#(y?Kv52WES0Ah49KPLO9& zr`0!@$9~WDksSf2{x{k!(`Z$uyD18gebN~XU*@5iDKWF`&c$T#=RQLi9wG1!LgZ^J4_DT1 z)ik}e%DA_LgB~L`3fDY_Jjc8e5Ap%;$M{bX+p;8umVx+Nj%!0A`q`#=DA|NiP-qik z^SB;kbBW%KGzie)>W^J2OlYotCTFlV6x~Oc_z~{ z#Q%>NcAtyL0zb1f19f~ecc7T2p(!gAZvke@hnfM`%8ESg!egvJ4n;2z)FVGWVcraP zQX_yNYzoVtNnyuG2N7Tl1X#EyIr_Yo80o=eyfV+mx$<%e+95WJHUrQ*taNlmt^;N~ z8)Jf5)=4uO5)wu>5YDxw1wpQ*))-SvJY8x3Y$`@vC20Ut_~zc)ZfXQk39MkQAheh{ zFgWnI0%FG#xM8@u4x33(S;U8f?-?gVTueH#g#bfHfSc!iNag-9#?7ZpFyW>f&ZA#G zv6&JNgb=M0|Kbp?r?W(bv41FG5pE?DZz}$zPLUyBphvn&M^dQ;+f6+zEvkJDQ2jZq zt4zn}&8$J0rVp)%Z+OLu&ge@y(m>4A!hsuC!wcRz1ikrP&31R|l<`tuO28t} z(yO&fj{Jt|2!T!mz`z7)iPblo^ebSM8v*?M5v!p+oT4K0L? zh8Ds>!Lq{FB1VLo!bf5m^t*v!q|Jqg*ER1Y&6P0R*n%Xhcn88<-L|$?w_rzGoYjWGPJX@n-UWH*EPl58hFYu-tIS-*+at=_aNZlDd)(6qXF zVh7gkoXmLvN~KK!o4Q{*Xw7B`bS(z0-J-3fjY)w|H%exHe{INULW1A6wBSzU_Rwt3 z+}jV99Chw%x8!OQXE9NYa<9p{)pT69nxX+W6ALcdFBP*10f?e_dq;*$y%|_o^Fl+A zOH%9aJzJ95kiyzwog$eVw+05I*R$cwoU#3ATED@5id|GN4a=b(*eDu$;o;n(BRSmg zkc@x2X%J*S;$-1FalPmjt-T!1Km`K+l=0-;&iO$;%Jl{WuCq+0bUkgaKis7c;>r5M zEx21K(s6ymh%WJEC2~baL2k+HY@c_eaJdZt^E?d4FSA?Wu1wMV6{Xq76*x*GJjl8> zNybZWV_pDw9%g_~Pv)h62&?CP99;k*&Kl+n+P}KXQvZU_u`L(d=wj31Dkuu$F*k)`iX z;KJOWoA;Q$()k3i&!QZNF{yCa^=jsP(q6eZK2g9E?X8q>0<7bIr`* z?VNs?IlA2%p~@cMN|>F>e-oQ;RL4%pwz zX9hT<42Gyrx_9nu#-?F4_I30z>wY{97Y!5Y<4t9F<#ig|L_>(-7_Rytm|}ud>{Qsj zMF74S|6sx5{P1wNKg12t*X;ae!B)KeL!igvx&Qru5AFj35Pqj5H#mZXKg4y=bEGIT zRP@UbSHQ)F&T#ELh8aTuJV=2Owwt1@|;GlIPeY~A@yq_)zoMU-Uc$3tA<7PN&0F={Ip z>#Ju9F(4K&KtX}Ab7k9(lfsE=zql9kPCQEignr`yZ~ss*7EM4#)+)?kj2s&mt}`$O z7Iq+2Fl;CH8w#(Nuf`h8wSK=afz>*|XKS~%cQ?0p3$WvY zKK@^@33Z^;(WGdsa22$9YWl^hc73u{?uxtowO#inb0#vWn!(;rw#pNuYdUc;D2`*% z3<<_6L4vVTNEVJ&0&O+DHe3`TkqF-%r8@(Ob6pp~S4V!o=-%Ob*W_>Wd^iP#6Se3R zh~%!C|Hl90eg$5H)JUAy6DS1GEFiUD7R_lkteXB(PIcMKH6!q!uwbBo)T;S$a5z%5 zp|ct{K-{*fQc~>v+dvlVA}yvPYgD%#uH|HdJw#)&ZMeV0Rg@n{cpDV*B;SqOtcUhy87+Ls8S$Z%^{=@4hn>ep>hClLcR% zHhzh?L{KIB3uY*_YdKz+mo}2(XqR@bUFxTryuA_;N%2&{MKBQ3{t;*y03^75`h-f$ z9OAkljrMv`h_R8xbz?xLl8yT+w|*M8|f zR{hfb+tv^@F8%b$qNRruaKS{OUDMKW>VlKyf?W?|)h<1J+Zv*{^g>u5Ws`ue=%s=y z5%mM<7qBA{oT{MU(z(*)@?k0111K?s%m6vh<%4g`uTGOGl3;WmAD z!8XOQsIsNo^xc#8+tg~5>)56^(Yx8Ei^gASn}S?wu}$ACK26_+u`1j2ojUNF=rpAx zp3BxFUCVX_uq)dYw3f0axxM7ciVJF;mWr)X!u1%(7WWhm)-r@AP}!4_ zSt*Cgn{OBwH5>=xD4nV=ld~yoJC6{{&EZiMpW5scHB@KttTL3!n=7n*>=zq18nOJJ zy3TOJSNLc{LP^Z$nZ1E%gC10>yTC;MQvI&-bp#~V7h6bC#OuvFS<^s~?5yTPFq#x{ zH)@klJVzu}b~XB@ws*OtihdXF=Z3V3e(yI)tnk$0`qYX(GCTu6BpPlly$3;_< zRY2Q7j3r*6^(>bZj(4;l#l0$|-(at4rb?ehp0gyTP=rOh+zqSy97z^4Y_D8fICDnG$t) z3W3qyGP5i?l%-ipI5XC5(Nf%YMX=Ew2-lN-@F5|B?&#pP5xQ-i6%a_gHY*@}dx7{0 z?ew)K(H1DMlU@Ii5CIANAt6F?IV)k+`qYgj!?uy(){kHH-lhU9O^A@Fc1VffXdf~n zt}k&VimYqdry!ax-lq>K5w5wsqm&37G1kJCXvb{k*UDH+KNi5KtzZ~}=I(rvvVx@u zc_}{5+VU|vE-4Twm^`p&?tCDr!hq*|bE{>Ej0ap6!=tbBLu=|iqhzw*9?FJz8Tye8>QS_p7r z0vYCZ(%=CI>6booiaN=s`phKXbY}T*+ys3!!F5d>Nn~B(S#R+s&ym%GT>*)ZLXnk( zY$JW7vM-Vnnd#h~d%&01iDMbKvDF{2PxyK8;Er#w6U&eKxHRw)-ATSzVV}bX zcwik*Ku?Wmly><-E_g`XF9paxjC)qMi!9gm-w!u8`If8}J<+=M4>KGbp4Gh^m1QIg z3rGAhYr$q(gjOoUC~4t(*V%NOpdnPc6wJUt&=w%Q{o{Q9rMS zmONV&m0C4LFali2I1PDsv5-?K->$4H;M^@%P8Jk0eghyNW=eIrE~A`~40lGOGbE6+ zjJCcK{y{_eprLpsZlWHBgC(l4u?Gm{f`&rvCkmH>TEVXZiIo+R*hh*^7PXXrK|4lu zlfK?rNV~RERg}pTrVx%<825udvq0CNJ`)AcMGDPttjaH-7YLHsHR+ftP-_^p4FrTRVMlm(urPtov^DVlp>G3sA`pr(eE|r zRfWxZ-;}Eh(6C6ma!iCai;xi--6|4qf{)*Yt`@yV7uOoC*VcId!lj`I)c& zQ+4U_p{6%KpVT>{W(&Phs3v$Q{v53VMq|aDQyBB4wR;NUgI=g6CsnqaT11f5=CMG; z1~%y7kgu~Md71COh!+8MmbfH5FNOj~@Hi#vE3-_qvl)zMev#yy(oO*`+B(EwySPTY zghxzDRvw(8(j?T5q!d$wcg)ZdDk=pZSw}r&vdTK&*lH{17~iTDgUgqqv}SLAh<{AA zqvH2cZ+-Jco&T&`>p;Vmk;H{|D?Yj=o-^^doeod+^|j(u-SiHL``J)&v&KsY)$o#& z^x`D-j=johgwGAc!x!Y#x99H;>^#LeEv^99q6$qVuG7ZphYL8xk-D{eVa$=I=NtG{ z#XN&wnRnqppST4ju2-oL$0L#d0)Mg|3@!G;1MKt6(hFDi?h;&atkPRh`DYn(0_P~z zLzgbdAODcxYP_2z3*h76WDa%5^pV@)_|%>+#yC1sqLTBW7-;?78t0_cs87DDnZaIw zT08}-*<+Yq-4vzon(PX{8Rh8ozmtXZN_%MU*+36V8gaEXuF^hU>w$9Rp@ZvkmJ;3R z>|AW!>;cpdj#aUHApq(et2_fNuH7$fXo}+GNsMw+Vn1z)w=v{El>D{&Bug-4(zyz* z+(3%<#zw`%3yoAKNGG-TH-2+3uS#dWZap&xaIfIzg_@K&eW@d*h`6lHlgxUV*<`z} z!}F{05cl&MGl7&7|7mpZR^6x~(qn@*{$Gqi1B##6nke6juX8<|fY@m*j)EoD2!3x>a*`IOpjVSXB* zg){?jW2(lD$p&sneK4ZT&+z*(qH3%Sd-r2eF2(Q7tU}QLSGS>vS<$2*$K3FunIJzO zOC@W%TBPi#+G+*=W!kIK`d6^eBKXm8bLe3}u@Lt{@RMGZ*2~&{WyFXwmT0LJgHR2` zVz*#=74+K%JMDv&C>nTkYRqq)ek0)h4$VX!p}N3X$rde*R{;Sp*RB5r6MtRgDR$RCdx zQn_~re)#F+?C2Q&Cpr8@_5j^%WoPA=4xX()njZ&;nFs{gC5W#-obXBZIabV~LLK>z zwyLM$hX{#EHG7%G0z9yDL*hL$ryYI`DQe7zPc|W2_K=Cuvibo)Z#n8ISJ# zlBG}2$(jfa)1es`wYY;cgsi1N7F{@+-p9!2l&2$SKq27cOFjtA5{)CS2|0;yfx{|MHs*V zONebTLT2|B5XTx#CNH^2q%b4kRTH4K9tVhf9f268!^s(AV))CF)Q3t2qK4E!ZZPr9 z)kKMpncaZ$DnEigb#*-7{r1OfM=o?&>`r$NOAasppk^nj7}=`mvUM!o8*BG)@nqenCQ`Tb##7B8| zrLkYcA8+vJJ7{@ z=d*ltcvEJljh!xACIHhsnan20%dqfs1DES)-5qS9Wjg%zZ~c8#rPs$jEBhDW&2+BkHd zdqsKDnV}^3JnL+}m~XDt@gCt&YwO+B)xY0a>#p_I?ylWi`{Ez4f^Mh~>XZh3bMpq` zuZ4*yJXXHEc>{vth8bLi@9`B6S6l-=5WZ7c94|V}Hv66J-T!&AwTh|(e7pk}X>M=w z;u9y1gJ9Mwb_9wWnlOmw%y_u99dPG+XoO8d=IO(>Gqy5Q_!mkw7!-JJm=aH=j&KB%u>uVF8+H_oKgW3yS%ZT(oPuvdO%Xu^ zgzF6{oL!$43Id8Fqx#0fhq#yIpXwsM`s%B~9wg5@@?h~GK&(%2*@}BfF~@?6`bJzz zD&ocRb0hMY5~V6f(uo z6cAF{!IkHd3M6zR$u9b!l%~9dq37qreajK^EUtqIHqw|4U4&$IyjjMl2)D^)o`_hG z^3*^#9$(^J#wX!DU1K^5kv&_dvLm^;O~g^)X|-y|=eay!g|gU?QnSQ0NwVhyi|12r z2@ik-WE|(X5DH<12e@VXxp;tkV4v~uc@G6AteX#e>H$h0DD-1UL4;8pUedsM`gY;q z@KE2|Ldp+i6%aBu^cs2U1C`PT^}EM>?UQN}{-Fd{VC`m=Cdb|Gc&Fw1vytUJQ+3pzCw6+HjA$p9jFjh&pynRp>H&>oPaEx)~-z$$6 z4;+S!e_VN7Ct-~0zhX@A-P*kcTNXP4O|GJozMH;E&#G{%#_hJ7=~H6RP$j55v&nbo zaX=16FoKBQ%!VNqm_7a^B5obTAc0-C}UT>mAL8YoNy40s+`BG`-Hsm92twGVyn ze26;xVh70ijF1BKuVKJsApc+-yp<88D-Ux71E<@KJANUf2xD3U(061K1qiEfEf(f= zgJmGx2kMo;hVjOHFx8A0HCMtUu)S$Ug8gi~pOirj4SOJ)56!A*5*{9dXVKNbuW9+K z{$}l!Z;9%v42YViD;T19dP6;o#j#Tpm^z&f(b!THR<^S-(NJ}u9M{UBfP8G;5DIrW ziIhyq9I|JDP~@+Fo}H-2Ks7fZ3gJmvr{4Cbg0yfE&I0P&`4VC)PRWJbP~D>5R$`d% z>Sm}YAeXoHOmHVBs$%phZ8WaI>^(Jwp$YMb*r)~j56Sf;j)%(iQ{|QOuJgRLn_X zBH#{wg$%!3lT9eTYYa!P#gz?1#Y&yUB@0mWG#Q7$LS0YbgXSUO)aBQV0%LnKnur2L zV%T8>=iv=TV)N9sW@3azO~uO_i)s_vc-(^b#4c2NaO=dJ1PP70B>|t3usv0=-BI45 zgB*^vS9Am^O$0W(q>*XN)t(?ah3^sun5jc_HR9^j!4SsL6uJ)#qTpu<8W`0igcQPk zsQT7c0S8`2B(<8~qhR)N9o(N_^vkz~2B?J2AYjSTI7NCN4K{Foi88Fp^$x`F-3N!a zm(2Gia=UgM`pHG4;4YPDfi(Oa4p%1Gh&D5=Rrdl2XlkTNj;;%(`49tffa^8$5*E6w zomgY&U2U{>rry=&s2Y1$8!u8t)!e(<$WCKLgD-NMcVn9^F{)AX$C+7j6;qBGr6+t| zOtg9ROf%YrBWgDdMaK=?#lG;zxi_+{11+p2r@CrgQH%m!hhquQ-$_%#;pPDKfTC(% zwOvf(aHHf6`wPF78YBqCEw*1wZtYTU<%LZcEUfV~*_ZGf+)>2*9L6&EEWLJ6SO?j3 zat^7bs*YMX<M)ePX%1<=PVKsJ^Wm7mg4c)+`KAv8Y`1kgL6fo&0qyrh0I7 zJzQF}hU#1T!qRqeRYr^O5YSIRd1H1$4`w$e(b6CItj2DD_aB1-FVK}hpoKB?u5lN2 z%32L#5j8E|q_VF95jC-7TS*5o!E%+t_(0-2GM0{1hAITZ)l5Y+_!O<#mMt*>Bp`^8 z;X{f!_+udD;24i5UcH#~X%4b|#8~J#4l8tb3th?*KMV3(_(o`FC=P$qNdy?oa7gk6 z;341#l=D_^e^TX(i9R3`nU#>;WNOZRzds% z*jZo4O&kA4uHVF504hs}NE`^jiv6_yM4H0KZw=>w=Dqs?bnhKZ3Xnp^?NdeUJmfP3 z(xsygTQtPMh0Mb73v?o}x+>AqEs0Ji`CyF5v5q{OOhvr|oepOE@a0g~(+ZP}8!(B& z%c77x0h~b=C^``SXmT!4obZitrh`P)65~wAfbz@K>)v_3tCOq;lLdXVZKcT zSE5Ke^Bj!k6Sx4uS%@tA36SZ|zF+@+?@z!JiHd>_kYXWLi*IUrmmU#n90s0;m6IIG z#p@MrcoINF1tYvJOGZ!MJMA_&pY`m(xPw_KYFw8SjG4GZSFckzO2=||dn69qmp#h& z5#r@Zfd+%OOXsgMiG3mlNP7_qjVhFhL!L3llxlJO#GmFNvot_Q+BF!GkAkP~DGx_$NF;qAS**~|R(EX`+we8C7M>FI~L zYhg;0c7lvDU#K-j$%Xd!^HaDjnWHBgT;ydkDK@iX4ikmzO>nN!GAym-Ep#Pyq3Q`F zj=7<=ui#~!RwL4Zf4gBf7^{rx2}7402VQ@h}n}B7(6^<`F8*ngT6zdhA8&W!;p5(L9JVW?;;alZa$O7#3FQJ8h>P!vr`lf*tB?n;y4^1d$UaxJboSfo__Q z@>c_(ge>oPcy!$0xddn`p}2=gdy!f)lIR=#oDjLYA(Rx-juW^jOPEx&k$t-%HI`gF zY;Qf;-QL=M;{T^ORdAF3yJ)t%wfn^Xj|d|sw|6sU_RX+{6G&-WJ~wWN+-BJya9dfg z8jiBx?CFMU*q%7#`a=vGKMxipFE=~fdT7*R!KlB^MU?uA9zLc&>>123pqX6T7URtD_IDK5UcJW zKV?x3oDo4{BC4F+G>*vH1USsG`us=tlL=*kN&}?S^eZDLA2>g_pKWh#aihckB?DE6 z?|!z6*4Xdxf7w?!<3)>V9#S1<*(g7Rwt$sWp}Pwo zIDgmBr}^|==0Gnf*bZ_67+6{)1JO|iuyW2pJ7F>}x<5tLYe6UdR3I<12MIpd0rW&F zWc-pfj7Tz=lTFDlzzxIx@8u+im&5&+plGwZ(Jp}tf1!g=H&oODUk#d6DKt>w*<R z79I~0YH^#T2S3}~nL*q=!I?~o%rD|GocshRhhKSn8MnAoA&ZUpoXzu7D>#KwlaSNi z5IZY+7_6h$qfq%B`Q5-f1cze|mE6XY@p=NM6d%o}Xf&Utr}Zy+b}$jh&&lza)5lrJ zCkFEw6YaQ7Kq{t%hw+dCP57K7iYuE|@(@M7UR*equ#;pN_9qiI0|#NnH9xP(cu_(K z=?)I(FbWyH0ar82VE`9087K}TllTDUd;<+r%zf3@RiY~kf!C&f{4&5iZ5y;E4KqF#UKJw#TvvhtB4?^ z@Kipd;g@wl?5vt?mu4t(l=v$$NAJCup`?pcuF5PeW3PWS=)o9-J6@*C8Nz zRsg`0N6fnmxEE1-K(O#c5bT1@Q1?By*N%ozr8A7|H8YlFs&EjJO@S!KZtH9c&&e5N zJB7x=NpZMAPB_lNJ7m@)$t+bp12&pZN2L1%u=3$RoQDz#7sK+|@dPdeRYD}Om>N1x zr!j5lbg-YpSu7$hC1N`)c31^#63oqhfx3BeMmM@R==W0TfLmpw5l|D5e&P6ay8=xO zN8mMNF6h`r&EaTpbOin@pjVw9TB6V#Uf>aiEZEau6Dv8~5>V3*z|nchV&)bNnXX?& zBd3r&^cbR7qr3BmXuH>$#cx+L+TFC>%Nx@VTni>4-2)fe*HtKJT)T~kLr#VK%A@c- z1IF5#;e|blVy8K!KyGFr4aMdTvF!ia%J%LGQwuen?^>4KyOw3s`h17WezKJ;JGn)1 zhF}Y#5qw8RHq!YO%m?JD-~K4vj5s~R)r4AtT?63wp#;|<;QoqMaHLP$D$)#MG-ZO0 zq_h@>D>;F(2g%)yI%;noxir$-IEO>(Uc7V;`z$t&KAbDS5(i1ZaR1B|#`PhRQNd3T zX}-dkIzTdF^C6R2n!gT6eCrHO<6bJ}7X(qL^VaERRpllyG~=+f)C%U|ep--AGrVtM zT!Gy77B_lZeCw}?zJ4(pTfCShmIF>2u)f8#YypT4Jg8p#9W|7(&^L<_zCI@f5q@abx?BI7$JrV zf%|`Mw#GZTvp%h#=^1YA`D5arLTJ!0I3LKJXi}tdyP&Y7+7ldiV(D<04C7E;z+|b) z9~3NeDfT3mwv1=G%+%R7YpmzePVnYg66Utl1c7-iUPNfN)Znh7EJBaAS!!6- zAlvw;Q4z%|P&{+GY9@z|vY#2&t>+x{(?FGkW5uGUwNx&&cfsj#_N$2pH)`}&=Qi3> z|C^>MG-fx#E|%k^hnR?!2*xK zl=KVnKf&TFA3lO5zM&YrHjrFDfU#TXuO6zV-ziNowM=mY$&%p!>#Is z%ZMIDbvzWS1dO;ygYy|_1e!MMOHY_wtO*;YVoVgS zTKH>=kC5q(in{D6EoG>fF0(XLK3D_=Niz#!ND0vJ0q4SknX?YcVhoV4pjJIsBE?2X zKI`j`c7FPHdw1vA)2;7~q;|~9dn(s`*Vhl=h7>P7oJ>{7grE^vpNf-7b^}6Z2KV|@ zvosP)Ge=7BAI~SJuvB>kpB4m3FnooI3UlOd>$e*r2T4eohM~b~MkV}1sO}Kl1azTl zPjwEe(lNlr*fIpx3M3=07(8N%(k&L|my4!!|HBN0_5C_K(1Hzwb2QwWa=g=N&TzV5 z3ZA?9WPifAyU)qlj^ROK@u1dnU$;&lBo{61i^ukO%Q8BPG1E-@WFia0I>}gJ&K-) z)ldD+NhQc_zkrYgmY^>BX@sjaVgMWf%5g+B{z%rb_)sMZ+A`f-(7g?G9to*J5CV1| zlnUbN2e1pIRiyjCkWokm*ZYTh8<%cM%PUH^%-()ZR_r_19t%llK^IdC%Ach#^2P^D z8p*oKzZTKN62mZ%3f!^M2P29s;e>N60=^%j%6(>_@^otG)DvG3&|GpE<$7V%wh}pu zs?lMKP=|*M2c)SDaPAPZGHDy4%|gPSJew$KIP$6V_mbxq)H?H$lvIgeUB~mZmYj$3 z8nC&1WgK1ndQGt~*>73%jEV6K2HEaIaIc!R+01A=?r1S@ya~=CtdF%e!@Iba*DxRs5pm!NnWa!Cy(hG z3390)6;MeM)Xn`v_}**55k?3OjzeQ2lu7zVYdXObD}~df9Gn&spkkhS(pAK#Mct?q z1oLc%8UUofGz+XQd|h{MLaq$7YYL392HT6VGzVlABL?Y!3L%6LWgP9DsM#g@=X+C4 zF08^JcB=3=(>o|97q&yNg9&l90eiOKPK#tWgg4!}zu}Lt6%_l6#=ta5Hx{XrrIUEF z=UYFJ$`FNeyGg6{XvE&BGq&gZ^iP@YRy~x)mK1pibPomi_j+|0O;&l_Q%+%463*`T94HIyt)!If~+(V z!10u0wT4OyxU0AjsmD8`a@C4QQ+e+?aS|3nM9Cjm-Ez}>p5O_C$V6RQUv(SQ?G+Iu zt?>p$>0ZjT)BEf*%l7$U>&HKHBWY<3UO{s5#HHOoU(L36o^1U@ zBb#Cf1P!eV2}^1S%V`anm7mZWJ{#8#^hFCab8lCSsw8%xi`>Ybs|Y}nr=r0cCgkk< zg|(1i)xX8xok}_03JX?EGXWE=fN5E}P@spZ$%_8?4tB0_ZO@E0W{4^4@*3g#(<EbNp9U183=+8LdT3Rp{pI6dRfkgx?mC6vw&%o=*49YW5S= zoFE4~#%R{nD@phODnMyq6v~Any>+?}7F*A^cXrlsF=B!TCggls5#AyEl8rNP*X7hN zo=UR3z}$6kKM|;{a)!Zagjq(UR{8bsHF<-QmD5OX9tKd40&C|R7Qx-SSw}vTvzvW{ zbi*0)p>Y93Qw#^$Gw>$a4?CbejgVIP9mBu$* z1)OF7&ZnprhkFZ5UQ<>9vvN`#P#A@zSp;fWg9XDxP%g$6K7WmOILq87%Ga&u_lNht zdipTi3QnaNTqdBvZR9<|JQ^DTh7XRgE9793i~^Dx!xPDJbR$5EdNds-Ka>5)7l37b zKEdYm*6%^Sx{*C&fjQ$jq$H{za8lyZ8PN|$3LzQDiv$p{`qu_nrI!ZG){EJ30qA$V z3X;)URSj2NGFc!u3boL$zD8Z1PJaeKm@AAK>=fm*gy9AApI(fkdex(%%v)!ns)=pY4zG$sDFo#?FuNDL|~qS@%y5ys-WWEYTBn{S~$MRxgmf}_N2gu+&9J+FNH<} zT|8$j0mjrtxFWf9_!!*EzidTt98L__*1S@a~EXWeCg_ME8uwnOXXOzAnyXE<3(na8Ckkys8y%N=ryJ z?21VpsF2hMk_vuq+JoKb$+HDOD8re+S#U$+9^zlf9TB1;p&Dx{PT~Gi$zO%eK)NZ! zb*;AZRjMai-RxM@F#1q0EJ(7*HBu}YQrA+PWK8Wy=N2TXPpn+01PCJ~XrX7fV)3 zYMTVgQaYAuU3HjM{4stJYnIft^Bst-WM#~R)iWnC82*~n_E6;?RBvju`F#R{<*EY@ zjsXKDe*Va$K&XQoBMeq+u%{v-*oM zAirW?G&9IDLdLS-sH|dtFXjNH7liRI5UMCpCLG}GWKTUEiLM2kYZE&{M=@^I2Pb&y zlZYrs9*L_!eX&Xy%96Y|!mxJxhe!SS;7DS!TpA?9*J^74v0j>QVO@U>N3kyir$3 z;0=0a)v=r)0){f#NJyCQi2{IYwa|oZ@Q!aS`B?~jg(qT4T}{K&r)j)rkjLD> z&@(}^2sT5Ee!K`VNk?5Rq@vxkhJ8=82B2((ux5D*qUS2P3S~{qEKg@=@+Hm@nOC^w zvNwS_Ef!I5%*0Y7MxkML+fIn?^sI2P$e)O9w_4_*gf-PP-2Xi;Xp1DZxxy$-Qlp%PgM3*SzJ_= zDv@3CANX~ZsTISX^IZ&MaXZ*c|DxiAlbFyI9C&;I3o}Yw>SS+tbjG)zuvobk%926Hy5MCLSD=a#UP-M6g3H2*>;Y1os5YG@h`Wkq`0CIS;XEr!wZfMl@ zy7b*wA=h*<5OhT|eMh>= zKyHM_Wj*0>saHRk9=7=D4|Sw7*DS(H#$$CCBon6Q1rYbk(DA^_CA9iv$!z2l$lhj48Za;SBMZN^D(ZrkjawR z$uLakkw0;0kXQ@v^l&mQ)O`^lh2w5QgN-?ci4KZ3umh5^8}K4pIOPXDD0zj13SChj z6WRVj5Bg8ggUBwa*y_LH#!#gQvREV~P7lSanq4?BshT7Cc&_B2wFJx1R!&*!gmkry z2fCP-=K>Q%_D_@%yx`d}y&<<#aNG*le&1A{-;2USILQIU0CtA|UaC6^>3gp1R9{gG zIz_$mP&WDNTh;41EfkY5`vqD~i=j&TjL7QJXCky)3{|J$wCENU8dWhuFsbdlPT$hq z|7hE}UPWmEc2y}7y@qUX36F_*IIX=B&3|yL#cmj-k|$8|)k7{ozq#vwMeof5mjpy# z^vBe-D|>BRTI;|3Z@At9G9fz`0c;T~W{SbW1yhe6!GKK$tJs+J#c+8X#cBsw)v5ebgv^MMu%q~9td!mWl6 zNlmn?kEOtEKq2nY;MR@BkX8BV+t zJ|S^&UF8vjFyGAZ7%$gd{D^^4u45>rAJf`v2=`u*Ooe2ZC)?~6YA##wH90RK;Pu;3 z2=_3syCTU}X#v)hU04e(om+!ny>~xMkO5F0H4E%h>2?nOs<({&QZT$I41-a6ummCH zMxb?AgbgKrA&PK`OlUh$Cfv4~;ey1Y9T8({g38TWSe6FZbv{TS*E!{SW_WcwnVahh zHyNVD2v*h@esH_Si$IJOCqLP!f)T z49|2mw>rZP>g6CazQhv&eu|Q33NTsW*@Pg+tqs$NuIHr$+eOu9=-)>uLoS4-w;w5g z4)VRTBNQG!Vh>0(GS@|w?#dfLve3TL-oj6abdW6vBT=4>>$eZrdt+5X8X zs$u!7263)ro-ot*ll}W?cp!uX3R8#XJCD%%!l*;Evg=1dLP@((0_g?dq#j zCUr?gl+u44jk-842riL2jc;a!r!N9Yx&8)nZs23p zG67`*g4R}`liHaXTq}1}n_`)%6FR{*;-~G@Nyp`94K$^5wx2b0Tz*b-YWa-q=hTkN zPZ&s|&bJ3<(zvVTj>}K&zOb;^w!3N_m!Hv`Xko(Ebfg`1!h)wHR+%t8Eo5>bMNM23 z1%80sZYJd6>JEw^tM*E5F6f5=skqhvS&A#A_*$av&?2cx;@+6$xIzJh9`PEfW0T}& zFK$O5{Gl2La}`W%131epgR`NkI-->s%8im5#f>u2+nQQ@r3vK3Erm4KnUw1hT~_f| z^R0((v;njT)jSr{?s z3w6VqK;tl;w-8bj>yQHl4N*E@8jvO`$-;lFWmL*o3@MlR$HL z!yt{?P-x+_!X(PNb#Yrr2%3QQoW&4AUp1W;Llm;~%c{BOVtU#ftK`ud(o%$i4-J_n z(+%atu{?W8L%H5L89aj#OuYCEXEnEhUc^eusYhOG-xfSPn+gYl7Z?xp&}vTH-q)u) zlrZmV92Tt8RfnaP7^pHF zprSUoV7_wKC0BZp+OS~epg1hG)Qhx+1rxg7fcfgrc|2Fb<}6kfFvN#5@j8`q1AEya z1Y{b#V(C>G(x2)p&W`zsvIh(4+N+^*2O_^Sj~2Y%;Z-FpZ6>Lm*H{1*D}rDESN#wy z>Pry@yPV@%#Iv7w;dl@7D0TRYvLN`QbsO7=lZI%$bijz(?_(sQqe_F+If6@ZlD%&s zq@}R&K`TtWW<1n!OMpd{5SIycNmEEsY#A>x68D1rcTvxHeOia~B04zvm82tLG zw+xlc%6Ag={x||)UJgOr@m39iH9h!(A%Hdr+tn~YGhG%~`g5V2q`os~fWWbJ%b&%KHAE%BOptB0}PfOgNDIlB?Map<$GdtG3rc5OSHE3$G_&?ofo09|eP75(izAV9rN) zksEgv&-viR9}}yVS1Q!3UGs;DaeD$f!aciMi#!h+`0>e6T=^q?6DCxGoDESjGET0g zHR+#EL41I065x)n{-FbI7bg3iv`YwrY69cPVb^##bkZHoEKT|eg*y6~g#y;XlF(R) z^pew>woihd!BAuxAC}aG8vvF*mW4XfcnX5~2nh!xm%?>K8CIRt>|`K?Ef9e0+KIln zna-Z5Tynn9Z~)a0c}pnZBO?n3=oC?iYSF>AC4LvDY{Q?;(K?~ph2S7b+rB|^3)lrm zDxL{%=||uO?#pncvX4rIb_V3iS~v*$9A)isP`|nzhJ?n|JC3nM)#KPHvgFlsah;~0 zUJF6Nqn2!5Iy`gC8t4g~n#c)Xf$cXRbF%$#-M_A#r&s&lidVBtpgqI{TM#IKk;`$( z3R{w=U_7-(oJe0IMlwJ`D#M9Rt{Tsq^TIT5G>=hID*|C?l6ENMOwwuJU)JNENeik6Afv?C*yk%GK>~ zVOs^j8A`L_b!l{E9RX&jY2Jd@YUh1-UA6H^|7tQ?Y|zRw1=-DebD*DHozLYP*nxwfDl-4TKy>S)MT> zQWM`byvOhqi+qDXolbBQf%YbG6bUz$SeK9!U-Hxv7qX2!w)lWuDqL3)C`ZEm*g{y? ziRJ2Mbgb$`VQW`?f9j64B8+!^;_V7jZ(Uo*T(M=MQVZSHq3zw^N)X1FpB-EG`xkh0 zr3QyrcFRHVU>43wAxaU8;0rtZbMfC|2QUHtpJ!7}gC`b1M#f`+J!J;3-49{A!aZr4 z3!w4=5=LLM+&13j)_`Qnk-{44upW2F8FXDM-(&!j?bKG+%2(}e(gimi;p?PDo)k4S{J*-D`uX+Jqt8Um zu0oQL<50$tH;E1}oM%b$G*VSERswLQL=Xas0mfU{P5`C;hu>u1Jp0S;&bN5;;D53o zxA4Z>qxEl}K3dR6jOW_7XSB|L9}I35tS=CW6eH+&p)C6Ez0@2l`2g6^3TSFfr;p;+ z;Z+&}>}9o+vfCsiJU0*9g!(q4FI#f02{}xtWrM?nD@{}}#xPYN4S33}cucWUH#pI~ zJHVQ!%akDTOvm!afNy1INBaKVL*S_sx)A)X~3QhNcsv1v*0;U8~CEu z^u<_?pgfM7T8Q?!ylq?axx7tV^ZDI>Ag8JLxyQ|5Li`~qvUYwRw<475VUN&=rATp0?Qn?aB-F{kn(*?DRt`swo-=N`5+AP}2mn%V zk$bKVZNd}OfZ*&#br&`2Ex89#G1!byG9{r#H35`2ZP1FG5tOjHT`QDxi)GW)J|y;w1eB4qKGf?QHJ?M!LNeGL)sET8K}-Nj~kZ`h4Q4 z2Rf}LkkGzdy)s`uup+gkf?$Oc0Kx?Kxa>B{Hr2r={1120n*;IEP7HfUDJ^W2ZI=Ky zR6u1%@;zP5QYZ1f0HJ;BJ z9{T+T5&#EFGKq0*WHiG0XMuQhmnnv@+q9XP7d&^7W+g>UpBvTJEJU8d z_+aKG_X@sAsaRC6UQ#TGx)2yqjh#tR)bG9(5^ECW{hodw5(#U@F~z`q2VABtsd&nv zqY<-FC--|-pjZ&u!P!0+XXW{2udK%b>T9qlG3w-?lA37!pj7CJ*k~Z$EbL~$%9acC zB=#atwdDFLOI3GLGe4B>SnBouI9x#C^WATM%yzOLLl&G6ygE+wP?L?)k}g^`{jTh% zaB5?7EsC| z3W4&fDM*3=OVm|2$^)6vxU;(ufh*&{2^K5Xo}@l2(XuNGz4hcQk__?eFPs>Kz7d_N zVI!VYnUf53IM!ETJvfU3xQA!OTUdiV1Sk~x1ja+?W`thVy71~1!0R;OSTALWS!51v zy`Sw2{bYbr_TY+wCXu0#yFS?GHMc$SB z@P+P-yZD_RMHqX47y?i0DK{u_oxpNTmH6x5`rEtq_9itn2xNTpWGjATD2yqs+@<6% z>bC0Rtzxx%L6vJbLz0oux~9@~Uuq;{@xhcA<1fU-!;(EDgc{RiC|C^Di<-UesuKeK z2RVyXu%XBQZ#LWPLyh~NCtIr#7kC^}9hoWE)cR{?;f`U7YlFHMpjJ9y?uaiLVwGeN z-%xR?f@jdQyNVJnb+BTvGWHOJOQ=CesXsaFCtq?|inhXez)dxTK^dfkb0>)jCOx^m zC{18UsT~7o-~8_v2Zm(8@OA8u-X6YN+bkbPT5Jm^wj9%Bya0*%xYWZsp(aW#WIir#Z*tDG-I+cd>H7g%7p^B#K8Q6$B}mI?TO?#G zzky6Yt^b^a&*khxYuD=*VgoTdm=I~%)o_hpK}pzzYUeIvzzcB>wgrm^ zx5Fvg2Y5dG1kXv*?3CN#cob}u-@GC8`lID0L(_ofB;^2TP(j>>iF)Sz_Ya^~KFktX z$AhC|VJFIOHK|2P76L~r;31?ExrkgF^dG$>@ncNG=R}hagc`XOVX5H)5%z+$_d2)k zqxwwJrKBx{CCE_Ri=Cwl@uApGIV=9!$_x`C)iIVRGQ0y_x3M5_M7XaV=DFDpWJf$m zVC+aVkm|s0PIHTtSOZ;lxOJ6R=UskaYw4dz&G7T|VfFzmc05QauU}L5gr3b@0gh?x zQrr{Tr>6d|X)dOkn9*M@0$iH+2rpBh?vYa-}UeKDjN!4E{4Wl088l(0#B zIzXh%WydaSmEE@wDw?L2*$>)gj(cbo-qo~pfM1@XS2G1U8N|Y6S0UaG3@iU$ZK!zG zF<$%w8p!n`9(7%&T7sV__p>C5&J_?5MX9y-Fq}1M9!K+7{G~31?B0c;xCd zatC(zUi~POQGd&hq*l|vMgeOUvN+Oul5UecDx)Vx+}rw~dZ_|WPaSZyF+-R+h%hpR zQLg=HnA6fWhC#m+95$jeFih#JI`s873`C&1Lqr|v;EP!OKz0=`z%KP;+S47E~CTSJX?QN^$t_h~R3R`1LZ-vh>% z9_oelus`GoSK${#u{#k{;@QwF00R4){`mI2S5z)QiFKy$q(RZbQ7!r&s_&ho9mHO} zpo7_XmaJpGNDqJ`viF7(1cORmEz)q*QRDWRq*sQ%YMP~u#d%WOSTsA1E!LFEVy=+p z_>0Z1*xwKh(;Rtzt~&FSXPaHVH^iP`=*7jQfiU9AM-{#aRu+?%>>Dw;u6;fg3qD^V zvEp0Jmym^YJ%|I(C0#vYz)NsooULqj^;sdnjDZMx-C8{!Ko;&qqC%nuG3r}Zes7J9 z#nW@aR0R6hTO=ch*%i1CLQGI7FMepoxUaf7C7q^jMY=J?RGPYWapj)2umD3M%vycruHl4jSy|Z^5}0c%T#DK8V2s+Z0p^w__hn4 zZDBVeXsRw?lQ|1qz~C$qFqOhJ!>EuERvkRSc8z|!;Ho;bjbi@MuIUEoJ_&P&;|Wqsf_ z+>gJ87aSWBG^ndN!F5M=-YYk_!t@?F!WE$R#ucvM=;4zPH5KQoGdw1r`lEH1a@eY= z4vM=}?{*IHD~w+DS2w%FuW^!Qr}z~nsk+6lFj&+qTRFzBFjI75*SN;7W!xI)_!Y*j z7h28!YPCA9d%_DGv+SVlMKkkNh0fym+NgeBJWYCpMeH_DCcIq;fL^!t(>inxkckE&hDQu5zK1)s91PN>THd@Upg@QhfnvZ{I%9I#Bqvaxt?|59V|1Q0y^7c2;=8@vsijNM6yn%OK#E;G|< zwSIyO5K96S2Jx!|dOrXeOs8n=m(0Kdz)zW`3?FE`$q`YZDj{<^Y98isz8lhdLwZ`# z1#4&uC}Ox0K^*K%VYP|tb)Q{EKy5S@gL2C% zBfx{0GXudVr>G?Hj-K>La&_bZ87z7_IGOQ47H9kj$Rl}UfAm7kau4Oy)1MU=(}H54 zMUKqk+7@je%=Qto;vbXg0nBk~3P}Q=K@`X%(5@x+p}aXBKxTYA8ZAjw9x(?AZSb|% zYkZBzpdw<6Px2t$DI8WVG}K(Ld6OrC-qFRZCTpO!!~rgTkq59i^I+)K(O@NyVj*+X zDUVNBJX~4kWO4PrMboMBkFJi$NTY3#UyC!X@H(yVAz$5Jp-{OufK|b?KAWFy>MysU72rq}MQ}pGTD~ z`d_=JXxOp}jua0W37 z@tW?bXPUVJBo{wW95+V}e9#Q#wrNle0|<7*nU;zqdPW7&K#Du=L4bDE=Tp4I$GoEb zc81b>L%A#x?^jBF;Acp}OrC$LkJj*JQo$Kn2ogumz*LTp!R zGal%LYz{CQ?#a!lQ&e|={VSKIzzpV-{Rzue3*{W(Uei9m8H|j0_%S6JEWOtw>6Dt# zRjtoO1&CWzywJl>`nNuJvJWQcgvSpkI0nCFNC? z7aURN?owlxecDr2(UVksW5g&l)n3CKBtQ|h1?2?u4Vei>DQ1y7rx%nCebeY^7& zWk46U_eT#hAJd^j2KT34;pBHwbv&w*U4ObWC92B0rCxd;HxWQ+{jh@|DY%+~NDPb| zn5POrPT`nJHfJ%rScHA3skPYe{cf?}dso=EqxoA^eOJT0yy->05)V%!k6HF`E-?`A z3s`Le)_w=TU}!i4Wth!1T!p04P;~wBrwvPHNsQ~@M<|ccexUM_ah!yQiw@dY@qQ)| zfm+Fs?qK-6d1>VucetTkE_%l;2y6s4L>37c^0DlLj$`^I7^BE{NJH zgiK|n6cQc9-S~-G^J^6i)a^|kYbRKAy9OIKzLJHtAio#igBYo=OAIJv>wlv~`tcI& z+58knV!S&!^FDOxsv7s9Bp|MvekcjZG8pYj0*bjwExKT88ZEjRsv*!yO+Y+Nf$MdJ zT7WCWz7`5(1RCR{!7ecgv;X%h0|E_txS<(~;Zu-K7T(*TyMR93h%_1W5=*1C64tBk0P07D58 zfT@gB0LF&_U~7;9jE@vx{89!OA8}n;p0{lUvqb>M(B2Nqup+1|hXWzord+DTU}JRF zWn0_sYH{2!e1-lD?`EA_65st*HarQDCd(*+0d&9PZ7hHR{eXAT;(ci+D}E7D?s~XL zEn=Dk2LwLf`hnaHRfnA8X*na5H-J2Lu^kcKfgkKCAGAg~0i`@xftRZdbq+CX2R^Uo zTBHd@T{BSe2(Q2}SBEH>ZDf!64@MZ}uLfGv)#d`5z8|L}lo>8~?1H(=bdmugvkydD zC^zj%)1aS_lQUVwYOJEqbON`FDH`kiIJ7MfbuZ3JXAcs!LLu4EN8g1JOe&~z(UT78&*PND5o{RQ$mC66Ba+_4+;9-YSvxf8?X@h&LJH>|PgA*mJ6iIQc3*hN=@2qA&k)sEtBgXqP^7m` z7s6ue`S#8Zc+LLg06pX+E9DMRRHtzU?z*&HJeAy30CU&D{X`_s3|Qs5;+p}?Wdc2c z_j4NQ&BFk+DX@0F;h?#DH|xk}a(1&&Mt(454K&uef4ud47w<+Rw>FYorwi%#>{J6! z_ztxqDe>fA$pS=dc73K{tNdWY-VAcvc$WP;pH4{Gaw9`o1-SjBIG`{JDOVn-VGRx? zyjY~!ms%!a{`ypeB^MgnMftk*{QmI%S5F^iTOk%TLlQP9a2r{y@K%kD0K*4`YnC9X zF@m|gUpLZM1UoFzz?whu1z=g9Pq6tYFo*45-N>G?c%1PZQWDh<#ORPKO7v6Zz`VZa zB!Gz3zc#=sy|ftKdht#70Q9?F1+jbKN`&BKOQlRBwUV$Sk_jHOzMcBx;{Ud2@sm@r z2$Yc$^Xy+I6HwF@|6-bHxSGou&{6}2MU z-{4R9OOkqF_`0U)p4CowG&sfi#Z!7~LakCTN7);WMnnLb zG2>V_@R|U6(g+YDVSJo%@*6<)G~JaP*D077Js*ts^DnXaFeq4PlC*h(2VhRXTp|_W z+^mn!_DA_-jyP||&X6N`9v1g6=4a%&g2M2eHQD?D%nbQvRjQ1bl)R+E7(pxg;Sqs(4#wZC3M&3*P<0R^Psy}! zd=oMyfKB$Kz;=-1QIZj4;U&a?gAVWVPT|sfvqi;?h4$r<7gQSp${_j82hCeh8I8f9 z@W^chzF#w6KgW|pVyoho$p~2|5W`=55fBWEwA20c-lu>3^z(aodIo6k-jfGtm2vj` zTbMiY(u=5~fc!67kw4bgmn%B~g(f289aFc$q=nnz;0=0A3>%Q39d$Cm@_#@GN$%!$ zkTmRjd;tkHp#(6|a+JFiC-w-GKjX%&FWe^5Qp)k#VvDZNXbn6oVKCrN>lLZ_MrC6Y5b(@jd-$a4PTm-L_~e`$!5Fvy{= z`nfME!<7!kDoi!5a!VOTB=v^DWQDk{=NSpdr2gBm4C4firMS{%2$!s40!Ozv)itdm z-sO^5v(E9W6@*Xd4^mNNG#;A~iYi;WFtnjL7x{~wfho8mL*Y<#ln52BQ@A*YeT35805;rda;nombbG1h7XQDM^&9LZz` zIz88y(02V&rKD{NWUu{F<;2!6HIA}ysjWt6T58g{v{X69kM%7a93MXuVXN=}2+Msy zjja$fe0CIXKRjYtU3~9oUH$0Am=ZAYT67WdYoakm`by1v3!00x4*4_Tt-1iPA3lLC zd>vqO7TI#$P|z#uhR?2U-M#l>-M!j%CtT!XxR9pf?AGJq6k8+~BoyePR}WV8{cNSO zaH}k=6jU<~=594kPb(A`$1yVED{}X^(H} zDKQyQeq2V^Nh`-n>;)_-t@6qXI8o_SeE4bg!(QeD>^GKgHNb)+)HvOXv8c(0Dzdb2 zP>S4@bYAF?#P z7Ae#@_3AWg4|q8NeWD5ses2Pwd$IC{%rF?uCr8ND&8H^L8;OVBf=1FxmQ2L}=4^no zkO4?hNOMqgSa0p}RYzH_`(XBu&-P!!nS`jyg9m?oE_xlWQ;^Bx&N;4AqkF6cEPaJq zhX~prNFd1s2U>pxZo%kMW=obCK_wR=2Y>GCn_UHiRmRsw@S7aZ{;u}wKae0T#+_T_ z&@+>COJ3dqT$=jm2DKdS;t+N9p^BYEV7KWc`beCLCVR$S}=5!ujzlE^x^4+IEGb5Tm)0u%bJCZyQ6fpVVa=kRy_E8BTA zyRY>p#NiTuRpTYhS;T2!FNIC1zb3+1)4tKeLVVT{i>cD&gN`@inKrg8N6j?-orj`D zC4B+>Djo)(tqxu%?guy~;Qo*}z9$Q@k(P+M2oc4$Tn+s$dpM0RVQ9fU47w&GWJJ(7 zTVN#ameq$-aeSFiBoHb><^2TsujFXdKy1rX{)Bd!X+)*mGT})et@WKpx{`0)(qz1buhky>&el{>2^8lC03kj_D~{J*<&m}x#PQ(> z8B5LxVD?9NWHnG8^ey$V`1nE@GYo{7y9FI3S*-Bd5aN;YR1kyu_DDUj!bt7;6z?Af z{000tl3Tx57oJe`JHSi5R>utSY~ho2nk2%TmkF>8h3MgJe7eqqjzN3FeZeGM5ZMgw zxC*7%fSFhY6W*+Fyo8j9B2@wDd-??-@H?`TcY)&#b&RxLvzPK#W{SlWQM&YqxmY`DcHXB0hhCzF5XlI?;vi;k4mYj4g zJ*B@c*;S{Nu2?ULz%(;;K;b<)2q@8KpyAD!s!T)8l^9`!FOn-|)OC(kLkjG9j0di% z)sRUi)}QU6MMp%f|htGLoT>)q8=3nR2F|ZOUv@IST6;34io(8+b2Xl$HRF#t!|G^)?iH@;SuRdZI!=t{e0eQ3q%t{Tf-lEh1U9QLNI3%8{ai z;Sm=t3Jn6Qeia^?OzjqAw={&X%qN^SzR<{Nj&=Dr)i_rGbcq<(;&6`7Wiq8EHqsc| zsvEf8n4yPW_R=jzzKfAWPePDXQgEKTBVANCD8@<4kXvlmY*U3c2%@!2ZjvE!iB5S2 zYp7cD`a^9j&V;K*k<}-Gy3;sH8iW)2SSJ2Cs!KG0sHmym1$M27pP||pLIAjfLgQk1 zbc}4e^TBH|>lu8^=q`JWJPxpHQm&YRh-FHs&Nryf4FrLD%*)DNc+JZVz7@Z=0+8_E zPw>Ax_CNlf{7tSAigwx6*}hzMfj0<_J^a+q%p`{7L4{at6EpWP#}YFmCRd!<)50;c z<&eZO#kOuSBn`TTHkKT~MoKS^Ix;EwF?#JWJl$Njm-~;P6f9_&2v-(y|4~aEN6Hhh zknFWF2d@%0O~^{IuX^$oAVHa-@ZE|qv~VBEQytQ6Y?fm$?$W8sSI_TH;mMng@`i-iT!Rgt%LHMyBB1aF>S@ zx|}=!;BmFY3vD=#m1!5hfh4Q=SdOJ*$Eb6`+2E=C;)~)Tlmof!p(HmbPJ7Xj#O<`U z^Da1&e%gw2$7eZ$W>bbP99cPy-u>67>wZ017bH_4P)SH=LBNn8@7)jY762&EN(=`; z1R9siXuemS#`8_t;b7`Hg$%y%#U{!i>=y)=x*i+qIU@&e!Y~c1{CF@12T!jl;%dfD zGK5q=hEvj@yT%uswDf85@up;MUTg}rW1 zGsoj%P{KY}*(6PQ!#uRoHY~8g+y!1d=Vo?33(JEPm%oo&ssk zvWKo-OJ`7@DT8i=bgQsBPN`ZZe4$Pyzp!33Xd>m%V3%yVw)VZ(^64@7aq zCKw;1ykJHsTj0&SxAmaOKm@MsKt+gX>By-vzEowHCGU*tTB0a*p*+m-Vv40^rreu3 zGor8U@8_p*lfdv)rJ|TejLE5SH}Ul3nLg?XYF5-d$l>qlMG_3Ed;9wBRi*<|sesg_ z9=an7!KQdlEQ{bX6ST45eR+Cyy2lm0seeka?w!5(vR(V>6BeY9XRTMCbXDtlnPmqC zgUFEmw8eVT^$PO>&zGI|GI+M2oX^RB+pbr;F2B@3LqY1dp0e8Y>eDYl*yZUjgYZQG z;4nT!R^#Dr8en-$6eWWYeA~S*cfIQ5LiLxM{4$&1l0PB)wv&6;LS2+*Ch&1?-cur81D#rz7&3sai z3!N9irxA~uRHH)SS^90vc|+F)s!LiyNqGGsC|_qslSy)$0X5X1XK2dWKFafnNr>CG zc&=BkmX@C*iS7=170hSdq-nr<#N&E&jyJWy4WoOeRZtJ%YWoeWL3c2;3QG?rp*Co1 zhPu-VOlnW=M%3RCc#BI=4~q)RU{Ui1vzF8o{iFf*My0+12X3u$ICp1<8{LbL2OQs6 zh}|-62M_~#7p)rkB074tTVdcZ7&-)Wz>gv2@QO;Txy9zFd$$m9)8B~^jia~&SCaYE zlUYcf3?RyNyWk51#_24H---(m-Wk=f^#Tu^m|CFFojp=-Q_I-kzO@`0++WlZDq*Ol zcUdlFU$mVhD9vkqPG4pd{8aIKaA6)wRcu%cf$&?bL<8##R{MgB-Yrc%@C%=OQAd8$ ziM^qdE;DV?`LfeS3EzITVcMmUe6gXj0LgBuEVZqXR%>tWQT#QNbhY!6Xf6*>HB*+D zeQ9hrNzo+;yJ?6e@ldujmO5PJ&R3ed&PG|V2gSLU-T}8W{Ae$=ZtuM0dfm*p!kkO5 z^I`*K0gi)-vedygBO_Vm5@a1L6iit!zC|$#8Bv}#)({Pr@&*El>R@W~xF*IgBt39f zg37c@<`X{HFh)^$++j!c7Tgt-_$;-RntC>!?)p81F7YTgipFS;pyZ06w$!b)2HRro z*2*fCLz)G_+bX3VsG}5?rw<)m>UDQ+YoqRkl$EG>xzOOVC3Mf$t5hCugWezy)dJQx ziywB!a8L!IH8tk5y$7jmG~0s8*DhyfiF`5?SS;p~#Z0!Ei7(2USz_WASu^ET2YV&h zKV`(c>ohi6{Aqe6;{TKEog}uoN%oFX+v*qMc}BlovKtJu#mR+Ip^Z<#(uY#Yi(6lR zwDZ%q+q*l@o^E}QuX(a}G=ov6FN2HI#n5`)!}WiUyS z86*>F&sp)cM$%qk8f%3d7csn1(B^n@HqxrpQW1lJF=QHyhRB9FK#6u6U^LvD4iGlO zD+ zU7S`sZ+wMXi}do~p&HfxOH$Y@7O}E-=6OHC>O;ZPVGtodhqxd?Ytf*5d^A6HAJSwG zhw_yq-+R|jdz#E8=Mvqdlrb;7`U4fop3AGKM#|UvL_MAnuYIDv`@4OjJ|tc`SI(K$^oVbki{gQElwM#G!#c*`O_jZaelyZ-{qE#`3X=~CNL53-f)$D zb>A*AkX#Odn8!sDzU||s!rMbq0a;u%PgrjaQW<%H zfWufIp0K~FpI>UCMcZ9FY3n6a8rpCmdqGEkZ-`q+hbZ0F=Tifemk^y$L1*I8UnnNS z%B8}+-5zS*@`^E5{y=a0*j*eZ$bOu~xM(*#+@lR3F7YBCf?U*>{lOY7-lqF3!ZtW6 zSYH?NIv4?7Lggl00N@=rZ{o=)jJ6MLxNqUo1ObU>e>mfz;PXt29-XcP8_;K{?=D&19A{|vU^95ddbjXq^B3~I zv_pp7GRU}Z70B?RyC?eiy$l)q6d;ouvh5(lk*kZ(SR$@QgPx?)lgA1LZ_C}ik>F_P1lv`daBvK8VnES@%Qd8Bc_ z$cXJRgC!YIuA2FF@aiUX36KNdz>zh`wxsQogvp=}@-`ku@51|slY{jR#UQ2saw~iO z?Vo@C_UZQj&)&OtwRI%x!++bKVu$csh*$zkPBQ0s60cy4G4HX#0i0wePP7CF&;%r+ ziye>S`RwoWRCRUtzUc<|l7TbFk+ggFrMjxRZe6ued%m`{o9yo-&)4?$q;EBk@=Z4{ z42?*`2qKaNZIIfzImWKmoBEOs5_l5kDv<~FPWsJf}>!gKs zSEO`Hv=GLZ_^%BhC9cv8JLjl?e2d~rj3;@9>L;np6HHTIijgVDh3%z=e@Rm+fR;N) z=)nkAl7%ai^1nq4ifQV@z83lyW4ZEm0U218^hk6msn{@`=m)>@y^T zcbPB>Ijh|B^lTNSA7Nx7n=@+0!bB&}f+@{E$y+@T8u^3<@QI{&YfOlOg96R?*Xw0v zZa$7<#N+Z#pRGN^$?%fwYTVp0GCa~1{)kk*c6ivKWgz6n_`KvUFGBdTA1|NkU%9@6B2cCX4qNKRzkoCYf*Zd$^uG|1ZWw~;zgezkmKv#u_gYc9*S=muU z!N!^2AQGqrvjRi$8i7Y^ARMx?$iaC|JLY$QUs4f{t+~R zDyvZt@%Ho!Q}A_pK@cj8l4iU(W~5PqLt1fQy7ZF1^@Ox5);l{#mBMe6r6n*6qE=Rt znw}{NSC-EFlzpFq73|!+7j99r@W*`rhOiFnT>rRleCyWioRV)#M+OJawn+OPg&Ha& zz;sW3DRx*s6AL94g=z~)MbM8n+*2wCT~s&a?xGj|=F@&FvmL?e+Vu%q*iID&pUS^8 zJdJ1M2h*Zrb`nV1LZO*}==PdslaaVKo!~t(Z7rjXqg|>1Hh z{%fQ$6-h|(Fi(g$>P6H&PV-9;MiChFxSv`6~{&SeUNFYkuur%X@7TZeLvaU z*xq@zweBU?^c&X4rNMN&E z3MU^y#JsUIs`w+qMK+nOGV?naliE`#^uW#Noi#Lk1WRV&Q~$2QFM3-l%5o>R;TCf^ zU`m~dc(S*l=IuG^8@Zk69Qs|WCU>9=(?zM`o(57S%0!gNp1bflOkYonocrZcao-90C=)!5;k|wqAzqv1b~JLw6Jqf ze0vaca;|g-*)CjP9v_3JSM17S{wjVB=sL{Fgso^y9qW`T0(Ci0HH_3KLCg z(G$l+UA9Alvm-$ZIH-}@g$uJ=TOtbA2|fALpt$B{#ZIH)1pCb{+d4xkIahBFG-jyp ze$nh8u18544*0wNX(vl}9NVJ?uaR4Z&%I~d3iP;1g^^mq*FJN zgyLyf``Xgo5H*)G=Qc)`92JCf4`F=E7}aujU931}}C}w+grNW5r_JZS{-~EGFqN9))>dZzcRX{bL3hWLt@*peis4X4GbM(fl| zMMKqtoXi3FaY~t^3_c(4S$m~OafptwWeg`BKr%FP&iikj9o?i8I)UG*AiMwgZ2&+D>< zCnTBj(iNPda>#n{JYfoBFogl~gtKxOK;hvO#~P{i;uI5 z7}07*4<&>J6DK1Q#u8Q|MOeFx)oqXb5FAWLyJWw4uh?Nmb52g7PK zi_F;v1rwG4B${QkMQRd~#L;3Dz}0DDvqC{M#I4sJ>@#R#v-V(3Evud3h}{gRS-52MDBt(`o z!GZ1N&;hW4PdcW3@aYd=>i>k+DIGZ3qpB#>kcSn+qboQ|L_S}h5Nw$D zU*gCGjCsz&T#6dn8apWdPM?R5sRnspS&O8ti)A$mT0{WL5KEa5rN|&*nW(r%mjS-% z0~Os2x!iE@{M97^#n!N1J=u*SGQZ2=!f4IHqsPZ`7C!aw7M!~d^4Ah4`<}yO1p#M3 zI)#6U8J0{^{k!8rWL+Z%#ODs-g?{$dp3x@@=~1LZ4e&yq&b0aguX`WKd0;DmG}#4D z(6sOBj0cxsuq!8kWl5RiIrz3g1kzE>;|cpCi@Z#KG!tlyFk;)0VFx@-&Il|9q>HaQ z_wQi9mAMZtkPaZJtiF77XSHf)5u@|a9F{UCo2$83$}pl@qb99$9rZ302)?7EaUI2P z4^BJB!{j^ZfCEad3%PQdw9hY)G*81h+8V!v;Y5>3uZE$yLtM7UY1^V#tR@vGIPLD} z`~abOP(gI^h}r|s_Ty9E@s$Z&VpanR;x52=QE^n~Ch<{7u#*C3BM(kdKnuVr!H%gnDc4cASbJ_nwpf`+vSn531 zw$R5tpU=@{4CI1gnl%aZ)HKSCSjyCT&ouICPA#%{iSR+IIyyV+?M!UOxPFx z&A~UvXRkT7e_QNLLxXAG$-aa7*lZ6WuqiD*AB>H z+-@yxFC~BER;*y139bfB$)QS4K?WV5%`4i0;irKtyCQ1_&kv1Kc15zN+)+>gJpoj> zcPD{ymWZp3WKe!XP+7PS`{Wt2Kf%%`w_b#KrH%$8-Qqodq?yE_DW5Gq4j)Bjk3i*lR_5a74)B$Y?32TkpUiXuQh%BlL2Nw~#I}tSv z?oVhda0jMy(&|%E{ZBINul~Hf_vHCc-~O_?s#GUO7Y?2Z#Q@3&N4(JF@&+y^h}meL zP>nuCqGe>{?+VXAqNVkt4ZP4LDg|j+2p{MQK3Ywdww6THTza&G zRLEwgKz@H2F7M-+JiJ`178&=7Cg4L2^-2=Cj>;;S;@qyvAZFaX0#PNTo`!fP*^S8) zKeVi(s>GGv%!1H^6GzS-ToHMDBmBfQa_s=kTz?FbByY9}?1Jb694->+8+kr#B0Vt7 ziR?xQ45_&SLbzg?{Uag-C$u+2XqVJ|9}D76@|EyhokePIT(8s)2nN(mXq?JO?G{Gu zrS`^EOYPUaLKRPB+L6>gUS>DjJ@;yo%---ayNr=JXA9u=3CrxVSCPyft(btC1*l$z zWYL510(%z1AE&?`2;+kY>@x}LGS?zuJqzksSkFHD$hN_Rbr{#)o3LJBxV*Hm9(d10 zVZC7ZD-hPx6DSkbuSiC@5!U;y^TCv96oRnc_?U$CPfg~DB_7J^%w?lBaB6g{doL^R z23_>}ybXl3HV=ERhaDzQ^oebD)mHD@JC6|^J+zX@W7z0@xuR=JaUIEIB58@*zF1U~$7`b+kpWJCMshjjOhvTPS5EB&4Bsb}4PP$Bnps{)M z0)2?|G!AWH%yoH9h^&zOAwi%lQ-{7P)nOe1o7LB~F1%1@H-$ghd#kB%;Cb=Y?6;9A ze9B}w!FqL-nIBT6=zfuX4H=d`#l#OO%#^znHz+I@{Yo=}{S~fWhhmGxIQ)rTyLNmS zd8R_19gS~W&KnCa#aQRM5aI>@%LxL7jP*lc<;}f3EqO+%!(ZE@`1{f0;mvgt7uj-i zourKrtc*8Cg|+B<>dkeM9E>g(XLQAZO_R+9U`jj`sf>FwF??evWI&A@`bbDeP4^oF z+c~C)-ka9DyCsCLnqK&_-RTn9-Mklos1PtXQIOfb(6=zh)ZFVjDf?pAc5nhr^T(oq z$@ATLd0_suv`mpH{~`qMU=kb8XL#iqvCG_hZAv3|Mp z6JPZ)dq?S|pG;ZBWvxbtuDB#{Rtag+Fik-_t@-{tilef0HY_T8%n{lLIv%11DZ&6M z&reY=Ik|UF0(DV8%jIgj;Ll%w>0;RyT@Ngei1ORtGPo64Lx)Y*tjXtt=CG$f+`Csv zzah>lnt)BK9Q4lGf3zm`uC1o*+yUW72Ly%;A*%yINnP}-+%MshjZhe?q5C1GGSVZW zonY8<*0H)XnQ5|9;@Te)jlzH=k~F%uu2vF^J`S71CNrI^z4ngoHpH0Ua&v#}YBMn- z>l=hrvnDnI+_ZM1G7K8zIw4(^4d)g*Pirv>8P=bdF0$-I9lm-mZ0E&30zuQL65VVDCAS%_8G`?+R&*yRPy zfA8Ft&M6uBbZ0~3de4U3z0g20Q=n}YKsRT^sO=7*LOweSdkxqQ2MK~@E8qxHSyxVm z*7-r_WQ0Ns!mF8p75CRhD|J zd?fWfEmb=!6&F`6$xs5{N(nN}#)lE0YbvJP}vBNh>y~bF1Q1mRapNOHZ6o3=|3b~Rp z3H7)_K<9|UW9Ak9C}Zkf%k+^Rm0AWSU7a8(atf*)l+k8hxSCH8jNv)MU_oMqFcJxa zCWs&P#16-kWu@*r#pd>zSN+`=deMh+wXL?(9UpX9; zhR+6#iE?TYBcCL?MtFW%DONelR8s8K9aOy5J66n{Th1*%HDjk}MUS0XCO zlJ3<3@q>uMsRMLMgUK`}=(Jtg5jvIiB{kV8&*&*6lPP<$sb0s4 zh#JLn2;`W0e^SVTz=k2(|zet}>gUdqWL} zgP(N>yPN9!y?0M$6ne-?zuHaIZ~hHjlBRJ}TbfYq*J>l9V{Mn~KxJozVYUh0q6n2# z78a@jelm!w`j~w~Y}fHtmXOLvvy*Oq7yWjimTc65+8}fS>=ogB<)#F_u>N*=-L_Lt)ql31ZWC@he#b;qD7{V ztz}S!GmVQbDPnt-*|`knN9_w(C+M?ufyzOn7H4m(3%^GX{z25@l^hZHVg)|xx>>Hy zk@`pUj{1~A@ugvyv(>*57i7+u5X=|e%8|BXh=hrN0l5&1tyd(x>awikqAeYdK!n-Y zIyN{oqoS)h^6`(|8b165d~V3~T)rRc0WaBS@5jBmIv7P&FeJANDrgds6qWAa28pn< zA_~!FCD~pmCVeDzWj*w+u1toO*96N?70Cc&nYB0uF@N^b7y~H?T^wkO^fMhR2pbM_ zNQ98;{Ou+y2W^oimtj3K-d~ut^hIfv%D{Sj-HW0#+w&Hp>tZ46iAzj=9IR)Y1$q*X zs5O}3qoiyDeDcZN2LIQ%TW=)mQVp56*C6l%eet4{?S2sPfQ>l+oH7H;kIxg=?h(1| zNL9!}!gb!C)0bTJ3Si>I8phbD;v7;RBUib~Z2P5rB@_}QV+J8>IqFcrPD`wf1Kjk* zubjDuuFp6UdtB#}nOKTjkx-S16(s6~+f`B+tH=V#r|9b>P z91}AOh^eE+-0v>I8LZQ=UQzex_FzedS^7;l8Pqx^G4c|;qcV+d+o#93-m@GyzjPMM zbearu`eIGkmN|42FTq*lJLLGq?2okP&Cz#8=PND#GiUcXh4D%+n~e!Xl5vW)An`O) zQk75_Xl@tqrl!rr!8T7KgMBm=^{4l$=7Tr-^0%mDj_3lE#8Ubpo8F| z;|~0JwnXvf-T1a3GlP!k9>7TiLr9>XPqV<>HTbsReAaXPrliq8GcdbbH1MD!x&VSr z?*K{JUjc!PxIlO03B_C-Bu%S{qs`~nS~g}r_Tl+-Nr@LoYH+*hB3a7>qga$wRNiIA z;x2I~p%zK56Q2|t!-q;FvzB$y6^QTiDvP>gYLLL5=5~3@-Mg|v79s;NjDi{0Go{;u zjjT|y+j+>O^So}US$|WFCcL}8AKt!aw^=V)>*KGUdpKkshKBS}6buQo#GrG<~8d1t&Wr)OE)R|*VoLeLfm?6_yh+U+lxW!TbqA&xocohz%vW5uhQNdSP zZ@IbpI@c=6hcg93LM;X3s5U`-w`9V6C*CM?&Be?AbQ1|PsdY#r9M%fKbbJ!wsJH}5 zVp%c4R0?5;YY0;DKsMMqBy+VWWJnCguo#4jn8QmVXn~4akd09`LGq*Klq&%OR29p4 zbjQ(G1(BOh=}Nyz#f18Fkq}-3wh|#SVXw~TaQ*`%7iNx)JYIsJK3SN$T*2=#Q*chS zxNM$aPYE7X+HW1MXG(hHhQ54`;MCwgQP_OqOrgtfXWzsbO%Pln&J?~2amK&hxFT`p zCQY$joik-7lZ7d|i1^N@DQ1%58}VrR{LWHL#APSuQE0`6XSW^ceN`V7{qAMAMAGy3 z`fi(2G9lgHO^Bm}K|_9-=tdHkJ7w)L9(fAlVB56EjR*WqkQeeM<0xaWF@S%SL0;tn zG$llX3CtKiQ@qKuoN6~gFw^Y%1kY_c2*%vW!RM@DQy*s6Q^;`Rua9VbBBPzK>*lYg zq%0|zL{3ioIv-kgf91lGsRq%IM`vx8YNjSGBs_;3C+pZ0J5BaS4lqaG|IOaca!=-U zWQdeqgC0ERMyaMOo#;nPznDDR*njemr)&FLJKM?n+S8|+5-}_BQMH5bpg8SU(_9Ws zb2(I1;)w3WRkJsk%u%VbhZyR`c!r$ur()8uPV;^YxiU!bXAN!1=t6G=xkM})To{Zg zWVCb|;oWn@r6C<4yd+&F-NV|xgJxmY{>m!uReMdtX5yD85vhE|Cg3~`@|ye2%f8YW zWqMVZb~lYd)9JK*<%3NF%j$w-5|rcCL6ri^7H%004^L}O0he;pc5!luxEEg)lvX5O zygGa{gh`8Vk*LfHijMbDL9{P6s$e;iYn{tgcjK$JE;VNPzG6*+A8_NV-2s^UfUV5w zVzVlj6EM>H6MwzSiJoY2i9a{QiujIo=1lk4>Avcny`pu^mNlR(T=0jnT^~P5ABOyJ zovm{v(S(!AYdA(O`n@4)l^~6f?=AH4hxEZ2Ozg*H0U|*J-(j+D61Hbr$4gjAWCH5Oz-@>+{9yqTM4LV22Zc9_S+z_Yfom6{_jH@u^ zBZvZo=<$lN6tvB)v{wQVb4mss!DwcHhU$ej_csXUl()ZE(WWk3~b1YWAogJC~-bcCAewphK`7r{cIq3xQXT^Uds z1{9A=93b~%8agTXJaL%s;FE#SPm>`Ooo)gUz=&{B!CyY6$8%I(Jl>Nj#`DZv2a@sd z5B|q7bzkCbPL@^>=z-^)mN{Z&f`a%l`9T!OpxrN?{LJ+fk&b3Hb`iEhjtB%qnuY;O zm;rkt5u)gHuAm8MAt%-?+a++Og1~+i=UpNKMHBG9!=W1E*7tT+R!)KTN}-r%gnO8bvpx5MDou=d&<|2EFHl&OrxB?mOPb5%>|gle3oyS1J-LU{`U1g}Evs?P}?ZFTPm1 zLLI9r9WFafOL*s1h%1-CU zyYVwr@gctFm5gX-fy{NAZ7DO^?`6V4 zT4GAKrbMuQX?cme)4O0?2-Hs9oXH~86);j;%Xsn0T9v@3SaeFilm=5;keN+znhI1? za#CefL9dxomYQw~EX2IAV*DW|m{ArB ztfyES1GCSuq;;w{CGG*a<48^keMkHO;FJT ziN;nW-&tC>^IR&$n$u8{IXGo>CKl*hNjM~#L*{?+-6=R#eU;J!} zUQ$!f^fG1v>6SL@qPf^Jy$s>JGjOAfar@w5*11DJj*4MfVC5L*#h_39A69Du!fe*%#K*0uRbV&wC1iexp@x!#5-N|otlvY zv%%!Mu$CqZzI2gDl+pdkH(#tAw;srbt8$@A6_gI>z+4KgvC=bw_xUDni^GW}`WCd5 zc2{@+`tag|s3sKD##%Iv@HRtBIj z$YSlzaz4aWB!P_IC0#6jWu>|`8Vj&x@B|T7gbv_=|6r^8TytBVC&9`N*W3wtTNn)s(FrBaIl85In^ z_?b*sd=1B)KJG-F(Ki}bLdFUO1gGd1zRp`*hcsm_Es*pz@ zj`wjt0Y&kKVw!q}k_4R^m)@#97CNmwd%U)ckvKSRLk&=EFWS?=+vtlx<(ekfVjkCRjl!o!d(C{H#r{ zj${EHjL3nYjD(lSgTYl>uAZRmxx+@<^}S(VCcrhTn-VFXow9=H3eCcK>((uGb*_61 ztOk6q_utt;{Lybf)OF8Nsm9qv4NRENqd{@ilXp9 zw1=`kT&BS&dDZH7S_de)cb4kMJUJwqU#~=)wNk1>#7B+trmB0R}I=7EEwMa~wyjLTG>w+SIKw9Mc99^64F zSNuLH(d6;WizZnS5{^O(B&YBahEDaY@{ZjGBm^lK!{tF)NkO8r5|(c&q_RnlC{-`H zMfuonZccnS#~_MW*ME{>e|3Fre{*$ne{CIt1klcNJ0CM1of2$eu&{<+CpqZsBiemH zhW?mR&SN=gZ7M=z(N4)m0PO&5cTxb*P8!g*|4CL~%BFF#mBn;htVmv@IB~vOS^47S z>N5UaUX)chPmoj2{Rs9F2l5f_C;#qTTtM7o`ld*az{!MprBoiE{m?)!q_AxkXw=a^4*8kx=g}_D^!(OW6=l$Lxf|=Mg*E(QmC2-l01QNgs z9WMAuGPI5z1A`EG*I8?NpV?%v5!gEZw#nbt!f(XKAr#9}rZkdVp!ox#QS3*tHMgiuF3L>tjF6alQ7Kq@l+k5v?(eK+ zmz8Qi^|BnXa{p5SZcg8@49b3pA8M6VENxAnZE~@dy)1J-x>@F5CvZO zUXMF}G`AiND()9a>mhfsVdDd8w_f!i>>?ftLLv@BH3sxYT`&hgS9rkIBLt^F6U8xz zRB~`l*>jFNP*e%U(fWh?C5AW#0 z0kqZu?M)Y2l-)pi*7=P!(E(9N;!rYp9(DoVv(5?P61&5JUz$V)+IBGh5=iygw@J-# zLJzd9N35nzP@{f*>tJx_PHM3f`BBDNG^+NY_Z{1!>GV<2#+}Tf!mHjQ6Ol82D?p^= zK#yKy>NDTH?zB4%!k!w^;TZh&!O9n_hK(^kJKn}~&AQW-`LDi(p7pEy!URo?<<(;p z0N0_i??A+t<#`&(vy`-y(Nyxz&n6Npd>fh1=IWZ8kIjf&qhLnbvojD9v;9ewpOhDX zC9oZYSH`>%hpw0@;!2i#Lrj!tS_7>5DQRGbHeYydmksQMSEk4*W4Tu|6(^ci(uYko zeQE-aoM>MtuthaAMaBoZ(eMMwT&>mJ(fI+5x^ddLtZ+sg6b_>&_l+PY0K5_86dm55 zAZKWp=MjRPJdm4;&oqC9Gb1M{m2nVbAklF+gZRhu`Km=g_Nga76w;zqTB>+sxC=qZ zEn`m8uQDayd3n%Qd6g1jv`VMtCF*3_rODoHYHWJ1)6IaS_y>^F5&wqcHrjsA(6w6v zQ1p3-I}{6!d!xP!&q{iuA;cIoC;;Gfp-B!OuWP5S0~_CuHh>MeQ3u?8A}6@qr~~t= z1D}vKP{iWHK_*W%w+i|GEw9O{)#K%A9slzZxSku1^}(5tZ=Gp6_y^+pXzkbF}rSdr2r$3s@1a@o7qy$%){SRav6BwYyP($b6k?!@cp z1Tz8VmYx-p6gwVYc7zJ3i||NLM*EvaEwlde8#2qBrXP>vh!wX0N?cvhk^*VyI0(X- zq$Qg4Gq;jxa8VBdXXZtRF#4nu{gR^-R$=_IwZA7t6?2(u%Zk<8l({Rc(K{+CIrWzt zpvV&grMUOJ^2~REcH$h%Xwct3hMqhywOKR)a0~C85>*=1eB<&E%8h}qLLwt`s3YEJ z%F}7e0At$Jqro+?&towBT~1a<@48< zcCtoatn0oXZKqJ#3B`56bb_BF7FKc(!aI%JGZucAy7DZ+o_d;gorFFleAG)d`Yk+d z=ntuG&8WQ?nqegGW(mLExLV*^{1{vR6xd7n ztUx@x9_g4vUA2VFKE1`I$mDV3@X^FhvA+l5Qr019*NH^GPf67w*fNn_k+k~06(3<1A0()3-k7rSVOpP_;y`>T z)*Dj47K!Dci%Ax4+27dt6I0D(2N3)}g)d|rHB+8FQ%xr22Xd#HSqY10b0yB8=qM1d zdJBfv<1)c>p@D796pkBiSoxO4ewzvE(^Q(Dj;5RAL8u3t_;MzUF|~?j!iPr#?}gLB z#1S=Nuo)I~4J**||B{kpaO6yq9D^leMh(;;9lJQ3q!LD-BtTqpbiyi(pBvq+eO5Qp zoLX?HS)fP?4vxQK&G`d)k$8Q_Sv%hXwq*N=Adh@T@|Dc`=j7$^mpiM?)e{C`_uCg| zh!%G-<1~9Qu300_SuUwRpwX6b(r&rv$H#plQX5xDk#HIj+sIzq^l{ZqT>M9PwcMv~ zYEJAX3MTuRNg7t09A=?;SzWLdpq{!di7@!1(ILWbkuS%O;MSofo%B@UwO-PKd9j@KKoa6jRyn3r)+78haB_HKBG(?YswTW|8Z-@`Q(pU68k zn#-Mw>Ac53Kn2TxytDh`+U}$1k_q+Iq;{<6|y99I1P9&2a}Ajohud)&Vnlfl(ewHirT7 z!9YxfyMEXowfzM-=YX6}=eTGwOCEKOfm1qAL5NxhoyE;MYV1ggJ$}M>IK18WhP#h8 zuTDbY^uJvD_<2jTySqrh;c@~|WIE||@4bHRNgTpqxTosVk#tj8j__Icq@9vwPe#&Cz{*O1d}UAw~Cj@^*BeBJY^)Q*T@s-A^Z6)Ytbpv&J|f zV?kDTB@p>3_wB^feHks5?n$qxZwq=l!;5}I#hzcJTCyMx`2wg ztuF;6|KWeSeH(MI)S*zGkJ`sAB>GxT{!E^)ZS8J6s%>mP!WYScsdp`dZAcx~&EtOS zM6!V~C*VN`iNcPNMy;t_qj@apwvhST#jR_QiOU>F1CoZgL_efkz_^rdVKyfAC>Di$ zkg*Gh2-izcZwfBf^c_9u^^#>S(|b~3Y{r_UJTHv2n5SbmT4r(6O+k+aCgMl7)xIP| z*0hda!q9@BP~}E$R=5PlfzT8;9HF}FKs=aI{*AYhK^ zk^&*|f5@2MyI}f7W}4BpG~EwA_Bg%A`H2zAN1azFSlc@LaB~DGVeD(S4o@XK<@gj? z_Te^VaV0z|rG@C?=J6>41mr>`_rF<}Wa{wG<5%7!WaaZ_=ZMAT8M<`-jvmYmfGNhl9-N$oFsu9PWqy-Pd`GEE{QW49}n({0H- zHRKk0>6pi_mrR$JsRiWol-@G-Txk3NN~BgVK`5L%DATk_%O~rzn&uEe*-+W@#=B~u ztB9KXE*i76TNSz;$EWX*O7)^OJe7l?_ZkHwQ;^qYo~Xs}{SG-JlaPNkKhs*H{4p>v zmGxKZoXACwi_V2)X;lsYR=^@B0&#|VDQ*)NWjsv0_TZ#F^<%?+)POdc zvQgIj0@sNvt1pL0u&u8yFaLb^7lyN0;?FQOlyBhKZoVcxA5v$6R@bO}E9)_oOG3yp zLqz!Jra<`Rr-0h((@w$Fp3cuf_frjGZU=$ye%C$ z*gR`NN^W#n@d}ET!+Purp|(*l1v^1CM)?7fnBUuH#|kv3vh-yKR8B0T+~zO zqExx!Z`3V0x0uWgUw9<)&r~$?Z(Q9_9yOz(AYyCX*$stoKtl#rV)NYIvyug59NNL) z=F~-g24K})MmYlJ@A%#b-y1%-oXpv+E`oy+g22I%laT|&C&^fwxNUbZ#f;<=6=wcr z`;ya8>8s&MYs$X#6nKoZr>J@(6>L1#-bb}6VN$YtZr1tV8t?pi6~3Qq0pZsjoJin6 z|Duan(*L7p{1oAW;Gpjv%nLchDGq=c9?4o$r1is9Y6!aq_sgV%y_D`#D(>B=K7Sq6 z2Z=gd%rSYNmyx)k5(N9#QGXgAUjGSGc#jHD!JxBfKqx*hfk~Ozj}NK?`6VsS zrUm);oaEri`@^U~sdG?u5SWyZqjzuZ83_-S0^ySAQFbuVOX`&{O!p1c256k4?yy5i z*Q`h;--^(Xxjc+h9;N0=6e&HzMZ|4N?P!V(A^()CQHL}$l8lHG)WrD+@1S{11^e%0 zSkuHUPUIH5qBs_dDse7hdm*w`sF20kqRJQN=2gZUT@Z1Gs@E&Z+~|TIy)L*~#}&0$ z9ZX;{Le*Ln69Da^9)_yUjpXpGb3v=}F=F3nV_R+L?jr6E)DCTSIB1_88&hZ<9YG;O z5?j|6ts(CRdc0PplY^z!G-k9iy;F6r8^yDmEZiubc~iKaNqN;9$AUAK7q^&3@tojy z`+jb}+xH)(>Y4TDsmJYm6Q8J>2Y2fw^x2s`ZqXDl2UzcJJZ@ep{m7Kg_o5gB3H~}N z=ZDug{S4lt!kHg)7Jc*M(>85AGb)?*+Q*@5di{y+C_PgDf-bj6qyEdfp`1_ezrw9} z<8pidF1IW3wUu;o#ZES{4BdFyQtHsuUbai9IG?DOEz&hUT6L#5y0DS_rQ!?Ullv;( zqBx;jzh8#~MZ|LBlznZk%k9KTY((zlM`7+Ugta50Q-cU+@fnfrB_{U@r9X9acz!VJAG;H^0(vks!8h7U9MA1@uT&=>D+Gax?O8X z>LfjscIW5{*Ub9e6qm=sb@R8F4gca1-=jKbI&0#`oI;!9?r0#7UU)%p&lN;TzSGdU zYrg_kcK>UR>s?oQA8SI$qHt9SSX6wG+gWy@Mm;6)cSU!froZ!)4zink*D4ZT!W9Z! z@UhuM&l%k)*Iu07g*q>-et8hkF#D>%xN*6AChzB(iJ$Bow@jkjt=!)-+x8V6>k~}d zH^;jrNW8-9?ALj`zb7N^U&GX_M|`klzPB?z4CxQt>%SRZFz9TjJaFp_x9zU&oc9y7 zFum#Sdi&SPB2QkF-sYa?u!q**q@El5s+>_Gx)uPXh0Ci*)G~A--LKkRkq_<4hE_1) z^^aEfO}H{WSmA|cxyx*ql6|ON&$8{X^@Yn>Q{mzM{yhinUH4gk zV@ZiH48ShCrWDTmposrB8&U|5@hAQ@+EM60^4k#1C_0}P^!o-rh=h~vKai$Qq7yU6 zyW4t^VkEg;MVpo7+w)@sVXN ztd{)6?JfUx=9Z5rZbq-WGgslTDdlKU2(pR%mrZ25r7xZeEImF9+J-}QaN0SRRI~q2 z8iS2~Te*FUya*X6hA69i-d=z5M0-lUeURL(=ALejdwNm+bksd>4^Q8mu{2k6+#s2T z)Am?6w71a~OuE~%a?w6vVDnhg2*wK^q3(1Cb*hmA*g7ldOY(Pj4pU9ej+))xuzApK zBE4_Fb;xyV#o%=Ro!>`ov_ekdFpqq@v)*gGeSUG){-ZN|6Pga1$Q<48kIo8vU*B)y ze^GDA=M@w$FG zrh%8U)5sNWamCz<=7lbE^Rh0Zd6jorI8j|~Zmtew^;I`QV@lxZy36KSXD@_7gZ&yL zXPmJPk(;rbXf#B$-Q`1+?SF0wd5A!<`Qz3lBlR_Kx92y@{QBtDcde$??i%a9<-2g z7JS+_W}TZ97uG89uC#Xiwl!eoE1yROM7pQO@)6EqAoiJVSL4hyf z`~hK{sr2e5tqe4a5b>1XGn_#kJmEFyNYcfC$TW6VvIE{AKWX*DZA#kk71&RXnzPKO z7}E+_&kT$w%XZ>glY!|d%gIeK%W%d`F$3{}DV8ytIj306WQwO4(`$ys6il>?iC=1> z)4?*H=zD>vuoFoLExu%A?ngYHSpW4m-Y1Z%@0IlsqWTK0uWHtqB8ql;nBzFeH5Li5*OroP_0XI}?B2-Uum^A1wn z4-Ze<1OFVk9GXy}PKKxU6UBA=?9~Eb3Es4yA(anMO2xkZKI*jnv%}L-_cwde?RA@x zcw;{wwmX32iG4k9{eg-v{u|Pfhi}lB7~CKctAyJ*f>kqdF9rEwU9H2RY`?H>t-+cUxNd-rO;qwJ2Y$^DjU^7WV6}sZW{L0TC~m&Iwzyv$hOl{wKeGd8R**MV))6LIZ}6{xNr8*5x#rqzS%>t z0hIcwogTTzo-J5|M}Bp(wHXFst#*B@5>{aiJvZ>H@xNG;LT=*wFMMjR2BI=1V^569 z_+jk_YXGj}pW(Cr`8fLd*!@gZ6kLh6{f(ct_LH6MWMg-CXP0^`q9stv<~9ETYV`UV zIw*02u7cQLbb*R9$PVl7ji9Gd7xl*msE80Vx#hQb;)-}bKWjsCg#4;X@f#E|lapBE z6M_+-6z@uuH7IUOdAyY;)ZdLwz$-~w`zjfL=$H2W#SI_9q z;XuHrbK}3SU%y^CNHthFa4)X34p&CMtwPM9v0|`_?R9IEJ_2CewwG9k(U>^#e0OJk zV~>veWas%l0JX=T^L+IMYtd{a-Z*4Y1$eCt}Owb z!<`@Zw}=y*DRvq4x@c!%i62y^Vv^#An!&n`EW#->40D}J@tzSdQBouKx@{O1iSbfL zh4t@PA7wB)XEBGfq5ERtg>gX|7Q6!AF{%s(pqT-5W2EV z@4&((xA-Mnyn1Y(r_rYo>~}y3P<0xDoQ8odWVbEYQ5$ajXx>&>HkL;RiE}HV!qm@r zgCr6W^V;s_i)S0#`+LUxH#fF-o^2$Lcb-1l*hPB^=4#`Gm*MOPjL*4alJD^D56rVq z^{It&XK&Elqj~BbhnEV}&33m3ua4`D6d0R< zrGpD$+&X}$I*Mwl8n98M?(p>y;7mZfb!L{RbYwkX+R;APufA5R>o>Im;Am;PBJQIb zx!XR5NPxnmX@CbZ=GF2P9=se@Y#g04S}c_ey}abfI^78nu;djIZ<_pt(>#XSN9Ns2 zP&Q3U1g1M=slb=r7kg`)8~5$RUxB6eXD5UFl6;d|#e?K63K)K)y4M3{5Yo?$2dFOf zjT$Hi58fp|e{+fi`VW5LepG5TiYB!BVipjjj@qa=b~dmW5kV8;0W31alW)nOQ=`7~ z5=e7XCY}>UK{7& zuzk{}BY_25brph%4)5K%u>6ue@%-H0)@^Zz2^RJpx==Q)mVCgdvh;8dF1nTa3c_yS z9?@@&)VF)7Z(pXq!S0lKm%r%y&iafE3-wqjSZ1f;&s<}DHLRZ^C?05~JDzkNgvw={ z*t}kQupia#)%`{Vy!JU#=}6%_Db>unV$$PEVnXvs#SRlI9j2iAAC&|tYIxA*)DufT z=S0#KPgHhN>G3>aKNY9q{d8D1)qZN*+ z!hU9^ahd(xtUXwpdq1yeFNKphZwZLhAzoB}Kz4D$e!qA6Z6CZIMg*MtdT%B5^~>~E z-SbonC1UyMTB!KyX!wfuT*bOE{8%aQJ<@O34-jD^+A(~M_Y+^^{d5#tProqDeqM%( zQ~UYR(6EyRWOClgu7ZJ*=Q}y>;&Z>Kt!_QrZmyv%aDcSGwf+1Bj6AB!+mCx2-@n+{ zUWfD#6AM&;-Dhk2u072KPBso=y_$x+1dmrlh!EScv|8lH-yLU_ z^M9v%21x<;NzDx*g7@$ZN;ta;yuQ1nqQl{+;fL&1xX(@J&{mKr!<+3tV3j)T4BKaK zs`_#?p13`zSfZ7Lb0i0EAePCAn_7aKw<}ZfSk|rvnJ|<;Ru^qSP?zXe!G#fkMY98$u_p^LNrXyYR62{>9qUt^I!_Kde1{v9af%1PczG zm-p@|{X0j^GK-959;t^We)9E~`jUGRvyFfBM#3K?)Jh3J*@$osNY!*CQ3-*mXoN!G zG!zXx=k3p^fVml|e~2@2Kbdn$f6`I4(Xzt=GwkV-7=$HuM%XQ+Dk5P?M&cO@Q;4W#=;I&}VUC#;$76PCx1zT>1(|>}`1(tS z9`6}w?*V3Ce;KWM2ACN-M{g5mf#4N6L1o_hZXk6Orzb$+E_>6h+D342)nbBKB2Lnr z2VV}1qaDN_Yd$g4e}e{VZl7T9Q{?(Q*^|Hqa!;iLrR8Ai5K72kZA^U=NnzbvO_U#J z2TU;$CA`dVQxHiRJ;dOWEy`?;K2Plp1vc>5&23t0vqIamtv!fo>rXuEb#>ekPvJvC z9E}w2sQeh3Cf@=p$O)s@`CA=*+$veqcHG#ZotYO5@zfOJ%ohmzmqV~SlA4s@+u|jr zH#Ui;5+7ZPMoYEr*o|-)9S1jPpL|l|E|B<0S*t-ZR ztyeIb^Hz(BE2GqnO1U&?+F-?e%+3N-vg}803o|n^7^t$myHVrP==|Q@c)qc}K=kiUw-2} z&kCK>Xa4Kc>dGDbFY=t{$){jKtZZ(^b?3Wx`t&zXt0HOWzYh{GszCCPJX$Xoj1NM$ zq~;0=GR+xT<|YU<=`2;TyZ8tmPk#q6o@ zRG-}E)9T%7?e0oL7`(Azx4^3Us1RWcu8;d6%CEb$JDfQlU%;h30_|?BJxVs$o_kTn zowtZVD#sl>6k4c1sPas2TrKjjcR`yekfvZJW3WaXUx?Uo|NRK?hVx&*sPrZr~;fRuTvZFQrCu5CMTVjv2e!4>HC1R;QkZxN3$9J(rN z`}JZvG&5ovPQ5zsbvua1Nf4$erw)5LJnI4cNc70)mk|u_N4?iwZX@ZYeH66t1SZ6G z`y#0{9LTxdUZ92i10gV$4vLYX@caSjk-rq%>_W5!{%eTNi4-w0nDyI`MG>lj%!jQr zSn~+Os159Bm^d~8Rx9d&z@SrDJcvbC;_;yBUObormS#Bg6xypGd4MD!)t!X z>)pIt21xlGuvpE;%H5T_&}N3MzpW%&OPDXvnSmtSbY}-)>?#gQbOOYY`L;NVv$hP{ zIj(q;Ee!Srxky7xUGlQK53{xFTj$yBy^iNv0T8h6ag$v}Do^6B*d7&m4jH6+#FdM} zh4~MLxIrNZZ889#tRycaAhMm*9tq0Sc1^7y8n%x9H5#~#l*ibdq4-dc&qShZj^TzP ziFaV}WWrhkW`MwhNI@NM)qV!Wy6F1F!rvWgeifl6wltQ8q~{XsdzRGig6N3_sTCiz zPg}1#z%k;PhbVx9r6Qt&>#LA%2Kcn>4EtZ*g}uP9&h7XKkhbf60E%xa<#|0ELqBwS zF}9D4Y<5ksVP9@+$Z_W{f zR@4Y;wTByf^Z=G|dtQ8iHCzDd;U;{t#Z`$(fHQ{#eb~Mj5@BmKxh+A)O7|a~eYpD3 z(-=TKA8L%=XDe0me8;*W&X_I>amPzUD_}A1<;!W#dfF)KOQV-AfSiLXGoo=j5+P8% z>ihw3#ByTOk`)g}NWYIGrFV?L!1l_?3Na1VcF+&D(W4b3>3By?L$16cq)>MNDA`Ma zpL6@A82Ars+gnect|bp?Hr)Ncq~fIZ`f{@KVjoUn+irKM!X4xV7?1=0fSW6!V#u(> zV+Qc4)m%SG3yeB#8TRK@kT@bHk%#@FbrsvKiKoxkwstojNtvBzYtNqtJIkWR=(#m8 zhD?8ebyaILB$d_bYSo%Kt|EHm^A=-6<(@Hbsc{e8k**t$okm~xlafflYI2de-jEh; zWFB_H`iU_h-ik-~c{P^ZybJ*b;Nq=*SPKGj<6nYRZ>ohLx*m?Qb7T;Ko(BTu%<8g# z{Dh=(IK&18%Mr`pI1mJbB)1YL8;`@JLPIjYYG|>J=0l44Rj*aX4G0!V0pr#z_y5@7 z4BqGuj{@;0hambBn$DpjK;ue&O3j!kMvBTQ}CgW zAV{4f#QU>ePoj6pBBD#|p4dgE0Z#x&T%xZU1Vpw8*_7EN@X}F-Rsi`HUUHxuGsE!O z(%+|_VR$RBqX{$Yw)J*OM7RP?*;7NxsZeq*+Tt#HF*V9C88`wwOCaO3c8aLYLF;5? za9~Wji%?-;5ajj_E<16@qMRGX4j?nMArIf3$le-y`Vm;Z>~8mjMkV2g-NX9KQ~{lu`pB^i2|X5ymQ_V~=;A}M zZeeyoA5glm!Sy=I%K_NtiPFt?o~|;?%dN|}mqu*7>^>$xak@+k;{6+V#_xO4P+NsQ zqXWUoe)j|{ey5*pZEtPAc!q$cy$x8W_qP7IkyN4sVQ2g4KX`r;6@v^eHzXY(1&p<= z)f!E0(7NP=7HhNl?TZlrAo3=5TdRVkkQx!J;rfS*iQ|~=ud>#&wV$@0&Abk|yO~E5Nw-5N&?4G+UgHGha;Tf(8q~ePNWnaBQTofXl0nR zhp7{>Bo&aCjYa6@F(X{5h3Fq#)yIWmrz*&zQzRe_JS!=L2Qb!mvnZcZM`i3zEqovF z^zVOFhi<_W(oE>F@C*>*`Ll;fg@$!TNFg}EN!BX+>X~9zvb`72AiMq}*?A1N_)~LS zSTLjVV1<%TqV1V0vM6Pu@4!!d$NRf$+mC8{&o|b$9&fEDPq!ZKuI+A#Yn|bkdPe~^ z{FdDG7qrS;!!`s{lg`r)ZgB!B(w7dgUa%JKmgexNxdY^ociXyg(x~MsrRctf@<)hH zq=VXyyl*&~dV_%kS|FI%T~XC;art&xQySPAGDAXd03e;8RRv7h&)uq>D^JetOS(|nP0uLP#TYP#hX8Quw zq_TAJO0up=EljvW*ja%YgFyC2`)AA3F0@Xs_qvOq6FZNrhFU<3xXy4e>Vv-giS%Nd zA_pASKAhGoR*QC7p0ErQZNCx|^)y&}_Hb+S#m#uDp8w<)$1Odlt4Begic8tvPUmaBEKPo7xRD=`-T1@%!OY?yXe6sjGd| zKvX)?e*;lq(BA-?B!1UG`=TrUYeEwz2!u=z#8h)$NptMhjDqXZUM!3>O&7kWLWznY zeuJZx*(@>}jM+qUjYf`s{XkGviEu~F@kZpYanlT0U0@`~%!MMMtsn9ZDsCBt<+_D% zd&1^K&#|PX^l87p@eBrT9xk3zH>dE^sB})Ox2&jF#cG&DRjx$ z=&!#-UlMv+jaY_QsQ9LETsXyZm^yScaa4B1Iq z@H+!@f?Y-|gH&@6LQ{}|*RZ^5zlc)Tu?_mJ4 z>eLX=H(w679^H5534v2Knaa!I`qTR_7u;Ps#IeB_2UaZ^-CizaCi8p~!@;h8(r=wt zlPyAY)MYN?L{p0*IV-ZSJm;ibD@~;;8BP`G_PYZ<_nKDdEr>;Oh`AsgFGsY28 z6C_t;9zY&sGHl>-2PrO??MjXnJcXD*cen@zF!&+l=3pd&MJD%}Wd;ajUTN$t>{It; z*d2JEktU|H2cIo-$v~)Hr{k}E2G>djztr)|#!B)-=Lo?&_QU>4^6c4|bG>pSF@|KMJK$@{ejZ!y$6{KxAHE}QH-VkNr+3dVim^EBNBEPwa+TdpT55|!#} znx3GAA&XqE8}|b}CG|H=KLm8XFKB=SaMas0eL=729Z9-j&HfN&`u1=m#JN+U>uGw< z?vw8hJcsGREvEo_ou<3YTfR4Utv%{*n%;76`TjKCyxBWO?mkPGIMoD;Ueui?<@@Ll z7%eCkYIuQj?;do9l1b?C)3r?qi*0ZS4Nr&zmS7bzevpvPc>*WH)^8ATX%D%dEbN1i z@X2Gn{I{njXT&W4=aR~+`pTEJ{{Q~ZSD*l^mhI0K~>DIf460Je`$9 zLQe1#-I=^B*+LRbSim)&F|aL3B!6GKd5G5ddlaoS;LcQ^*1?~tMNatpfmgV6%)5Wo z*_h%46Y;~yg-9G8->?$kC^=zw@J@KwJ6Ub|@9zEB=~BP%4EhNRYDRw&m~=mBeDTHI zq>>=&$i{E5k({S)IGzqLEHI*m_D4Xibx8M&h~=u2=mTX-L*(_+$SWHBf{`yafMA3$ zJ}&GhNyCr-CC9g+&0fIYru?hbYE!mkGqkpa{|~=+5gzSFci7=KWBhF99=LuN1H~u4+vB3*&!{@o}BbZ@i5!PF6#`?Bl#A|S1u^YmYNz}9JU?QFMAi0hIn|t6dIq>s!L|XO5Y>#6( zIU4Z>J9*yZzu5Iwu@lAP+(heN&o|T0!L4+rwJ=lK_p8YpiWxi*u@EQ<6cM;2auaVwPnpj% z%WG4>=bspZUx4+w3<+-2Xb9srt?G6#P{AofIA*0#18hmi%Yf3;?lKm5gagAgG?Zb95P)c)~n-a8eEF&uMlmxH?hg}Q$Qq# z^CcJ`k&;JkkC5j235*m`6wIB``0A^9X?smlj#@Z|KZ78rkGlRAoO2h2aSuBuElHLx zWZ^xgbej?9F=_;8qLKe~(avGRFI?$&Bs~=n~J(b96 zu#B5l3(sOx7m+9T7p1aNBZLOz^PuLz49$hs74YVyBEcKrd(4Kfk!)VuBk6_hx@8An zU!*%E-TPwVJaQA`4~Rhw@q%{z<#y-7jWR!xPBY37Ail);H^o{?PrtyCz~j|o0aM@T z0GLv-U#h6i=Inj`w`C+)M!23rau;zFuefnXM`BTd6x1CZp0#^J3HR_BRPHTNdvkX$ zWXpM{!rRrtNmuAE&SU7988@zG#$UxVPER(dH57LB=oF1M6A%FFA|nbq5^E=joH+oO zYIZ0@BnEVV{=_j-gqI`m3~nP}&RK2oI^$}fW`oWlO^9BiMWXcZ?i{*FE2{~x^y9jp zbeA&&<-dNO>{@zON-mr_Rz1Mb5>&1>N2mud+#tzbm>OA+ z^A` z%Kq&od3%XDOO;)jC4RyJXJk^5r5*vHu=DFS3%Oeu?o{0Br(HwipLV(Ij1o5yle5s@ zy$==-6V4)6pkU5ob!l2n9VgBCKh7_TM10@8+gwAXecRQG%PLvM#={_C*!^= z3onNYRrRRRf>EVM3;tz9zzum(;R4<@5#VtgP>h7rTK_|FFYScmxm0y*r@E-rj{geU z;=i(w_^$wWdATYcQgK*AQgf#1>HK|7T%icwzb0Jd5w_+1e{9aVH5)~qz-G=?ISpU; zTm*%jU{t7s!^esO6Psy}qu$z)!wk1uEMzzcEx{$(%o#1`m`KC>obrxIWlGa@TI8N| z961NBY5E)E5G3e;lmvr~kcGo#M+h)6R+nAGG$Z6tEKJ;Yo0<7s-G!+?1@S8c zMXS1B6`MRI2NxOsnM2pY0e}H>Pma)ZeWUtYogHr=Dp%Km~z(#p4-BVWjzk zT-vUsW6yCllp8w`X-h5Cyo#6>6Jg+K!Ek_@Qkc>!$+G}ay!D2_7kMj3Yvvo6&ULUT zfo9lNm}lTFm#A-;+jjOrLKSZ|Pn$a5-NN7{{s3SO1;?p90)Bi3pNH{j(MR5l;=wiMlGX zW?R7Ppwm^?2QZ1SfislCyi%Lskgv{_2iA4ozI zVy+An>4Z%G%A5VnSOEd956Ov|l1%MR(!h&nJ$ygzw_H2}x}}B&tdGt-72=WA0YkWH z9E*b(Rub{lTJVrmA``lB8_O#PXk>tv+=d`S;5UBVd5F#sh3Xj%=pbXbgZOs0YUW$t zZna6&};X{MC#wr<@<%PF2xemT*U6u7~( zE|R#DGEGv7ev>=NpS#Q$+*d}H*j|VZE_;zN$wwW?ITLMkEI)_%H|x=fYgl5`g`7## zccD^4418z4ue&%yWUW>W7_3aVvp`s;@c)cp0=n?TJE6sVc3sQ@ll34|UH10!ZJSVWi%LlUctY6u8GgyKMJnbT&`Ai%$ z;`vMkR#X$*ej_eA#Fp(wsyuEf1X_V4#l`x)IpOJMER%uC<_{kZ>x2A|*opRNs;OfB zlZ}(*xCnKpG^as~%w}vTHVkLG(4FUe>vKl}I_)_RVWC+C5NAtMna=U+JNE?eS(yFp zf9Kog49o}?54U*8alk>kD-WFOL_a^V3K`C_;{qP744-6wvA2e!Sx{XOdc}qg<-koV2itKcr9r3nM>+uL0llO ziGj7?;g|{8ta##^5ueT2_4N64+OfnBi07JnZGSVx+JZB>8G+T5IpKp5QAf;JLX-l+ z#LdzhnZaOnnjET&iEHRGlIUsFm7u#fE&7n=NnyIFMHS2jzA2pzW}a5{xkDDwyY#Kg z&S-$0_P=1`{tCS1#r%5$tT{vQC506cPH6wjM_5T{hI$UwYQbhemubfaem-p<519nn z+mLMz4g+-vv7AIh073CQMJtRCxuozh&e14PHYf`qeg_b?IUa)PQq@Q(59Y$G`k z@}W5Y1@2q^yUk;pyCIV(Bo{RB7lIJ#;;fAcN-w1>+qmc;EqT1U1JPTRXnj3%;IdY7 zGDqew)OQN@CaGn0`xX-%m;61Do#L*^6*}4F1X0QTGi4imIr*mKWxZ-CFE`MTZF_z9 zq4bq_MCrY>`!F?zIp{Fvr3L&M9}8Eh&V8=oU9SV@Fl)eD1ttK2Edzk@>vuEb47i>n zyv&OCl=PiL4lZmh7KgN~DCv_fRT;E#kk>6mAmOLcrq)t8QWulBK&R-Rh@=EIyhGf= zD@5+9jgX@!2s#088u&Iw>;z^~3K|IzD*7%crSGPhV?9YRN9LtX zvsz^H_VIElRy!qv8k>TTY<3P>2Tl+zX5ye^yI5+Bl`$m=6a*(3bLc)4Bb8&|o&^bm z@I+fDdx&7%xB?a=OfJ=D%z`-Lb+Mp2F~+Y=@L<{m}*-y*~EIaST8brIldX6gZ!fiwn54XYU{S9zzUHQp97+Rj5oo`B}7RQ zg2K>DBjqYv#KMp9)_^7v$z|Bdr6d|%n+|i$Volyd>@7ycCx^j3y(`f-+fA)9N1=b> zZfa>0HvM3`d5wodyqnj2LU?-iYGfSNL=sWUI%PBYW_JV+G)*y#x2|FhE|B#Y1}6#d z{FNZS4lRIJAcQw)Z!TK0==P#0(lE8t%AN|#K~XS;OYy%H<@A0aD0QCei|K(w$lKUa z4Y%jJzi2~-8;rB)?wOE8y;#IOGAoyYULz6Rt;F;$K-ojgYT+&ryNh*Fu+nMn^z+cu)M_$+PNn14K;Xt0SdDGE-;KctD!-%oAE^oVW0vU{gcz?^ds z8?Sd;Jw6r9mm60)U))lP&Q5KD^Y!HT%!|1Lcb03=0idPh$n zGwDt)FVPwP271fPh*xs=G&{%DUddGey6F;t6Q`RYx7^C*R^lvYTxM&sE6-f&jjp^p zEYy3DvG1lt0C5c|ZI#~D9Y68UlWklNP-lT>@ls$YB{#7+gz^9QT)>1$I-*wLzaaN$ zN(<@B$D|%c=i>>@jy;ixP6oEJ|y0<()fvhgCUA;cete%p{97b{V-I z-3y@=V8}ZbDKO%-qbB9Y_g9eXJWf1tO=pMAL_V3KchdX6d zc}x6Tkg7lI9cf8B3^r(SHTb}*(kVk6pURzSbByu_y*G_I!Ck2L(t-NM@F;e2;;Yn{y`|H$5=&iz6(fw|nJJQLxNEUp^&7DtEbLS7~x$_rwso_I3 zcmA$7tntCkoxd5w=0}}7*s`H?e(oSRUIll;J}opP0Z8!R8d`@B=pi?*o^>=uBA-D- z%X2XY3-_`;=U1{T{rNu#I$PxpMXcmxb$^o|*Vb3rz-5Zp_HJ6S%wu!2WKXoh>vX>h zVBxIcwH+p_tJR9DBiNL5=2+Hu?l3KH1X>WAPUY^(%HrptTW7kua|gSnsiuVSh6X<` zHh-ZyW#=`&%dq6?Ze;}Q=@AFVNyVa$;dL5=T+fnEo?B9M+!_p`H_Nt3n|Rq-jttHq zg28F$*i{=u=>pqcJkN;T{#8XSEMNgb$3O4;--oD7fnt^sb|%02fr|BZSPosTWS)YW z?ngIs5!(B!=J`4Af}mH~V@bkNS(I_)&gsj$zpS44D7TE5;J<#()abC{S{~#u6i~bO z8a}0#50`X_g$N;BvTe#{V0cVI#%0#3)XZYj{O3$Hp@xr;UnX4Dh2;cy_zZE7W)y{| z<@%l_PQk*J>}2F~44b)*hdN@>Jix3FodE<4Kf;A>Q4?-|33i+X2V$oTdzzgU<&h9E z>hon$ z>RiV01^pCAX}LWf3i1{sujp>}!zLACm1K@a1prh`@8IbBHxR%XN#GD-;#Ivw>f#U6 zc<5oB9@yiWen4(Gmm8iIv+O3Dazww2p+kP-GMJ|0p<~In1Na3Lvp!_oQiCN@>DxI~ z%^U;pruH$KhVEqaLz3Fd7`(QVgs@&smFtgKZZwYZa^)CNi3~X?Q5)mzf@#mm&h+T~ z=9^^U${T*->XE&D{JkV)azJa41_uH^7^^S+AVbZ>zyy=ak>>&bCkQGZxDM-K_ z0+3g6W=T5yH-b_O!yDl$x<+{p+x>H?r%u6qbNhuO-m~QS=m7b5lcybECR_oaj0^rU zI0c>@xSsgVW6opG%>zl=(Y0JYt|o27V_+1TeVB*!o|#Qum&gpmY908i0CJeW=Kk$< zQU0&{2IzvR*~@aJ7cH|u2y6AZ+fR|4TzL%EX;IfO%ffk8)!`WM`s3FA6V@PE+x|!L zXj4?Tx*?`V{7+_Nfm4c;55;-zp z>R|l+1A7cs%~Bgu>=1>Y+Niru#(mhk;N)b|@Ur1RdaUJ@+Z(OJQ}+)ceNC zCAl>R%gmpN#E!Tmg5-C@7UU4&gU;Vs!By}+%McJq{Si~FG6|k!XpuKPgBNV746sMe>vVH4>xmoQX;7t8zK>7j^J-lu%bF(ZPw|xsQeK>d!lwb#7*Q6T=5f};sN>T;# zORN-E42WmzOQdPZS;;kkqCu@|vN*%JyDNWNx$DV9xTK_{;X>n}mm$hS5z7H|-Ur|u z5NscTLaKBEgbp2X%+Ph*Md;gcD!lG8=2e{=Zp646} z3u?FQcKf)6Ta_y;2ok*0$m}QsgSjD1%4x5Modu9J1(t&N?CAE=t5z4)GFwYF78VA% z0rB6G=unuJ*8KtRH~_rQI_DjsU6v)$1f0wy=e%oV95}mN=(m65Jlg zo4=%(szpl`WU?(Wi7*YmVl9^V5bksWA9GYzqh(fDeGhPnjtBr8DR{`0U+k@IZm`-3 zk{kAaOMZ47_Lt-vMWiNpG!Fj<$=e#jUeGbh&Ob9sx7T(z zUp(8`-ru`_`*yYuIhObjH&JR#V!1#}9t4yy$i+beN}lkJ`m#KE-r>8-et_h`M52^H z*fW#??Ht<-^0KnJIJA9Z_-wz#&mVTHvTw2o(4dBSC8N>pZ4w1BD@g4ABqj@v%EQGaLJP zof;h&sLWRivX2Hxc!!WhD0pWhpl$iA7htnQ^`7?1$x3p3_GYbFUpfB0icb&a6V|fl z>CG3MS7H=UDTbpg4(Kkmg!l_CpnxS_pr(ebpa!C<32e#J$mHY~;oMF#A_ob>VPA|u z#_aO0aLe=78`c;`_L`2IltHCbwR8|96k)w~ND(I}*^pc?SunZs^>Es3?--mcg(6W@ zV5ie%u~dD69NpaPh{|K&AQj~-o87q7W;Z^e&8|bxO#v$YGVNxAZ5ls~U+3pFa7aR) zL^-&C5nx(^Bchsn#iX|iDX4t9sEXGX%HoFxNDL}gw|x&EKw;Ca#h9#UR^?-rW3({R zBE${Y%2UjhG{=#1dy8m{54GP{Yc+DW-!o9;*@I{oWDgI0fD;AU#mOPGTYpfM|Kf}T zyu+qYaC^MAxBq?lf=ET^)0i4gd=jbW%V4_*?swdm@DxQA;snnnm$i><3{2r{&?_IN zR?|A1HQKVF_T8=>yG6IIYtd`vvng71YR#=`vT}aRe~-H@!C{C?Fy;aPYdEnj_C_vj zk2wGc|8(v2FOjfIHSp&wP%1X!Ho-9fI9Pc!`34%2-W(CY?D01(eD0zWg({Y112j#T zzM+JJt>OpamNcax_ab*n@Q1^UFwE!zvP@=}9i~;4o^@+y>>0J=M0UMTYn+Id9IPfq zSnu7_O%MAlDCvO6n^y$W+TQxs7UWx`o4P>j*n%m7?s1ST-xKIQe3n=*#-9weFE6_ii+h;_Haep23#jwbNNjpn<_7syz50u>RdsPN?rFdHeKD_~u% z-(9)8yqv6Ro4XkIh2&0$IV`FDuDqdqC@>A}Zm*g7gXd+Jyb@s;=P>Zbe2nWkZ~f6ZADzeMb6+VYfgI`ZNVE~S!`RJ& ztE#~9lX!(+y<}3Ub^hl${Q!b5_KLwF%XKtd9j_v4lA~Xu7z`xVrrri_C zfrFP1+jYAy+;uPUt z4O%DNyVA zgEv&+0rGR?bY{kJ7(G#KU?@I9 z|JM4Rtv+niRNs@|+Nk9XbNbR!!uCKkvR_yQR~Ld!N5caBkLIu1j%D^#xt6h)Djx&X0>GCf1mGK zYwy}s-PH&iCz zP|p+44RMR^`Q-2m2N1YZaibt%^tK-j7-ux_T1Mg;cKqo`mHWFhjf-I#O~hV1dLCVZMopy zcb5#!*F{-mYHZ>87RO*G2)FYtH=OC1UuG3$cHlm~s;1z&MnIzFYpW*W$#yc3C zalRMtbi|ByEQVwh6E~N&o@O7m60EbFy#t^JQkYLr(%Ktsb-I=cnK~TN&0|k`UbXZ` zZA25lGuNc36xJ^v+BPL51fh#FTNGh>W4z$~?z|vPigxd zHy=uzBoJ^!o$6vJvwnsJ(+S9O;7xrmf>yX3*S)jC({B!eWOQML&UeOF_(B!kVkHNc zP%?VFiO7Slz9ue+8}2R;uJ0+`UG(PZT^ii`k?H7@#8DEvJa{yXr*_*syC*ze;BJC&tE37${Np` zjiGycNyNc;0|~(c8Ff*V`;IKJe6=araheoXh5xI2Li4*d4Ik*<3UkXWjfIFFWzs#D z4qOUGU|w;uI{fbWWPri_)E6$QuiS*gml&sbn*@vA1%fDIzc}Y&qet*DUS;qW5r~LKXQ>J6seAV(`SppLy+L4ERslPmbmnbg8n`$Jn z?OeE89&$e5DF*Ob-*;Qs37WnHz}C;x!;y-zxPdPq7h0t6k7Yp*SZ<;*E~cAQccvRF zQYKEGGzzuqTXTf7%X6;5kX&npD@i@Y+qrkh!ho1uLl6l=CZRWS zAZ`Q%T_%?l<~s;f5$S$;c@akRwgWzGev1W%TU*kyH{0Yy5eang7w4B2TL8)rKq1Mm zYFSdZsv)F@pid=!hooVmvoy7RBD=a#U?=%qW5kHtK!8YyQHTvMpFLwYX~|>%_OyxJ zOB{XZN9#vNm&kocV+2N=?h;C5I{s!KXQJrS9?BnmC{77Hz_3AgO^X09Dyzfu{b}=r zfRPZ77NG_e<@V%U?ECM}?aP1Gc z3TQiixNkyyvC!_V7hn&YCtK7$0!MF}!^UrESCB`052{DnOAX*BZCXqV3a!pXy_Zi? z#X;jn7vMNbk;<`+#c5K41I!qK=nQm=8d1tOT=q+ z(osywTU-E;5?MjzFH*xf;l2sff6A7qR7}=m2%YdWRJP=0H8R`F`ZLrv`kQpD@RmgGo$*5FBn?pt0NyX2d}musa+N3a@yWYXWv14EH2YV}3ovrZ)ETilp-k z(S5389f+Y=H{s$O^lk*kv|oNTJO|hq5bJxIJk$oJEE8QE_ccZ2wk9AqQ?0_091 z5R3D7=igw8f~s%;-2g*}E+T(?``YKF44Z_KCzf zBXIyIq=#kcmIQ45s$u#-h+r^G2QieS%kG70A!IPHOU8mNpi{=U=1?NK2-81PaFmb% z1q;gF01+LxZrPe8n@G0lo>9e!iAOuaV{C&9x^9Q!W(1t)%g)>mkt8I)i_PiAql)Is z>PTn@yPc{^v@wFeb=I^2bKMMZ`eqLui52_-*Rlq@+v$|7xZQ|wj*It#0xU9)FTPmT z7vY8O-nA{vEDL}M=*fA(krHBqi>1w-MS|6#UFW8h;5O zx?x;E2vo9aF6DLFr;wcpk3pPMJjC%X-lnM8sTZny#_^jb8_3l!RG8W(->dFv!JJDI z@=*8Eg|mR+&vvi6=bn7wy6%<7@HFmKMws@+>Yj7E{Z8DuE#t~gAH-E8j}%xgRJMby zG1so4D6nw%6k6=mlWyJ^_4$uk18Uq2+Um5$c%PXL3tIami_#@APQ(a$4x39$(g59? z%r`&6=bR)%pc0QJPS>1p_MpjF3h$4nlYZp#7^-5#P#W1Bzz&Yjrsz)G0S`#DI>$0? zOTcM{MJ8B8bRxLzLsA}Ulyejd2i;)%kQ?D-c(g=QQmX~84K!@$)vaO{wh*OY|Rzc?iTdy0IOxn^S5BZ%%UQ6i-DOu_XcOo-i<2=8KoU2&FvPCJ^vbo&5k1t`oK!wTEbh_ssgS|)BCf}Pu(o;u)E~k^EmMP$_HbWhA>7aXkg6TL& z1tk;91R!}73+xijR6K@54z_?A%Gp7cGsYcG@gJjomUiuwvA_`cY@Ta#%fBA{k6fn) zfv{jSqkOmlW^d*G@0uGGL{{K)7?;mT0Xe$(AiI$HW0Mil7=1F58n9FPWO{Wtg>oeW zv9=8Z6=)S)A!8n2S$c(P6dsUmW%784h>^!@(;)>AuMv`gOc0TqKgl z(%n9)71SmTBZ^vB7+_eZf<~czX1UZiCY5wgC~4&6MzURhXisU;zKl`z@z={C5OdeJ zPi>O%Bg$kbm8)s9to=F5rN6Qubuyn2zz*6qf3_rE#4KTHtQirhM|qfI2Az6=0F$$8hfiU$5*gi*qI=3k1 z{W*N;{O?T*RWlNq@gt#6_ry98w|#e`bU0fIbbi{wJ-)TT1&@!@+K0QB77JqRqYm&6 z@gIXQe0NrAFzyd8`AKOG;0qi4heHy9vBnSIu@Xq1aJ_p@J;vrpfN_6lp1SLL0tT;GR2)AtxH!6&^N3W@G&suINxJwr$-)VV2sIP2(^S zO7m#8;S7tR(edGvxo3z7 z0Z`_QBMkQWfFd6v;r^-NmJH=`L`8;LaJ3;T&OqqFjU>qD02Be2HH4)|4iI_9&ZE?T9W+3g7Wl!5 z+Sm}Cl=Bz|E~xoeArbp^a{E2s93EI6c5N;rD^mfWhB-n*seV&TU=4~vd)TDUpOdPB zyVda~(~a07sAV!dgQFq<@y*)znc<}S(hY1DW!ID?wwACyx3MUBwlQsPOR2P1Tz{pU8jJDx(r13jDKZ}HbN z)V5qmCnSslCOSf#9aZmyOo4j!H~HObS412^|HXsuIWi73)D?hlEkA^nkN>bF3j$Ev zr5Z|m{YK5LGM&hoQx_k~qF)gjlsIBi!r>J00M!|LmO;q)YnpIuJ`0D9;q3W9@M4h# zbVGzvKutFY-pZ#;Zh+;S%vUcJt|-_x<10nAox>O^pFmk9#Gz*79AS5f7gvv0;=HpTL)%I1SZL((_=($o&PEoR{F zp1nSPi=FgaQmYgK!x8yRM~A4=N*Dt%Ff^D9a&|~6jvg>c1NkLy+&R4SH@(t5y}hC+ z3sosn$O=rsu|7xXnBHqRVan#mF|shQC4)%!0iF*Z z9TS)c#hp$S!{lwqp>sA(-4not@x;fA!nyy3xMkGk8~?VlvHtw|%8l#^t0+?U>f0@J z=}idQkxD9I$&E-bL(Zy4rJ}lQ#7!Qhx9?5L8w`K!uRndvU-S!oXmUug0}J2Om$&&+ znlPjQp;({LxB8(4!{tD1eR`KqHF>hV>$7`&mV0CF)RCM~u0@aB*K0`sY**XHPA6KZSx5j&Ui>erKv)2*CMD{CQ*L#n+UZ z^qYaVQ#a$;^OZH&F@gHrMs=CSX%UHkin24}=jV*7QRBF+C^Tke(HVb$ks>(k?qbiq zh!GjIwjA0+=w~oY93xLRRtj9w)T7_u)T#j3>sV@iGxY@kqmrbA2ai_Gmr-8=rxz=) z{;V&9@+{xIt1q~v*7ulSxs9(kzJ9aCui(n7_!Z>XqM@y0z~Gg;JIK?9xf4IL?`o|3I>R1B8&^FJ~Y77Jtb|W{1I1iybBn zw>)9I#z(Q|hsntV-0-xJc_1tpj1G5aA3t0uzKqifIKzAhsKbuKe;>`Bec-*5zy(1u zP9YPLv)U@16|XP8s8;b6Qh&gNsgQ+^G3JQFNi?WElgYuSG6tso<;pCz8;dXtEU)Rw zJGBvo2+E*uR?*@6>&5vsp7J=$`Xk!BX*2p^u}=cC)~sMCHRW@`P`qGPw_4gly&i88 zmtX$|?6h>eWEu3BQU9I^7()FeeDqDT)Xuv|;eS+*=pTU8NhysFvrX#_G1c!*tUl$V zu_5^88NsRbM${&${kr+`hNHg0=lWmU%doHK-N-PnXH<+oHYVkWIGiq^;Sf_Wb+#tf zW}glJ=12ao?b^Q>+q8eoq6PQX>*n0p_9sU5JX*9tx-nPd;dzYHHmem6 z#A|1Hfvw3kPODN?-iyA|Pm~{QeEtPyllD%h%s$ffIG;GTt?n0$J?I>@85QzH9I)>R z=uN^O8LQGcG}ff@Jr~2JiHBp~On7>^^XcWQ9i$EX4?4yPbAP5o93S~pkFW{r^%j`% z(5(~Cblx=0QQ)I_FNx%7J(e(}J{*XplqK1qDLuHiOT3}cb2nopJt`Qd{QKDQF!sUj zoq9{NiBxKt_Vc>?{al=gGA?cMVKinmALjef7QEZ;@h-yM=`rY@#}O{G#^mu@UTl2h zWzhNVC9|sjX5-oBNG&pEC{T>9DxsboRdr3FM*x$o&}p9ft%;L0 zKm0;c0@yhx6NtsxO2)}y6cNeT%6?BXQGLLoYd`OhECfrvqF{^5WT5#%ToU6*FD>A3 zqHj;_{*p}GS+C`=ydRaf?A2cK}Uqc~O zG=wf6LP+7OsIL3JdD82nNb9DTj#9c4Ql>4pC|;68e9s|^Tzur~OnNJ6u7Tiw%8Zy<^mUxfvo*t&~8;()0j|1wJvhNCmg5%IB-q6wdI z^+hpRa^okq9^U!J>RErpD3~nrXf|G*eJt;Od%lMY{&20=>lU^~NGh3)pd|C``;BiV zjpCa1aXIiq^I*4^Rp3|)_O|j4pl^1;IstHf-wVz90V=k`^m)aF!qL9|EgGrjc3R>@@JmbmqvER@(5FF-*?Nd^0e(`!(L%`C$kU<01x5G+Zum181uScpo;_pYCM2e9@FlG}1;} zRKxb0_hX<~H(S7~6j`KV&-JlBEQSp7QYz^!q-l_$;Lr&+exiBf>k6#{t5bNd6sb^F z|D`o*Mq>B4WBM~UIu^g|PETl1FFek(Nmtw`G5gr97;+&`esKaJU=by5q$Olf`g}Z9 zQsRaYApv3>Nq%%?bgp6B@W1T8AW@*pGj%vv(0^A5FFC?)>7>^_7lE$78qK#5h_yKi zHP+nNRf13B3A<0@yE4u9AHSz5t6TmjTwT`Z)7nJgWlc)By_nDXg-5MI0j2+6so?3@ ziH|`i8w$GSU%p$c;L{6uj;)}Lb4?;uC^!=+jUio|F*Pn&X;w%(Y7v@bAWo0U!%a9( zx;cNrcCB}SxFt! z636L?1|$K${q`81nr)}H$)vOGzAYP<$2*8B$V~NHI%?7qA_I(zWX^gvksbO|()PzN z+16<5e}2XFW1H$)_;P*Ao_jPkE0+{t_MT76c<)^4IQUC4?xvuw1>Ub0_~O^7>u#IH zZ-o@-pBf7f^sQ>_mFB z3ALu}!}>VY!!MQKQ{ZfpEEm6`P#x5$ZkI1evXYD-8h}QM%RZjRAgfowLMeVS>ZCu?Bx1i6|6Qd6W8Aq&wBl@rzhJU#{0Ji)oXJy zwXia*i;u73&w9AvrpVg9L*yCDtN*U?0SsuYoSG47K!N9nW#Zi`mA@gHTpiky#=UI=O*i#5T87tu<# zko58k$_%leHa#0BDHF&^X?RbOczI)nRVDIGv`{yxhqI5y5H`#>sc0ZsV;wCM_ZXfb zoo0^YdxwTiz_9V_^l{7dueF;nZGX1COuQut2#G%jKQjdR8MgzuB=r30ta$u(ydJ+D z+Wtjlc+uDl@A`QPdUmIb`{!@rS*mZxsr zvI06$6ZF=toCLou<)KVgH**9=K+I+Bd}%d8Pi}7IyJNFIsh$yiv|=G4{PO3p80ziX z-!fC1k&EJaJ2~6WOEYeHb~W5>pxy8UY5^SW;Iq*%tf{9 zuLsm!@h^G1Si9QJ85G>mJ3eD`C{{+OnnoHAlbk8H!j8R?))jV<$JEjz;ICt6GtK2t z`h{K_r*hg@1XXrs%Uy?wIQ5p%z*a9cRr^rej4kUFa)&KpcHv0N3R1l#9mDd?86B`H z?14oR=V=OHox|mv4n$Dw5p6guc#nC!#gd*M98fj5evXyqcR1ZMg)B%m)7nyu{Y-u$ z$7lvTgspqZWh82bcv3rNmS)kVR)~hKa73?iimM90P-cYJk;a%$&i(v+wf)4?10STi zYwzB@o2TenZuGD_n%^1hoDbd(j^_uv^Orv?0KWfb(9r^Dp`6u)nHjIMm5^77vWxk? z8OW{Tr69IkJI>gnqa3GKz1*^k`|?Q%Qx%FDR(BF!TfHoWqU;R0nApV?rfK!!J%W~w zed-{>+8{@&woMWayQEeZlwyxNazsftd;VXsIjHsfGn;fp^*Y~SPkue0>a#zIEt3aG zQKJhCLi1IAX|5{Y=uwh@av>RxScH#gb!P?1RW?71{1FBA1RYM(^ zYAsm&v6XDJ;!E5UZcFdO@B);Dtb(Ur+7oykP^xwG_Hf^dIgH-QyP|WmRA=6@VrU}8 zV6F-*aaxp9>iZje=afEYWKe#SL7ze7c9pzg6-V7EhDn{<QxQFf8REV0{c`w@9l z#17?43|>;-N`5JIY`17_Ik%Y)2MBAl29hexr`Eh&u08q=y2IYcf=yH1x<$EGs3(_w z@766tIMdK|t0?oXKIYgAxt8jrI>=4d|7-7*mg?LlJFNfO-nK(ljWqgyC~8B1c?PU5 z<$O}D^aw)sGFFWTD~jHJ;7BXo$L(Ks6Kk|s_jW*5Ut?78O_gpLo>7)iB+3|JwC3S} z>`o|xGQ1j9@4!Q+WfZS&nHADQF>l?RtD8?g`^YOP8}*j4F+ELiX4G{Wn_zGYDy*<} z-F+WYS1KSJ$nf3OsL*b}6$!9Xr)#1_chF|6bNb zSK#=N2A!mvb0t8-vm;uvaWhYM)dRh56*s5iVWZnRa57f96vBPUTPaT%iS|Stke0iDlfVbyw?7x3;IP>?5vE@G-(&m5-TzP@R_p0He z!)Cm{6o@Hw1A{ns7k_nP!7#*|L>;N<6j!45R5HS?TQAKSnz1$pXusv#cngB++JJ-< z!Lx-}Dgq3^i;FO&XR<=c&owUxE4jkOK`SzF!OT3uaT zSzY0i)vdL)Yj6GMtFPi0{qvu1U+SL&{=0AKbpZ zG`5Ykt(C2nHCDO8zx-1FZ1Kn13d`N{ZEUexZO4B2!+vZVj4^o|w#4#3UE8o{w84LS zn=}Md5fZOSZd-|iE8s;K22xm&m#8TOBZ3p=tCRq+>s4mOq!V>aylT5lt!9bUgbTF2qS8&c-x6;^IDz_Xz?Z7TSGg*k=TWa82 zw-tr?m;5z02Lp6wW<2%P*80nhO${<*aFo_DzzZ9+JAfXf=}C z(rhkFhX|u>YoD&(a_5S;IqPTzj&=MT4Nitj^ZP%}>6c}G!D)Fiyv;92#jjC7RATmn z4G4tI=2G)H!s#hHZknT4lalDxJy)`WNSI3{dtGaC*^`ewlF3vBD|BJbZ2OZlLR?tl zMXYx4{_C&ZBMZAAMM% z`7*HMwe$>mciL_$> z_tTC##`u{>z5GAtsFyy&QQHOkk;Y=Cb~nJj1#xk)t-g?`pf$=Nv0049oc_CPgexf9 z0G@AJbvK4ME$gLQ2v-n6Gg;Qu&3Rf7K(^;?U1ln z{lHr-I+9rnHDv{V?(MF8+IAbe9n4kp$NT#4{t0p9sI%03IU1a^burOYiP<9erqC)u z-g~}U^fWx-4h>CR+BPBDi_bbBMx49Kv{dUGTQw}MkX)hXMT)RnPstAPNiLbh3eYqr znVfTS#c}KIS#*Q31QVaCe}=Jl@AmCxUrjgfH~>|SSRh*2TwPyBfyU@qLA%kJVW2`S zhf#vI8BlKDE?Q)qs#UjCSexqJozFYfy*uSp`};9hiNg7uR^7c5)~kAZX1OsQ@#f8H!Jk=VUJQ28w5tE3mp1NRGNdBW7L$@_ zDT>5qSpuePDbv&#X^*Bn$y0VSuzH9cO@bFz61}`O&CJEBkc-pS}3v*{0kcYI=qP}@#Rl(0U|2|<=kh3;>tT`=UPSX^<|dZ);h!61E~ao&ry<&U|D@U-9)k4rCzdZsT`07a zV<S;#u-`b3Ms|7jLmdhmwW88ovPi7h^m7?*=5?Prd?Yd0Kzp9H4@t-{Vh*=ktK) zki(85Yz13NxtTTUd80_CP8=Z?LthWs=rg*yh@Q=M5>w9AXA47W_x=RSFpy-k&v@>w zqx~3!@6$x`xjbPn20!A9c33m#$=;M((Aq`tJhe$wbsGq>&WdidG)q$(8g_X*wZgWI z>*VALJEw*Ynds)u=-yu-fES1)T2)gtXWM zYr3}=FvI^rL~gqB1#G-w4MWJ~NQ8hjg*rILw+s9F07d7<$x|R-iZ#6BYOhb}BrRt! zzVM-3qq2~kpFn!{z@N6k$~^L0@c&I%V(f#)KR zc7kxbFp-aRn4(h5FUVfhgF5e)U$}?ExjJ zjWAs~H+ZK`_x+u-+URUvB9SHN`uQn>jG%~D1S(}N=x#6>J`Z9%HK?QD_B$OhqaBMO z8O032KxD*Lf=`^YcL4N2ilbBU6cPsC=#lA~oj1)sB-lsZ_oY8-BbxYKnMmqGn*bps zgb@~Jrba;!rZ>h5-cLHQPp`aEoY)%(>fiCmbYO7H;*=(_ zI}S$_!{$)Bab=~thezpCPwOsild+`ExT`YRQrqD;wm;praB_(NrQ%j<_#w0-?c2hY zKnq%3qa3>jsvlQtS0tZ^8oZrJl7*iW6bHknzxWh^Whcei#E20Jh7#^3PIW@d6yxQy zXNG@`6RH2`X%oAbIMraY>rrW2wm2O8^dm6Aans^?xG68Phdg!`LI{Ll_JCP>r$MFkRZ@R-$PriC$Gy4NIH?eO;{5Aj77k%j}9+BV5Q0J zLd%7A&tHH&6i>FO6@`F#6U3HZK{{1exL11*7pG@c@8wp?{Ho!ce}fY5oL;E9Q19iF zn|af0MEph3cec|oRjNi0--hL3@>ys|E?Eb}uiNUvhG@{Q8lN?AgiQ(@3~)boozNp)oBOd_?yO;m7HG%>D@HPX?HJr zgxF7R2kmziE}BxK@dp5>2$|89(dlsG0*yH?*9E|>x=sRg9=zgVmIZbls7(M9t8DQh z%315@75>x<$)FZXMu$0^j^Y%-Sw`_L!YL@yH1JighUWkqEwsL;$s=K4O1inN66QxR zjG^5PqHS)ZC5ugg6Ukw;IDdEkjUsSRX9iO&CNyax+NrxAiO3V5p{Ak_t+QRBS zjUMYHK``U8-(8)kg-O&}Qm71B`NSwc$Tdj_-Y9LMnJC@x?9P)6sbM)I;IzefpLuv| z)To|7Gov%9rfa-lVhe08ElC4(Z&I(@%~!N1T-8Z3NSSyvq5l>fVGm}%b3C2nl}sL^ z$!m1zu{A^!geh~MR=a?aUBl+1(eD=PG24YGZLu|9T{ssa%oicid-lc-s zu#p|1xj1EoUTmL;I{{@*VB!(61tq7?n`z#R{LiE zXbNwmss?zWLy^gwM3fP+ia(hak`=FH(Y%TgdD)Ee-G`V(M&N;6JjC_q6Jd|Qo}lkB zG|4_^u>7=QlRz+JZ72QRD-=|VGYp^#P9}T^L~;feimA{;9vg=pcV5BV9_{2}8Pd`e z8#AhfInII#MoV!a0Ct_{LW1RA?*xvL!h}A6f?E2~M8_4_j;nLmOsz6CWeYxmM?_9v zkU|kQ+hm`z+tLthQ~FN6I8(&_l=P)-a|;Ao!*(>})EIjXMfrg2JQKEp|6{zBPuk8F z5-*3BlYQxzMwh3|X?#M?2VLg2OfIgd71a8wiGo^KVq4K{<=nMHePeTXqnKEY+R-~# z{m`D$g3%bG>SIhNTPi7|_5C|MvFLZpW^A0$!*<;<_y1kNzHW`V%0J+lutNzS#N$IjO$YW~GNONz3Ot%Y$BnjeG+p57 zo>r<%8+KBpflV=GEObMuF~q+W?cY7HSN4eF@l20Vd*QjQouLCtzmL7u&rV?oD-%aI z3nKQoq{s{TKvgU zcW%K!`*RrF`QMusu4W{%iPJ^to>(UW$hfubbE$E9NH!1IZcTiCI+25XYwiv4buE3k zdug$##y;x6snp;~C|J?`;P7Y`(!(6XjQU)|pSTI9CvzBrVCmf((CyD>qJrD_7jX{Z zzQyss;sQcFwaqq~4MfQj=!e50wn0p-!*{F%(kG#?drm#Z=7@x(YbQ_V`3oMkbl0WH z*Aw9+2#Hd%`0)?}PZ3H~03{nDNx3o$QJ9s{(rgP83hqw9X=5yD5CA$iTf>?-t}`rO zWMvVsiK)uK5De94>zS1AGQnI++3yBZW9v#kv0CydqCR zNeGXnL#T!Y#|#DNy{G9H0{tHYl91sUpbQabau z7Z+y_78Z7ld(*V`bbe%gV1f0)aQ2d_IbR&^>oBOl3ElgGI%z7$bxLBy)q0k^MB` z*nD&1$}yZhZ^&INv4CvobSenaeN1m<_#_GS&QMC(8Bn2CHXj@sg1+Gw zE|?n?G?H$Lm3Qf$y*_%gVfYQH?Ggfm^5>2Y5egI>2T~v=+6_LVu7ME&7>cS))L?hE zV(4J6bWd-vQgA-vx%pM#@s9BlZfrW^;I;eVW7t?|2}!K$p8ntxhbawpr0=pb?KYoY zKJ3IJ3>}g#ETSdsHZobiW6YIiC)4R+DDHNRUGNs=aXQ8Ax-?-y=g#pHTP$nj!v!w@ z^l(AhVvlEjH{l0oq3T&|?-=LFUEALuA5D1pXMVJ}xOnf*o$7b|d;i{D`*(5K|JFzM z?-Innw7htK>F&LIi}!C=i%UFy|992m&z$d*ULz!KaL&^9@ZwE-k;nG;r#!$;`0>r@ z-~S;$4=s`Kkk$=NOh|=t!>~>UzZXiNHZ!GZ?$0krMOQ2E{oT2!;W_d(k$D$Zx5YdU z$ts^D0ecmd-HUxaEZEIsX7?g4M$%*$b&R}8F|3*^#f!i1_aDZX+X_YCK4<_Ta$4uZ zh4uq^+UP#5K#{U?aPe_!MmW6K9-jPkcz$}Khv@~k`g;A<*7mEFt#2MK6wlYi%N`b= z#wf%p{xCRD4TKtkgwuUF``CB*OGYx$3Oy3hDh;JqgI77oAVTfk@_tEUx2i`0DLB=U$I&xMA;$(_Sr{Go3gEhfKa?!3v zKc!Hh8xd>ODb+VpB&HGd99&-OtC)Z;kNfG#JEDC4L&#-NH*@TKf4w-r`l&A(4Thft z8hN3U>J{7@axa#RqkLZyk^&j)$$2}^$Hu+^6-oKa!WWE}RtfbJ3_&;KX7O-Of8R8D z-^xIzlHN_}3)kJd6HA(WG&V$(;I8Kx(uSq|x>W-kj%a|0`hWee?Pb{4^DZdN>zTh5 z!y#5+5-hh`e@Wx#b@SR~ZJ-wG1)lmz?Y=Dgn+i5L{BP!mpg!`|(!|5DZyG!BrzhFiMZ_{Ouy`etqx zyXj|3cVpcA{dW4kF-o!A<;EAfb?<3RZsy7~A|DZzqK_KSHvT9^yx)Yt7hh=5AIXZy zAoM?L91*080f@OG%L9{6a|RL1I9KcGR{b8!Ta$bFk@>fcXPYBg=6%z3WsD$UO>V;0 zIfW92y?=XNlRom`or#k*Km0;c0@z8xvuJ*%Tabf|!>Q5=vraoo4a+|7kSqjkdk>wp zxJ-UB0uJHfy|jR4qbMw(kE&U9aB3a~C(2I>B?4M%7v<#~kWBQ471VF-B2y5(te^1@ z8j{#c5A>{sib9O4A55#Lu6ry}p7eTZLJ<9z5>louwPkY3`Yw2!(+NnTcQ2=!B~IqNSX0Igndb5`T|dFW|7Hp5ZC zyA6)gWYz(W{!u!t_RICsik+qSQ{Pt~Vlx?<^~}wEeGHn7-AsX2v84K`SX8~E`l~T; zPaB}#)z|=C`%1gCBUASe*Mt2}6E{okRt`riY=$vV@rh$Sj2azU8jZ|K{UmiUYxR!% zPS$zpX#P2#n_2^k>Rx8>v@%7Z-pC@0HH&9=E)S0``WVMd1^JiMSumBKRP-%=7gt{> z^Qg<2nQ~ay8>`3i6+Q-&MIOz@tKJE@J>TT6Osx@;O7{3$R;*~|Y`=|fCXF(oO>(_+;F=+e;hXw?+bZq9eWAz`YzJ;Z^i8!JYTL%pg|QkWOSq->K~i)5=}MN%Z#EQ)jkb&iw%@!T z1I4VWq17+Zc1443#>13)>nuAsBSWU09dlAEZvnaMGhfSN)gPsO+Rk9fH|E z-gHX?GGs!7^1$spn=iU9Jp1@nPTrXO;siR-!i}_q3`#>EVUQtSQ0#^cApv3>Nq%%) zbgp6A@W1T8AW@*pGZh_P(0?f=jG-zK4*Oj6x&CT2-dA{#23sj;2Xs*rZ%tRr8UWFJnC%H4(( zlWxvmkbxALCc#EjIN1*J<6J6!C-s%;4#$2<9=0TJCLB7(SF3?cLdRR}wkTN4;_}T? zcM5iP+-<$!(YL=E$wHUhb2hFoE*dv8;7k=z#g7Y(cLXKlCI+B(+ zPDjLhsn^ned+eT=X{R>Hbe!!L2R0s2SHC9XRkLUhO&o`Nw100iinuCGc0p6q#mOMe zyKcU)H2ZkzlU{&CB}wPethN7sj628&z0wkI&py80{lY{(X0Wz%IJ)9FU}8%L&?Z;Q zU*?OZ!!Ynh%-4;tEb^Sh=8A3{HrGOimr^?-3>TEU+&x|4P}j)En#aPwE!=I)P^ z!WyAILn$8H9)3!dB+k(V;}v%j&bDu-w)oMDWEQxt4c7a!aF~eG5YW8}%?6sv$ExZj zLK;*|ii+3mQCo^pb%SVcvmtWnCT*Ze&a>g}<14;GS-`6;ADTBENzh`l@B6OwoI`E<$d9G#-=~-)GgH_uH&A5S=u#-bH$|ME8N9o#WSneuLVXV zm;j(6vvzaQ4&p|Q7$I;7w4v*?q?1JA6U1Ly9Ar~})ie4rNoG3cG)?IVXIj$aOGUG@ zHmzHBb6c6ksVnkTZEq~n)eKGwB)FQOA&dQE{?pCLZ4XUhlmgv}iM? zFm4-VNfg%yor&FAC5%Fw4yqN`lO9BruOGoaV)i) zU$VOBQJq#>qE~a}>98>*D_`26oB7&9+k0`1-8Rv(%BI)dHoLr?jI+>2B3@1&%`VYfXn!A#l(@bI zEE4!RkcQdT?w>LRF41|@CxPuXyj_7<>6gOv2<7j~5=gD>on(}_)@*Xiz7z31ayFczob)biycpuS81Y+_}AJ^7@0rYUMAj>1cbODiJuv+ zG#Qu~=+g~%7Peg@dH9d7+5)?NF=XK0KU))QR*3JB`U$lTaN|Fu;BV1N^tX$nps(L9 z4nUZAab#h9Nk3J*w@H-@JWm4R$M05pF-2xvk?*9}(!KiixlWpK(na}==aiS^H=bj> zw}a;x({E^dlUPmz+eX`0WjN`zbS?hb*iBmBq)YT0xA~3Rgsj@)Hc6DtWY*=j1sBhF z*#E|goRY`LuJk|r8&A2U>8xd%);J2wUPhWk{04 zS@jOBHtRFufL1IxgJ0+}jit$}VATE=TDw~+tZbciF~E9Cw^yi68P#S~XTqOX?3C4J zl>T6rN3)jJ?_miciA_dHo}e2#l{fVK-b|#nRlU{zV=LbhA6T<$oP1hHJxtViVpcsi zj6tg2TK}>71Zs}dli8d{jkzd+^|kWGF_-ZrZx?GXtdn5vzg{{$fKsZBK_*s`A|qwdonzC`uue7lEV^AtfopTzokNn zTLz0eYzeaqM_QI*pal`O{Nej*1;MkJy@xu@tb;#Y9b*h#W93-h$BZt8#&+6bPl zDiA^`3aa>`WRnXro7Lm}iRbD4vQF<_up*(GvQroHeKU|xEyB&|RWG*;FJ3+=Vd@=f z?j*dn!V?M#*%@*%v5PBS(#phmC~LdOyPjv46ukfm_P8TQl)ks;{}n~a`pes-EA&Ah zzQdmUdOj6xdotKLAG{qLlaNP+fD4tStny29RryA5d2wm!X4>upPRp!t; zIcgL*EYvjI7pdfccaP}v_TWJUm7&|jfl_jSkV&o zRBHYncaQdGZx8nYBZy0%R5f-1nWJQaRTZ&Yc4XBGI4x=u^!*JQ%v-}g-rQu+XArqv zWl^Y7LO8`RsZ(v>k?xZg=vUT~sm-<@F)(#RO_X%T3$Vlaf3WadwR*AL%el=UDq0S* zQ+mY87SV24{;EAH&AY?E$O3ET!&|qgRRx{ovhUrxW%tg^8Xj}W_1p!ba;Q*5o?ZJ2 zht+@Wozjbi+hm9JU)$Su$SO8Mvv|c0oE%s&g^No$pHzq}qJh08MB~AVIz=B;Xh8RI z`98T86GDS#aKBYyAz6K46jDjI~W`d-UGc4XT$223B4W^ zd5cb`>#5yoX*6RVYG@e8V}~6D z4FON5RQH0`9D;_{ zGLV~jx~t|X1mmB$H)T$hI7Y{Z#FC3@xZij*JP=NS%ioEW7%?@Mbj{yse`LEeq39>7 zxrsLF!WydUI+1p*iyKTpVZJp8e4qFpSlbCj$v4&4C&9r8c&iYIJH+kjI!fGG;Oy!E zLQH3!BYI9L6jp#?JTIsgWzOn{@e0~2+`ml+qR_Ea*$ty?cGeJf*a4A6edx1o+nI#-a8iZY^A?pcazzg4wr?y8E? zXSqNT(zX)~3`6<1^vPBKWf1x>X2 z>!NQ%BsBhFYCuvWLKf0hF(JrhVagp`a&sF`_7ZWgOWfHTuo+H(Tl;ilr4|Wc)`Nr5 z@8x#5T~*d+zAElDAK$4eddzDb=7-D8+xY5kRjD66Uqw-w<-5v7y$77v*H)+rZb_yp z_2z%GQ_h4Z?(-tu>v{17x4$4!S<>4Wyckx!8;@>S8f5f#wB>23U-F_aiVitsVC?xg zd}OIfJn=_r32d(a@2^!-fY1n~nsc=ju&hscfG%#EUu)@qM_uP;@9{&wX#uUjkB+qb=8tiD;UHeU2EIBNQ9snDRy(c-llHxHtHEZwf& zys>Asi~i0B+fA;v8;DZAHQzk!Ey?%UJMYBSGn|en?8O9ksyDuvwn`|W(hpQ(3@B}rz5hXm1i|VU%?kAiH-&+S+y`q`*jn zgx>T{Np^#;>C#>^WV;4RT@#L9TU}pSdwoqD#z_|H}urZ!e8)V{L0?Yh{g9uJA9v)IVGNv9`i;w|pC0tXA8xAO5f( z+XiDy-i9r){7=_5>=|wF-`*w-p)Veicuk6)mk6{XP18$px;v8!>y&N++mgs_p$PL; z3dN@lG6EuEiN)7*BaV^cQTh2hjh`zrf_sE}%MXe?qbln%M_6W@Zjwp&C>ZDxk`Bg_ z$k8COAg!WC6p$d_T!1{fpj%Li}!gPo*+P2fWupSk0b5`aW9P9Ww8k`K5=J$V` z(=W^Xf=mBoc$;64ieH;GD%#DeDEWJBwB~bO8`d;Oui+%ot$VH%=O|$=)xUGC$z@MI z_DCjE6|CZfIkW9g&IoZ~$pb;*{pHW9f$NGu`Xa`lgg!PCCF7CwL5L{|SJ3mi3QjqR z{XVaxOoJZT*Kl-q;ZLs6FC-xgKL#;NxR`#JW#nK35IP%rcvl~uUVsCVY=x#gj z!w@`4UlO{AxqV_(Jw~R4Z(%QlUQ3f6p|+20Abs$&j#ho%%#t;Jg55KjS~1~h3vN`O zvhgTC>gCu(TCxB8X-6Gn{LG_X{-1NyOP}GW?SlPCv2jzo8(`mph`QKTAC+imQ2FjvhV@9V$&C&ZDX&Qg4Z9=L2> zOf;1YHFi0LRt!RA27A?WVr|O^4mwE4Yqh&qXq1l=o zfKm$uqLt0n^>tJ*jE)tw8=W~c2ZvFDwi!@v-!7_npGI>gpX%P7W>=42I@P^9danDj)&tzQu)F9g z0j*0+aVhkY!_kAL@|4%BUwk%$_aiBXw@@YyaCrQ|{M5zr%VSFE)-*ueWomX` zooyGYs5hUF4v0Dy&r^F|Nv(GR)V;4zLX1{{&GJk<7x0wcYiQ6vie(}mCUz4-@FE@& z93$HDZ4+8GdG}OFvte|O*^CE)cYI=q&@6e8P{P_fB?MV!Wt1-8h#o@4;))9;7#dU} z>vHxhYY*ujt)G{B*}bhgDU%#tx~zlrG%bo0OtWeh8(;ns&yweV^&|^kyuGB`je7yH zSHmgbzZl!O65TUe2YBmAe(4_4zBxbzMX!4P>F|6W@Emg3Q4Os|N$gwi50B}JUZdV; zNc8O9pU@}_B-u1Go&oA;FGR|Mj{ta|-V)DcgLyId5nr^!`dmFxrbq{guelVFc-{%V zHxb zMP0u_{c7Yu6l_7idiySFW;P7JGLVfhCfWT`hvMbM{?1v$FzkkT==%97f{dVuR|G0$ zF6eGB89onUyo13R=X>!^M-0%9#gM>(Oysl=TZwh|Q8p62hSkja=u|v~EY3H2WV&YO zjkFF4_EFQIr9WyTn)qFrNa{nI03jp74w8T3OM^vTeP`YttrMibl=~GYZF5Q%{q|UgjGTBnw z;W)NG-L`OYi2$WCQ}4hmp&e=87On(Z&;bkO*ga5n%X>)v$?#n57gH;Dkrg@RX*xDV zVA)A=HZfvEkys-fB@C;rNTwJspFJ~*(KwO%kDfLW&FbqQ2o4zs|EU&&F0pCxJlqs- zI7>j!Kpyy$D4-#9H=^hJ)8=XaHI$bh7iIVP1gT7sVR3~t9mcDQs!3;=lmN0rNf`<{PbQvxtTZ3M#NtfeP=rjV>QbGxb;ednZ!b2iCCv0 zy>K>{Rl9eqlN_BXFtbKL$5@ONA+Gjul&W>&+r7M8e%VaP&GPFxMJ77RY3{VIIPbGu5;pZYmNeb1-J86zsMklS&_IRojG8jU{yI7P^eu8dBH8y9HI zak(x4Pw{fGZWR+=@i5B*yAD*ZA87T_fe%s6T0gJwr(OupES3y`0jHxlMY^c*F2X4& z(lqc@uZHIU8!fcHr^zE>V9JV^RtfV{FGiu=4Weysq$P_@ffLCYwK#uw{*5AVP-g~H zEGUR#Mf8Pl-`9F6{WwMDK~UL!%v(sBc~xd}Bk2zrLAZ?RvE>HZp}TMvj) zvaJ*03TKXS&8A1YB~pZ$f}?~CfVQI}6AdJ6w$%$Ued*A0`E_4_&)OTlzO0U0cCgz^ z-LvbgC-jDQB$lIym`Lk?*DimXTf^tDs>oD|{_n44AMKvk7WTP{fAn$>df;t6rlt@& zT{nqVIRb#Y*V^1Xbq3g$wE;Gia(%nsujhf%6{4uBz8)RL<*Ogc=v?-#4=l7APsz9R zO90Ug%HopJ9Edz zg)>S&;a+*sl*YZv=o0rQU##vqx7+W;o!e^V7)TyTaV}uFP+6-iFxRf3=&$4MDYV$> zCuN_`tEcP4SCzOMwAHt2yw5zm1+9Go&5TB01u;&rL2QA|r6p;A?oH}-3y@N@CtTG@ zGSoQnXhQ!jIKm#xUb%QW$19mUM$fcpm1Ck)bR(ED_i3fZ^=N;v`RJOr#d^$k5s*x< z@YRKLA;NqS63xdTo9SQ&tGlz({shoIlyeDEZ?dlMKD@% zn*e_RDIdW&oIYa1_}SnSrT7?>rhvHK>nA`kg_k7TS}ppdx;>#8ZItGROoum9JJs4c zTo6h2g8+v?%9BWBf-a)Qs=Z4Evtc7ULdk1c@-n_ zvKi&O4>603X069@U^xDKBJ2^^6ZAcXCfVln}9vg=pcU~Di9_=cHuGI4aXAYzY8ioB2yJq3#lP8oMb=_{0yLt>J& zY>}`=-L!7NxK+}9`2`BOEs$&wC{vFJsB?tBa|;gIpTpqJ|K7B4H6xKtoGwcD#5xf` z#;xtf>1ZL{#OJ3IImox>-jG{COCRoDigG)bt^=o1rzxQzUF6BqnT7OlszoPob>-HY z<}d`o(z`dH+n>)wH9GMx;vB+#i{pQ#J+;k7!A?h%EP;MF9AX>9)H-~}N+5j_3cKgj zV{DE{NOuV3`3oMkbk{lU>xpm@ghVM>{CJ3grwAn~;HJb`ScSOoY&MaAc3AqbaApu& zsDDXair7QL{tQ*5;}zSsKqkDxqDB-|_r$O=i{Wm= zN5?LZyU#rDz>(zbAuF-Mt}Y$u!R8}VM2gybBzcq!1jIY`r(Af}d|(Vw=@TIQZM+1v zja@#untNLF#bKDkUfoAPoHw|)Cjx|5v##nBvV^x&r&p;%qP8aM1 z9Ab$AGHa>7b?$!^;Le)IYPRZ`&FMfbxk zK|FI!%97H{L8@$m?>^V(bBlBR#d$@Za8`DCe47rTdMb_?3ebB`_bmkaKUDV8t?Io* zJrV8q>GG8e>YjHqf;b;&V$%xhO+{5#&iw7g#o2>}g&pJGw2D2QA6XApV0|#0KN()c zS2$(2z^x?AOp{d8h8sxo+F;y96MVXOV_<`Ou>Ozq7~fNxrTpA@mDZzUJ2K`uts8#=3BhOS6c(9&k)zX; z-3c^{=B(TPQ=?4%*XceA zt9%pzp?mINn96Q82aAxAIu7ARt`EmJL_ws>U!QS9bCQ_c)2y<9iH;DZyy_|IyZWH_ZBs&8%W(L_vi0$O3)^ zSCatpDGA@4tQi`oN(2zJT!zB!w)qXq#)-4)o*glR^l8O_xIH{8CE|u)45Y>!GJe(2 zwY-%wlu~vERH&8B2Zx5BZ}^1^=0*k41>F=gRdmmuZ@8=5enV=zgutNuxuZjb0tLr` z6o`ptnmePefe`^13fH@gnlQFv=wPpOPj9eNa6aNWncmbFaeg375@i=TRTfrlb^jPP z7Ft3Q>$;~uxWr*fYa8jiOr_oC)60jQc!Z%t(uGB|gxy9a>vxQ~(stwpFLh5Fio2nP z3*MqU`vc5Xu1Onhb2}8V#j?~qT<`)wR0Se={qfB2{>guET&lLlw&#Oxdz}JvzkR-i zFD@?Lzk9d(o&LS&|1K`yvA_A~4ta@7q~9*yU%Gqm&h7h))#B2wgrBsjYRZ%UR9}7Z`-RKV`NGcO$pW!HD!)Dhx+_-U zaQEUX`#nmqjQp?FcYFTJGu$2z9=sxqvHI~{@BA1m((vY2GuQvWKm6e=Dc+zY1}g0MUAn*{A-Nm@TK0iT)$Oaij!i%xMT2 zIlf!p`sU@gTh+?OU#jm`UcX-1*!s&?*$od9cDKLOOaiY|gGba?`Li0~#n-P_zu}dY zC+pAGxBen7|7?A0e|he*2tK0aKASO#&-&gntjG84r z46WeZyLa;kCztc+0t;Y(PJguUnC0kptPKhNxp?s4Il+a+t(ZCfP+jM!cTmf}vOmNN z{I|dW{{uGazsUKueQ~<&I!JseXUMjq*$4iCC-;`0_~Xl5GtHPtctWyA)s3H2%j*6O z9$=|2L*?n+h_&6Q9zCwUPb;09xh5g=hSnP2=-2cyYmHC&EqxeM)dMNPE`8bD)Pqqx z)%-X;Ty8ENEI&$*#g5~N_{+b9<`@`)gN3v55eTE&Pk&l@z4mWkRbNPS;Zox8vxir= zfD5i^nQ99>ZEAuSBSaD zMW(F(o7dENFoq6!@|yYva%kMg$*ir@AZFP>22N zyQ2XHoVP(zqT|E4eS`82QbZ@qvlB~v zk{x?!)bEGqaR`57rUdlQGC_|Tc2;t+F@@dPYW9`aaDV|0Bpodj$`Z$We zmX4O_N6s;m8PT_p7eytSg2zQiCV&y0kA5E;8XXOXXT7@uJdMU-Ghwn6wbM%uQCO%4 zH|-xPlo|Q~XWb<};YdNV2Kpv>j1!n}C}2_Z(dyCvtwsyild-CAy@(ft@l_>omjM6B z%VY2AeQnAV=6AU03CcnUQZldu)3@jENyL-3#p!E>yjq|1)izSy2n6&2;a^wWM0#=T zJxDZr=G?@Sjl>3O*zv{gFx^+iKN%E{h?OrKl82&`ydE&R*H7M^TwuWrQ%oD-F`bor|ubu@j)5W3x} zH`^Lzc5b%6&wuKX_`}AUwf781zu@HPAzq*XmnUID^$32|K!@cfI=Be)tDyrKyLcML z(ZQ$vd!fT>37|>XP}C3p`(VS`%17|2j+zJcNrrSM?`DMMO@fr&J+UR+5mMVgndQVU2v54TPVekPd| z3Uvr40_!(&*t4AfR^q#?Wme@&y0uvjKEV*rbEk-Ju;8?zWC)+)FFvmw|nIKkScYEpAa2(`hmv92P-GSceH z4Xhhmb2lE}n3D<44F=6Sm*=no(k7`Byo~oWa6)coA{0ORdKTg*Y^njMJfUsM31hjb z#>4GR@ny%R8n5L!uDq$qujfrkO=V*e4dk3oYJ<+E_a_0!EF*zoM&4?~Z^9cISA5l& zmYUWDp{s}x&^fMe%o(}X4E1)NVR`nuYvCqd{wU#`pIBN}sa<^`LpdtdYkZ<2fAvy8 zP6<42sk7notU(j)V0C-Ec7&Dx5dkrxB>P1rF1!*snHdf2Y4j^d7%XgkBQ!4}!;a6y zqp3q--xdt|Yju?-z^A!Dy9n7oZU(LPT=jOg?deBYF>I*q);GOkVyEys6|%=%=i*~haDQY2KEo2`iz>ZwE#tUrF=&6I7o+NG{~ zPhCT0m`xr^1hpdgM(m|0!T?6wM}8I2Msq5^Q4g5Chmnqvnc(%?!q0{Oe7wLw*F>mn zp`s$XtP&E?ZjhmpWI-UoPlt#~#WkO~HbK&J@!BM}HmWor*x=n$>3m3ewn)n}agz}% ztSL^U86%j5`@{;e<3gFa)+?8B^;v7~a?Rm$6*@cEqtv+7SP0R`Q|z&wrdK*0Na}CXiM}xxx@ZMjB`7WQ6zsvGN?a(gsAn*b-~6N>ZDE z=o+G%o@PMQgr_aoP9C%tnm!`fPJTVhfY%TKuQ~1ebTXRWtQUV(su?8Fe>bp=J|gtY%CmbUz|?#*4adIv}($l+SH=gplw`7=@v>>J*#eNlpc9E zwSX&hF2==S; zJE8q;LB1!NKB_-Fvek6Y$C^s|(x4TiE4`kOut48(110c327Op@%aom9AIW?$*%I`* zbqFVONYt;~Nq{5T>-GK6>q{E^>oU-8bYT{nSi2G79B~RSNFgE zJ;ffMdG|=}&caMl)9G5d^@pTSx$!3xW4w(1e!a_2dS8-eSP#EH**d+zXI>}fln0it zd$R1sTe8|c`2J$zT@D*yJgIw9KiND`H})X~n^yk)NrWok)L--XnJj%2#EXn#{!&pZ z6Rbl$he62h+M)1PCR>Z9MaiTHK7waR+)l9&@wURHWuvkhk(Qrbcvwh}xlo_2$51qT z?ir<~-84vi8pcGJ0F0pQ66u9fG?f|U(h|f05Z5KI3m;isL$Z}dB9n-hUFt1-Kl;PM zT&zEA{8Zz*#GX;oMZF~lKr@WP3|m`lsxIGBa1=9`WKP6%id%03&6&6#OLv6bf{ zU?}OougxldQP_X?#9eaQz`{rcrb< z?>ikr*b)7?IDsq)H@-Br>U+XB;wdEBvF{b#@%=(0q+`-fUq#4?zaKlP&xGsU7&$Id z4k>7SnO(`Nin_#NsSF?;Tn;Q~jW$mh8)X5kgsJeme$J|ejC7<(?1qa9#>)X0&DAP! zL>+=CCZqx%LV1O;p2}#tgEj64A-Tr42UApy2i2qsAvQ;IuOK@1?_Gs1>oR=CqcG92 z_J>Y7mo=w0fizmPT%%#FCx#Uv=KR2#GFD+O3cYPVWVqsn#{Z)aIp~21hcPk|QF``y^!% zeH`A`+RJiXC1ZVJs7~a<@z+=*u)4-d2+Pzi>*qtHcP4OEYz}NKM-s`i!!{}_^s57R z+c&)V?Sv~n{=kGgu8Cr&&F3>(*is^7O8cLLeXoKlc&HMU4KuNVAT-C#6k!Z{R~T4< zc%Lw|OGfp@sXUMgE+X%d?nJ8no1=NOSmPZQT3ARczFa$;Ntp4g(K0Xs?m#H8ZbaZX zv`LB@|8q;4nX3~s_j}4~aO2vu0dY#+XN`!!;55$15#>`hfFV1n2tHs23PJ_3>C($E zY2=6w)m<=C+_a}L*fC$aoAr7Ez3LBGps$cO%)?-y?tF2AIN&!mv82{rPw8Kh!qiFA zwwomC*sp{IbAX@;;76hVgzWp;vYdqiHCBEZJ2N~+fz7s7zx_{=mo5o_>mHk$(=%jD zvTUfNZ}mh`;C&>GC~R}-K7Cq$Qs&B19ar3RW}})y0SzPQNQN-8tC({TKuPe%xtg$m zW_!(XF*T+qRAyiPVty56^!zAY0>w87;fUhy*3GfKFD$B{|nanhz z&Rv}H`x6xfNzjjk9nk`uuIPrvrqDPiM)ObnvKIm%dvCc^=-dTO*4&EZO z;Qj`g!m&jp=F?M4QBuHxXqiy{@Z=JTaq@vqhyOwZLtvE!(5`f5b@SLkN5=PLah*7G#c)F4C1iGa?ffje=4#mLyc) zs5qS^kqO$mro)L-6uCv|h%hoyS-V_1RwtaYiJ*M=UCPnT87+&&VS_34pBnc6D5DW>QO+>oqZQuKFvWvq)tUbe86RAi8v$9?}=e*ID|Tsbyc(6nnzuWn{Dx3p+tj z1N8FiSoogB1#4rT%#Wc)`B)f|r54ul`i6N7rmy zJ~!`klL%HWRQke&tl;Ar(pKJYoB$=QqzT8s(Fp2rs@NFmZw>pNlSEmLMD&b?HlJ>o z74)>NGt+rQs>_{p;L#=6F)(bXy{7pp$cvb^68)6LX6C+L80?gMsgG+*iQ|m_tpiFI zEX;vU@mW+!*4ZL!YSMH6EXzYh)c6=7k4Jwg1@~bJ(p@gmA>LeKoWl4~yZ&F?xO8B@ zbcsG=UZS0CP=p;d?BJ5eUk}bygcTS1>D?o6LPTJdzZ(e@4z4cNdp)*gv z-dugXZaquYx*l5Or7kRMUO~yK>*q`mj7Gw#6$mDerp?Lb;_+QHn5fGU!T5|GO(v-YHh8gN+@>FR##=$HZH9QdY64=oeE*}{& z8BjOb@Q43UF<8NdVt)D6wNbHpUR~i;kppV-JR#n?s4VU$thMt#f`!qTRFx&MD9OcP zI?N!PX<*EJ^-|1IYpQou)0*0F2*4BdP%##p`*2c3r{hrYKcjnv8m9?)Q=%hNzG*tf{ll@2a zFi-WEH)(CHX2I4#SlO%AljOur8u8z!hnRn@zqX)cnXi(226nWFLKlKTzn|DmR6eC^rGLF@lLTtr1?SBEGnpeVMEOHdDZjT@81TPB;KtPt$6V$F!5Wb*zE&rj=3qycU4hYR!b`hU#y6Rddzuz7WJ^)gDg6?8p56Y~}& zI=rxr1h(fL#4koF?Y&@n^*v1jtOkj#QJ$*zbZA9Mh{KL+rlO@(Tm@p}U%7f*&$1p3 zOYB}JEL;y=7!nlGscg0;YKi!W3`6fbetg2n%ngMdgmQX-VNlvZ*nV?VaKC=8>JbMm zVW;Cql4$K&h#xvhsEsHyg8?Etc&)_G^gb)@S<#pX zk+j|d;;EU2x-2Mm-IW{BM!p87Zmx(75sGJY1IS`6-CJl;>DVVMHI{e;7`SXVspg;& zR3)iXdr@6FMx)^|+n-VdFkzV5JBUnbv}M&)>C>l&$}0No@W2$zcCft_{@OgF7o{6Qe~wh`Gm%hwaxcM0uH*pgFQ&rXZpR*Y7 z(*%p-y7I(!II2(2Khrk2z1{VnsQ7eutp3yeb>rdZoMABq$U1#N`t z!*iC%E0x3GMVTXNHAu@T%EqSEDzuM2A9gk)76SAwymX{^I>eB$cp~;V=Ss;K_Lpy1 zvMb&f%jK1@qi5k71?`akML~~nqa@Bt{&F(mdCA$G9|}pU*Cb$5?111JMT_d|U-8Fb_2TQTZ~pRpMJi(T^_#8NE2~>CU(d|A z#i(3c(ht%(2yXQuD;3dK?J)-S{*6aB-ru{OvMoWaUfe}9_1WKX#lrUIv! zocj*<4eyiO?~91s3RPH00Hk`vp0UWPtjvhW$k;M6G8*oc1hB*9gDMKbko`Ap9Q;HH z+E;d#H4zr#A-nY|n=X{f+4jz3iA!wn?1;q;CiKoVRyy4nJynD06QB~Q9zpk%R{|Dx z2T=zlP{nY51kW&rFBYZ))`k4v>u^C?WUYVVP<%AvbG@6;g#seUO7ZEeecvC7q(Eso zmAI_cB0{k$ol@&l+z}5X{4C@}1W4Q@U3ZmAcvgfc{P^&}F;>iOK@163 zZ^$)7dY+w*@~U{25?7a-z=2dfNz zaB(a`#2`!54Tg}k6d{?~kO{f|hPw^8o6I`PToasz?8yT;G#{j>ekk{ff?${;qmIWG z!^G>}t;y6LO$N>{`Ux$l0+~=G6#`+S#lc;MUJK_A`n-9ha6n_dxh#!LnL`s(i&=P> z8{&yCV^*9|cAFP4lBH-h0wbaVfTU%h1gZJrJ22y>W$~~xWt%x@$&gLsvkdj z_85hbe|q@poDSUe>sSy&B`~sl4Gd;1xob_tP~ibX7uDC;JlT_h6HI~@Z1VAz>|RC7 z{buS$*Nz!$uskjMlB-Zv1*@^NJ$*@C33<-7h0W!X0=HpXTREtMQnlgI6Cx=mF4v* z)7)`A(+zjJBRs`@$pueQ=F{wB90!r5!vWm0x=CCNhTn7?J>ZOL-+G`|TQH&6$_+!ZnTo7LfUUDLiGw(4HU>XG3ceOx;O!PN=VgJi5V4q4BnLXIOJQnIUv8ylbj z!^^oKp0+XPAo5W)b5i;gb>Nhr#oN5(B%%B}pw21rRD%3U-*IBxXfh8n0Fz=GO;?eZ zS`7;b+b-`Nz8P^6K-^+xxa^lt(eLU#&Dk6C0b!OmD9yL$N($j8Fp(O!x;QCw&RdN zJ~*S`Bo)=P4g*3uA_}eVa&ze(D=(Y6b`NQRxZj`^1hBCxj!&$wQct?sq0 zjYZ`Rj>OEj77~!R_sV4ULjQ368lTy_A^vmC0{>ad@W3@m7}8oL&sH-7$GFtm*k7)j z?qABa_grcFz{IYJXW{^h?HVA8jx(e{y& zDED1hLKPOCx3*2{%IY(XA}R8AA`e!9UWW%!qqSAzG%zzs#pNe#I}d+)K|>J(h{oc2 zwNBH_N~cwrNJSwLf(ApYApSmo{Q$l!{c+*)r%^2Iq1(d|P3>Cvr`dmLB?y=Twlt%g zT`qLD0NjAdZx`6poon1OP;kwF&H(L87 zy~0sLdxTMBDO$(?iu~K{M75}RsAYL(afP|5=znmgB#m}~O6(-(tYlw*{k6{oiPe^8 z1fLY{Ck3BG$CyEgCy$>!e@QO|1)+BD+ctn*rv>$gSgEZ9i8oEL7Z4HD_85$$ZpT5c zTTs`UGx7pZ;P#p9<--@xU+%_VtK@fI>t7%UE^9FE=jSjdU%;Y#@C5hEZ0+vgl`Nwm zh3$Jc$u#rK$S63AnZ6u4hndE9ldh!lZ3Ne|=UCcX4<5dZG4h(gA>|(ontny!5 z&z}7B6RwjmEODjI)2)O?rj1;G_Gi+Y&?A_Fu4ym@r8R^C$w|lcr-#|YXAiQipTHxM z1|{?s4`2EUU%I|R?{Qx*ww}Cv_@G+WSL*}%wWFT7!^~heifGwT_L7*3g7IhWG$Rh` z7GJPd=vD*+$M&cFDJ3s)F*DCmt@Ed?7mitiabO7fQ;U|nxy)OxU5dc6TofUJT;z0x z00|>l(eRSxz*X(#izOe>*b=Kg$4y~`WcoNO+s~iAc=>SW;j>py{_CMJL(Erx%MGpz ze;?`q*EBtTm_2*`EZ)zGdq^xFH?7ITp+42J;HkU9_;e*oJ8+?Txei)ql|{%+KgMuq zrw#xcBEROhIe$on&USZRK76(F97~0lO&vOiP*+RTt;O|=vbW1+Qln-4{V98{T0HhufH)*Lj~0FBWaK*yck}^0b*^?tHFU#o$c7 zS!Q~uuw>&89L1LlAh1?5yw15O(lRhDLVRyqozAd6N*FiShjA|mEb<^&YsCPD_tSuh z)dc9BEj$abDIdJEox>rI;fD~IxSWtIg+m%tjcrV4VHQ+40>x-8Mzs zL#Iy4M6hYoSwW_~<>7KRn4HX&)oCZA#$=v}haD+^f0+z4IR};mrA#p=(NU^;#!{SN z3$n_UY>8d_3>C&{Z891YDR_;tirF4I2*#|4pwKT4I=`+R{)z36OzjJed7`-U(x14^ zI`okJwd4byOIol{njZcs`6#*Zq6!lqWve6KyGO0CQ^FU9jyt2Ap^E=6%Wh)up1D41 ziF)@#tnhH!O@msJ|86y&=wmWsCFF2cp#e$Cn}_5Ab(jDt5sk4CibW1Rx62z!7OI@DZjX0LzXX>^JuEwvR2CYqDuppG&Rkl>E}cGTwS z_WNy|R>0@x9?4`v=7C2;R@Q%dLzCpN%_Mi>h6sw_XnL;i^a}gFlPx<4mtUXQAbgmkJ6?(y zR9`Mx7IL8i=`DPlr&Y+ea|dSZYE*SRB^L98l^hp-0i*YjYDTYA12y6BrY1^t-s?e% zFSEaCDyowsQpOy!cyQJOIM{aRzU7e?nGh}ubNGV%KzUB?V`r#>wuT~T?#0T=Z`F<8 zMO<;UK7aA>8J%afG^-5Y;1<*e-0!m`XwGZ20`*B;#Wg>=Ev2D`+<*A(bq#w%f4#0@ zFX;EM8C2Nz#u{RPwi$0bANV081>_v3K^EF zR-V=Y<`!b4s_w+TV8!|6t|5Fyduo_*{g@!l3I^n^&IxrPw1Q9 zgTipoW2V71+z;?9L0-z-_(^hQKgHXpeC4E)raVN8eZ>SIKHEBZUzIzA{F_KjIk79w zB5jKF{EMSIRptKaVaO>E-ARpF-wzM-@>vy!5Q;M%dv;6xTYohEz*ez`U_dwDqBmlsz{unKXfhof9PEk2MLv_P6Uj)1LzJpj%-ayLD zhX|dIsxw35k?Jz)6iPDlT&e8n?x`Mq~pXl-_Feh4aR6R8A)cxd*F~T z>SH)&a3bu(5RMtBBdL0@2jQtz~f3aGu{6q6x)T4v>MzjY)U#DA5L7g|Y^`OlJ0y`va%#a)0hc zZ@c`%$vEfQh*P8b=n>p37YP;rwpe6igdnb;@1~O4UgnpV2tIo^w5-C*{Bm+{Jk+sd zwyd^tshp&}$+B7hSiMiN9E;+8a)5hX!~^=Shzs~; zgcx;T59#KOVNPIIqot z+WGf5Uwr;)Z4DV2u-YXpp|ZV=lf9B1zKxt0GyJ`+<4s3r;J62rduHTBk2vXxml|_H z&h`mrsaYt0UvsWRq~KiV9E~C_FTwKfhuAc2kZ0BSZJu4jyAhHs-zZ{Ru>6Czw(-#I{({Z!{Uu1O4|;f7_L6S^wp9oM9cdu%Z{BR}VL z^6dm16q-dmB&LGllH(z^AFUS?8q8i4j^Lw-O{f^eX;~|w%}iO#>&l3=g2s$swuU2E z%UXweY@7G=T?3>C+g}AzNH53~MpWT4VHU-1A~ZR5@!6E8iH(X}K#Q&+NF% znNyLn8|Ez6sEn`N`%*=lZ<0MB_!4H4ycG|-KwzFG>pg|JAjRbJBOdl{0nX@k6^jo;=a zZFo1rD9ksCB&{Cf7IYk^cx1+Kif=auK^skkZg2U)rq#SLg0o~4U$sg&2#gZskp<6o zcj~iRPPc8ryQY z2uoq$C)xn9b-=B3AA+Hjj>0dZTuMSO@w;KR_q`6WyuBG>cX?jU>b55|#N~dwCEDyC z;}{#{v$r-Q;(p#;!hAADK{Z^&1NyFr3;1RXAA}~6X7u|iffLwnjHq*9E{Ge-{nvuc zM}j}CQ1n!|h(rvc0U$uceBND}hjUseF3K5X*iSY6@OgI$3DhFgM@7Tp29eb8J1s$K zjEh)kPxCO1qQa-cLHXkOKX;4*+y|@aqPvU5Mn=vn6KjPIfj4aM2ZfunWb{vO*s$pc z!$zGvGfS|e2?sB>S!|%9?IQ83?89N3#eX=Yuk6i`wvy+gbXC+K;yJZhc6>&Rv>KzQ z6offwsW4X)*en9Yv?|G?I|G#j0F-f@&-S>UZ2WMBLxW*=Eq+_b0q)}>9x#^8ZS=Sy z$M69^9p8gVx z#RsKAD2Ac6V4@tQ7K{AJNmb(pP2(BPg<>>J7g;Pjsl}oL6;Cvm%}WlZ-!Zp z&qr#i&>&(tQ{g*tAaSgg5)kIWa(=ERu~-Pjw8F-tdtq*qnHOd4oM=R4a~6x^tg`tJ zpBHj~dtJl>`mTrz_+|_rgeDOun68dFf$hdwEQaHPxM2#5MFE?ZK|B>^#vqU$&ZHFv zi8YdwH6s|rJ}+Ysd0oOFvD|X0{t$hb)zKD9+$3TUQ`9*I8Edf^j+f3LMHY*~HZ6~M zLd=Fo)V7cldk`Us#R3s$^FM5{gm;C!p&u4;MxxImz8K3Du0g~TX0gOPG2UVk$QQ&9 zBP|vJa^4i)ot+C=EOugwHb!*1ug*inb-f3uU$cT6a(Av(ma&V#zwalVDT5?g-5*O zPHS>hmR=8*P>5rFbz^mN_15ZltG8GGef2+3mBa8s@{p^6VwK}Iv%OPS48=~e2gIcI znN@++DyL*csaSF|+r)>RE-MA#EzK;Ygx2w+tPe?HMGeX+vP^B!+H;pIpu?ISG8!}`igOgggl!zw+wSzFH5 z@NkIhw0IB6&DEmFoS8h0Y3l5jW%& zPppze(yAKFGHBPh&thwaMa(uN|1`2nwEDvJYg%E4IFpQytf*~{kIWt-0irJ>Tf$d& zDN+99;HWunWY30+WyLk%pe(6sIieQQ5^q@9t*m&KDjiotO=c0W%gv1JXLR?RC^bp^ z2Lw3!=+5iGjrx*sIAj_;eUNNkW{opNtnye~q77O(+o-?XSlhVv-R*1NUfWo|wQ+lM zbMu>Po8NwW?VI(d*Y4S~Z#HgU+q`w_+SbIrET5s=O`{l9x zE5-)s`DJ!DTd&=&ea9%E+F+^e<)y-T%`O^U8~DF(RyV$-)fGmJB#uT71S1yhiyP%M zlLZ+BoqW?FedQVxXEeSBgiFxeZyz>#MOGuhC)`3XlxSdaK(e=Jx^Ab_eapXOv2_}U zQvYBdg`wm0iIR{Ogy;E1+EEx4*w0{C;y~?JzTC#2#?dB~li8hM^)U1<6;zy!0Kuj5^HWNFW3n*X_{zah*SoIv*m{Q1=Bb z(g9RQpDV+%a%dlP3!AoE>8*<8|Ip9^)5? z(bA*YiR;{M;t&9$}Q(on3P!)n5r{?X*Sk;0@g&N8Z%n~EbqmQ@+s7< zNuY>{bJ>0dEOKY%(Lhb=sM1ysPCVV3;d|x9%YaPsogBf}kb7QxG$i{isi~%QGa#g0 z;6h_9UY->j*kPf9&8@pgm?_s^HJ8bY8Ab*J2Eir|&w$mreJaXzKUC;443oNB;@D87 zHW0lF7mE1663==u@aCpFa<@c>)7_BIgKH()NJ>fn0PlymxMHmjZlYSNs(u>l{Q(w= z>2~###CoR}Bff(wDTwhOx~IHl#rM-J?zh`vMB2MJV*751!y*bagBY@E( z9ZzcdZa;av_0r)6*L~37oueSq2``jx4EB!*c(}Gq!PjdYJqWB;=_}l!d~>JK%akO* z#r_0lbaKoUCa#pTbs-XIw0G9-NY!Wf6pdaF*C?BR(8(0WFq(poh2*cK6s43^)@!vJ zUjjmJ5oBC$(yFq7cX;)i#+QDVCSD`5n(Jxby`yt*P3t%X7+0}X7@Ek+BL|9q>HqQk z=mn#|4!>0JjCj+j;(Wu1D$-hUO)?Sny{`E;=pWKKBm4@dRNcxAwvmLpu}8BkRO@>L$Jpkt^}qxQ|4WK;ZIf|k4-1;;fwzSlIsYj8X< zI0891Pht*6^CK!O%tuyvVu0CfC}6fQ{L&EpPr)0aOxaD3OjgZq*yny}dDt~vph2qF zDA&rexxDUEf5DRn%5r$*IMMpT6Z0HNJBz!psoq}4?GNtee1Unc1tm?i8 zi7jDz^?0mox(Gu|l=)ES;~C+RJmBawnk9!kWz8cHz?mCHr6{@>8AbG4d&f{2vZ8~U z@R~zonw6)T1>u)8n3O_8Ra6LHfJD>)Ho(W1}Gv-~{1m zzZYRB{4UYVNEj-QI}98+UVBCSrkV?ZLJy}n#C4G*PnYTaL+H@yf>`m3$xn0?>-8N} zdEE&_i7ZzAg%;2OyXb<4^Lnirhjq-nm_e5}X#gGmk6a@$2J#C$99eNuK$~lIM2vl&5q58n z(_Ol7nBB1l|7E1z7vYX}{$GR}=W;v$u2@7U>@V?lr5-lYTVedG66vOX5st)xQqfD*c+=ipbzx-O-Tvh zBOe>(A5ao?yb90ohipdbS7F0*s)T`WBxT?~RyUAAu(7(aiD0&bgDKfukZnXT@48xU zg>Z=U9}~pUco(IBjs_V3ZHE<%HOnwaKYPB5H603@w2@7aM_2|L6Ijz|Q8LhGIuK7G zeDMV~0%tH<0a_bFqwP*S+881h;C3q=?fH*}mW8>*=|10PdNI#oL%l3u3xRB05?FUb z(=)<@O>>qhd}g}kju^qvo;&W)Gc_-p?#tu~QLXyaKmkhz_%^_pLop(*O8sPb=e@oW z?Y*0p3*BuNc83LB+TCW{o%TmsjffYiEl01FmZr1m> z)1qNGZODGO=Z|10V!i*_AKILhyf@PIz@0-H>e-Q_jGf|JE~y&@U3OE#2t?3_YLy9 z>weR9?^rzP-Ay`YW3nYNF(=J(NtDl$E(k+o^7SzB!lrnJG>xxGo@PC4PPHE77KTe+ z5U~)>OP^#(L=AZxAY`l92&QmdCR1_N5jD;xgI$rkfG0@`6~FAm;JIe`L%Ad*3P5h! zxSZ*zCptEX^AWD)`(Kuer0!|(o-j?b$+7myNDO*EgY?n@iA=^bux)m5wZXWH1?_jq^G7mMtY|N|%}|Ri{--L%NVM z_HHPF2G19t1XUNaCdpTd=2lbKCQXDY#Fkc5(tSy*G-t_26nw_jT#}*?1Gig+bwAwS z?oC7@Vq*fwP5VDTD(ff=jJTntDJ_x$Q8VqeQtbZLs|V5zf9V*Nc$SS)Q0vvPd% z5tH)wG}|#ZPBD6JNTz)JxlK6WvpghykQ;izyp_aF|tMLcD|X5 zb#WDvRTTj{-J&hVX=v4!HjDElVboEE4KJ19;FIGJpB!7BlL%W7j;`dj;+Et6#_8a! zr`ad1w_4KA6BlMFtSm~BGzfcK?D zal!cTXf^Vq$sy^%6?$E6LtDZVvNO%paElft8HmQTL*YGgVYGfACS0s3ehi5i;wZ@% zv^EO9puNNc7P3-8|HW}=PB|Btj~(!9J|qENKHPecZCDegmx+PnbLB$L9Zbkcv+gh$ zbMDffLe^bDPkt+vfP}0&*oBkj-OWGij>sf=cf>-;NOVI=8(@Y2o-*++-hj`g-O+Ua zK(Y*{-PJc-a-DAyvNtvRE;P}@IqW8%ga_AwTkn`J*BuaJLy2$8f1Hk3N;B)gnvC1I zZR`4mODP!FKB1TCgNPp`yh*Z>AE~>G_z|%d5PFgT zsw~L^T>w7*gI!-EQOy6V#V{RH_`nJ1;C4QXCcp|@h&Ua++_LxjN~ zY+!JY%H*P?z00-_vh0xPetZlw+!tCEA6O?LN%$4DxlY+8DMf;-Fm?W~YQeD?oPM{A$Lwa73(O79i)D6NvSNK>Og^v#u0SEL?+P-L=amucfD;AH zDZ($FU*ibNMzpORWd~!1Lw>Lz!cHPwgh3rECmoK;7Z!8C)9>}bQibKQ{qF1p$6J)7 z(=M*f(Ti)ZvHxa5-pwWO){?pa$0X;{!ml=m3Go5%euOLV(Z0 zA%glwfFwCcAQUvfBAQYXE3X*u**_uce?<2%8K*x|pnwd|^|%?#9WOBZOQEOnH5t2E z(ivwvfZ!HPU^gc-Nt&45T+1=Q7CYhWERIFw*)7B(MzGsv%5DTS&u${fCSbSCVs>*l zma*Gr%x;^W-8NHpivTlrxe2no5h0jD)n6R=L7NS~NBGEQ8P}j`fSkj3jKnRjUtFc9DA8Gx80F>k+ zmzvb8xQ4E4YtJ26wvJLso2Un_BzLq-!~lwHiu7@-B0jX0NZ;sC8$J}$mmF&YlRR~Y z)AfiUXdTYVMhENir}ErJiJk+Lq->cV&VV~&jPrRX%XUhhdl#JT1S@jeNUY*T9%F^f z&I6P~x-wWrJ1rD3VnA3X5dH6H8p9dExRqKez8MV^t@`!?+0EoDM4Ad#hCNLkIMO+& zzD2AsY$oaC#%pP#WQ~a#8lW5zNRUisXox_as;} z#G@jX&%=GfezMp{%KXW*t|!h{o@Lf`yn>kuj2*M|D1x1XgxrNxR-(#b7FrUY&m=^< z6t1A7=86>`010OjBD%||1RNL~e=fnzh0DeEcE1Y0$yx1E68s~Sc^pevEgZ!_lD|Xm ze5^L{^0AogdDH!;JN|OT()~TJh2us^!hcAlaFgM9O&7oHsm(3#p9iBIPm`r(ulW_s zf=;Un$XKdiC>NboUPGl`E%n-YFPow?%F(3-J6K)ng)ciE9XHmDb=|!}nl3ix8z`^s7fRiFVt(CgQ+lUmZ|gDYsi-RSI}vCu6{x(I!i zvD)WZyTcYv5gag1&}r|pN(Vor$!rZ!DHpdY@>Ufp*NXaz=*&SWtz`GJb-2l-X*vw-p}ydJP+9t_wsEk8GuG>0j~a7rHV}cJ#-lmZcSfR) zYqn(nbR6owT^Z`Quy9h;eP4yB`?q5z!?#;0M}241tjk60xQbyc>N{g*eP?HK)Lmc2 zsJpl0Pz5)A>4wdW^l=w-UP ze7l&%qV-jPRWaRx%v89nmlwE0UzU(-T)Wc2 z(*FFoTsGs>=Js-+FAIDRQ6;{mCZ)Ylm5IADJlJ%!%E2;3uY!Tav(Y_;TUi0^ z;&~j!c33*mM1taxNlr9#q!x8Jp180dC=58Elq{aG@_EE_vn69R>JSOB4W;N9;gob` zixJl0v{c!dger;rNT{+iR|bQ|cJnbrnL_3=vt@E4MKUt6r26~47HS)-sRNShB3wF` z!ScVyTrn!CX%VVoPGO}aJ;cKJoW{wx{?Y|LZ0zLq?AZQ1gHCLUBwdzhY-rU;4&}g1 zHFn-So22`ZR7krYNstog!>TPHO0N51dxMKtkcfjY<4uiHL&FG?SEOZ16te#Gv6uB! zmq91FuBKCy(MUs5Oys-B_7ZcV)R;){EztZqGixc2e@=NP`mUh6oSZMSx|X)5qfHKq zA^Jp8otdOiiM+#dEyacM%T=9K{KHf`tP;QI-E?3BRanREg;jz++d>!%cs zUyI=gx_tsT{w#+h-%lAF7vvA+O4|&oL;h%l8Z3)s42{j{RNed#{)HxeR_;&d#dhL!_KyKWrfz7eFES;^T^Bhcm1#5jD2v^ro91PhMqPIg#V`T*JDKEgrWp+3SfU49tOjq~u_m_xe!VE(6c zb7yo_il&*Yx#Qkm`}w@S{2eQZqqqc2J>%`p+Cn3bydd>WJ^{+HmZGK5q6I5dI&C@pT|DGjV`wt z`4xDL5H#C|?LqP(Uhu;RWVNS$2wa_XY}QuLcFDK>POF8Q_{_HulMw%v@Rk3U+v%mO0Cd39U>^UA8zDReOlmu;Bx zOhjp!g@iPc;*uOhi}Dp{%=CwefSY%bqz?7Fip#xuWrdQ$Tj;wxPhMsCzpZE4^B22M zo6?i#}i)GnTs3H$rx8EeZg8ro3abe%!@9?^Is+g@)t)QEUH=UPysdE@p9{_?#M@ zu_n@QR4cLC7wZGBzK*zJ#pNDS9N7Akejsn;-`9gYMg=({{&K0;16L6GruDL9MaFI# zc)VC1@{!Doo!1yvITqxx4Q(QojUt_n2=?9**>#>@woqY7bSTztzsv1OEo2*^Ad(Sp zT)M;dV1(2eYkOO)c??hZ0Vti#I8Sn%;M5fu1s_6uM{AqzD1lQwH_HiAfe5e0RJbpeQXms+%PSx30G|fVOjn4L zuv=uQss0m9uJ=3A;yUoROrM?bSl!`C8`&wL;@(ItcH&f&0D}Jn={IC;0=#+K0khT2 z)tqLo7HMXQdU<6$Z`0!n+W|(M#16RosoMbsNc{`h0Smxxv39@$5V|xwU;(%-+74I% zN|$H{Tm-u*?SSlx8Uk^hR9Yl*pgF#mhZ`Oztv0Z0~aA%<}Huf|l)(0G{w(My=di&`>; z*jW(qYE0z}4Vx*Eo#k|X!gh|Fnx9?7<9yb|=YR({N5*V?%*VPYFz>S~7w?#IY3BpA z1vp;e@W+(M=H=J|kiA6fVgcA)4eMe7h+eXFu>cIOj&*SnWIti+f}lI&A|FMuD8}S* zlr`C=32_w0Q=&9zmv+b9pl0i!an@l8pf;{xlJXo<^P-6xnOMcH`<9t^mJYzfw$1EJ zGV5AMHhrII0tqr(U3P;+Vd&Vu8njUZkNJU8;;t1XyF7pP(|>2(Qy!DX@n>Aj#NXsY z3*Kn_hrL{Sg39^Kw8d@`&DJPfSbj=G+R>R4pOZnlQY+(psV;f5X91}$7d3lECrspx z)&(FmLrZ5gZkI4w7d39KV5-#&t(kGy&2qy2Ma-9cR;XRGFhw>@0na2~DPp8Wn{Yeol{A z^y!F;nj3}F_&GfS)z7uu`>YPT{EHYPpCbTsI*B^F+{+14<1QL@zo5{#h*3YGcDp*N zd;!Q_qAFhib~6kX6!LAc&62ms7J%p_tMUb4IKx0f;Z&B}Ewh}?e-Twa>$rlZB3L>C z0tOTRAU^X>TgoeCXWJgk_J0vTeOuQ?y$V7zM?!QcshA`e58kH%E zgc#)9aB44l2F2zTmzNi8MQ0_oF`~*C$onBE(oH@NN?=TiN@P*x zQAsCgxctTBSS@^lBJ(MaRbkdEgDeh z!Vb7H9}s6bsXi~E$_4W=B`p!=iU&P9Qfi3}6YFm;@H|@AG5z>8o!EWq329;84Z* z1PS%y)hLJ#RA^sgi&G@08!j3QnBJIJq;8+B;qo~aP*4so+Mdaq|MX-=dd-D#QG7Gc zqwc|k9&P>4$g-FM#o75d-b)yb;zi7iV!Y;CaAMM_n~}{i1&Rxwx{DYf#dz)PT!mFP zBWq*|6lXVepP*@ifbZwf!u)D>e-171%J%3o>3LZL=l3TjRF#)+4a^j@^@0sZt1i2UBd=R^g$*#no-$dzI4)~`@Ta`X*g$!CR^agEP2yiz*z{KoZ!+GR#w z`*b>U?pQ5OXU-j!8TsbZ>CCxfwK$zQcT{HNfseKX@cNi^{J4d)5f(~KPIDNvl9Z!jT>m!yH`K6Nv4 zwTq{&(8yT$)LoJWp8M3z$O$i=xbbA?S=h^x%N(&f4H7t+(1`-9@SS zBz(leLY4K^^_B0hW3;@HqG1A_Qus2<^pNKgLl8DlW``brh(P~M0mW^c38cRZP!hx7 zAQODS9`extE~nO?;^J}u)NlRstaY;A@^?jK$Bh9dqK~A6R(9Ar=^nTD{Yx2I@Oazk zon%Yyf(cw7+G^qw(Qejg{^P7aIBuN`-IGJy6)T(~l!-Fyvwd7p0uo}IuVfc{c#D?eN|xQtDt(X)M2?s9O1AO=>BtveFy~w4 zINJ?THQ^FR<4&VL_@4Pv*JicIsVEn)fCbSB7{^U|@hYA<1twa@jzaTZ#BQ8Ol+Jhs zPdf|wW$bRKCiiJ&0)L<|S;za`lR=|>0;zHa@pK@+bx&Kp28Csxx&(5lEs_%gXZh*! zYPS4jS*}-Fez1%yvEvn5qN0md`R_w_XscS}e-&i@7FAQYN>CFpo{A7HZp+(QF0FrN z%bLc(R^H0`q>URG2%ns3Gtqj?SvBDBkrv(rQqk7oXPag)g>g9-!^ZT|XT#=XwEGfi znn$?4s|iowEd@O;zB4uOz4vuWeHwJK7=x!^Szpv+DJB4vTttA8evGq(ir47ecqm2GQP%hn?up0;U0x5^2sShV3esbYX1#UN{JlX5+06#Yrt;lQA!iuA!?R;sF@|=;Lkww9_6`*4A`m zS;o--8SKN0{Q!P+kkv9)`O+W_ymA=ue{?&4(3RhRKS2261ksA(AAD_n{33lg@ZlP4 z(+@z7BVr(utDRNeq67)9Umf5|)K+uFcNqG3Cw<vHhS^T+!AxaAS}!Ke!-co=K09dl`UCjwEe5&e1Bo`oILMmS zNJ9hh(vzx0N(u4F+3_ABJutZS@@7ANA6fvxiB7BE4`D%}1&Co$LZGE$RPJtUHTI8$ zpC#goU5Hxk#+UeF<%570+Eh011}`jb@jDt^a~^xPbOXJ*H%kcRou2iNvS{{Jv$zG> zkw32H5`TAo^XrobEftl?v_jv>{F#~y?SJxNcPBg<*QXA-lP#?RU!x_G^F`Dhe_Mjhsh!^}Y`c!vsV!Ff~+ulw9Hu<*SX|=Eeuz|_5 zXbL2p?Uy)a7WErE5H4dz({&jiH%>K9t)6?2mwoqJwta*?^ZQO3nGvKTh%>AS`!ssW zRmE^X!o|9&gFwMv%YD)m(Gp;4blQiq>@A#-0tMmG?Hbj~NGGoy%iln6-ChgAK@>bN zs#f(8qB4ERgVuigpbafeSaZZWLW4$!q&J%!$-r^#cbq(sXz4SEJpTUb;r8y6=g+dr^B4Sg>!%e?1;5G_ zZkMlQU!_V_(t0<@eARZLFKf0-BY@RAY}FxD>lgtgX$3=RWBaHKN`@`hsV=xp86*>( z%0&Q%a~kykdpv!zfEduiO!ea~%w?)({4$3-W}K8>5)ltUD*wL5ChOqneMf@VA{y}~ z|H#Yq*OT8)m+Uwf9IT2q%jK;hF8e=FDVSUb1+8i5vBt@JtVNmC71$yo&-PnwC{m^OP8%9k@X>zq7WJ6Eob0JobA8FQI-sCkgj$|7X$|)Ysl`*7pnsQtd&o(;Ner!@_3{sXTA{KG$`e>Va{2;ePWb+3R)heg4*pd9;1&z&FDb% z#8~)*vpg2*#5iBz<4vurKRH5+o)({8mPe?dVtb6mRa-GWoh*+>Ix+cj?bD#N=&6xX z?QT0hw;>vdLCw^#)DH9D%(Ob_b~@d+v|7$iU_*6bH zR{NuL_t2~q>R6g#u`3h7t74lf|0-%3P1Z0c^#csQk=Da(b^6#E@{2Q8E@nMkAx(YG zJl2U7!X;?xd84!#O+9Zc7Otu1jnJYr^}O-81Wi3}l%T1h%~WPwEk(^dZ;hJxTq^Ug z+b0?_VVI6VnUfa6%ZO>-M5MEqRbd)vLmxIv!Ug(nOjUMgD9q2-|MV@0&ng`rID#=p zxbCmh;`V3j-N)*E=aK(w_~KJyxM)Y?yb8Y%E%@tKVA5uw{incg(GzxFQ=kwf^W?9< zq|LxGm=eQ9Puyph{t^PmVgSh*j7i=4&r12CN_pNE_YBW+4%hEvZAxvuJuefw6El{-)6OqJ{q4{`41D~b6%TZF?;9} zA~6FiU^G4po|Np;#RAmO6?dayX-WVCY0i-7am!4t~m8MxG=@yUyTIpeb!WjSXg z7OpJkjL%|}<(!dNxUxJiKA*g@EbxDMG9Wv`|2OQ&M4WJ(p?D7t(#&r^+X0l<;^)d# z|E$Cpqr@$zZ?vy?UL9T-MusAs<9^!W6#5j%<+b~H^T{Vf?!4N)7^AuFSx<=33{?FT z$Sruf&a3x}F)A$YnKzf`%z!g60;WK2R@3##TLgK<``LY-_qnwz8)`0DK9gwP^BU9> zit-t_(o-Opmko19?h-WboH4pw%{yo0E|b&(R;JG*J?g)oF1X5_P$gRt!A%ww_fgE8Ca#tI!YPsqrOO;!1e%b zT~FFG-Ct@xW=r>$-hKCtFPWi*AfJZA=BPjrodDo@?h{l|MvaKKz4l;$!VB#aRD)>X zi4K5@DV>&`2FQ9b61jS2=R-;UU7Le|po$R9=Rx@j!p{5HeTb!W;gVd5~_xoo!*&hm~@Fe2FQC4}= zdf(5!!eQ^E*Z|(#`=4y>@4szq-dg|t#+rMXe(#DISSNaB9KC-0C~^ONAlc$5eZFD# zI`#qdhF~eu)5gm$(i#qO|8iJSxgPRgB}XrsD4x+IO*Pgfl?+IOGr+*3mwvtg151nG zfJdGjn8=gHF^XLO(C?xE`^jNldch$Uo)vK=V8jGrJAVX3jBhrTuSvMqYim*h2yYaS zMz00N0oyd&oDx(5a1)5bs7;(NLH%u;TPcj;dy_InGCsCZ{8Nn4U#oqMVb)lMqt`m^ zH1>%+tERk1)g+#6Jtg|AvcY$n^>t8OnCZvTS%AzJ9bou1J`Iv}lJ zsnyg-1f5lXF1CdeqhDs&o_3WI^!R86Wr`bToxzHMF-*t5b4wefbR~PrMsme^T*YD3 z{^`GIYe5-oz>;wgd(yeE!4x>pLMjJ-mQ_ibg)_o7f&J{s$?4hPj?KF6SN)?4f>wCf z{pt@6ww0)=PP+23=Dr%B%u*dof)v8vKj`1!ojxa$GKO=<5;Sgg>G+OTLPZ|FU9s6E zd-HSNy#O>Z<;J|O7zvIwpB%DN;|3=<+AJT_?Y##ezCew>t2f&{NfdDf_hpuZZu>`k zcOz6$LjQKNu9p4tF>7JcEDjgov@T0>-wZZ3?7bsSZMBHCE1p6)o) zB`^vbX<@4%w{$QT#1D%Fk^;2@Pa9AeZeh4v7P2P~K-N~L=_=L4(zsL@<7~G8<);F$ zi5B8yn$WSTNdjrcqF!L2mk+ldpe&($%<3LOQOZVZMPaQ7HR}p5)tU@-j|a;6n=UyB ziL!L4c-mb{5TGpr>o2Nmsg`XtCjTXfnD0}o^_g3L}X!6J_26JcQ{7( zh46tP=o#>&TR*~1Mytm)k5WVTvb!YC{qL{Iu7H~bqh+RTAswLP2HDGpPf?y|bW=U` zXJQRDf!=DMgQ(Uz!tD^>OT{Eybr4mGL=8zvLqn-HSMCM(qWB_igS8dt_iOlss7Gnf2kbh~=Zp6q!6wimY7)mxv#>nzg_kzVOGkr|9d!D^}gjUKkXw2MaeOJgb&2-ZbUQX0+3J@0L!Ie}ev;ESQcKc}XNa5+^JaPJJYnPFR!5LT@CE+Bhxjj=B`*M_s{tzxM2@tyVk==BV8g%-T-X8KC~^lp%NyO?~=IFhvoQs>OCka@r zHg;nEa2nLkr-sn096&3qu=dFt+5*bJLNR?CsAR^+**>K#&py4^5lrm{8!NR-qT0c_ zDvXnrv9;4(80oq1g<)vcBxZ=h)D)l6+c@mj-@Tce%MW-*RLM}1n(WrDaExqOQM1zaK3;W9Zd!A^o)q^gXWVjq@1aN$S^J- zWIa~8xK8V5%LvZU&{!tPnvnG%>=_&sy%BTIqVH&+#&cGw!epr;%+~s&-94jp>a|Zn zGL*#)Rhyj&7lxtn(nf&SM4okBeM_YDQ5!Ds8x~4QtWDsH5n-S>t5Y zm@~5bl&`R@dob1OxLLB*r}4a!{in@j^ey0xW_K;&udlO>_4SqPPT^N_i=7xBxJ;IW zNjhbSrSiJ=y7{`fQoDI$?Z(EMbKi>^ul@bU7HU3#QD3= zf8K?M>((;v)3s?ye#=3NcYGrK;rE!k6LICD*D3@xY;>;ioS&yM@Eke_#sRM^cDB`! zefA;d6E(4hrPotzI+4dKH`h?Jm=thN zd$`{Mlj5uYQOj9(hQewj_ z-R%GTA6?O#^5EB{Jd|Qb)xpGosP*d-L259HekD5P%;`OQ4!zf9MBXJqpsy6jKa z{M~fTkCNs}^~p8zTw9JwQh506!PQAoU~b2ifrjn?j7mh)t8eLmxqGD&A&TuX2@hVy zWDjrFD;9NPQFmC>4e}54eZ-^4nEP@2-~HaoaI_%anUIo9#kn7aMAP9+q36f5$LW4B z=^u7wbg$kl`>-485&-<(gtl1F9<9p^m}Zxj$^h1Gq_A~&LicF=c7ttCv9+Hv;1<5a zw22)yly{J8uN#}uoYe7!&W3Go(i4o0s}UV24^hbu^?)uoWAKPtcbKn10~=lgw(4Ct zLsB`gSrVpbl_@=F+w6)bb=2TxOWfhw@0>}v$b?H7qW5~h~`7`K(egaKn6bcQovFubrkt^t-L%|hp5`@eT}k2fZLGBq?#bbtM@V_8pF<}=i`{n610?XbUupK50RF)v_3`8C5*&J$XC;0rKJuQt zMrLJ(+_h4?%dud6`Q;Us)`qSrUdR?GuJow_OXvNxOgT$KHAd_WdA`QMmGHWaeH6ySmm~t zJC@Jmids0L)9c@UEZK(?dYbgZg|PiN#~{fUwGSH~A0uV%69u%u zB+XICD9bRRGUtU7BFZU+DMhr++99we-v4a%y3(4zSVT06lxj;%F$ktt-Mc3mf`hG; zxwH)=dU_#h*3T%tyZ^xAuWXb6Hzfw!}(AU7?eKdFfhH@2K}BRI0^pWhKn}!R zjYqB-`~}IYlUr3!29j>ex%XUzJ}AS+7*{hh#035?v!9p}F6%NjV^VwH%g4pqWx{M_ z9?!%UnRy3^wIrlYOft&#T$6D&+2r<^ka}u6jLZ>CF=dO$5DO0|t?p6CM<~R`Pn1}s zA}V|uY%w=lP^d7_gu`VTCNap^1?cFcb2clR%=!&>49zk=^!`Ky;t1o+`k4~G@%CB1 z-yst%*I>v>v-bf(8ftuKGKT2Q#K5wFrm(T>i@+6=nOPC=vo$u^F~c3ss-WBL#gjMR z*h+Z9&2_Ng_=<}?XT>_d}lq)ikW4g@&ml%-(({9cql63rkO}e7xZ7K1LQ9~Nb zW;kP#b=zp*8BlEZ9^?3wY%f4G@ioN~V!=#fb&*WkgTboS&t1HjCZvnasL{xPB@b9t zZrDJ2I?#uCmbS&$V{R+M$Z0E|rLSh3o^>h&%DZmyeuw2m8;Aks_6a&pzzIi~V3_<6 zkdWh&On{|f!d(1=hNj+AbYp>n7EHrIZZ-T>-kU7Fw)Z8Z)`vHW2odRpC_@Xo@reDP z2S(4uPeq#c8wm|cx>`wVL@qIpF7R3io5ef}-+8#v4?tJ0n54IGjhOeKjF5J|Go_#$ z+ZB(S8mS>X!vy7jp0!T)-GM$leFQUoi%8=V_pBlWB1yoV_y;g*Fw)bjuc1wX4cPIS zKP^`P-InwGe5}TGhyEAX|o=h`m!w1-A+j58vq--@Vp;gR3Y)NV;is$ja|A zRXpug%U&Q?5Y3JoZ=gxeC_t&%Js4wz?`F$8 z&;M56eU3QI_3IBFzS@5Ii{0VCiv5NfkQ=A-907C zi%m&Y*z0y=DcGjkbw=e}UbHQtZm*Vc1UoB0b3jf9V32nItaHEu3u5l#_}n3mr@+L4 z7K2ho>I6{8uH#5S^K4%??`XWc;{oCGo$fa($UvxLd8e5U0{KLN(z(u zC`227>c)bz79s?h_zf6QSK~TtNtC3<-lgng(gh-bawqFc$IuT=Yjo zfp&!d{(}D}Vh(8#Y@33KrD9y9u)?C*pyvRG#(C6?KY$|b=$&=ME!3nl_9Q)^J7n+p z9;BOsj&`i0q!WQ?XF7loe?yO>WT$kLB(ZyY3lI)SjM4 z(mHF@g8KvAq<7B2F>0i+8@ICCgMO=XfQ|Jn)xQyF*Q0eT>@GE;!0C;XM&~_G2XPUa z$S`nqal-Ft!;jLWMMPv6cfiLd*%9{hp^)(4;QBFg2t>X?+|eh-A!iXn{q~=j8g>mw z2Pwv^xA7jQyL<3(DRvM$gQ7>4RvU2uoH4`sdpz#6RP=2pa*-Hhjb>Af2Z#`aN|>>Q z?v12=)p05)Wcnj+;ui#?GtYBYo0po0e}PB8G5_{33%aJt?H`Vyv<$fZ^F6Id{^yQZOP zl-Y-ak6Hyi=Eg{(TX>Nt&~>#aWF5d_lrUYZxeBDM_T4_?1rh?`(YzdmsqnDnq8A$S zca%pn*>hdutmx8XT+1Gz2{=asv&wh@GOpuvsufm|L7*HZSEt{-PSPsSWc=e6Wq@>w z4aRDe!h@g;H72_SSUjT7b%F(K6PasXgIPAjLF7|Vf~1jO?KR|wh#~tk60{Ze}4M#+3qVs72YJWB-_G5LQ$q_zXeZCPT+PBB%*wCiXhQCpu?-J zrxf~(A2U{%3XQmP75OnbvEIN;nuf#5Aq%Mz@$$FLVu~d{oC>jFm79Xms)EHPOUoec zpCRaX?6FaHD({-I#29P2T1BuA%BZmnzIZN%I%!$$uT`tcKUL&-2N*+98`S6#@m4@X zzVfTV!po3>ofi8lhf6dFmpWFnNWS+JoGol{Kw>7~fRxqhX@e3;z-zmLTLDo28_xwa zSh5cYR!@Oe^qp^XUbI4A=SC}ZK}#V)b7HjIBzk=87q0W%{Dg`EZUO@b$m5$nn;E7uHKuclX z$(}!Y#ObY~5*`m&cN(V&DIVdQh|aREsC^V1*i>>178a7Ta{wjQKHJqdC|S#CAZ<1? z8`V5JTV0Jmx^38dsK_A^9cyz+u8u@TdEaW24c@k~yHyd@6n$nE8^|Q<24F&lBQ&m( z#wHa8Doae14}mp~&aDTe;DNNVfxw|*#0s4==DJc}({V7EV0P+Pn=3s2Uh?xd-TKNbf=g6{B zUaytih1xdtDa3qoctTsBLu*)28|SRhUb=bT)IcI6P{ zsZ9$R9~leNrZjrBYIxYdUu>Y>RqN!>_wY`j)JF_pfFk&aN@gIVhf|hnY65u%iX?x` zHezI{4Ft}FGL4)PGwp+gH5tz0ABnMKmRz;jLSfM+BSW$Es0OzaS>=6OFJkoHDQ|{q z7?JTdx?KpVjA<9FQE-%ysSdNR&Vl@o@UGC&xvhh@Fch^Yidq%4?(=>KDs{AE>$nAG zAC$w9H*?dDjeIL&25mDiSKiYU6w2AQTu*kk&ZBL=yZ~=QBmgRU5l1|o8!rGY57%Npgv%Rv{G)d-~1N4mS5g7fO^6rNt6VHl3$RJ7A`_K9qk zV@AmE2{ma-iogWR!3l4JW6MF>YYgD`nIho&29dfjgRI8~PhJMtwmR@Iz2Awd=q3OQ zDsYGWk4j^&-|d_syb7?eeT=XE7$Fr-8-pYGYfla&lf%2Xu#^#<*oQ#ryk8Zn7=X^b z;-O7elE)N~X0F_zhePP?C>T(Z-yXCP`d}?n_dc@H`P~ud!B}?gv&?@d^C~U4P{h~5 zPBS)|;e%ZF^VzT9aliy9HDYEp%0K}*qKFt{ z5>LsG)|^BHAu7KQGK!5uMQ7Y@UK)k93-EYnh5jX@M=>BH24rQUsTcmxOGE+(cg1B&718 z0`Ka%kFct6yaBy3jsXNLI!N;j4^M)P#!2DSld@Kgt9&ro4isCdmi_4CPnNXdCP_9t zBJH9%U<`L)DApX^#mZ6UEFTFzVK_X#EEW`c5X+L{Pb(WxChflmzaVy*wNd$##Nfh@ zZ80ArDd9@gN0NmlOMJjoji*0v?_`}WDxvqjfMLn?vwt?|9vo!9`GQHmOK-%4N79Z_ zV(aAWcu%cB9&)B_88a5_53nL_TYyhmv@i)2p~C;+^jWRGS&VEH+ z$%e=zwW6#9d7#8gxqbl9!ZR9QP+d5TC#`$*Ca5|5OaURBh z{$2ceQXFke2V&v`^W5zLGk^)McB{mS6fASr!Wyn{89HUdB8?3&9&|qoBtiVN56a>8 z)l$i_Ts=@HVF@|vkYn?5I8y7fSg@nVHLQ-eC|_)nKxskzK~i8s91hC7+-9?r+U3xh zs|5XuTTSQ~4Xsx)w)3Cdnl(27JDw=Srb~wbO+ZE=G|nfco&~lNlMUHeSbPn9O*tsO z0qAd&MK{ewjBL9j2y#wLM5qzoEL*ZGktGcgBvSW)98J<^7O_A~2+l2hh(0WlG>G74 zYv|ufyM}ERYm79CSj@_qORm7{k0-1QQ)nu<$#q3o3GuA70)qx*C1s#SN8?TkUw>$FvI@W*F{0ZMGartU!FOz0Q!zjT zmR*HEK>XpzfG84_j}``~l<@z!#o`SxRLryMZKBb$lwVkl2-pZi%LZ7AN$ zhXt6jMVAO0O_{%+ZPZW}Qg>~2$5u=TSQ-sDj-585C5ENjW;;5Al%W|ZY2PD)WU>5# z#XMfhRI6MGbuCgE%c-BaypmPoPw@a|m{(%NVNxWb182ZYQz)GQ6TxBj+u{WI{ij~& zg)pn4T2})(yGxgjv8#s?jN^JYT3Cbk-7}C8MM5F80D{;Tnw{;{4mSIllwDylTzK8k zG%8cGE!4<8g;*<}HdU6&^pB7(rkQcrdcmG7tZT8HmYFHf$>1+oE6d3igWD=Ki(kiw z2uQXL5MCOpcS7Q`&<_=4u%X1Oh<1cEwnCYiyN&~z-V>fdNzw4pYQxqKcah6ccjgNB z!{ipBP$G%@d2WwcGo+y#31lQ))ovEOg^HOke%^({x{D@zTE;ag%~<4~+4AND+_`@5 zoIkRgSbg#U40JbEH6qcz=F=b^TJvC`bGt}pl zAadKH)6!u567UB@DH8Qq#TP$I5o0UbTlh^i`9=sS^mA^u&1J>w5|||2H+@}<;`(rN z6>-O~xq412y3ekUq!x)2Q!7ub;L&h%Rd_*H@`yCWh+oBJkMRKy3eV;Dl+VvC4$ty` z^AvELp}fOtIQ&f%rFRos(KCh=9UtoRZKOy@u?~J%ppM@Sj9T<3q`ol@JYbR`XdwKp z&N1wS_08=0{vg|2-`E7JXqF_hQvdDK{23`EYNzjKfz|r@`gh-all>e1z5U&{^6&bl z{*6Z)-+p)Nn`~oq{WgC4=GJXoyRdQlyW8vkmaWfX@Ns=$)4kCHXib!-8`T!g4h}{% z04DfRF!y6Q^wz~U&n?5Oyurqz18bgvg7_? zhRhV|WAt0=4`8DAY7FzWD%sLkyu>H_N?Z9x4B%E)v?mM(*;hd@c{RiDa`Mjl!=Lc> z44T@n?NjRHifgo*Ex%n}74M4yefUWCeA6c{>JJ`1`e|$T;e!d0O}fB0{u z+pp2msUd#Oe>wV!Hl(e<#;eHn>sfA~#P;yX+eYV&G!1c>MsNQJTCC1b(09IHK3@6F z4Swf$Z%=TT1IlQzP(ptsBMmXJ!$N;7=TlLz4iru{rh+oRb z!mKP!;~2NL)jK?6v_w7tYlIy2d&_^rCAr`iiz0f1$?Frk$}~KXqp%DPBXl^+EB2V0 z0&3Igpfv>L>KIl=tI%af00=09t+?ZGZ~2KgtLqDduJUs6||uKoTvnrKQvb?>zAN7hGmeKdG~ z+PViX1(R*O`Ft0fu0^@EWZg3M0%7Xi!8l;{k0xt{OQ(HbUuptyRphGPSTiF3cN~nk zu+{fmiVL8etld%4?#m1r#1ZMzeBp7Bb@wKs-UO{cJ>O~q>P-hG2;(~+L%<2z2?3?u zNV!1nGP`>cB2C;B!>cvVUPnT}42m2S)3lHHxr}JJ5faXt^qcWhQ!q*(KNOEt#G+d4 z!5+asR7QPTMkXA7JZqzvrSplX+d?n^B8gEFX{nx|oSVGoExX$Bn_R_$hA;jVT5-?9P}_OR+urUZU@*&uy|Z5!kyGBg|OSp-6fQY|RIVC&?2 zyNT}7EKKd3M;6C^71U#5lVUF!7wB|laN~}oxkL^d>q65dQxGz;MW)im4&z0x+->;BHDRF!$ zrYE>WtuL<{Us}Eost^Wf2DpzLJ%RU}2UB~-7~HNUEWCe2Ct;;V8&>E|O~8-fndo;c z*HMv2_?upYmz=ICJeYI!uiu6K@%K>w^uo}2@W*N|^aE1h9!N!>4JU~2?ZPO8d-Jw= zZ>hF~hXVe+fQdZ92<{OF559q#Piy=QTJ#cH>kw#WVuXY5ogEzTEEb+_ ztbg-A7TffjMBGzuY=k($vS7|mOam3GHC#TtoZSS)5n5w6FjJ4}?uQEJR$u1Flj#}LHTHds`+Eq@KkT7SM<|9W|4 z<+t_UO}eSkZfn$!BR2ZE$Jr?wN4gvuhgSai$Jr^s=ZIbA$D!>AtN+|1fpR>8SU@{= zn;*@5hjWhx`W204-42bW9?1De+bJGRyUmYA4%|EUSmeMT3#f=_o{GJM1J6GWIdEtk za^Ubd^pwtx&ki_ncpP?_$45Ev{39s`4vl5q=0{UjIsa(NfkUHNw?m_;3wIGJ^5kuv zFR+s7X<{_d0=VW!9knPgKXyV?ifZgqKJdImBI&2+1`*`9F7i-@&WFfy@nJC!CBt$m z6sMtKLADEG3$`vB)_3lv#ZfuuRi2m9&-9o=$@sZJ#=vj^-QvI)lKD9ZhILK8!A10r zgQ7qi6b+N(gnX0p%Q*+f&>9^cVhjvi&=+Nm0$GuV!^w@*04sDPr?a7uwu`I}rfFMJ zUTRH6Bak;uZf&FZ9P+$RHT|1gI8Hvfoe?clSBVTZw|zw;*g)(Z4{#7YZG;WBgRCya zl+82} zecu0_0qMtfad*%~rgs~GfsvfBB<20?E&ZrDMN94o&b_7e+HL%AbLqY;j=Nt}tH`C& zeKq<+NW)vAF#*GPgGer7XP|S zCP7)8{iXYN*P@p;eMWtFHelF>YrJ&PvM19PyRzK}urXgc(7b3*I9be#P zFL!X=KmU@MS~zwP9sn?gByNOQ?n;I*%hGxZjz3czH4L2;^?F0~SRN^TDqFL-p3xNG zn4Y+FeG!{{hsnz^9fb%nLKgXs4 z!X61CX0Pf$V7)ak=vUtz|&vzFkdE&eZTnz*pZs@-rCIJlp zK?8cYY#{#@Hjv;58t~tu2I^1RAT(Wz8mSv-1C=%s?@b!%Z$&*3qdmiDuc!erVFUTM zuz@(+icw(?ijfUMyRfhqcvEyWWuVeVf`>HH-;$od8Wa@iY%?w2*vaZR70tUG#VVWi zoM|hU{>9sn2@vE3WfbQe3b)uFNmGM2;oM^2r2J7Ccx1-0V;NZF8}nI}(E3LLmTL+M zho=6Iq-hbw3o($B>ZQvnM_>sQ4pCX2CZj8$`H#8$hM!IHA^2+O!y;bIH&VVWdRj2* zco8y@02sS$M=v_3$w)%}p!_YhkQ2KeZr4s8++D+a?k$^@Ai6xZs=k21sHssecuJpU z18$--n~=kZ9fH%Lcz16}K3_8Pi^VsVxT}EkeMo<594{eT7YCE?ElGA2_j4re(Ak|Z z8(2YaW72?2xWe30AKRNHoOx*N zziGh=6*cWC93K~*tX7YnV1@ifNgk;kr&Z$)T>T%Z?IWdFcond+a*orTS|CG z2(m_s1VWPUJwqX}+(?p8Bp85>BD9@xXd147GQhe=@=3ms%rg|%$EGFHP%Rgi+p0Vn z61z^!UZk;!cQM2X6p1QvfD}!QW){X%BEOf;%TuX!M+ipOd5Hn$p;;8|#0ewQBT>{i zjuaQoLxNSJ(C32{gm+rNPv9oUQ0qDfg*)Yh59W!LiBSp-6<9bcV60H>CdLlw);QOm zFYAKs&Sb85Dxp;?omVycXNmf&(t0Zjd?_djcE3MMV7ATLmoye-9_J)_>2u=)!$gt?^O0v|Z+t+wMdixJJK0MuCLt6K z5)k4aPoU24XBEg#9*^b8B4$`%C;sl5z4TN@bO#^>QJG0B2?qokoLZaEjZHNOQww1f zE^0eXhzYZp3i4+YJsn}?_}aUNmQN7*&grWll{k`fI7d;#P5RxDIMUAwCdcU6VwKTP z5=T0}lg=XwyMBr|ni)SSmf)cw84zkQe+pa+H)!%X{lu}A=)>gLvKU-Whg^xd4FY?l zND^mr3P$$JHPKz40uOihCPg!ryq_4FjMq(!W+Z$+Ei^3vGAW`(^7qq1n6W)XP$xxL zDp}@)(kF(n#(*bAxIhYD6@-Hz{sJ+yZH>D|{&MVBoB2!=9fjuAZne<1ov~P%&ZIW? zd{R2fN|Dp|IKgP;SuY^lXJRo=iGuL{>`<`UKW>w0#{Pg0y-zwp;UGIfeieIkUOhl`Wi$PBAI4 zSZjJ7l$cmgkK_bXU!Ni$Olqf$#D9nn=B>}iutJGcIFwT+O&5|=l^rEFOt>UoYGl-^ zG1SKD+)?t2%NH9^l3RgTsQ`W#RkK`{)ZI1vL*KCM0a|_i_1{=0(ryX*`fDvn6lw_} zCV1`}1nbFFSw2xrYyEw`LeO0yi211E#=m=V0?V0&sGv3O#y|P)oqadS1%L9vPU%3t zzP(-yhz$|DT2Q__DjNtJAJI)s`AY@m+h5DZj1+$Esy_Md&rxIMzlG(1oI&26B;VHT z+p(ANA)}b}#2n$%qt~{a5FwAYFywfoFKVdP~%nyqArkQ5~kAfGIy;gZKFwyifibyjm@*TZ+$}>}W7J zy|cFV_U+r+UK^FJG&`uh->>a8kbra4IIcDJYiDoPpnU#;Je@vDpY?HP$eT4t=_j&7 z*+kvn4i1|)c|+v?#dak!GO|bF(QfhXPh73>>Lsqy(F=Tjv~H2a)E~45xIm*8K3dJ< zFDj@RxUu5y3AqEa=b(clB+QSOi{UT~#e1wLwEUFlZJ?<4A6IepV$FAn3 zJ^WT4=D9<#nZ7P+mYu6IsuDq_LvI=6bV&N%9ncj-yNnwLSu_rHj~Oyb3IbpB&cQ4x z9Sr0yvhCDH>FbFP>9nU@|9n<+7*at8o3Oq~Ka|tt8_~ zCXVEejZRJ+H9;{AXld~1)40S%4NW-BI;wYzQ%gnk;rw`yBz~FnDmrNywlPKx&OXDI zfbv?JyyO8GPH-H`T)fDIbgaHpo++83qNgN?O?D;SbhJVwI!^FZDp)3)N8f%~%*AvN zILsyYSJPn%@2+W_4Z%00=-r?8(6Cv*^|yLvb4C@P(_^P&&i}5jZ+_>_|62X$Z*FgV zuK%18aZZ2DyZ$qGoF&zEF%_W0>3J2I^DPvN#iK5nohV_MV9Em=dte#Hd_~|>)Nw^! zVBFVGPS zJJ(8%)i^_D(f|FvZ)W(~hgNoyiwj6UBwj9e zI2@8gayXn%g6HRrfjRYdb2r4$Jq!_yrD(0PT=sC)Mi{?2FFLpa_V|L3Nl(Jz$&-6V@J88+G;E!am*FEvO}Ltrh)LBlTU9Jo_p=iD$- z0nFJ)Gd?$vJ8N{8$`fW9q)SkFq#4n)XQ|L%aR4G#BnN^` zsIjg55YOu^#^n};{o|Slnkd`&O;UP*uyhU#~W5gKilX1>*VwP^=_Z@cJOR+EpxkeSy(MyLiGw_+b96F z(S%UEqyBsa&OFH^wVJL7=|B{yfu5$}$`Aa?yZd09K-)kv9<>o0*q6@)0?Z&Gv=*xo zNS>TFlY~1AERs0ZX!ilRK0}c8v;_aY7rl1{T5k#^?obDu^kD6STiS(!lJwdFx?5{B z>tACKP%^#}NWvhnqypJitEKJZ8{zTx3e5U%5t=xnZI3l{g}5t(YB7LU&&C)1!G>Uh zuvyN=Gc4zE(su!5A0AL~{X^Ad&j7R~Y755T>QZQ;2g>%zf+Q1oUh&6ptoSYd#Cm** zgnUeou51SMs_e~pj2LxGM3IsbxgR7sjaCOE>SB&d$JnTi=lPoFWOB&apT6sFfUd|s zA^?lr#_=;PW$kpqyJa>z&wN{{vj1OrBs zMtOOT09wLc-rwj&cvO6Nxt_mq+z?%NK+N#*^<^Jo!|^W2NAe{`E6*MOgO?y7)Wq6` zA+B*LBo^QTH5ma-8ersTT3=b)2uHPy05?ww6~_VWdQ9E2OE{hlIDTBm3wwW1VRS;$ zVSIP^f*2Y3&%r(Lwd4VQ2s#_sbbDplcnX{>;1VIRHl;?vr_T?xp+WFUwxwR|O%W&cDUnEa-#hjoVZ zmM4z;o`IDjDizye@d zcdewh?uJj|J1CVT zPmqbn+bIT)_w)|7^v7(D*-0T})yEiPPr8Ze27}Rs)&bT4ls89#E0I%z%mg9uoEwAI zdBYH7oq+A2{S{-@!?0}a`-UQ-U=^XON6nN#quivBM$*12>S`k$C9Tf^Q9MKuozC3K^CR)hW-Ufd#zA` z5<=shz-L6VO@Twm75k}2s_=joP!%a~2(#kccRRR;b^zcnz`ztkd?aH*;2B^}cq)Zq zn8CYg4p2ZX^1i&rEE)lSbhyKsFOFjULw+Jqh-wrNW(xHyK~C`X`=NoP1)x1*0j6jm z%ss=5Dxe~+> zCNwl5_>Fo(86Jcrnjk{%;EaIzi}OL43!D!}?NkuPxx&KuXHi3G)KLEpomMoECQ$$A zg6<$wcrK^$FoAeLE;fkMSh`B{dMI6;nAK48xEVF9Od=W>{pxR?n?ZxHN?|6+?unkf z&p>P}OTfVH9TgbiYruL%I4JVL+w}y9KlhGoHqu!SZ|)t5?e{(az}QE&&j}xPKCs#~ z%>rR@Eg+s;VgK&ul*6Pxhfi$H17QcF>>=A0uMQ(a?O{P`cwI<|z7H*kY~dl~_`}x! z-dk0#8z7v^7Q0b-aq{l*2W^OYA$>>%wE2S;XWKsoG)R;rp);n%tq zdAWsQ^Yd2#?#lG5bAHwS*?%kJCD@k?0Ee4@H8?4~bi5synG)KK2SH7>@@47ZOst^X zayc1NQR@OiHx#i9v7gCwsg~v9cQhF$=ymlK)-Aq{3g&VnL(D+Ib`lC>rUQEVVZAw< z={}!n{8h_z?rJF97YO_IND9Uppt?l7@L$xII4zgR59Lb??kQs5&0w8MMC8gZt$3>| zb;TmW=FVf}MQ7=rQAfH^^|Xf7#r#mNNobVn0!p&0ru?EzITmihbJJSWTyOi-NDI-0s%VQUhS1RNZ7a z>ZM_@P-P}bb&EuUp}vtYvnFK2w?f+SDR%ruYLY7|{3ovlGV57PI&srY<|QmZ7;TN+?^y`sXwbu}xlYOuxY zq*h|4n4^N7QkESU>dyv~i}hj+*iW174OKkvtgm1&0G7z_ZlzJ^rLwN$0s9l3^8VoL z#kwD44TzeMp)Pt{{ykvk9x!tcm??vY1Bh#O-Q<@H8_>*Kcf?J*VUqXd{GR|bZw-w} zW{b0&souydEX)Jtci2XV-6q!B(Wf*_eQvu_7Dn?PthI8#iUwnuOI9=ukZ75!&b#FVnw2Gm4N66}7%i1@NM=;aUsU=*8aG3& z@)LnhKw6gw1hl3&+hMr{Lrt#U^aHWgJK2>n%#SMDCe|WQ+xL=Kue&pA#5=BP%XGnU zTal$y>X=G%Rm&>PbNfat(5Y?1LcW?7EabX%`xWTaHeVs1Z@hAE*{&V1993A|%*+Zp zSp1@?9s*9am$?OpZh@Zd)X`2ySvwqK)sT#=dGYyJ1;Hfe-9euFNEaIRWN#z^UK;B0 z6tah-&*_-(Ex!`>uY;k18{0RcRhN1D{U6+kf7C79|9RYfe6q*?tCQ!4OqU?*dYMFV z3*ahH(<2ts5pE^qs||dip_J%~Gg{0Kao+;YbM!FK-Kk#VM^5wjfZvTq3=oGKc%V-R z)|WjzJ$>-x^vlyx_wl3QSKYyo(do~m6FLC8zaXsT3%ds)-c)jGG8Vs2+PA+|CnQC4 zF^72=1T_S`k}@b4wJ7R==@>8sJYIqIu?NNgQg;wV#X18rJi-Fx&m`ypoNoqi#+xsM z3|+iemjw3E9DdEti12#U|FlKvwK<-SLzzGs$vY5;$49_jJq|Wwbhm1D?T*n7_3VC% zib|pNHMJ_EX+LD|-P3ws3Fv6IkgP*^^L-%9*O;#c{eOCL`mmt?p%)(S^?xkGkM;N2 z*O=@bChF7HN#{Z9YwBT{1YEb+n5HV=ou;1!{>PXPP*|>TEe@f5A~* zEn3e$d-eB!ynKEB)5}-?>Ye|9TTm}v^q!x;Iy^Hy$${82_zp~o{SZNQi+~m~<9-z;4!JTj5(>@Cxyo7XC@-p@T*eod1T0EfAC0Q2=T`;?C}-VmwwxoyOFscD@|mNlp0ahvv;RcN>ikSC-YEj_2*oJ>)&{@mG;GInC;a(9k1wx_-8iw+_|i8VG$g{Kv9ee)$KEOd0rhmYxP=tx7ld4>NW6c&33(A0`KWm_&BFbX(my% zfL-yAQGaC#0uL4HwR>G$}namg2&J6);HK;0a7%0GB_9hFX(6Zikeor^4(&Jl z9{p>yJqN@j<+AH!ip!b<)m&mVAAu@W@94DhB-ex=wZP01)sIp<{sqQ=yJ`3Rd-s2Q zKaYQ}iTi)O)!HpV^YNmWbND~s{|o=Sv!J96c*xVpe=-!fH2)L+tF_yW)^6Kt?4tkM zwFdfsjsKP}-$zrd@!z9O_&GBu;y-UZee^HIf6ZFEbu0c`?cK&Y{y)XKGDr`ipz9o+ z9G;$^??JHw5QbTxSM=P)de0CTb^@+^8I3g}$liO&gUHtk#zJ%4Nmv|D=PW_;AyO$2 zAut$Aq=`~c{6mI?2VQ|pMd=Xqh(p5lNCZUpgNT{BWD>aqa3Rtm6DdBro$5`wMiGgO z@SK>SVAbo~cmh2gBVx_LjX@* znUkz^C!eW#V{|q^JZ1wP_fAtlL(6HWYM+Te@i!PdpWA-v{slth{b+snp=oN3JlF?V{$*u0?TvN8E= zR+mE`7*j( zk(IR2Y-rqkVeSQI6PtGyaglhO-oXr~Lmmakm}-jQcVoL#y^%^f7-JYYMP8wNQKXf* zkUS5VfTTo52Xm8^s$T zMp}>es#Uvbxk_Zx$J(eIayR9=iWO3Y8(GYm;#Dlkz zDzdU)yeOBCg*4h__)$Q{Q?oDQcY+FFm>_5HF|oieXOYk0eFc8ReHlL^4JixgK^@K?d=NWQ#;X>l)2nBCA&dG&3 z)d;@N(*0U%tF^sXZ?|^ZE_@a1{oELn$;3_lLFA@!;-=Ba9g3^!I2$B9sMfr1>g`6e z_0>@C|I#m)--a+nCt#@wXBa7`AoK5-3?hZTKYLYycl=Ujh@mb8neqTa+?V-8$V-Y-@I{Q9xRAJ3t~f#?7EcvU z1vlHHcdt%g9|1-59wh9NJcrgE@*U+fge8`iD;OB_M@sL6GD2g=Q@$+R-#DCs+`siO;|G{s4x9Y!jtN^d$|C21L4U5ro z?2oV_$FAiv9*6{D6$EVvGK4H|gi<6hG#Cl;mjp7Xw4%!84Lk!QDOP(F#}hfC>zthD+ebe2?B9PCO+5*I_6+~g;WNTUI^bPq}vyL)M`uPe>zYr$wpaJGTE4nrVy!gQv+hv1ZS9nkcg9c z05Jlpmxju|>J~F`O~=-@Wf5~Mk}0xqghtLJRyeK@b>X!zm?(I#WrkMZl25xchZuH4 zuFid09OgY)91k#As=GDQL0FSc?-c_m z2_+j5)(&I~d%n6LUR{{G%031V}RhzRH0agL5XxE6AV z01GV((ukKih%RL|J8LdwM@6^g+^{gWz=`aKI%90P)HJc$r^*UCW9s^JV&Nvt=#BBt zO_fxK%#ntm`?2p2hne*ulKgLs{N!s}i@aq%8 iO`F_J2;_^E?Tt$PPnr9Z?C5JDYYVI`u(rTz7Wg* + + bisulfite mapper (bowtie2) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + --bowtie2 + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml new file mode 100644 index 000000000000..6e4e4def6200 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml @@ -0,0 +1,614 @@ + + + bisulfite mapper (bowtie) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation new file mode 100755 index 000000000000..1895a296632c --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation @@ -0,0 +1,492 @@ +#!/usr/bin/perl -- +use strict; +use warnings; +use Cwd; +use File::Path qw(rmtree); +$|++; + + +## This program is Copyright (C) 2010-12, Felix Krueger (felix.krueger@bbsrc.ac.uk) + +## This program is free software: you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation, either version 3 of the License, or +## (at your option) any later version. + +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. + +## You should have received a copy of the GNU General Public License +## along with this program. If not, see . + +use Getopt::Long; +use Cwd; + +my $verbose; +my $help; +my $version; +my $man; +my $path_to_bowtie; +my $multi_fasta; +my $single_fasta; +my $bowtie2; + +my $bismark_version = 'v0.7.7'; + +GetOptions ('verbose' => \$verbose, + 'help' => \$help, + 'man' => \$man, + 'version' => \$version, + 'path_to_bowtie:s' => \$path_to_bowtie, + 'single_fasta' => \$single_fasta, + 'bowtie2' => \$bowtie2, + ); + +my $genome_folder = shift @ARGV; # mandatory +my $CT_dir; +my $GA_dir; + +if ($help or $man){ + print_helpfile(); + exit; +} + +if ($version){ + print << "VERSION"; + + Bismark - Bisulfite Mapper and Methylation Caller. + + Bismark Genome Preparation Version: $bismark_version + Copyright 2010-12 Felix Krueger, Babraham Bioinformatics + www.bioinformatics.babraham.ac.uk/projects/ + +VERSION + exit; +} + +if ($single_fasta){ + print "Writing individual genomes out into single-entry fasta files (one per chromosome)\n\n"; + $multi_fasta = 0; +} +else{ + print "Writing bisulfite genomes out into a single MFA (multi FastA) file\n\n"; + $single_fasta = 0; + $multi_fasta = 1; +} + +my @filenames = create_bisulfite_genome_folders(); + +process_sequence_files (); + +launch_bowtie_indexer(); + +sub launch_bowtie_indexer{ + if ($bowtie2){ + print "Bismark Genome Preparation - Step III: Launching the Bowtie 2 indexer\n"; + } + else{ + print "Bismark Genome Preparation - Step III: Launching the Bowtie (1) indexer\n"; + } + print "Please be aware that this process can - depending on genome size - take up to several hours!\n"; + sleep(5); + + ### if the path to bowtie was specfified explicitely + if ($path_to_bowtie){ + if ($bowtie2){ + $path_to_bowtie =~ s/$/bowtie2-build/; + } + else{ + $path_to_bowtie =~ s/$/bowtie-build/; + } + } + ### otherwise we assume that bowtie-build is in the path + else{ + if ($bowtie2){ + $path_to_bowtie = 'bowtie2-build'; + } + else{ + $path_to_bowtie = 'bowtie-build'; + } + } + + $verbose and print "\n"; + + ### Forking the program to run 2 instances of Bowtie-build or Bowtie2-build (= the Bowtie (1/2) indexer) + my $pid = fork(); + + # parent process + if ($pid){ + sleep(1); + chdir $CT_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of CT converted genome in $CT_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Parent process: Starting to index C->T converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_CT\n\n"; + + sleep (11); + exec ("$path_to_bowtie","-f","$file_list","BS_CT"); + } + + # child process + elsif ($pid == 0){ + sleep(2); + chdir $GA_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of GA converted genome in $GA_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Child process: Starting to index G->A converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_GA\n\n"; + $verbose and print "(starting in 10 seconds)\n"; + sleep(10); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } + + # if the platform doesn't support the fork command we will run the indexing processes one after the other + else{ + print "Forking process was not successful, therefore performing the indexing sequentially instead\n"; + sleep(10); + + ### moving to CT genome folder + $verbose and warn "Preparing to index CT converted genome in $CT_dir\n"; + chdir $CT_dir or die "Unable to change directory: $!\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + system ("$path_to_bowtie","-f","$file_list","BS_CT"); + @fasta_files=(); + $file_list= ''; + + ### moving to GA genome folder + $verbose and warn "Preparing to index GA converted genome in $GA_dir\n"; + chdir $GA_dir or die "Unable to change directory: $!\n"; + @fasta_files = <*.fa>; + $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } +} + + +sub process_sequence_files { + + my ($total_CT_conversions,$total_GA_conversions) = (0,0); + $verbose and print "Bismark Genome Preparation - Step II: Bisulfite converting reference genome\n\n"; + sleep (3); + + $verbose and print "conversions performed:\n"; + $verbose and print join("\t",'chromosome','C->T','G->A'),"\n"; + + + ### If someone wants to index a genome which consists of thousands of contig and scaffold files we need to write the genome conversions into an MFA file + ### Otherwise the list of comma separated chromosomes we provide for bowtie-build will get too long for the kernel to handle + ### This is now the default option + + if ($multi_fasta){ + ### Here we just use one multi FastA file name, append .CT_conversion or .GA_conversion and print all sequence conversions into these files + my $bisulfite_CT_conversion_filename = "$CT_dir/genome_mfa.CT_conversion.fa"; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/genome_mfa.GA_conversion.fa"; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + foreach my $filename(@filenames){ + my ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + open (IN,$filename) or die "Failed to read from sequence file $filename $!\n"; + # warn "Reading chromosome information from $filename\n\n"; + + ### first line needs to be a fastA header + my $first_line = ; + chomp $first_line; + + ### Extracting chromosome name from the FastA header + my $chromosome_name = extract_chromosome_name($first_line); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; # first entry + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; # first entry + + + while (){ + + ### in case the line is a new fastA header + if ($_ =~ /^>/){ + ### printing out the stats for the previous chromosome + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + ### resetting the chromosome transliteration counters + ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + + ### Extracting chromosome name from the additional FastA header + $chromosome_name = extract_chromosome_name($_); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; + } + + else{ + my $sequence = uc$_; + + ### (I) First replacing all ambiguous sequence characters (such as M,S,R....) by N (G,A,T,C,N and the line endings \r and \n are added to a character group) + + $sequence =~ s/[^ATCGN\n\r]/N/g; + + ### (II) Writing the chromosome out into a C->T converted version (equals forward strand conversion) + + my $CT_sequence = $sequence; + my $CT_transliterations_performed = ($CT_sequence =~ tr/C/T/); # converts all Cs into Ts + $total_CT_conversions += $CT_transliterations_performed; + $chromosome_CT_conversions += $CT_transliterations_performed; + + print CT_CONVERT $CT_sequence; + + ### (III) Writing the chromosome out in a G->A converted version of the forward strand (this is equivalent to reverse- + ### complementing the forward strand and then C->T converting it) + + my $GA_sequence = $sequence; + my $GA_transliterations_performed = ($GA_sequence =~ tr/G/A/); # converts all Gs to As on the forward strand + $total_GA_conversions += $GA_transliterations_performed; + $chromosome_GA_conversions += $GA_transliterations_performed; + + print GA_CONVERT $GA_sequence; + + } + } + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + } + close (CT_CONVERT) or die "Failed to close filehandle: $!\n"; + close (GA_CONVERT) or die "Failed to close filehandle: $!\n"; + + + print "\nTotal number of conversions performed:\n"; + print "C->T:\t$total_CT_conversions\n"; + print "G->A:\t$total_GA_conversions\n"; + + warn "\nStep II - Genome bisulfite conversions - completed\n\n\n"; +} + +sub extract_chromosome_name { + + my $header = shift; + + ## Bowtie extracts the first string after the initial > in the FASTA file, so we are doing this as well + + if ($header =~ s/^>//){ + my ($chromosome_name) = split (/\s+/,$header); + return $chromosome_name; + } + else{ + die "The specified chromosome file doesn't seem to be in FASTA format as required! $!\n"; + } +} + +sub create_bisulfite_genome_folders{ + + $verbose and print "Bismark Genome Preparation - Step I: Preparing folders\n\n"; + + # Ensuring a genome folder has been specified + if ($genome_folder){ + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + $verbose and print "Path to genome folder specified: $genome_folder\n"; + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + # making the genome folder path abolsolute so it won't break if the path was specified relative + $genome_folder = getcwd; + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + } + + else{ + $verbose and print "Genome folder was not provided as argument "; + while (1){ + print "Please specify a genome folder to be bisulfite converted:\n"; + $genome_folder = ; + chomp $genome_folder; + + # adding a trailing slash unless already present + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + if (chdir $genome_folder){ + last; + } + else{ + warn "Could't move to directory $genome_folder! $!"; + } + } + } + + if ($path_to_bowtie){ + unless ($path_to_bowtie =~ /\/$/){ + $path_to_bowtie =~ s/$/\//; + } + if (chdir $path_to_bowtie){ + if ($bowtie2){ + $verbose and print "Path to Bowtie 2 specified: $path_to_bowtie\n"; + } + else{ + $verbose and print "Path to Bowtie (1) specified: $path_to_bowtie\n"; + } + } + else{ + die "There was an error with the path to bowtie: $!\n"; + } + } + + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + + # Exiting unless there are fastA files in the folder + my @filenames = <*.fa>; + + ### if there aren't any genomic files with the extension .fa we will look for files with the extension .fasta + unless (@filenames){ + @filenames = <*.fasta>; + } + + unless (@filenames){ + die "The specified genome folder $genome_folder does not contain any sequence files in FastA format (with .fa or .fasta file extensions\n"; + } + + warn "Bisulfite Genome Indexer version $bismark_version (last modified 17 Nov 2011)\n\n"; + sleep (3); + + # creating a directory inside the genome folder to store the bisfulfite genomes unless it already exists + my $bisulfite_dir = "${genome_folder}Bisulfite_Genome/"; + unless (-d $bisulfite_dir){ + mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $bisulfite_dir\n"; + } + else{ + while (1){ + print "\nA directory called $bisulfite_dir already exists. Bisulfite converted sequences and/or already existing Bowtie (1 or 2) indexes might be overwritten!\nDo you want to continue anyway?\t"; + my $proceed = ; + chomp $proceed; + if ($proceed =~ /^y/i ){ + last; + } + elsif ($proceed =~ /^n/i){ + die "Terminated by user\n\n"; + } + } + } + + ### as of version 0.6.0 the Bismark indexer will no longer delete the Bisulfite_Genome directory if it was present already, since it could store the Bowtie 1 or 2 indexes already + # removing any existing files and subfolders in the bisulfite directory (the specified directory won't be deleted) + # rmtree($bisulfite_dir, {verbose => 1,keep_root => 1}); + # unless (-d $bisulfite_dir){ # had to add this after changing remove_tree to rmtree // suggested by Samantha Cooper @ Illumina + # mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + # } + # } + + chdir $bisulfite_dir or die "Unable to move to $bisulfite_dir\n"; + $CT_dir = "${bisulfite_dir}CT_conversion/"; + $GA_dir = "${bisulfite_dir}GA_conversion/"; + + # creating 2 subdirectories to store a C->T (forward strand conversion) and a G->A (reverse strand conversion) + # converted version of the genome + unless (-d $CT_dir){ + mkdir $CT_dir or die "Unable to create directory $CT_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $CT_dir\n"; + } + unless (-d $GA_dir){ + mkdir $GA_dir or die "Unable to create directory $GA_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $GA_dir\n"; + } + + # moving back to the original genome folder + chdir $genome_folder or die "Could't move to directory $genome_folder $!"; + # $verbose and print "Moved back to genome folder folder $genome_folder\n"; + warn "\nStep I - Prepare genome folders - completed\n\n\n"; + return @filenames; +} + +sub print_helpfile{ + print << 'HOW_TO'; + + +DESCRIPTION + +This script is supposed to convert a specified reference genome into two different bisulfite +converted versions and index them for alignments with Bowtie 1 (default), or Bowtie 2. The first +bisulfite genome will have all Cs converted to Ts (C->T), and the other one will have all Gs +converted to As (G->A). Both bisulfite genomes will be stored in subfolders within the reference +genome folder. Once the bisulfite conversion has been completed the program will fork and launch +two simultaneous instances of the bowtie 1 or 2 indexer (bowtie-build or bowtie2-build). Be aware +that the indexing process can take up to several hours; this will mainly depend on genome size +and system resources. + + + + +The following is a brief description of command line options and arguments to control the +Bismark Genome Preparation script: + + +USAGE: bismark_genome_preparation [options] + + +OPTIONS: + +--help/--man Displays this help filea and exits. + +--version Displays version information and exits. + +--verbose Print verbose output for more details or debugging. + +--path_to_bowtie The full path to the Bowtie 1 or Bowtie 2 installation on your system.If + the path is not provided as an option you will be prompted for it. + +--bowtie2 This will create bisulfite indexes for Bowtie 2. (Default: Bowtie 1). + +--single_fasta Instruct the Bismark Indexer to write the converted genomes into + single-entry FastA files instead of making one multi-FastA file (MFA) + per chromosome. This might be useful if individual bisulfite converted + chromosomes are needed (e.g. for debugging), however it can cause a + problem with indexing if the number of chromosomes is vast (this is likely + to be in the range of several thousand files; the operating system can + only handle lists up to a certain length, and some newly assembled + genomes may contain 20000-50000 contigs of scaffold files which do exceed + this list length limit). + + +ARGUMENTS: + + The path to the folder containing the genome to be bisulfite converted. + At the current time Bismark Genome Preparation expects one or more fastA + files in the folder (with the file extension: .fa or .fasta). If the path + is not provided as an argument you will be prompted for it. + + + +This script was last modified on 18 Nov 2011. +HOW_TO +} diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py new file mode 100644 index 000000000000..cb79d1ecf590 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import zipfile +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def zipper(dir, zip_file): + zip = zipfile.ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED) + root_len = len(os.path.abspath(dir)) + for root, dirs, files in os.walk(dir): + archive_root = os.path.abspath(root)[root_len:] + for f in files: + fullpath = os.path.join(root, f) + archive_name = os.path.join(archive_root, f) + zip.write(fullpath, archive_name, zipfile.ZIP_DEFLATED) + zip.close() + return zip_file + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark methylation caller.') + + # input options + parser.add_argument( '--infile', help='Input file in SAM format.' ) + parser.add_argument( '--single-end', dest='single_end', action="store_true" ) + parser.add_argument( '--paired-end', dest='paired_end', action="store_true" ) + + parser.add_argument( '--report-file', dest='report_file' ) + parser.add_argument( '--comprehensive', action="store_true" ) + parser.add_argument( '--merge-non-cpg', dest='merge_non_cpg', action="store_true" ) + parser.add_argument( '--no-overlap', dest='no_overlap', action="store_true" ) + parser.add_argument( '--compress' ) + parser.add_argument( '--ignore-bps', dest='ignore_bps', type=int ) + + # OT - original top strand + parser.add_argument( '--cpg_ot' ) + parser.add_argument( '--chg_ot' ) + parser.add_argument( '--chh_ot' ) + # CTOT - complementary to original top strand + parser.add_argument( '--cpg_ctot' ) + parser.add_argument( '--chg_ctot' ) + parser.add_argument( '--chh_ctot' ) + # OB - original bottom strand + parser.add_argument( '--cpg_ob' ) + parser.add_argument( '--chg_ob' ) + parser.add_argument( '--chh_ob' ) + # CTOT - complementary to original bottom strand + parser.add_argument( '--cpg_ctob' ) + parser.add_argument( '--chg_ctob' ) + parser.add_argument( '--chh_ctob' ) + + parser.add_argument( '--cpg_context' ) + parser.add_argument( '--chg_context' ) + parser.add_argument( '--chh_context' ) + + parser.add_argument( '--non_cpg_context' ) + + parser.add_argument( '--non_cpg_context_ot' ) + parser.add_argument( '--non_cpg_context_ctot' ) + parser.add_argument( '--non_cpg_context_ob' ) + parser.add_argument( '--non_cpg_context_ctob' ) + + args = parser.parse_args() + + + # Build methylation extractor command + output_dir = tempfile.mkdtemp() + cmd = 'bismark_methylation_extractor --no_header -o %s %s %s' + + additional_opts = '' + # Set up all options + if args.single_end: + additional_opts += ' --single-end ' + else: + additional_opts += ' --paired-end ' + if args.no_overlap: + additional_opts += ' --no_overlap ' + if args.ignore_bps: + additional_opts += ' --ignore %s ' % args.ignore_bps + if args.comprehensive: + additional_opts += ' --comprehensive ' + if args.merge_non_cpg: + additional_opts += ' --merge_non_CpG ' + if args.report_file: + additional_opts += ' --report ' + + + # Final command: + cmd = cmd % (output_dir, additional_opts, args.infile) + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark methylation extractor:\n' + str( e ) ) + + + # collect and copy output files + + if args.compress: + zipper(output_dir, args.compress) + + + if args.cpg_ot: + shutil.move( glob(os.path.join( output_dir, '*CpG_OT_*'))[0], args.cpg_ot ) + if args.chg_ot: + shutil.move( glob(os.path.join( output_dir, '*CHG_OT_*'))[0], args.chg_ot ) + if args.chh_ot: + shutil.move( glob(os.path.join( output_dir, '*CHH_OT_*'))[0], args.chh_ot ) + if args.cpg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOT_*'))[0], args.cpg_ctot ) + if args.chg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOT_*'))[0], args.chg_ctot ) + if args.chh_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOT_*'))[0], args.chh_ctot ) + if args.cpg_ob: + shutil.move( glob(os.path.join( output_dir, '*CpG_OB_*'))[0], args.cpg_ob ) + if args.chg_ob: + shutil.move( glob(os.path.join( output_dir, '*CHG_OB_*'))[0], args.chg_ob ) + if args.chh_ob: + shutil.move( glob(os.path.join( output_dir, '*CHH_OB_*'))[0], args.chh_ob ) + if args.cpg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOB_*'))[0], args.cpg_ctob ) + if args.chg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOB_*'))[0], args.chg_ctob ) + if args.chh_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOB_*'))[0], args.chh_ctob ) + + # context-dependent methylation output files + if args.cpg_context: + shutil.move( glob(os.path.join( output_dir, '*CpG_context_*'))[0], args.cpg_context ) + if args.chg_context: + shutil.move( glob(os.path.join( output_dir, '*CHG_context_*'))[0], args.chg_context ) + if args.chh_context: + shutil.move( glob(os.path.join( output_dir, '*CHH_context_*'))[0], args.chh_context ) + + if args.non_cpg_context: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_context_*'))[0], args.non_cpg_context ) + + if args.non_cpg_context_ot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OT_*'))[0], args.non_cpg_context_ot ) + if args.non_cpg_context_ctot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOT_*'))[0], args.non_cpg_context_ctot ) + if args.non_cpg_context_ob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OB_*'))[0], args.non_cpg_context_ob ) + if args.non_cpg_context_ctob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOB_*'))[0], args.non_cpg_context_ctob ) + + + + if args.report_file: + shutil.move( glob(os.path.join( output_dir, '*_splitting_report*'))[0], args.report_file ) + + + # Clean up temp dirs + if os.path.exists( output_dir ): + shutil.rmtree( output_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml new file mode 100644 index 000000000000..141ec5805d7f --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml @@ -0,0 +1,306 @@ + + + methylation extractor + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_methylation_extractor.py + + --infile $input + + --bismark_path \$SCRIPT_PATH + + #if $singlePaired.sPaired == "single": + --single-end + #else: + --paired-end + $no_overlap + #end if + + #if str($ignore_bps) != "0": + --ignore $ignore_bps + #end if + + #if $report: + --report-file $o_report + #end if + + #if $comprehensive: + --comprehensive + #end if + + #if $merge_non_cpg: + --merge-non-cpg + #end if + + #if $compress: + --compress $compressed_output + #else: + #if $comprehensive == False and $merge_non_cpg == False: + ##twelfe files + --cpg_ot $cpg_ot + --chg_ot $chg_ot + --chh_ot $chh_ot + --cpg_ctot $cpg_ctot + --chg_ctot $chg_ctot + --chh_ctot $chh_ctot + --cpg_ob $cpg_ob + --chg_ob $chg_ob + --chh_ob $chh_ob + --cpg_ctob $cpg_ctob + --chg_ctob $chg_ctob + --chh_ctob $chh_ctob + #elif $merge_non_cpg and $comprehensive: + ## two files + --non_cpg_context $non_cpg_context + --cpg_context $cpg_context + #elif $comprehensive: + ## three files + --cpg_context $cpg_context + --chg_context $chg_context + --chh_context $chh_context + #elif $merge_non_cpg: + ## eight files + --non_cpg_context_ctot $non_cpg_context_ctot + --non_cpg_context_ot $non_cpg_context_ot + --non_cpg_context_ob $non_cpg_context_ob + --non_cpg_context_ctob $non_cpg_context_ctob + --cpg_ot $cpg_ot + --cpg_ctot $cpg_ctot + --cpg_ob $cpg_ob + --cpg_ctob $cpg_ctob + #end if + ## end compress + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + ( report is True ) + + + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + + + ( compress == False and comprehensive) + + + ( compress == False and comprehensive and merge_non_CpG == False) + + + ( compress == False and comprehensive and merge_non_CpG == False) + + + + ( compress == False and comprehensive and merge_non_cpg) + + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + + ( compress ) + + + + + + + + +**What it does** + +The following is a brief description of all options to control the Bismark_ +methylation extractor. The script reads in a bisulfite read alignment results file +produced by the Bismark bisulfite mapper and extracts the methylation information +for individual cytosines. This information is found in the methylation call field +which can contain the following characters: + + + - X = for methylated C in CHG context (was protected) + - x = for not methylated C CHG (was converted) + - H = for methylated C in CHH context (was protected) + - h = for not methylated C in CHH context (was converted) + - Z = for methylated C in CpG context (was protected) + - z = for not methylated C in CpG context (was converted) + - . = for any bases not involving cytosines + + +The methylation extractor outputs result files for cytosines in CpG, CHG and CHH +context (this distinction is actually already made in Bismark itself). As the methylation +information for every C analysed can produce files which easily have tens or even hundreds of +millions of lines, file sizes can become very large and more difficult to handle. The C +methylation info additionally splits cytosine methylation calls up into one of the four possible +strands a given bisulfite read aligned against: + + - OT = original top strand + - CTOT = complementary to original top strand + + - OB = original bottom strand + - CTOB = complementary to original bottom strand + +Thus, by default twelve individual output files are being generated per input file (unless +--comprehensive is specified, see below). The output files can be imported into a genome +viewer, such as SeqMonk, and re-combined into a single data group if desired (in fact +unless the bisulfite reads were generated preserving directionality it doesn't make any +sense to look at the data in a strand-specific manner). Strand-specific output files can +optionally be skipped, in which case only three output files for CpG, CHG or CHH context +will be generated. For both the strand-specific and comprehensive outputs there is also +the option to merge both non-CpG contexts (CHG and CHH) into one single non-CpG context. + + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Outputs** + +The output files are in the following format (tab delimited):: + + + Column Description + -------- -------------------------------------------------------- + 1 seq-ID + 2 strand + 3 chromosome + 4 position + 5 methylation call + + + * Methylated cytosines receive a '+' orientation, + * Unmethylated cytosines receive a '-' orientation. + +------ + +**OPTIONS** + +Input:: + + -s/--single-end Input file(s) are Bismark result file(s) generated from single-end + read data. Specifying either --single-end or --paired-end is + mandatory. + + -p/--paired-end Input file(s) are Bismark result file(s) generated from paired-end + read data. Specifying either --paired-end or --single-end is + mandatory. + + --no_overlap For paired-end reads it is theoretically possible that read_1 and + read_2 overlap. This option avoids scoring overlapping methylation + calls twice. Whilst this removes a bias towards more methylation calls + towards the center of sequenced fragments it can de facto remove + a good proportion of the data. + + --ignore INT Ignore the first INT bp at the 5' end of each read when processing the + methylation call string. This can remove e.g. a restriction enzyme site + at the start of each read. + +Output:: + + --comprehensive Specifying this option will merge all four possible strand-specific + methylation info into context-dependent output files. The default + contexts are: + - CpG context + - CHG context + - CHH context + + --merge_non_CpG This will produce two output files (in --comprehensive mode) or eight + strand-specific output files (default) for Cs in + - CpG context + - non-CpG context + + --report Prints out a short methylation summary as well as the paramaters used to run + this script. + + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py new file mode 100644 index 000000000000..606fa428bd77 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import fileinput +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark bisulfite mapper.') + parser.add_argument( '-p', '--num-threads', dest='num_threads', + type=int, default=4, help='Use this many threads to align reads. The default is 4.' ) + + parser.add_argument( '--bismark_path', dest='bismark_path', help='Path to the bismark perl scripts' ) + + parser.add_argument( '--bowtie2', action='store_true', default=False, help='Running bismark with bowtie2 and not with bowtie.' ) + + # input options + parser.add_argument( '--own-file', dest='own_file', help='' ) + parser.add_argument( '-D', '--indexes-path', dest='index_path', help='Indexes directory; location of .ebwt and .fa files.' ) + parser.add_argument( '-O', '--output', dest='output' ) + parser.add_argument( '--output-report-file', dest='output_report_file' ) + parser.add_argument( '--suppress-header', dest='suppress_header', action="store_true" ) + + parser.add_argument( '--mate-paired', dest='mate_paired', action='store_true', help='Reads are mate-paired', default=False) + + + parser.add_argument( '-1', '--mate1', dest='mate1', + help='The forward reads file in Sanger FASTQ or FASTA format.' ) + parser.add_argument( '-2', '--mate2', dest='mate2', + help='The reverse reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--output-unmapped-reads', dest='output_unmapped_reads', + help='Additional output file with unmapped reads (single-end).' ) + parser.add_argument( '--output-unmapped-reads-l', dest='output_unmapped_reads_l', + help='File name for unmapped reads (left, paired-end).' ) + parser.add_argument( '--output-unmapped-reads-r', dest='output_unmapped_reads_r', + help='File name for unmapped reads (right, paired-end).' ) + + + parser.add_argument( '--output-suppressed-reads', dest='output_suppressed_reads', + help='Additional output file with suppressed reads (single-end).' ) + parser.add_argument( '--output-suppressed-reads-l', dest='output_suppressed_reads_l', + help='File name for suppressed reads (left, paired-end).' ) + parser.add_argument( '--output-suppressed-reads-r', dest='output_suppressed_reads_r', + help='File name for suppressed reads (right, paired-end).' ) + + + parser.add_argument( '--single-paired', dest='single_paired', + help='The single-end reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--fastq', action='store_true', help='Query filetype is in FASTQ format') + parser.add_argument( '--fasta', action='store_true', help='Query filetype is in FASTA format') + parser.add_argument( '--phred64-quals', dest='phred64', action="store_true" ) + + + parser.add_argument( '--skip-reads', dest='skip_reads', type=int ) + parser.add_argument( '--qupto', type=int) + + + # paired end options + parser.add_argument( '-I', '--minins', dest='min_insert' ) + parser.add_argument( '-X', '--maxins', dest='max_insert' ) + parser.add_argument( '--no-mixed', dest='no_mixed', action="store_true" ) + parser.add_argument( '--no-discordant', dest='no_discordant', action="store_true" ) + + #parse general options + # default 20 + parser.add_argument( '--seed-len', dest='seed_len', type=int) + # default 15 + parser.add_argument( '--seed-extention-attempts', dest='seed_extention_attempts', type=int ) + # default 0 + parser.add_argument( '--seed-mismatches', dest='seed_mismatches', type=int ) + # default 2 + parser.add_argument( '--max-reseed', dest='max_reseed', type=int ) + """ + # default 70 + parser.add_argument( '--maqerr', dest='maqerr', type=int ) + """ + + """ + The number of megabytes of memory a given thread is given to store path + descriptors in --best mode. Best-first search must keep track of many paths + at once to ensure it is always extending the path with the lowest cumulative + cost. Bowtie tries to minimize the memory impact of the descriptors, but + they can still grow very large in some cases. If you receive an error message + saying that chunk memory has been exhausted in --best mode, try adjusting + this parameter up to dedicate more memory to the descriptors. Default: 512. + """ + parser.add_argument( '--chunkmbs', type=int, default=512 ) + + args = parser.parse_args() + + # Create bismark index if necessary. + index_dir = "" + if args.own_file: + """ + Create a temporary index with the offered files from the user. + Utilizing the script: bismark_genome_preparation + bismark_genome_preparation --bowtie2 hg19/ + """ + tmp_index_dir = tempfile.mkdtemp() + index_path = os.path.join( tmp_index_dir, '.'.join( os.path.split( args.own_file )[1].split( '.' )[:-1] ) ) + try: + """ + Create a hard link pointing to args.own_file named 'index_path'.fa. + """ + os.symlink( args.own_file, index_path + '.fa' ) + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error in linking the reference database.\n' + str( e ) ) + # bismark_genome_preparation needs the complete path to the folder in which the database is stored + if args.bowtie2: + cmd_index = 'bismark_genome_preparation --bowtie2 %s ' % ( tmp_index_dir ) + else: + cmd_index = 'bismark_genome_preparation %s ' % ( tmp_index_dir ) + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd_index = '%s/%s' % (args.bismark_path, cmd_index) + try: + tmp = tempfile.NamedTemporaryFile( dir=tmp_index_dir ).name + tmp_stderr = open( tmp, 'wb' ) + proc = subprocess.Popen( args=cmd_index, shell=True, cwd=tmp_index_dir, stdout=open(os.devnull, 'wb'), stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error indexing reference sequence\n' + str( e ) ) + index_dir = tmp_index_dir + else: + index_dir = args.index_path + + # Build bismark command + tmp_bismark_dir = tempfile.mkdtemp() + output_dir = os.path.join( tmp_bismark_dir, 'results') + cmd = 'bismark %(args)s --temp_dir %(tmp_bismark_dir)s -o %(output_dir)s --quiet %(genome_folder)s %(reads)s' + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd = '%s/%s' % (args.bismark_path, cmd) + + arguments = { + 'genome_folder': index_dir, + 'args': '', + 'tmp_bismark_dir': tmp_bismark_dir, + 'output_dir': output_dir, + } + + additional_opts = '' + # Set up the reads + if args.mate_paired: + # paired-end reads library + reads = '-1 %s ' % ( args.mate1 ) + reads += ' -2 %s ' % ( args.mate2 ) + additional_opts += ' -I %s -X %s ' % (args.min_insert, args.max_insert) + else: + # single paired reads library + reads = ' %s ' % ( args.single_paired ) + + + if not args.bowtie2: + # use bowtie specific options + additional_opts += ' --best ' + if args.seed_mismatches: + # --seedmms + additional_opts += ' -n %s ' % args.seed_mismatches + if args.seed_len: + # --seedlen + additional_opts += ' -l %s ' % args.seed_len + + # alignment options + if args.bowtie2: + additional_opts += ' -p %s --bowtie2 ' % args.num_threads + if args.seed_mismatches: + additional_opts += ' -N %s ' % args.seed_mismatches + if args.seed_len: + additional_opts += ' -L %s ' % args.seed_len + if args.seed_extention_attempts: + additional_opts += ' -D %s ' % args.seed_extention_attempts + if args.max_reseed: + additional_opts += ' -R %s ' % args.max_reseed + if args.no_discordant: + additional_opts += ' --no-discordant ' + if args.no_mixed: + additional_opts += ' --no-mixed ' + """ + if args.maqerr: + additional_opts += ' --maqerr %s ' % args.maqerr + """ + if args.skip_reads: + additional_opts += ' --skip %s ' % args.skip_reads + if args.qupto: + additional_opts += ' --qupto %s ' % args.qupto + if args.phred64: + additional_opts += ' --phred64-quals ' + if args.suppress_header: + additional_opts += ' --sam-no-hd ' + if args.output_unmapped_reads or ( args.output_unmapped_reads_l and args.output_unmapped_reads_r): + additional_opts += ' --un ' + if args.output_suppressed_reads or ( args.output_suppressed_reads_l and args.output_suppressed_reads_r): + additional_opts += ' --ambiguous ' + + arguments.update( {'args': additional_opts, 'reads': reads} ) + + # Final command: + cmd = cmd % arguments + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark:\n' + str( e ) ) + + + # collect and copy output files + """ + if args.output_report_file: + output_report_file = open(args.output_report_file, 'w+') + for line in fileinput.input(glob( os.path.join( output_dir, '*.txt') )): + output_report_file.write(line) + output_report_file.close() + """ + + if args.output_suppressed_reads: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads.txt'))[0], args.output_suppressed_reads ) + if args.output_suppressed_reads_l: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_1.txt'))[0], args.output_suppressed_reads_l ) + if args.output_suppressed_reads_r: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_2.txt'))[0], args.output_suppressed_reads_r ) + + if args.output_unmapped_reads: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads.txt'))[0], args.output_unmapped_reads ) + if args.output_unmapped_reads_l: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_1.txt'))[0], args.output_unmapped_reads_l ) + if args.output_unmapped_reads_r: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_2.txt'))[0], args.output_unmapped_reads_r ) + + shutil.move( glob( os.path.join( output_dir, '*.sam'))[0] , args.output) + + # Clean up temp dirs + if args.own_file: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + if os.path.exists( tmp_bismark_dir ): + shutil.rmtree( tmp_bismark_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc new file mode 100755 index 000000000000..61663caa7c70 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc @@ -0,0 +1,37 @@ +# bowtie2_indices.loc.sample +# This is a *.loc.sample file distributed with Galaxy that enables tools +# to use a directory of indexed data files. This one is for Bowtie2 and Tophat2. +# See the wiki: http://wiki.galaxyproject.org/Admin/NGS%20Local%20Setup +# First create these data files and save them in your own data directory structure. +# Then, create a bowtie_indices.loc file to use those indexes with tools. +# Copy this file, save it with the same name (minus the .sample), +# follow the format examples, and store the result in this directory. +# The file should include an one line entry for each index set. +# The path points to the "basename" for the set, not a specific file. +# It has four text columns seperated by TABS. +# +# +# +# So, for example, if you had hg18 indexes stored in: +# +# /depot/data2/galaxy/hg19/bowtie2/ +# +# containing hg19 genome and hg19.*.bt2 files, such as: +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.fa +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 18:56 hg19canon.2.bt2 +# -rw-rw-r-- 1 james james 3.3K Feb 10 16:54 hg19canon.3.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 16:54 hg19canon.4.bt2 +# -rw-rw-r-- 1 james james 914M Feb 10 20:45 hg19canon.rev.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 20:45 hg19canon.rev.2.bt2 +# +# then the bowtie2_indices.loc entry could look like this: +# +#hg19 hg19 Human (hg19) /depot/data2/galaxy/hg19/bowtie2/hg19canon +# +#More examples: +# +#mm10 mm10 Mouse (mm10) /depot/data2/galaxy/mm10/bowtie2/mm10 +#dm3 dm3 D. melanogaster (dm3) /depot/data2/galaxy/mm10/bowtie2/dm3 +# +# diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample new file mode 100755 index 000000000000..61663caa7c70 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample @@ -0,0 +1,37 @@ +# bowtie2_indices.loc.sample +# This is a *.loc.sample file distributed with Galaxy that enables tools +# to use a directory of indexed data files. This one is for Bowtie2 and Tophat2. +# See the wiki: http://wiki.galaxyproject.org/Admin/NGS%20Local%20Setup +# First create these data files and save them in your own data directory structure. +# Then, create a bowtie_indices.loc file to use those indexes with tools. +# Copy this file, save it with the same name (minus the .sample), +# follow the format examples, and store the result in this directory. +# The file should include an one line entry for each index set. +# The path points to the "basename" for the set, not a specific file. +# It has four text columns seperated by TABS. +# +# +# +# So, for example, if you had hg18 indexes stored in: +# +# /depot/data2/galaxy/hg19/bowtie2/ +# +# containing hg19 genome and hg19.*.bt2 files, such as: +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.fa +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 18:56 hg19canon.2.bt2 +# -rw-rw-r-- 1 james james 3.3K Feb 10 16:54 hg19canon.3.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 16:54 hg19canon.4.bt2 +# -rw-rw-r-- 1 james james 914M Feb 10 20:45 hg19canon.rev.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 20:45 hg19canon.rev.2.bt2 +# +# then the bowtie2_indices.loc entry could look like this: +# +#hg19 hg19 Human (hg19) /depot/data2/galaxy/hg19/bowtie2/hg19canon +# +#More examples: +# +#mm10 mm10 Mouse (mm10) /depot/data2/galaxy/mm10/bowtie2/mm10 +#dm3 dm3 D. melanogaster (dm3) /depot/data2/galaxy/mm10/bowtie2/dm3 +# +# diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample b/lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample new file mode 100644 index 000000000000..9e62763c741d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample @@ -0,0 +1,13 @@ + + + +
+ value, dbkey, name, path + +
+ + + value, dbkey, name, path + +
+ diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml new file mode 100644 index 000000000000..776d2aa33442 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml @@ -0,0 +1,61 @@ + + + + $REPOSITORY_INSTALL_DIR + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.0.0-beta7/bowtie2-2.0.0-beta7-source.zip + make + + bowtie2 + $INSTALL_DIR/bin + + + bowtie2-align + $INSTALL_DIR/bin + + + bowtie2-build + $INSTALL_DIR/bin + + chmod +x $INSTALL_DIR/bin/bowtie2 + + $INSTALL_DIR/bin + + + + +Compiling bowtie2 requires zlib and libpthread to be present on your system. + + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie/0.12.8/bowtie-0.12.8-src.zip + make + + bowtie + $INSTALL_DIR/bin + + + bowtie-inspect + $INSTALL_DIR/bin + + + bowtie-build + $INSTALL_DIR/bin + + + $INSTALL_DIR/bin + + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml new file mode 100644 index 000000000000..68238f4ddd85 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml @@ -0,0 +1,616 @@ + + + bisulfite mapper (bowtie2) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + --bowtie2 + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml new file mode 100644 index 000000000000..6e4e4def6200 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml @@ -0,0 +1,614 @@ + + + bisulfite mapper (bowtie) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation new file mode 100755 index 000000000000..1895a296632c --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation @@ -0,0 +1,492 @@ +#!/usr/bin/perl -- +use strict; +use warnings; +use Cwd; +use File::Path qw(rmtree); +$|++; + + +## This program is Copyright (C) 2010-12, Felix Krueger (felix.krueger@bbsrc.ac.uk) + +## This program is free software: you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation, either version 3 of the License, or +## (at your option) any later version. + +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. + +## You should have received a copy of the GNU General Public License +## along with this program. If not, see . + +use Getopt::Long; +use Cwd; + +my $verbose; +my $help; +my $version; +my $man; +my $path_to_bowtie; +my $multi_fasta; +my $single_fasta; +my $bowtie2; + +my $bismark_version = 'v0.7.7'; + +GetOptions ('verbose' => \$verbose, + 'help' => \$help, + 'man' => \$man, + 'version' => \$version, + 'path_to_bowtie:s' => \$path_to_bowtie, + 'single_fasta' => \$single_fasta, + 'bowtie2' => \$bowtie2, + ); + +my $genome_folder = shift @ARGV; # mandatory +my $CT_dir; +my $GA_dir; + +if ($help or $man){ + print_helpfile(); + exit; +} + +if ($version){ + print << "VERSION"; + + Bismark - Bisulfite Mapper and Methylation Caller. + + Bismark Genome Preparation Version: $bismark_version + Copyright 2010-12 Felix Krueger, Babraham Bioinformatics + www.bioinformatics.babraham.ac.uk/projects/ + +VERSION + exit; +} + +if ($single_fasta){ + print "Writing individual genomes out into single-entry fasta files (one per chromosome)\n\n"; + $multi_fasta = 0; +} +else{ + print "Writing bisulfite genomes out into a single MFA (multi FastA) file\n\n"; + $single_fasta = 0; + $multi_fasta = 1; +} + +my @filenames = create_bisulfite_genome_folders(); + +process_sequence_files (); + +launch_bowtie_indexer(); + +sub launch_bowtie_indexer{ + if ($bowtie2){ + print "Bismark Genome Preparation - Step III: Launching the Bowtie 2 indexer\n"; + } + else{ + print "Bismark Genome Preparation - Step III: Launching the Bowtie (1) indexer\n"; + } + print "Please be aware that this process can - depending on genome size - take up to several hours!\n"; + sleep(5); + + ### if the path to bowtie was specfified explicitely + if ($path_to_bowtie){ + if ($bowtie2){ + $path_to_bowtie =~ s/$/bowtie2-build/; + } + else{ + $path_to_bowtie =~ s/$/bowtie-build/; + } + } + ### otherwise we assume that bowtie-build is in the path + else{ + if ($bowtie2){ + $path_to_bowtie = 'bowtie2-build'; + } + else{ + $path_to_bowtie = 'bowtie-build'; + } + } + + $verbose and print "\n"; + + ### Forking the program to run 2 instances of Bowtie-build or Bowtie2-build (= the Bowtie (1/2) indexer) + my $pid = fork(); + + # parent process + if ($pid){ + sleep(1); + chdir $CT_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of CT converted genome in $CT_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Parent process: Starting to index C->T converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_CT\n\n"; + + sleep (11); + exec ("$path_to_bowtie","-f","$file_list","BS_CT"); + } + + # child process + elsif ($pid == 0){ + sleep(2); + chdir $GA_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of GA converted genome in $GA_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Child process: Starting to index G->A converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_GA\n\n"; + $verbose and print "(starting in 10 seconds)\n"; + sleep(10); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } + + # if the platform doesn't support the fork command we will run the indexing processes one after the other + else{ + print "Forking process was not successful, therefore performing the indexing sequentially instead\n"; + sleep(10); + + ### moving to CT genome folder + $verbose and warn "Preparing to index CT converted genome in $CT_dir\n"; + chdir $CT_dir or die "Unable to change directory: $!\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + system ("$path_to_bowtie","-f","$file_list","BS_CT"); + @fasta_files=(); + $file_list= ''; + + ### moving to GA genome folder + $verbose and warn "Preparing to index GA converted genome in $GA_dir\n"; + chdir $GA_dir or die "Unable to change directory: $!\n"; + @fasta_files = <*.fa>; + $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } +} + + +sub process_sequence_files { + + my ($total_CT_conversions,$total_GA_conversions) = (0,0); + $verbose and print "Bismark Genome Preparation - Step II: Bisulfite converting reference genome\n\n"; + sleep (3); + + $verbose and print "conversions performed:\n"; + $verbose and print join("\t",'chromosome','C->T','G->A'),"\n"; + + + ### If someone wants to index a genome which consists of thousands of contig and scaffold files we need to write the genome conversions into an MFA file + ### Otherwise the list of comma separated chromosomes we provide for bowtie-build will get too long for the kernel to handle + ### This is now the default option + + if ($multi_fasta){ + ### Here we just use one multi FastA file name, append .CT_conversion or .GA_conversion and print all sequence conversions into these files + my $bisulfite_CT_conversion_filename = "$CT_dir/genome_mfa.CT_conversion.fa"; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/genome_mfa.GA_conversion.fa"; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + foreach my $filename(@filenames){ + my ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + open (IN,$filename) or die "Failed to read from sequence file $filename $!\n"; + # warn "Reading chromosome information from $filename\n\n"; + + ### first line needs to be a fastA header + my $first_line = ; + chomp $first_line; + + ### Extracting chromosome name from the FastA header + my $chromosome_name = extract_chromosome_name($first_line); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; # first entry + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; # first entry + + + while (){ + + ### in case the line is a new fastA header + if ($_ =~ /^>/){ + ### printing out the stats for the previous chromosome + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + ### resetting the chromosome transliteration counters + ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + + ### Extracting chromosome name from the additional FastA header + $chromosome_name = extract_chromosome_name($_); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; + } + + else{ + my $sequence = uc$_; + + ### (I) First replacing all ambiguous sequence characters (such as M,S,R....) by N (G,A,T,C,N and the line endings \r and \n are added to a character group) + + $sequence =~ s/[^ATCGN\n\r]/N/g; + + ### (II) Writing the chromosome out into a C->T converted version (equals forward strand conversion) + + my $CT_sequence = $sequence; + my $CT_transliterations_performed = ($CT_sequence =~ tr/C/T/); # converts all Cs into Ts + $total_CT_conversions += $CT_transliterations_performed; + $chromosome_CT_conversions += $CT_transliterations_performed; + + print CT_CONVERT $CT_sequence; + + ### (III) Writing the chromosome out in a G->A converted version of the forward strand (this is equivalent to reverse- + ### complementing the forward strand and then C->T converting it) + + my $GA_sequence = $sequence; + my $GA_transliterations_performed = ($GA_sequence =~ tr/G/A/); # converts all Gs to As on the forward strand + $total_GA_conversions += $GA_transliterations_performed; + $chromosome_GA_conversions += $GA_transliterations_performed; + + print GA_CONVERT $GA_sequence; + + } + } + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + } + close (CT_CONVERT) or die "Failed to close filehandle: $!\n"; + close (GA_CONVERT) or die "Failed to close filehandle: $!\n"; + + + print "\nTotal number of conversions performed:\n"; + print "C->T:\t$total_CT_conversions\n"; + print "G->A:\t$total_GA_conversions\n"; + + warn "\nStep II - Genome bisulfite conversions - completed\n\n\n"; +} + +sub extract_chromosome_name { + + my $header = shift; + + ## Bowtie extracts the first string after the initial > in the FASTA file, so we are doing this as well + + if ($header =~ s/^>//){ + my ($chromosome_name) = split (/\s+/,$header); + return $chromosome_name; + } + else{ + die "The specified chromosome file doesn't seem to be in FASTA format as required! $!\n"; + } +} + +sub create_bisulfite_genome_folders{ + + $verbose and print "Bismark Genome Preparation - Step I: Preparing folders\n\n"; + + # Ensuring a genome folder has been specified + if ($genome_folder){ + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + $verbose and print "Path to genome folder specified: $genome_folder\n"; + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + # making the genome folder path abolsolute so it won't break if the path was specified relative + $genome_folder = getcwd; + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + } + + else{ + $verbose and print "Genome folder was not provided as argument "; + while (1){ + print "Please specify a genome folder to be bisulfite converted:\n"; + $genome_folder = ; + chomp $genome_folder; + + # adding a trailing slash unless already present + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + if (chdir $genome_folder){ + last; + } + else{ + warn "Could't move to directory $genome_folder! $!"; + } + } + } + + if ($path_to_bowtie){ + unless ($path_to_bowtie =~ /\/$/){ + $path_to_bowtie =~ s/$/\//; + } + if (chdir $path_to_bowtie){ + if ($bowtie2){ + $verbose and print "Path to Bowtie 2 specified: $path_to_bowtie\n"; + } + else{ + $verbose and print "Path to Bowtie (1) specified: $path_to_bowtie\n"; + } + } + else{ + die "There was an error with the path to bowtie: $!\n"; + } + } + + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + + # Exiting unless there are fastA files in the folder + my @filenames = <*.fa>; + + ### if there aren't any genomic files with the extension .fa we will look for files with the extension .fasta + unless (@filenames){ + @filenames = <*.fasta>; + } + + unless (@filenames){ + die "The specified genome folder $genome_folder does not contain any sequence files in FastA format (with .fa or .fasta file extensions\n"; + } + + warn "Bisulfite Genome Indexer version $bismark_version (last modified 17 Nov 2011)\n\n"; + sleep (3); + + # creating a directory inside the genome folder to store the bisfulfite genomes unless it already exists + my $bisulfite_dir = "${genome_folder}Bisulfite_Genome/"; + unless (-d $bisulfite_dir){ + mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $bisulfite_dir\n"; + } + else{ + while (1){ + print "\nA directory called $bisulfite_dir already exists. Bisulfite converted sequences and/or already existing Bowtie (1 or 2) indexes might be overwritten!\nDo you want to continue anyway?\t"; + my $proceed = ; + chomp $proceed; + if ($proceed =~ /^y/i ){ + last; + } + elsif ($proceed =~ /^n/i){ + die "Terminated by user\n\n"; + } + } + } + + ### as of version 0.6.0 the Bismark indexer will no longer delete the Bisulfite_Genome directory if it was present already, since it could store the Bowtie 1 or 2 indexes already + # removing any existing files and subfolders in the bisulfite directory (the specified directory won't be deleted) + # rmtree($bisulfite_dir, {verbose => 1,keep_root => 1}); + # unless (-d $bisulfite_dir){ # had to add this after changing remove_tree to rmtree // suggested by Samantha Cooper @ Illumina + # mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + # } + # } + + chdir $bisulfite_dir or die "Unable to move to $bisulfite_dir\n"; + $CT_dir = "${bisulfite_dir}CT_conversion/"; + $GA_dir = "${bisulfite_dir}GA_conversion/"; + + # creating 2 subdirectories to store a C->T (forward strand conversion) and a G->A (reverse strand conversion) + # converted version of the genome + unless (-d $CT_dir){ + mkdir $CT_dir or die "Unable to create directory $CT_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $CT_dir\n"; + } + unless (-d $GA_dir){ + mkdir $GA_dir or die "Unable to create directory $GA_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $GA_dir\n"; + } + + # moving back to the original genome folder + chdir $genome_folder or die "Could't move to directory $genome_folder $!"; + # $verbose and print "Moved back to genome folder folder $genome_folder\n"; + warn "\nStep I - Prepare genome folders - completed\n\n\n"; + return @filenames; +} + +sub print_helpfile{ + print << 'HOW_TO'; + + +DESCRIPTION + +This script is supposed to convert a specified reference genome into two different bisulfite +converted versions and index them for alignments with Bowtie 1 (default), or Bowtie 2. The first +bisulfite genome will have all Cs converted to Ts (C->T), and the other one will have all Gs +converted to As (G->A). Both bisulfite genomes will be stored in subfolders within the reference +genome folder. Once the bisulfite conversion has been completed the program will fork and launch +two simultaneous instances of the bowtie 1 or 2 indexer (bowtie-build or bowtie2-build). Be aware +that the indexing process can take up to several hours; this will mainly depend on genome size +and system resources. + + + + +The following is a brief description of command line options and arguments to control the +Bismark Genome Preparation script: + + +USAGE: bismark_genome_preparation [options] + + +OPTIONS: + +--help/--man Displays this help filea and exits. + +--version Displays version information and exits. + +--verbose Print verbose output for more details or debugging. + +--path_to_bowtie The full path to the Bowtie 1 or Bowtie 2 installation on your system.If + the path is not provided as an option you will be prompted for it. + +--bowtie2 This will create bisulfite indexes for Bowtie 2. (Default: Bowtie 1). + +--single_fasta Instruct the Bismark Indexer to write the converted genomes into + single-entry FastA files instead of making one multi-FastA file (MFA) + per chromosome. This might be useful if individual bisulfite converted + chromosomes are needed (e.g. for debugging), however it can cause a + problem with indexing if the number of chromosomes is vast (this is likely + to be in the range of several thousand files; the operating system can + only handle lists up to a certain length, and some newly assembled + genomes may contain 20000-50000 contigs of scaffold files which do exceed + this list length limit). + + +ARGUMENTS: + + The path to the folder containing the genome to be bisulfite converted. + At the current time Bismark Genome Preparation expects one or more fastA + files in the folder (with the file extension: .fa or .fasta). If the path + is not provided as an argument you will be prompted for it. + + + +This script was last modified on 18 Nov 2011. +HOW_TO +} diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py new file mode 100644 index 000000000000..cb79d1ecf590 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import zipfile +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def zipper(dir, zip_file): + zip = zipfile.ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED) + root_len = len(os.path.abspath(dir)) + for root, dirs, files in os.walk(dir): + archive_root = os.path.abspath(root)[root_len:] + for f in files: + fullpath = os.path.join(root, f) + archive_name = os.path.join(archive_root, f) + zip.write(fullpath, archive_name, zipfile.ZIP_DEFLATED) + zip.close() + return zip_file + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark methylation caller.') + + # input options + parser.add_argument( '--infile', help='Input file in SAM format.' ) + parser.add_argument( '--single-end', dest='single_end', action="store_true" ) + parser.add_argument( '--paired-end', dest='paired_end', action="store_true" ) + + parser.add_argument( '--report-file', dest='report_file' ) + parser.add_argument( '--comprehensive', action="store_true" ) + parser.add_argument( '--merge-non-cpg', dest='merge_non_cpg', action="store_true" ) + parser.add_argument( '--no-overlap', dest='no_overlap', action="store_true" ) + parser.add_argument( '--compress' ) + parser.add_argument( '--ignore-bps', dest='ignore_bps', type=int ) + + # OT - original top strand + parser.add_argument( '--cpg_ot' ) + parser.add_argument( '--chg_ot' ) + parser.add_argument( '--chh_ot' ) + # CTOT - complementary to original top strand + parser.add_argument( '--cpg_ctot' ) + parser.add_argument( '--chg_ctot' ) + parser.add_argument( '--chh_ctot' ) + # OB - original bottom strand + parser.add_argument( '--cpg_ob' ) + parser.add_argument( '--chg_ob' ) + parser.add_argument( '--chh_ob' ) + # CTOT - complementary to original bottom strand + parser.add_argument( '--cpg_ctob' ) + parser.add_argument( '--chg_ctob' ) + parser.add_argument( '--chh_ctob' ) + + parser.add_argument( '--cpg_context' ) + parser.add_argument( '--chg_context' ) + parser.add_argument( '--chh_context' ) + + parser.add_argument( '--non_cpg_context' ) + + parser.add_argument( '--non_cpg_context_ot' ) + parser.add_argument( '--non_cpg_context_ctot' ) + parser.add_argument( '--non_cpg_context_ob' ) + parser.add_argument( '--non_cpg_context_ctob' ) + + args = parser.parse_args() + + + # Build methylation extractor command + output_dir = tempfile.mkdtemp() + cmd = 'bismark_methylation_extractor --no_header -o %s %s %s' + + additional_opts = '' + # Set up all options + if args.single_end: + additional_opts += ' --single-end ' + else: + additional_opts += ' --paired-end ' + if args.no_overlap: + additional_opts += ' --no_overlap ' + if args.ignore_bps: + additional_opts += ' --ignore %s ' % args.ignore_bps + if args.comprehensive: + additional_opts += ' --comprehensive ' + if args.merge_non_cpg: + additional_opts += ' --merge_non_CpG ' + if args.report_file: + additional_opts += ' --report ' + + + # Final command: + cmd = cmd % (output_dir, additional_opts, args.infile) + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark methylation extractor:\n' + str( e ) ) + + + # collect and copy output files + + if args.compress: + zipper(output_dir, args.compress) + + + if args.cpg_ot: + shutil.move( glob(os.path.join( output_dir, '*CpG_OT_*'))[0], args.cpg_ot ) + if args.chg_ot: + shutil.move( glob(os.path.join( output_dir, '*CHG_OT_*'))[0], args.chg_ot ) + if args.chh_ot: + shutil.move( glob(os.path.join( output_dir, '*CHH_OT_*'))[0], args.chh_ot ) + if args.cpg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOT_*'))[0], args.cpg_ctot ) + if args.chg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOT_*'))[0], args.chg_ctot ) + if args.chh_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOT_*'))[0], args.chh_ctot ) + if args.cpg_ob: + shutil.move( glob(os.path.join( output_dir, '*CpG_OB_*'))[0], args.cpg_ob ) + if args.chg_ob: + shutil.move( glob(os.path.join( output_dir, '*CHG_OB_*'))[0], args.chg_ob ) + if args.chh_ob: + shutil.move( glob(os.path.join( output_dir, '*CHH_OB_*'))[0], args.chh_ob ) + if args.cpg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOB_*'))[0], args.cpg_ctob ) + if args.chg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOB_*'))[0], args.chg_ctob ) + if args.chh_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOB_*'))[0], args.chh_ctob ) + + # context-dependent methylation output files + if args.cpg_context: + shutil.move( glob(os.path.join( output_dir, '*CpG_context_*'))[0], args.cpg_context ) + if args.chg_context: + shutil.move( glob(os.path.join( output_dir, '*CHG_context_*'))[0], args.chg_context ) + if args.chh_context: + shutil.move( glob(os.path.join( output_dir, '*CHH_context_*'))[0], args.chh_context ) + + if args.non_cpg_context: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_context_*'))[0], args.non_cpg_context ) + + if args.non_cpg_context_ot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OT_*'))[0], args.non_cpg_context_ot ) + if args.non_cpg_context_ctot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOT_*'))[0], args.non_cpg_context_ctot ) + if args.non_cpg_context_ob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OB_*'))[0], args.non_cpg_context_ob ) + if args.non_cpg_context_ctob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOB_*'))[0], args.non_cpg_context_ctob ) + + + + if args.report_file: + shutil.move( glob(os.path.join( output_dir, '*_splitting_report*'))[0], args.report_file ) + + + # Clean up temp dirs + if os.path.exists( output_dir ): + shutil.rmtree( output_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/bismark/bismark_methylation_extractor.xml b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.xml similarity index 100% rename from lib/tool_shed/test/test_data/bismark/bismark_methylation_extractor.xml rename to lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.xml diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py new file mode 100644 index 000000000000..606fa428bd77 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import fileinput +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark bisulfite mapper.') + parser.add_argument( '-p', '--num-threads', dest='num_threads', + type=int, default=4, help='Use this many threads to align reads. The default is 4.' ) + + parser.add_argument( '--bismark_path', dest='bismark_path', help='Path to the bismark perl scripts' ) + + parser.add_argument( '--bowtie2', action='store_true', default=False, help='Running bismark with bowtie2 and not with bowtie.' ) + + # input options + parser.add_argument( '--own-file', dest='own_file', help='' ) + parser.add_argument( '-D', '--indexes-path', dest='index_path', help='Indexes directory; location of .ebwt and .fa files.' ) + parser.add_argument( '-O', '--output', dest='output' ) + parser.add_argument( '--output-report-file', dest='output_report_file' ) + parser.add_argument( '--suppress-header', dest='suppress_header', action="store_true" ) + + parser.add_argument( '--mate-paired', dest='mate_paired', action='store_true', help='Reads are mate-paired', default=False) + + + parser.add_argument( '-1', '--mate1', dest='mate1', + help='The forward reads file in Sanger FASTQ or FASTA format.' ) + parser.add_argument( '-2', '--mate2', dest='mate2', + help='The reverse reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--output-unmapped-reads', dest='output_unmapped_reads', + help='Additional output file with unmapped reads (single-end).' ) + parser.add_argument( '--output-unmapped-reads-l', dest='output_unmapped_reads_l', + help='File name for unmapped reads (left, paired-end).' ) + parser.add_argument( '--output-unmapped-reads-r', dest='output_unmapped_reads_r', + help='File name for unmapped reads (right, paired-end).' ) + + + parser.add_argument( '--output-suppressed-reads', dest='output_suppressed_reads', + help='Additional output file with suppressed reads (single-end).' ) + parser.add_argument( '--output-suppressed-reads-l', dest='output_suppressed_reads_l', + help='File name for suppressed reads (left, paired-end).' ) + parser.add_argument( '--output-suppressed-reads-r', dest='output_suppressed_reads_r', + help='File name for suppressed reads (right, paired-end).' ) + + + parser.add_argument( '--single-paired', dest='single_paired', + help='The single-end reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--fastq', action='store_true', help='Query filetype is in FASTQ format') + parser.add_argument( '--fasta', action='store_true', help='Query filetype is in FASTA format') + parser.add_argument( '--phred64-quals', dest='phred64', action="store_true" ) + + + parser.add_argument( '--skip-reads', dest='skip_reads', type=int ) + parser.add_argument( '--qupto', type=int) + + + # paired end options + parser.add_argument( '-I', '--minins', dest='min_insert' ) + parser.add_argument( '-X', '--maxins', dest='max_insert' ) + parser.add_argument( '--no-mixed', dest='no_mixed', action="store_true" ) + parser.add_argument( '--no-discordant', dest='no_discordant', action="store_true" ) + + #parse general options + # default 20 + parser.add_argument( '--seed-len', dest='seed_len', type=int) + # default 15 + parser.add_argument( '--seed-extention-attempts', dest='seed_extention_attempts', type=int ) + # default 0 + parser.add_argument( '--seed-mismatches', dest='seed_mismatches', type=int ) + # default 2 + parser.add_argument( '--max-reseed', dest='max_reseed', type=int ) + """ + # default 70 + parser.add_argument( '--maqerr', dest='maqerr', type=int ) + """ + + """ + The number of megabytes of memory a given thread is given to store path + descriptors in --best mode. Best-first search must keep track of many paths + at once to ensure it is always extending the path with the lowest cumulative + cost. Bowtie tries to minimize the memory impact of the descriptors, but + they can still grow very large in some cases. If you receive an error message + saying that chunk memory has been exhausted in --best mode, try adjusting + this parameter up to dedicate more memory to the descriptors. Default: 512. + """ + parser.add_argument( '--chunkmbs', type=int, default=512 ) + + args = parser.parse_args() + + # Create bismark index if necessary. + index_dir = "" + if args.own_file: + """ + Create a temporary index with the offered files from the user. + Utilizing the script: bismark_genome_preparation + bismark_genome_preparation --bowtie2 hg19/ + """ + tmp_index_dir = tempfile.mkdtemp() + index_path = os.path.join( tmp_index_dir, '.'.join( os.path.split( args.own_file )[1].split( '.' )[:-1] ) ) + try: + """ + Create a hard link pointing to args.own_file named 'index_path'.fa. + """ + os.symlink( args.own_file, index_path + '.fa' ) + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error in linking the reference database.\n' + str( e ) ) + # bismark_genome_preparation needs the complete path to the folder in which the database is stored + if args.bowtie2: + cmd_index = 'bismark_genome_preparation --bowtie2 %s ' % ( tmp_index_dir ) + else: + cmd_index = 'bismark_genome_preparation %s ' % ( tmp_index_dir ) + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd_index = '%s/%s' % (args.bismark_path, cmd_index) + try: + tmp = tempfile.NamedTemporaryFile( dir=tmp_index_dir ).name + tmp_stderr = open( tmp, 'wb' ) + proc = subprocess.Popen( args=cmd_index, shell=True, cwd=tmp_index_dir, stdout=open(os.devnull, 'wb'), stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error indexing reference sequence\n' + str( e ) ) + index_dir = tmp_index_dir + else: + index_dir = args.index_path + + # Build bismark command + tmp_bismark_dir = tempfile.mkdtemp() + output_dir = os.path.join( tmp_bismark_dir, 'results') + cmd = 'bismark %(args)s --temp_dir %(tmp_bismark_dir)s -o %(output_dir)s --quiet %(genome_folder)s %(reads)s' + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd = '%s/%s' % (args.bismark_path, cmd) + + arguments = { + 'genome_folder': index_dir, + 'args': '', + 'tmp_bismark_dir': tmp_bismark_dir, + 'output_dir': output_dir, + } + + additional_opts = '' + # Set up the reads + if args.mate_paired: + # paired-end reads library + reads = '-1 %s ' % ( args.mate1 ) + reads += ' -2 %s ' % ( args.mate2 ) + additional_opts += ' -I %s -X %s ' % (args.min_insert, args.max_insert) + else: + # single paired reads library + reads = ' %s ' % ( args.single_paired ) + + + if not args.bowtie2: + # use bowtie specific options + additional_opts += ' --best ' + if args.seed_mismatches: + # --seedmms + additional_opts += ' -n %s ' % args.seed_mismatches + if args.seed_len: + # --seedlen + additional_opts += ' -l %s ' % args.seed_len + + # alignment options + if args.bowtie2: + additional_opts += ' -p %s --bowtie2 ' % args.num_threads + if args.seed_mismatches: + additional_opts += ' -N %s ' % args.seed_mismatches + if args.seed_len: + additional_opts += ' -L %s ' % args.seed_len + if args.seed_extention_attempts: + additional_opts += ' -D %s ' % args.seed_extention_attempts + if args.max_reseed: + additional_opts += ' -R %s ' % args.max_reseed + if args.no_discordant: + additional_opts += ' --no-discordant ' + if args.no_mixed: + additional_opts += ' --no-mixed ' + """ + if args.maqerr: + additional_opts += ' --maqerr %s ' % args.maqerr + """ + if args.skip_reads: + additional_opts += ' --skip %s ' % args.skip_reads + if args.qupto: + additional_opts += ' --qupto %s ' % args.qupto + if args.phred64: + additional_opts += ' --phred64-quals ' + if args.suppress_header: + additional_opts += ' --sam-no-hd ' + if args.output_unmapped_reads or ( args.output_unmapped_reads_l and args.output_unmapped_reads_r): + additional_opts += ' --un ' + if args.output_suppressed_reads or ( args.output_suppressed_reads_l and args.output_suppressed_reads_r): + additional_opts += ' --ambiguous ' + + arguments.update( {'args': additional_opts, 'reads': reads} ) + + # Final command: + cmd = cmd % arguments + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark:\n' + str( e ) ) + + + # collect and copy output files + """ + if args.output_report_file: + output_report_file = open(args.output_report_file, 'w+') + for line in fileinput.input(glob( os.path.join( output_dir, '*.txt') )): + output_report_file.write(line) + output_report_file.close() + """ + + if args.output_suppressed_reads: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads.txt'))[0], args.output_suppressed_reads ) + if args.output_suppressed_reads_l: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_1.txt'))[0], args.output_suppressed_reads_l ) + if args.output_suppressed_reads_r: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_2.txt'))[0], args.output_suppressed_reads_r ) + + if args.output_unmapped_reads: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads.txt'))[0], args.output_unmapped_reads ) + if args.output_unmapped_reads_l: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_1.txt'))[0], args.output_unmapped_reads_l ) + if args.output_unmapped_reads_r: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_2.txt'))[0], args.output_unmapped_reads_r ) + + shutil.move( glob( os.path.join( output_dir, '*.sam'))[0] , args.output) + + # Clean up temp dirs + if args.own_file: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + if os.path.exists( tmp_bismark_dir ): + shutil.rmtree( tmp_bismark_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample b/lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample new file mode 100644 index 000000000000..9e62763c741d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample @@ -0,0 +1,13 @@ + + + + + value, dbkey, name, path + +
+ + + value, dbkey, name, path + +
+
diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml new file mode 100644 index 000000000000..776d2aa33442 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml @@ -0,0 +1,61 @@ + + + + $REPOSITORY_INSTALL_DIR + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.0.0-beta7/bowtie2-2.0.0-beta7-source.zip + make + + bowtie2 + $INSTALL_DIR/bin + + + bowtie2-align + $INSTALL_DIR/bin + + + bowtie2-build + $INSTALL_DIR/bin + + chmod +x $INSTALL_DIR/bin/bowtie2 + + $INSTALL_DIR/bin + + + + +Compiling bowtie2 requires zlib and libpthread to be present on your system. + + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie/0.12.8/bowtie-0.12.8-src.zip + make + + bowtie + $INSTALL_DIR/bin + + + bowtie-inspect + $INSTALL_DIR/bin + + + bowtie-build + $INSTALL_DIR/bin + + + $INSTALL_DIR/bin + + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml new file mode 100644 index 000000000000..1f3793c6007d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml @@ -0,0 +1,58 @@ + + Predicts potentially antigenic regions of a protein sequence, using the method of Kolaskar and Tongaonkar. + emboss + antigenic -sequence $input1 -outfile $out_file1 -minlen $minlen -rformat2 $out_format1 -auto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + You can view the original documentation here_. + + .. _here: http://emboss.sourceforge.net/apps/release/5.0/emboss/apps/antigenic.html + +------ + +**Citation** + +For the underlying tool, please cite `Rice P, Longden I, Bleasby A. EMBOSS: the European Molecular Biology Open Software Suite. Trends Genet. 2000 Jun;16(6):276-7. <http://www.ncbi.nlm.nih.gov/pubmed/10827456>`_ + +If you use this tool in Galaxy, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_ + + + \ No newline at end of file diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py new file mode 100644 index 000000000000..3591cd8feaeb --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py @@ -0,0 +1,53 @@ +#EMBOSS format corrector + +import operator +#from galaxy import datatypes + +#Properly set file formats after job run +def exec_after_process( app, inp_data, out_data, param_dict,tool, stdout, stderr): +#Properly set file formats before job run +#def exec_before_job(trans, inp_data, out_data, param_dict,tool): + #why isn't items an ordered list? + items = out_data.items() + #lets sort it ourselves.... + items = sorted(items, key=operator.itemgetter(0)) + #items is now sorted... + + #normal filetype correction + data_count=1 + for name, data in items: + outputType = param_dict.get( 'out_format'+str(data_count), None ) + #print "data_count",data_count, "name", name, "outputType", outputType + if outputType !=None: + if outputType == 'ncbi': + outputType = "fasta" + elif outputType == 'excel': + outputType = "tabular" + elif outputType == 'text': + outputType = "txt" + data = app.datatypes_registry.change_datatype(data, outputType) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #html filetype correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'html_out'+str(data_count), None ) + ext = "html" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #png file correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'plot'+str(data_count), None ) + ext = "png" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml new file mode 100644 index 000000000000..3c9b8f43ec1e --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml new file mode 100644 index 000000000000..1f3793c6007d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml @@ -0,0 +1,58 @@ + + Predicts potentially antigenic regions of a protein sequence, using the method of Kolaskar and Tongaonkar. + emboss + antigenic -sequence $input1 -outfile $out_file1 -minlen $minlen -rformat2 $out_format1 -auto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + You can view the original documentation here_. + + .. _here: http://emboss.sourceforge.net/apps/release/5.0/emboss/apps/antigenic.html + +------ + +**Citation** + +For the underlying tool, please cite `Rice P, Longden I, Bleasby A. EMBOSS: the European Molecular Biology Open Software Suite. Trends Genet. 2000 Jun;16(6):276-7. <http://www.ncbi.nlm.nih.gov/pubmed/10827456>`_ + +If you use this tool in Galaxy, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_ + + + \ No newline at end of file diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py new file mode 100644 index 000000000000..3591cd8feaeb --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py @@ -0,0 +1,53 @@ +#EMBOSS format corrector + +import operator +#from galaxy import datatypes + +#Properly set file formats after job run +def exec_after_process( app, inp_data, out_data, param_dict,tool, stdout, stderr): +#Properly set file formats before job run +#def exec_before_job(trans, inp_data, out_data, param_dict,tool): + #why isn't items an ordered list? + items = out_data.items() + #lets sort it ourselves.... + items = sorted(items, key=operator.itemgetter(0)) + #items is now sorted... + + #normal filetype correction + data_count=1 + for name, data in items: + outputType = param_dict.get( 'out_format'+str(data_count), None ) + #print "data_count",data_count, "name", name, "outputType", outputType + if outputType !=None: + if outputType == 'ncbi': + outputType = "fasta" + elif outputType == 'excel': + outputType = "tabular" + elif outputType == 'text': + outputType = "txt" + data = app.datatypes_registry.change_datatype(data, outputType) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #html filetype correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'html_out'+str(data_count), None ) + ext = "html" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #png file correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'plot'+str(data_count), None ) + ext = "png" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 diff --git a/lib/tool_shed/test/test_data/emboss/0470_files/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/0470_files/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/emboss_5_0470/1/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/libx11_proto/0/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/libx11_proto/0/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/libx11_proto/1/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/libx11_proto/1/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/0/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/0/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/1/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/1/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/safetar_with_symlink.tar b/lib/tool_shed/test/test_data/safetar_with_symlink.tar new file mode 100644 index 0000000000000000000000000000000000000000..a7810960f691e69a430682cb64c6bd5ae6cd3ad8 GIT binary patch literal 3072 zcmeH`O%4Jf424;Hir!!fwDdf>AjU*7iuboLZimDLW)j;?6KMMLy}Ww8FK%)mqA-JN zIi;R4(3Ei2vKi6Ni TKl}o-%`gH+zz7(DeGzyA%=bao literal 0 HcmV?d00001 diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 418e9ca9a4e7..3f04ff5581da 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -24,6 +24,20 @@ from tool_shed.webapp.model import Repository +def tar_open(uploaded_file): + isgzip = False + isbz2 = False + isgzip = checkers.is_gzip(uploaded_file) + if not isgzip: + isbz2 = checkers.is_bz2(uploaded_file) + if isgzip or isbz2: + # Open for reading with transparent compression. + tar = tarfile.open(uploaded_file, "r:*") + else: + tar = tarfile.open(uploaded_file) + return tar + + def upload_tar( app: "ToolShedApp", host: str, @@ -39,17 +53,7 @@ def upload_tar( tdah: Optional[ToolDependencyAttributeHandler] = None, ) -> ChangeResponseT: if tar is None: - isgzip = False - isbz2 = False - isgzip = checkers.is_gzip(uploaded_file) - if not isgzip: - isbz2 = checkers.is_bz2(uploaded_file) - if isgzip or isbz2: - # Open for reading with transparent compression. - tar = tarfile.open(uploaded_file, "r:*") - else: - tar = tarfile.open(uploaded_file) - + tar = tar_open(uploaded_file) rdah = rdah or RepositoryDependencyAttributeHandler(app, unpopulate=False) tdah = tdah or ToolDependencyAttributeHandler(app, unpopulate=False) # Upload a tar archive of files. diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index a0d5df75247e..16c13f67ad53 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -41,7 +41,6 @@ from tool_shed.metadata import repository_metadata_manager from tool_shed.repository_types import util as rt_util from tool_shed.util import ( - commit_util, encoding_util, metadata_util, repository_util, diff --git a/mypy.ini b/mypy.ini index 6a3b9a641717..fe479abe7da6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -4,7 +4,7 @@ plugins = pydantic.mypy show_error_codes = True ignore_missing_imports = True check_untyped_defs = True -exclude = lib/galaxy/tools/bundled|test/functional +exclude = lib/galaxy/tools/bundled|test/functional|.*tool_shed/test/test_data/repos pretty = True no_implicit_reexport = True no_implicit_optional = True diff --git a/pyproject.toml b/pyproject.toml index 3a74fe4a38ca..6ab9b16296be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ include = '\.pyi?$' extend-exclude = ''' ^/( | packages + | lib/tool_shed/test/test_data/repos )/ ''' diff --git a/test/unit/webapps/test_tool_validation.py b/test/unit/webapps/test_tool_validation.py index 02326c67e397..a1f421120ecc 100644 --- a/test/unit/webapps/test_tool_validation.py +++ b/test/unit/webapps/test_tool_validation.py @@ -1,7 +1,5 @@ import os import shutil -import tarfile -import tempfile from contextlib import contextmanager from galaxy.app_unittest_utils.galaxy_mock import MockApp @@ -9,14 +7,15 @@ from galaxy.util import galaxy_directory from tool_shed.tools.tool_validator import ToolValidator -BISMARK_TAR = os.path.join(galaxy_directory(), "lib/tool_shed/test/test_data/bismark/bismark.tar") +BISMARK_DIR = os.path.join(galaxy_directory(), "lib/tool_shed/test/test_data/repos/bismark/0") BOWTIE2_INDICES = os.path.join( galaxy_directory(), "lib/tool_shed/test/test_data/bowtie2_loc_sample/bowtie2_indices.loc.sample" ) def test_validate_valid_tool(): - with get_tool_validator() as tv, setup_bismark() as repo_dir: + repo_dir = BISMARK_DIR + with get_tool_validator() as tv: full_path = os.path.join(repo_dir, "bismark_methylation_extractor.xml") tool, valid, message = tv.load_tool_from_config(repository_id=None, full_path=full_path) assert tool.name == "Bismark" @@ -27,14 +26,16 @@ def test_validate_valid_tool(): def test_tool_validation_denies_allow_codefile(): - with get_tool_validator() as tv, setup_bismark() as repo_dir: + repo_dir = BISMARK_DIR + with get_tool_validator() as tv: full_path = os.path.join(repo_dir, "bismark_methylation_extractor.xml") tool, valid, message = tv.load_tool_from_config(repository_id=None, full_path=full_path) assert tool._allow_code_files is False def test_validate_tool_without_index(): - with get_tool_validator() as tv, setup_bismark() as repo_dir: + repo_dir = BISMARK_DIR + with get_tool_validator() as tv: full_path = os.path.join(repo_dir, "bismark_bowtie2_wrapper.xml") tool, valid, message = tv.load_tool_from_config(repository_id=None, full_path=full_path) assert valid is True @@ -63,15 +64,6 @@ def test_validate_tool_without_index(): assert not tool.params_with_missing_index_file -@contextmanager -def setup_bismark(): - repo_dir = tempfile.mkdtemp() - with tarfile.open(BISMARK_TAR) as archive: - archive.extractall(repo_dir) - yield repo_dir - shutil.rmtree(repo_dir, ignore_errors=True) - - @contextmanager def get_tool_validator(): app = MockApp() From b66a0bc17551ec53d88968e08bd507e7902e831f Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 7 Nov 2022 05:18:52 -0500 Subject: [PATCH 38/73] More integration-y unit tests for tool shed... --- lib/tool_shed/managers/repositories.py | 5 +++- lib/tool_shed/test/base/twilltestcase.py | 25 +++++++++++++------ .../test_0000_basic_repository_features.py | 4 ++- ..._0010_repository_with_tool_dependencies.py | 4 ++- ...st_0030_repository_dependency_revisions.py | 16 +++++++----- .../test_1300_reset_all_metadata.py | 2 +- lib/tool_shed/util/repository_content_util.py | 9 +++++-- test/unit/tool_shed/_util.py | 17 +++++++------ test/unit/tool_shed/test_repository_utils.py | 19 +++++++++++--- 9 files changed, 70 insertions(+), 31 deletions(-) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index da1ebd0f4f1a..bf2577275045 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -13,7 +13,10 @@ from tool_shed.structured_app import ToolShedApp from tool_shed.util import hg_util from tool_shed.util.repository_content_util import upload_tar -from tool_shed.webapp.model import Repository, User +from tool_shed.webapp.model import ( + Repository, + User, +) log = logging.getLogger(__name__) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 89743aff6877..41543fb10b50 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -56,12 +56,12 @@ from galaxy_test.base.api_asserts import assert_status_code_is_ok from galaxy_test.base.api_util import get_admin_api_key from galaxy_test.base.populators import wait_on_assertion +from tool_shed.test.base.populators import TEST_DATA_REPO_FILES from tool_shed.util import ( hg_util, hgweb_config, xml_util, ) -from tool_shed.test.base.populators import TEST_DATA_REPO_FILES from tool_shed.util.repository_content_util import tar_open from tool_shed.webapp.model import Repository as DbRepository from tool_shed_client.schema import ( @@ -1082,7 +1082,9 @@ def setup_freebayes_0010_repo(self, repository: Repository): ] self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] - self.add_file_to_repository(repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed) + self.add_file_to_repository( + repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed + ) self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") self.add_file_to_repository( @@ -1098,7 +1100,12 @@ def setup_freebayes_0010_repo(self, repository: Repository): self.add_file_to_repository(repository, target) def add_file_to_repository( - self, repository: Repository, source: str, target: Optional[str] = None, strings_displayed=None, commit_message: Optional[str] = None + self, + repository: Repository, + source: str, + target: Optional[str] = None, + strings_displayed=None, + commit_message: Optional[str] = None, ): with self.cloned_repo(repository) as temp_directory: if target is None: @@ -1107,18 +1114,20 @@ def add_file_to_repository( full_source = TEST_DATA_REPO_FILES.joinpath(source) shutil.copyfile(str(full_source), full_target) commit_message = commit_message or "Uploaded revision with added file." - self._upload_dir_to_repository(repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed) + self._upload_dir_to_repository( + repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed + ) - def add_tar_to_repository( - self, repository: Repository, source: str, strings_displayed=None - ): + def add_tar_to_repository(self, repository: Repository, source: str, strings_displayed=None): with self.cloned_repo(repository) as temp_directory: full_source = TEST_DATA_REPO_FILES.joinpath(source) tar = tar_open(full_source) tar.extractall(path=temp_directory) tar.close() commit_message = "Uploaded revision with added files from tar." - self._upload_dir_to_repository(repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed) + self._upload_dir_to_repository( + repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed + ) def commit_tar_to_repository( self, repository: Repository, source: str, commit_message=None, strings_displayed=None diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index 261e9c5a6187..d6f7092e2a8d 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -78,7 +78,9 @@ def test_0030_grant_write_access(self): def test_0035_upload_filtering_1_1_0(self): """Upload filtering_1.1.0.tar to the repository""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.commit_tar_to_repository(repository, "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0") + self.commit_tar_to_repository( + repository, "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0" + ) def test_0040_verify_repository(self): """Display basic repository pages""" diff --git a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py index 3ec4755caed8..830e0d0022c7 100644 --- a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py @@ -70,7 +70,9 @@ def test_0015_upload_missing_tool_data_table_conf_file(self): """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] - self.add_file_to_repository(repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed) + self.add_file_to_repository( + repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed + ) self.display_manage_repository_page( repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] ) diff --git a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py index a1a54a06efd2..3b9883eb439e 100644 --- a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py @@ -39,9 +39,7 @@ def test_0010_create_emboss_5_repository(self): owner=common.test_user_1_name, category=category, ) - self.commit_tar_to_repository( - repository, "emboss/emboss.tar", commit_message="Uploaded tool tarball." - ) + self.commit_tar_to_repository(repository, "emboss/emboss.tar", commit_message="Uploaded tool tarball.") def test_0015_create_emboss_6_repository(self): """Create and populate the emboss_6_0030 repository.""" @@ -55,7 +53,9 @@ def test_0015_create_emboss_6_repository(self): category=category, ) self.commit_tar_to_repository( - repository, "emboss/emboss.tar", commit_message="Uploaded tool tarball.", + repository, + "emboss/emboss.tar", + commit_message="Uploaded tool tarball.", ) def test_0020_create_dependent_repository(self): @@ -71,7 +71,9 @@ def test_0020_create_dependent_repository(self): ) if self.repository_is_new(repository): self.commit_tar_to_repository( - repository, "column_maker/column_maker.tar", commit_message="Uploaded bismark tarball.", + repository, + "column_maker/column_maker.tar", + commit_message="Uploaded bismark tarball.", ) def test_0025_create_emboss_repository(self): @@ -86,7 +88,9 @@ def test_0025_create_emboss_repository(self): category=category, ) self.commit_tar_to_repository( - repository, "emboss/emboss.tar", commit_message="Uploaded the tool tarball.", + repository, + "emboss/emboss.tar", + commit_message="Uploaded the tool tarball.", ) def test_0030_generate_repository_dependencies_for_emboss_5(self): diff --git a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py index 4b5571c74146..7115bf47d02f 100644 --- a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py @@ -262,7 +262,7 @@ def test_0030_create_repositories_from_0040_series(self): owner=common.test_user_1_name, category=category, strings_displayed=[], - ) + ) self.commit_tar_to_repository( repository, "filtering/filtering_1.1.0.tar", diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 3f04ff5581da..163470942bc5 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -62,8 +62,13 @@ def upload_tar( check_results = check_archive(repository, tar) if check_results.invalid: tar.close() - uploaded_file.close() - message = "{} Invalid paths were: {}".format(" ".join(check_results.errors), ", ".join(check_results.invalid)) + try: + uploaded_file.close() + except AttributeError: + pass + message = "{} Invalid paths were: {}".format( + " ".join(check_results.errors), ", ".join([i.name for i in check_results.invalid]) + ) return False, message, [], "", undesirable_dirs_removed, undesirable_files_removed else: repo_dir = repository.repo_path(app) diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py index 54e080d7cb98..22571813de22 100644 --- a/test/unit/tool_shed/_util.py +++ b/test/unit/tool_shed/_util.py @@ -17,7 +17,10 @@ from tool_shed.repository_types import util as rt_util from tool_shed.repository_types.registry import Registry as RepositoryTypesRegistry from tool_shed.structured_app import ToolShedApp -from tool_shed.test.base.populators import repo_tars, TEST_DATA_REPO_FILES +from tool_shed.test.base.populators import ( + repo_tars, + TEST_DATA_REPO_FILES, +) from tool_shed.util.hgweb_config import hgweb_config_manager from tool_shed.util.repository_util import create_repository from tool_shed.webapp.model import ( @@ -26,7 +29,6 @@ User, ) - TEST_DATA_FILES = TEST_DATA_REPO_FILES TEST_HOST = "localhost" TEST_COMMIT_MESSAGE = "Test Commit Message" @@ -50,10 +52,6 @@ def get(self, key, default): class TestToolShedApp(ToolShedApp): repository_types_registry = RepositoryTypesRegistry() - model = mapping.init( - "sqlite:///:memory:", - create_tables=True, - ) config: TestToolShedConfig hgweb_config_manager = hgweb_config_manager repository_registry: tool_shed.repository_registry.Registry @@ -61,6 +59,10 @@ class TestToolShedApp(ToolShedApp): name: str = "ToolShed" def __init__(self, temp_directory=None): + self.model = mapping.init( + "sqlite:///:memory:", + create_tables=True, + ) temp_directory = temp_directory or mkdtemp() hgweb_config_dir = os.path.join(temp_directory, "hgweb") safe_makedirs(hgweb_config_dir) @@ -130,8 +132,7 @@ def upload(app: TestToolShedApp, repository: Repository, path: Path, arcname: Op repository.user, repository, tar_path, - None, - TEST_COMMIT_MESSAGE, + commit_message=TEST_COMMIT_MESSAGE, ) diff --git a/test/unit/tool_shed/test_repository_utils.py b/test/unit/tool_shed/test_repository_utils.py index e408929eb494..5d21058b0e5b 100644 --- a/test/unit/tool_shed/test_repository_utils.py +++ b/test/unit/tool_shed/test_repository_utils.py @@ -26,7 +26,7 @@ def test_create_repository(shed_app: TestToolShedApp, new_user: User): def test_upload_tar(shed_app: TestToolShedApp, new_repository: Repository): - tar_resource = TEST_DATA_FILES.joinpath("convert_chars/convert_chars.tar") + tar_resource = TEST_DATA_FILES.joinpath("column_maker/column_maker.tar") old_tip = new_repository.tip() upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( shed_app, @@ -34,8 +34,7 @@ def test_upload_tar(shed_app: TestToolShedApp, new_repository: Repository): new_repository.user.username, new_repository, tar_resource, - None, - "Commit Message", + commit_message="Commit Message", ) assert upload_ok assert alert == "" @@ -48,3 +47,17 @@ def test_upload_tar(shed_app: TestToolShedApp, new_repository: Repository): for change in changesets: ctx = new_repository.hg_repo[change] assert str(ctx) == new_tip + + +def test_upload_fails_if_contains_symlink(shed_app: TestToolShedApp, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("safetar_with_symlink.tar") + upload_ok, message, _, _, _, _ = upload_tar( + shed_app, + "localhost", + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + ) + assert not upload_ok + assert "Invalid paths" in message From e7353b78b97213fc578ad195464dd01e650064ed Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sun, 6 Nov 2022 14:56:03 -0500 Subject: [PATCH 39/73] Remove upload controller - everything should come through the API now. --- .../test_0000_basic_repository_features.py | 6 +- lib/tool_shed/webapp/controllers/upload.py | 449 ------------------ .../common/repository_actions_menu.mako | 11 - .../webapps/tool_shed/repository/common.mako | 4 - .../webapps/tool_shed/repository/upload.mako | 160 ------- 5 files changed, 3 insertions(+), 627 deletions(-) delete mode 100644 lib/tool_shed/webapp/controllers/upload.py delete mode 100644 lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index d6f7092e2a8d..f5c886f7cd62 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -132,13 +132,13 @@ def test_0045_alter_repository_states(self): self.display_manage_repository_page( repository, strings_displayed=strings_displayed, - strings_not_displayed=["Upload files", "Reset all repository metadata"], + strings_not_displayed=["Reset all repository metadata"], ) - self.browse_repository(repository, strings_not_displayed=["Upload files"]) + self.browse_repository(repository) self.set_repository_deprecated( repository, strings_displayed=["has been marked as not deprecated"], set_deprecated=False ) - strings_displayed = ["Mark repository as deprecated", "Upload files", "Reset all repository metadata"] + strings_displayed = ["Mark repository as deprecated", "Reset all repository metadata"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0050_display_repository_tip_file(self): diff --git a/lib/tool_shed/webapp/controllers/upload.py b/lib/tool_shed/webapp/controllers/upload.py deleted file mode 100644 index 373a12cea534..000000000000 --- a/lib/tool_shed/webapp/controllers/upload.py +++ /dev/null @@ -1,449 +0,0 @@ -import logging -import os -import shutil -import tarfile -import tempfile - -import requests - -from galaxy import ( - util, - web, -) -from galaxy.tool_shed.util import dependency_display -from galaxy.util import checkers -from galaxy.webapps.base.controller import BaseUIController -from tool_shed.dependencies import attribute_handlers -from tool_shed.metadata import repository_metadata_manager -from tool_shed.repository_types import util as rt_util -from tool_shed.tools.data_table_manager import ShedToolDataTableManager -from tool_shed.util import ( - basic_util, - commit_util, - hg_util, - repository_content_util, - repository_util, - shed_util_common as suc, - xml_util, -) -from tool_shed.util.web_util import escape -from tool_shed.webapp.framework.decorators import require_login - -log = logging.getLogger(__name__) - - -class UploadController(BaseUIController): - @web.expose - @require_login("upload", use_panels=True) - def upload(self, trans, **kwd): - message = escape(kwd.get("message", "")) - status = kwd.get("status", "done") - commit_message = escape(kwd.get("commit_message", "Uploaded")) - repository_id = kwd.get("repository_id", "") - repository = repository_util.get_repository_in_tool_shed(trans.app, repository_id) - repo_dir = repository.repo_path(trans.app) - uncompress_file = util.string_as_bool(kwd.get("uncompress_file", "true")) - remove_repo_files_not_in_tar = util.string_as_bool(kwd.get("remove_repo_files_not_in_tar", "true")) - uploaded_file = None - upload_point = commit_util.get_upload_point(repository, **kwd) - tip = repository.tip() - file_data = kwd.get("file_data", "") - url = kwd.get("url", "") - # Part of the upload process is sending email notification to those that have registered to - # receive them. One scenario occurs when the first change set is produced for the repository. - # See the suc.handle_email_alerts() method for the definition of the scenarios. - new_repo_alert = repository.is_new() - uploaded_directory = None - if kwd.get("upload_button", False): - if file_data == "" and url == "": - message = "No files were entered on the upload form." - status = "error" - uploaded_file = None - elif url and url.startswith("hg"): - # Use mercurial clone to fetch repository, contents will then be copied over. - uploaded_directory = tempfile.mkdtemp() - repo_url = f"http{url[len('hg'):]}" - cloned_ok, error_message = hg_util.clone_repository(repo_url, uploaded_directory) - if not cloned_ok: - message = f"Error uploading via mercurial clone: {error_message}" - status = "error" - basic_util.remove_dir(uploaded_directory) - uploaded_directory = None - elif url: - valid_url = True - try: - stream = requests.get(url, stream=True, timeout=util.DEFAULT_SOCKET_TIMEOUT) - except Exception as e: - valid_url = False - message = f"Error uploading file via http: {util.unicodify(e)}" - status = "error" - uploaded_file = None - if valid_url: - with tempfile.NamedTemporaryFile(mode="wb", delete=False) as uploaded_file: - uploaded_file_name = uploaded_file.name - for chunk in stream.iter_content(chunk_size=util.CHUNK_SIZE): - if chunk: - uploaded_file.write(chunk) - uploaded_file.flush() - uploaded_file_filename = url.split("/")[-1] - isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 - elif file_data not in ("", None): - uploaded_file = file_data.file - uploaded_file_name = uploaded_file.name - uploaded_file_filename = os.path.split(file_data.filename)[-1] - isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 - if uploaded_file or uploaded_directory: - rdah = attribute_handlers.RepositoryDependencyAttributeHandler(trans.app, unpopulate=False) - tdah = attribute_handlers.ToolDependencyAttributeHandler(trans.app, unpopulate=False) - stdtm = ShedToolDataTableManager(trans.app) - ok = True - isgzip = False - isbz2 = False - if uploaded_file: - if uncompress_file: - isgzip = checkers.is_gzip(uploaded_file_name) - if not isgzip: - isbz2 = checkers.is_bz2(uploaded_file_name) - if isempty: - tar = None - istar = False - else: - # Determine what we have - a single file or an archive - try: - if (isgzip or isbz2) and uncompress_file: - # Open for reading with transparent compression. - tar = tarfile.open(uploaded_file_name, "r:*") - else: - tar = tarfile.open(uploaded_file_name) - istar = True - except tarfile.ReadError: - tar = None - istar = False - else: - # Uploaded directory - istar = False - if istar: - ( - ok, - message, - files_to_remove, - content_alert_str, - undesirable_dirs_removed, - undesirable_files_removed, - ) = repository_content_util.upload_tar( - trans.app, - trans.request.host, - trans.user.username, - repository, - uploaded_file, - upload_point, - commit_message, - remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, - new_repo_alert=new_repo_alert, - rdah=rdah, - tdah=tdah, - tar=tar, - ) - elif uploaded_directory: - ( - ok, - message, - files_to_remove, - content_alert_str, - undesirable_dirs_removed, - undesirable_files_removed, - ) = self.upload_directory( - trans, - rdah, - tdah, - repository, - uploaded_directory, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ) - else: - if (isgzip or isbz2) and uncompress_file: - uploaded_file_filename = commit_util.uncompress( - repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2 - ) - if ( - repository.type == rt_util.REPOSITORY_SUITE_DEFINITION - and uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME - ): - ok = False - message = "Repositories of type Repository suite definition can only contain a single file named " - message += "repository_dependencies.xml." - elif ( - repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION - and uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME - ): - ok = False - message = "Repositories of type Tool dependency definition can only contain a single file named " - message += "tool_dependencies.xml." - if ok: - if upload_point is not None: - full_path = os.path.abspath(os.path.join(repo_dir, upload_point, uploaded_file_filename)) - else: - full_path = os.path.abspath(os.path.join(repo_dir, uploaded_file_filename)) - # Move some version of the uploaded file to the load_point within the repository hierarchy. - if uploaded_file_filename in [rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME]: - # Inspect the contents of the file to see if toolshed or changeset_revision attributes - # are missing and if so, set them appropriately. - altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) - if error_message: - ok = False - message = error_message - status = "error" - elif altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, full_path) - else: - shutil.move(uploaded_file_name, full_path) - elif uploaded_file_filename in [rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME]: - # Inspect the contents of the file to see if changeset_revision values are - # missing and if so, set them appropriately. - altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) - if error_message: - ok = False - message = error_message - status = "error" - if ok: - if altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, full_path) - else: - shutil.move(uploaded_file_name, full_path) - else: - shutil.move(uploaded_file_name, full_path) - if ok: - # See if any admin users have chosen to receive email alerts when a repository is updated. - # If so, check every uploaded file to ensure content is appropriate. - check_contents = commit_util.check_file_contents_for_email_alerts(trans.app) - if check_contents and os.path.isfile(full_path): - content_alert_str = commit_util.check_file_content_for_html_and_images(full_path) - else: - content_alert_str = "" - hg_util.add_changeset(repo_dir, full_path) - hg_util.commit_changeset( - repo_dir, - full_path_to_changeset=full_path, - username=trans.user.username, - message=commit_message, - ) - if full_path.endswith("tool_data_table_conf.xml.sample"): - # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded - # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables - # dictionary. - error, error_message = stdtm.handle_sample_tool_data_table_conf_file( - full_path, persist=False - ) - if error: - message = f"{message}
{error_message}" - # See if the content of the change set was valid. - admin_only = len(repository.downloadable_revisions) != 1 - suc.handle_email_alerts( - trans.app, - trans.request.host, - repository, - content_alert_str=content_alert_str, - new_repo_alert=new_repo_alert, - admin_only=admin_only, - ) - if ok: - # Update the repository files for browsing. - hg_util.update_repository(repo_dir) - # Get the new repository tip. - if tip == repository.tip(): - message = "No changes to repository. " - status = "warning" - else: - if (isgzip or isbz2) and uncompress_file: - uncompress_str = " uncompressed and " - else: - uncompress_str = " " - if uploaded_directory: - source_type = "repository" - source = url - else: - source_type = "file" - source = uploaded_file_filename - message = f"The {source_type} {escape(source)} has been successfully{uncompress_str}uploaded to the repository. " - if istar and (undesirable_dirs_removed or undesirable_files_removed): - items_removed = undesirable_dirs_removed + undesirable_files_removed - message += ( - " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " - % items_removed - ) - message += "were removed from the archive. " - if istar and remove_repo_files_not_in_tar and files_to_remove: - if upload_point is not None: - message += ( - " %d files were removed from the repository relative to the selected upload point '%s'. " - % (len(files_to_remove), upload_point) - ) - else: - message += " %d files were removed from the repository root. " % len(files_to_remove) - rmm = repository_metadata_manager.RepositoryMetadataManager( - app=trans.app, user=trans.user, repository=repository - ) - status, error_message = rmm.set_repository_metadata_due_to_new_tip( - trans.request.host, content_alert_str=content_alert_str, **kwd - ) - if error_message: - message = error_message - kwd["message"] = message - if repository.metadata_revisions: - # A repository's metadata revisions are order descending by update_time, so the zeroth revision - # will be the tip just after an upload. - metadata_dict = repository.metadata_revisions[0].metadata - else: - metadata_dict = {} - dd = dependency_display.DependencyDisplayer(trans.app) - if str(repository.type) not in [ - rt_util.REPOSITORY_SUITE_DEFINITION, - rt_util.TOOL_DEPENDENCY_DEFINITION, - ]: - change_repository_type_message = rt_util.generate_message_for_repository_type_change( - trans.app, repository - ) - if change_repository_type_message: - message += change_repository_type_message - status = "warning" - else: - # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies - # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency - # definitions can define orphan tool dependencies (no relationship to any tools contained in the - # repository), so warning messages are important because orphans are always valid. The repository - # owner must be warned in case they did not intend to define an orphan dependency, but simply - # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. - orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict) - if orphan_message: - message += orphan_message - status = "warning" - # Handle messaging for invalid tool dependencies. - invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies(metadata_dict) - if invalid_tool_dependencies_message: - message += invalid_tool_dependencies_message - status = "error" - # Handle messaging for invalid repository dependencies. - invalid_repository_dependencies_message = dd.generate_message_for_invalid_repository_dependencies( - metadata_dict, error_from_tuple=True - ) - if invalid_repository_dependencies_message: - message += invalid_repository_dependencies_message - status = "error" - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - stdtm.reset_tool_data_tables() - if uploaded_directory: - basic_util.remove_dir(uploaded_directory) - trans.response.send_redirect( - web.url_for( - controller="repository", - action="browse_repository", - id=repository_id, - commit_message="Deleted selected files", - message=message, - status=status, - ) - ) - else: - if uploaded_directory: - basic_util.remove_dir(uploaded_directory) - status = "error" - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - stdtm.reset_tool_data_tables() - return trans.fill_template( - "/webapps/tool_shed/repository/upload.mako", - repository=repository, - changeset_revision=tip, - url=url, - commit_message=commit_message, - uncompress_file=uncompress_file, - remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, - message=message, - status=status, - ) - - def upload_directory( - self, - trans, - rdah, - tdah, - repository, - uploaded_directory, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ): - repo_dir = repository.repo_path(trans.app) - undesirable_dirs_removed = 0 - undesirable_files_removed = 0 - if upload_point is not None: - full_path = os.path.abspath(os.path.join(repo_dir, upload_point)) - else: - full_path = os.path.abspath(repo_dir) - filenames_in_archive = [] - for root, _dirs, files in os.walk(uploaded_directory): - for uploaded_file in files: - relative_path = os.path.normpath(os.path.join(os.path.relpath(root, uploaded_directory), uploaded_file)) - if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: - ok = os.path.basename(uploaded_file) == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME - elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION: - ok = os.path.basename(uploaded_file) == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME - else: - ok = os.path.basename(uploaded_file) not in commit_util.UNDESIRABLE_FILES - if ok: - for file_path_item in relative_path.split("/"): - if file_path_item in commit_util.UNDESIRABLE_DIRS: - undesirable_dirs_removed += 1 - ok = False - break - else: - undesirable_files_removed += 1 - if ok: - uploaded_file_name = os.path.abspath(os.path.join(root, uploaded_file)) - if os.path.split(uploaded_file_name)[-1] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: - # Inspect the contents of the file to see if toolshed or changeset_revision - # attributes are missing and if so, set them appropriately. - altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) - if error_message: - return False, error_message, [], "", [], [] - elif altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, uploaded_file_name) - elif os.path.split(uploaded_file_name)[-1] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: - # Inspect the contents of the file to see if toolshed or changeset_revision - # attributes are missing and if so, set them appropriately. - altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) - if error_message: - return False, error_message, [], "", [], [] - if altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, uploaded_file_name) - repo_path = os.path.join(full_path, relative_path) - repo_basedir = os.path.normpath(os.path.join(repo_path, os.path.pardir)) - if not os.path.exists(repo_basedir): - os.makedirs(repo_basedir) - if os.path.exists(repo_path): - if os.path.isdir(repo_path): - shutil.rmtree(repo_path) - else: - os.remove(repo_path) - shutil.move(os.path.join(uploaded_directory, relative_path), repo_path) - filenames_in_archive.append(relative_path) - return commit_util.handle_directory_changes( - trans.app, - trans.request.host, - trans.user.username, - repository, - full_path, - filenames_in_archive, - remove_repo_files_not_in_tar, - new_repo_alert, - commit_message, - undesirable_dirs_removed, - undesirable_files_removed, - ) diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako index 945b0a65f1e2..bfe679f59a53 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako @@ -58,11 +58,6 @@ else: can_reset_all_metadata = False - if can_push and not is_deprecated: - can_upload = True - else: - can_upload = False - if not is_new and not is_deprecated and trans.user and repository.user != trans.user: can_rate = True else: @@ -97,18 +92,12 @@

    %if is_new: - %if can_upload: - Upload files to repository - %endif %if can_undeprecate: Mark repository as not deprecated %endif %else:
  • Repository Actions
  • - %if can_upload: - Upload files to repository - %endif %if can_administer: Manage repository %else: diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako index 79c69e562e97..1da62ed987fc 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako @@ -35,10 +35,6 @@ var selKeys = $.map(selNodes, function(node) { return node.data.key; }); - // The following is used only in ~/templates/webapps/tool_shed/repository/upload.mako. - if (document.forms["upload_form"]) { - document.upload_form.upload_point.value = selKeys.slice(-1); - } }, onActivate: function(dtnode) { var cell = $("#file_contents"); diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako deleted file mode 100644 index 634f957817ed..000000000000 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako +++ /dev/null @@ -1,160 +0,0 @@ -<%namespace file="/message.mako" import="render_msg" /> -<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" /> -<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" /> - -<% - is_new = repository.is_new() -%> - -<%! - def inherit(context): - if context.get('use_panels'): - return '/webapps/tool_shed/base_panels.mako' - else: - return '/base.mako' -%> - -<%inherit file="${inherit(context)}"/> - -<%def name="stylesheets()"> - ${parent.stylesheets()} - ${h.css( "dynatree_skin/ui.dynatree" )} - - -<%def name="javascripts()"> - ${parent.javascripts()} - ${common_javascripts(repository)} - - - -%if message: - ${render_msg( message, status )} -%endif - -${render_tool_shed_repository_actions( repository=repository)} - -
    -
    -
    -
    - Upload a single file or tarball. Uploading may take a while, depending upon the size of the file. - Wait until a message is displayed in your browser after clicking the Upload button below. -
    -
    -
    -
    -
    - -
    -
    Repository '${repository.name | h}'
    -
    -
    -
    - -
    - -
    -
    -
    -
    - -
    - -
    -
    - Enter a url to upload your files. In addition to http and ftp urls, urls that point to mercurial repositories (urls that start - with hg:// or hgs://) are allowed. This mechanism results in the tip revision of an external mercurial repository being added - to the Tool Shed repository as a single new changeset. The revision history of the originating external mercurial repository is - not uploaded to the Tool Shed repository. -
    -
    -
    -
    - <% - if uncompress_file: - yes_selected = 'selected' - no_selected = '' - else: - yes_selected = '' - no_selected = 'selected' - %> - -
    - -
    -
    - Supported compression types are gz and bz2. If Yes is selected, the uploaded file will be uncompressed. However, - if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed. For - example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if - some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed. -
    -
    - %if not is_new: -
    - <% - if remove_repo_files_not_in_tar: - yes_selected = 'selected' - no_selected = '' - else: - yes_selected = '' - no_selected = 'selected' - %> - -
    - -
    -
    - This selection pertains only to uploaded tar archives, not to single file uploads. If Yes is selected, files - that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive - will be removed from the repository. Otherwise, all existing repository files will remain and the uploaded archive - files will be added to the repository. -
    -
    - %endif -
    - -
    - %if commit_message: -
    - %else: - - %endif -
    -
    - This is the commit message for the mercurial change set that will be created by this upload. -
    -
    -
    - %if not repository.is_new(): -
    - -
    - Loading... -
    - -
    - Select a location within the repository to upload your files by clicking a check box next to the location. The - selected location is considered the upload point. If a location is not selected, the upload point will be the - repository root. -
    -
    -
    - %endif -
    - -
    -
    -
    -
    From 55564b67d7e15f977379149057b82fad84ac9f04 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sun, 6 Nov 2022 15:30:31 -0500 Subject: [PATCH 40/73] Remove upload_point upload logic in tool shed. Not used by planemo and not tested by any tests. --- lib/tool_shed/managers/repositories.py | 2 -- lib/tool_shed/util/commit_util.py | 22 ------------------- lib/tool_shed/util/repository_content_util.py | 6 +---- lib/tool_shed/webapp/api/repositories.py | 3 --- 4 files changed, 1 insertion(+), 32 deletions(-) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index bf2577275045..61625a505de6 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -27,7 +27,6 @@ def upload_tar_and_set_metadata( user: User, repository: Repository, uploaded_file, - upload_point, commit_message: str, ): repo_dir = repository.repo_path(app) @@ -38,7 +37,6 @@ def upload_tar_and_set_metadata( user.username, repository, uploaded_file, - upload_point, commit_message, ) if ok: diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py index c2054bcbb504..83542a7c8db4 100644 --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -137,28 +137,6 @@ def get_change_lines_in_file_for_tag(tag, change_dict): return cleaned_lines -def get_upload_point(repository: "Repository", **kwd) -> Optional[str]: - upload_point = kwd.get("upload_point", None) - if upload_point is not None: - # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed - if os.path.exists(upload_point): - if os.path.isfile(upload_point): - # Get the parent directory - upload_point, not_needed = os.path.split(upload_point) - # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/ - upload_point = upload_point.split("repo_%d" % repository.id)[1] - if upload_point: - upload_point = upload_point.lstrip("/") - upload_point = upload_point.rstrip("/") - # Now the value of uplaod_point will be something like: / - if upload_point == "/": - upload_point = None - else: - # Must have been an error selecting something that didn't exist, so default to repository root - upload_point = None - return upload_point - - def handle_bz2(repository: "Repository", uploaded_file_name): with tempfile.NamedTemporaryFile( mode="wb", diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 163470942bc5..3e863f2e5d00 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -44,7 +44,6 @@ def upload_tar( username: str, repository: "Repository", uploaded_file, - upload_point, commit_message: str, remove_repo_files_not_in_tar: bool = True, new_repo_alert: bool = False, @@ -72,10 +71,7 @@ def upload_tar( return False, message, [], "", undesirable_dirs_removed, undesirable_files_removed else: repo_dir = repository.repo_path(app) - if upload_point is not None: - full_path = os.path.abspath(os.path.join(repo_dir, upload_point)) - else: - full_path = os.path.abspath(repo_dir) + full_path = os.path.abspath(repo_dir) undesirable_files_removed = len(check_results.undesirable_files) undesirable_dirs_removed = len(check_results.undesirable_dirs) filenames_in_archive = [ti.name for ti in check_results.valid] diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index 16c13f67ad53..28094114fca2 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -1021,8 +1021,6 @@ def create_changeset_revision(self, trans, id, payload, **kwd): "err_msg": "You do not have permission to update this repository.", } - upload_point = commit_util.get_upload_point(repository, **kwd) - file_data = payload.get("file") # Code stolen from gx's upload_common.py if isinstance(file_data, cgi_FieldStorage): @@ -1044,7 +1042,6 @@ def create_changeset_revision(self, trans, id, payload, **kwd): trans.user, repository, uploaded_file_name, - upload_point, commit_message, ) rval = {"message": message} From 122abb0a3ce180d84e9c2a30a9c7de2dbed940f4 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 13 Dec 2022 14:08:42 -0500 Subject: [PATCH 41/73] metadata_generator: no metadata for workflows and datatypes We dropped workflows and datatypes from shed - don't generate metadata based on them. --- .../metadata/repository_metadata_manager.py | 225 +----------------- 1 file changed, 3 insertions(+), 222 deletions(-) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 2b1f4282b605..ff83f3a0bede 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -330,63 +330,39 @@ def compare_changeset_revisions(self, ancestor_changeset_revision, ancestor_meta # This changeset_revision is an ancestor of self.changeset_revision which is associated # with self.metadata_dict. A new repository_metadata record will be created only # when this method returns the constant value self.NOT_EQUAL_AND_NOT_SUBSET. - ancestor_datatypes = ancestor_metadata_dict.get("datatypes", []) ancestor_tools = ancestor_metadata_dict.get("tools", []) ancestor_guids = [tool_dict["guid"] for tool_dict in ancestor_tools] ancestor_guids.sort() - ancestor_readme_files = ancestor_metadata_dict.get("readme_files", []) ancestor_repository_dependencies_dict = ancestor_metadata_dict.get("repository_dependencies", {}) ancestor_repository_dependencies = ancestor_repository_dependencies_dict.get("repository_dependencies", []) ancestor_tool_dependencies = ancestor_metadata_dict.get("tool_dependencies", {}) - ancestor_workflows = ancestor_metadata_dict.get("workflows", []) ancestor_data_manager = ancestor_metadata_dict.get("data_manager", {}) - current_datatypes = self.metadata_dict.get("datatypes", []) current_tools = self.metadata_dict.get("tools", []) current_guids = [tool_dict["guid"] for tool_dict in current_tools] current_guids.sort() - current_readme_files = self.metadata_dict.get("readme_files", []) current_repository_dependencies_dict = self.metadata_dict.get("repository_dependencies", {}) current_repository_dependencies = current_repository_dependencies_dict.get("repository_dependencies", []) current_tool_dependencies = self.metadata_dict.get("tool_dependencies", {}) - current_workflows = self.metadata_dict.get("workflows", []) current_data_manager = self.metadata_dict.get("data_manager", {}) # Handle case where no metadata exists for either changeset. - no_datatypes = not ancestor_datatypes and not current_datatypes - no_readme_files = not ancestor_readme_files and not current_readme_files no_repository_dependencies = not ancestor_repository_dependencies and not current_repository_dependencies no_tool_dependencies = not ancestor_tool_dependencies and not current_tool_dependencies no_tools = not ancestor_guids and not current_guids - no_workflows = not ancestor_workflows and not current_workflows no_data_manager = not ancestor_data_manager and not current_data_manager - if ( - no_datatypes - and no_readme_files - and no_repository_dependencies - and no_tool_dependencies - and no_tools - and no_workflows - and no_data_manager - ): + if no_repository_dependencies and no_tool_dependencies and no_tools and no_data_manager: return self.NO_METADATA - # Uncomment the following if we decide that README files should affect how installable - # repository revisions are defined. See the NOTE in self.compare_readme_files(). - # readme_file_comparision = self.compare_readme_files( ancestor_readme_files, current_readme_files ) repository_dependency_comparison = self.compare_repository_dependencies( ancestor_repository_dependencies, current_repository_dependencies ) tool_dependency_comparison = self.compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies ) - workflow_comparison = self.compare_workflows(ancestor_workflows, current_workflows) - datatype_comparison = self.compare_datatypes(ancestor_datatypes, current_datatypes) data_manager_comparison = self.compare_data_manager(ancestor_data_manager, current_data_manager) # Handle case where all metadata is the same. if ( ancestor_guids == current_guids and repository_dependency_comparison == self.EQUAL and tool_dependency_comparison == self.EQUAL - and workflow_comparison == self.EQUAL - and datatype_comparison == self.EQUAL and data_manager_comparison == self.EQUAL ): return self.EQUAL @@ -394,16 +370,8 @@ def compare_changeset_revisions(self, ancestor_changeset_revision, ancestor_meta # readme_file_is_subset = readme_file_comparision in [ self.EQUAL, self.SUBSET ] repository_dependency_is_subset = repository_dependency_comparison in self.SUBSET_VALUES tool_dependency_is_subset = tool_dependency_comparison in self.SUBSET_VALUES - workflow_dependency_is_subset = workflow_comparison in self.SUBSET_VALUES - datatype_is_subset = datatype_comparison in self.SUBSET_VALUES datamanager_is_subset = data_manager_comparison in self.SUBSET_VALUES - if ( - repository_dependency_is_subset - and tool_dependency_is_subset - and workflow_dependency_is_subset - and datatype_is_subset - and datamanager_is_subset - ): + if repository_dependency_is_subset and tool_dependency_is_subset and datamanager_is_subset: is_subset = True for guid in ancestor_guids: if guid not in current_guids: @@ -442,56 +410,6 @@ def __data_manager_dict_to_tuple_list(metadata_dict): return self.SUBSET return self.NOT_EQUAL_AND_NOT_SUBSET - def compare_datatypes(self, ancestor_datatypes, current_datatypes): - """Determine if ancestor_datatypes is the same as or a subset of current_datatypes.""" - # Each datatype dict looks something like: - # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} - if len(ancestor_datatypes) <= len(current_datatypes): - for ancestor_datatype in ancestor_datatypes: - # Currently the only way to differentiate datatypes is by name. - ancestor_datatype_dtype = ancestor_datatype["dtype"] - ancestor_datatype_extension = ancestor_datatype["extension"] - ancestor_datatype_mimetype = ancestor_datatype.get("mimetype", None) - found_in_current = False - for current_datatype in current_datatypes: - if ( - current_datatype["dtype"] == ancestor_datatype_dtype - and current_datatype["extension"] == ancestor_datatype_extension - and current_datatype.get("mimetype", None) == ancestor_datatype_mimetype - ): - found_in_current = True - break - if not found_in_current: - return self.NOT_EQUAL_AND_NOT_SUBSET - if len(ancestor_datatypes) == len(current_datatypes): - return self.EQUAL - else: - return self.SUBSET - return self.NOT_EQUAL_AND_NOT_SUBSET - - def compare_readme_files(self, ancestor_readme_files, current_readme_files): - """Determine if ancestor_readme_files is equal to or a subset of current_readme_files.""" - # NOTE: Although repository README files are considered a Galaxy utility similar to tools, - # repository dependency definition files, etc., we don't define installable repository revisions - # based on changes to README files. To understand why, consider the following scenario: - # 1. Upload the filtering tool to a new repository - this will result in installable revision 0. - # 2. Upload a README file to the repository - this will move the installable revision from revision - # 0 to revision 1. - # 3. Delete the README file from the repository - this will move the installable revision from - # revision 1 to revision 2. - # The above scenario is the current behavior, and that is why this method is not currently called. - # This method exists only in case we decide to change this current behavior. - # The lists of readme files looks something like: ["database/community_files/000/repo_2/readme.txt"] - if len(ancestor_readme_files) <= len(current_readme_files): - for ancestor_readme_file in ancestor_readme_files: - if ancestor_readme_file not in current_readme_files: - return self.NOT_EQUAL_AND_NOT_SUBSET - if len(ancestor_readme_files) == len(current_readme_files): - return self.EQUAL - else: - return self.SUBSET - return self.NOT_EQUAL_AND_NOT_SUBSET - def compare_repository_dependencies(self, ancestor_repository_dependencies, current_repository_dependencies): """ Determine if ancestor_repository_dependencies is the same as or a subset of @@ -572,38 +490,6 @@ def compare_tool_dependencies(self, ancestor_tool_dependencies, current_tool_dep return self.SUBSET return self.NOT_EQUAL_AND_NOT_SUBSET - def compare_workflows(self, ancestor_workflows, current_workflows): - """ - Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows - is a subset of current_workflows. - """ - if len(ancestor_workflows) <= len(current_workflows): - for ancestor_workflow_tup in ancestor_workflows: - # ancestor_workflows is a list of tuples where each contained tuple is - # [ , ] - ancestor_workflow_dict = ancestor_workflow_tup[1] - # Currently the only way to differentiate workflows is by name. - ancestor_workflow_name = ancestor_workflow_dict["name"] - num_ancestor_workflow_steps = len(ancestor_workflow_dict["steps"]) - found_in_current = False - for current_workflow_tup in current_workflows: - current_workflow_dict = current_workflow_tup[1] - # Assume that if the name and number of steps are euqal, then the workflows - # are the same. Of course, this may not be true... - if ( - current_workflow_dict["name"] == ancestor_workflow_name - and len(current_workflow_dict["steps"]) == num_ancestor_workflow_steps - ): - found_in_current = True - break - if not found_in_current: - return self.NOT_EQUAL_AND_NOT_SUBSET - if len(ancestor_workflows) == len(current_workflows): - return self.EQUAL - else: - return self.SUBSET - return self.NOT_EQUAL_AND_NOT_SUBSET - def create_or_update_repository_metadata(self, changeset_revision, metadata_dict): """Create or update a repository_metadata record in the tool shed.""" has_repository_dependencies = False @@ -770,43 +656,6 @@ def get_query_for_setting_metadata_on_repositories(self, my_writable=False, orde self.app.model.Repository.table.c.deleted == false() ) - def new_datatypes_metadata_required(self, repository_metadata): - """ - Compare the last saved metadata for each datatype in the repository with the new metadata - in self.metadata_dict to determine if a new repository_metadata table record is required - or if the last saved metadata record can be updated for datatypes instead. - """ - # Datatypes are stored in metadata as a list of dictionaries that looks like: - # [{'dtype': 'galaxy.datatypes.data:Text', 'subclass': 'True', 'extension': 'acedb'}] - if "datatypes" in self.metadata_dict: - current_datatypes = self.metadata_dict["datatypes"] - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if "datatypes" in metadata: - ancestor_datatypes = metadata["datatypes"] - # The saved metadata must be a subset of the new metadata. - datatype_comparison = self.compare_datatypes(ancestor_datatypes, current_datatypes) - if datatype_comparison == self.NOT_EQUAL_AND_NOT_SUBSET: - return True - else: - return False - else: - # The new metadata includes datatypes, but the stored metadata does not, - # so we can update the stored metadata. - return False - else: - # There is no stored metadata, so we can update the metadata column in the - # repository_metadata table. - return False - else: - # There is no stored repository metadata, so we need to create a new repository_metadata - # table record. - return True - # self.metadata_dict includes no metadata for datatypes, so a new repository_metadata - # table record is not needed. - return False - def new_metadata_required_for_utilities(self): """ This method compares the last stored repository_metadata record associated with self.repository @@ -820,62 +669,13 @@ def new_metadata_required_for_utilities(self): repository_metadata = metadata_util.get_latest_repository_metadata( self.app, self.repository.id, downloadable=False ) - datatypes_required = self.new_datatypes_metadata_required(repository_metadata) - # Uncomment the following if we decide that README files should affect how installable - # repository revisions are defined. See the NOTE in the compare_readme_files() method. - # readme_files_required = sewlf.new_readme_files_metadata_required( repository_metadata ) repository_dependencies_required = self.new_repository_dependency_metadata_required(repository_metadata) tools_required = self.new_tool_metadata_required(repository_metadata) tool_dependencies_required = self.new_tool_dependency_metadata_required(repository_metadata) - workflows_required = self.new_workflow_metadata_required(repository_metadata) - if ( - datatypes_required - or repository_dependencies_required - or tools_required - or tool_dependencies_required - or workflows_required - ): + if repository_dependencies_required or tools_required or tool_dependencies_required: return True return False - def new_readme_files_metadata_required(self, repository_metadata): - """ - Compare the last saved metadata for each readme file in the repository with the new metadata - in self.metadata_dict to determine if a new repository_metadata table record is required or - if the last saved metadata record can be updated for readme files instead. - """ - # Repository README files are kind of a special case because they have no effect on reproducibility. - # We'll simply inspect the file names to determine if any that exist in the saved metadata are - # eliminated from the new metadata in self.metadata_dict. - if "readme_files" in self.metadata_dict: - current_readme_files = self.metadata_dict["readme_files"] - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if "readme_files" in metadata: - ancestor_readme_files = metadata["readme_files"] - # The saved metadata must be a subset of the new metadata. - readme_file_comparison = self.compare_readme_files(ancestor_readme_files, current_readme_files) - if readme_file_comparison == self.NOT_EQUAL_AND_NOT_SUBSET: - return True - else: - return False - else: - # The new metadata includes readme_files, but the stored metadata does not, so - # we can update the stored metadata. - return False - else: - # There is no stored metadata, so we can update the metadata column in the repository_metadata - # table. - return False - else: - # There is no stored repository metadata, so we need to create a new repository_metadata - # table record. - return True - # self.metadata_dict includes no metadata for readme_files, so a new repository_metadata - # table record is not needed. - return False - def new_repository_dependency_metadata_required(self, repository_metadata): """ Compare the last saved metadata for each repository dependency in the repository @@ -1023,25 +823,6 @@ def new_tool_dependency_metadata_required(self, repository_metadata): # record is not needed. return False - def new_workflow_metadata_required(self, repository_metadata): - """ - Currently everything about an exported workflow except the name is hard-coded, so - there's no real way to differentiate versions of exported workflows. If this changes - at some future time, this method should be enhanced accordingly. - """ - if "workflows" in self.metadata_dict: - if repository_metadata: - # The repository has metadata, so update the workflows value - - # no new record is needed. - return False - else: - # There is no saved repository metadata, so we need to create a - # new repository_metadata table record. - return True - # self.metadata_dict includes no metadata for workflows, so a new - # repository_metadata table record is not needed. - return False - def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=None): """Reset all metadata on a single repository in a tool shed.""" assert self.repository From 5a4e950f08a7b0504033725b08d41f4c3a57f554 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Dec 2022 18:10:31 -0500 Subject: [PATCH 42/73] metadata_generator: Typing has improved to the point that we can annotate this. --- lib/tool_shed/util/commit_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py index 83542a7c8db4..6bed88bcea00 100644 --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -161,7 +161,7 @@ def handle_bz2(repository: "Repository", uploaded_file_name): def handle_directory_changes( - app, + app: "ToolShedApp", host: str, username: str, repository: "Repository", From cdfdaaf017f338d85d76fc6f758782c9bd876530 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Dec 2022 09:14:21 -0500 Subject: [PATCH 43/73] Bug fix: data manager changes misaligned... - Data manager changes force a new installable changeset on reset but not when updating repositories. --- .../metadata/repository_metadata_manager.py | 13 ++++++++- .../data_manager_gaps/0/data_manager.xml | 18 ++++++++++++ .../data_manager_gaps/0/data_manager_conf.xml | 16 +++++++++++ .../data_manager_gaps/1/data_manager.xml | 18 ++++++++++++ .../data_manager_gaps/1/data_manager_conf.xml | 17 +++++++++++ .../data_manager_gaps/2/data_manager.xml | 18 ++++++++++++ .../data_manager_gaps/2/data_manager_conf.xml | 28 +++++++++++++++++++ .../test_repository_metadata_manager.py | 18 ++++++++++++ 8 files changed, 145 insertions(+), 1 deletion(-) create mode 100644 lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml create mode 100644 lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml create mode 100644 lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml create mode 100644 lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml create mode 100644 lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml create mode 100644 lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index ff83f3a0bede..67e4e1f54c6a 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -672,7 +672,8 @@ def new_metadata_required_for_utilities(self): repository_dependencies_required = self.new_repository_dependency_metadata_required(repository_metadata) tools_required = self.new_tool_metadata_required(repository_metadata) tool_dependencies_required = self.new_tool_dependency_metadata_required(repository_metadata) - if repository_dependencies_required or tools_required or tool_dependencies_required: + data_managers_required = self.new_data_manager_required(repository_metadata) + if repository_dependencies_required or tools_required or tool_dependencies_required or data_managers_required: return True return False @@ -727,6 +728,16 @@ def new_repository_dependency_metadata_required(self, repository_metadata): # record is not needed. return False + def new_data_manager_required(self, repository_metadata): + if self.metadata_dict and repository_metadata and repository_metadata.metadata: + return self.compare_data_manager(self.metadata_dict, repository_metadata.metadata) != self.EQUAL + else: + return bool( + repository_metadata + and repository_metadata.metadata + and repository_metadata.metadata.get("data_managers") + ) + def new_tool_metadata_required(self, repository_metadata): """ Compare the last saved metadata for each tool in the repository with the new metadata in diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml new file mode 100644 index 000000000000..36891279dc41 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml @@ -0,0 +1,18 @@ + + + {"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}} + + + mkdir $out_file.files_path ; + echo "A new value" > '$out_file.files_path/newvalue.txt'; + cp '$static_test_data' '$out_file'; + exit $exit_code + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml new file mode 100644 index 000000000000..81dce17225ff --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml @@ -0,0 +1,16 @@ + + + + + + + + testbeta/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path} + abspath + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml new file mode 100644 index 000000000000..36891279dc41 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml @@ -0,0 +1,18 @@ + + + {"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}} + + + mkdir $out_file.files_path ; + echo "A new value" > '$out_file.files_path/newvalue.txt'; + cp '$static_test_data' '$out_file'; + exit $exit_code + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml new file mode 100644 index 000000000000..62ddce2c99f7 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml @@ -0,0 +1,17 @@ + + + + + + + + + testbeta/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path} + abspath + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml new file mode 100644 index 000000000000..36891279dc41 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml @@ -0,0 +1,18 @@ + + + {"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}} + + + mkdir $out_file.files_path ; + echo "A new value" > '$out_file.files_path/newvalue.txt'; + cp '$static_test_data' '$out_file'; + exit $exit_code + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml new file mode 100644 index 000000000000..200ae00d8be2 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml @@ -0,0 +1,28 @@ + + + + + + + + testbeta/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path} + abspath + + + + + + + + + testbeta2/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta2/${value}/${path} + abspath + + + + + diff --git a/test/unit/tool_shed/test_repository_metadata_manager.py b/test/unit/tool_shed/test_repository_metadata_manager.py index e57fbebc2fca..93cc148c020b 100644 --- a/test/unit/tool_shed/test_repository_metadata_manager.py +++ b/test/unit/tool_shed/test_repository_metadata_manager.py @@ -41,3 +41,21 @@ def test_reset_on_repo_with_uninstallable_revisions(shed_app: TestToolShedApp, n repo_path = new_repository.repo_path(app=shed_app) rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) assert len(new_repository.downloadable_revisions) == 3 + + +@patch_url_for +def test_reset_dm_with_uninstallable_revisions(shed_app: TestToolShedApp, new_repository: Repository): + upload_directories_to_repository(shed_app, new_repository, "data_manager_gaps") + assert len(new_repository.downloadable_revisions) == 1 + assert "2:" in new_repository.revision() + rmm = repository_metadata_manager.RepositoryMetadataManager( + app=shed_app, + user=new_repository.user, + repository=new_repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + repo_path = new_repository.repo_path(app=shed_app) + rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) + assert len(new_repository.downloadable_revisions) == 2 From 78b978e1886ce43f8d536fc0ad5c718013ce5472 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 15 Dec 2022 11:14:14 -0500 Subject: [PATCH 44/73] Refactor install manager a bit to isolate interaction. --- .../galaxy_install/install_manager.py | 62 +++++++++++-------- 1 file changed, 36 insertions(+), 26 deletions(-) diff --git a/lib/galaxy/tool_shed/galaxy_install/install_manager.py b/lib/galaxy/tool_shed/galaxy_install/install_manager.py index 7a2aa68a835a..d97a586c08f0 100644 --- a/lib/galaxy/tool_shed/galaxy_install/install_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/install_manager.py @@ -37,6 +37,7 @@ common_util, encoding_util, ) +from galaxy.util.tool_shed.tool_shed_registry import Registry from tool_shed_client.schema import ( ExtraRepoInfo, RepositoryMetadataInstallInfoDict, @@ -45,6 +46,38 @@ log = logging.getLogger(__name__) +def get_install_info_from_tool_shed( + tool_shed_url: str, tool_shed_registry: Registry, name: str, owner: str, changeset_revision: str +) -> Tuple[RepositoryMetadataInstallInfoDict, ExtraRepoInfo]: + params = dict(name=name, owner=owner, changeset_revision=changeset_revision) + pathspec = ["api", "repositories", "get_repository_revision_install_info"] + try: + raw_text = util.url_get( + tool_shed_url, + auth=tool_shed_registry.url_auth(tool_shed_url), + pathspec=pathspec, + params=params, + ) + except Exception: + message = "Error attempting to retrieve installation information from tool shed " + message += f"{tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" + log.exception(message) + raise exceptions.InternalServerError(message) + if raw_text: + # If successful, the response from get_repository_revision_install_info will be 3 + # dictionaries, a dictionary defining the Repository, a dictionary defining the + # Repository revision (RepositoryMetadata), and a dictionary including the additional + # information required to install the repository. + items = json.loads(util.unicodify(raw_text)) + repository_revision_dict: RepositoryMetadataInstallInfoDict = items[1] + repo_info_dict: ExtraRepoInfo = items[2] + else: + message = f"Unable to retrieve installation information from tool shed {tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" + log.warning(message) + raise exceptions.InternalServerError(message) + return repository_revision_dict, repo_info_dict + + class InstallRepositoryManager: app: InstallationTarget tpm: tool_panel_manager.ToolPanelManager @@ -79,32 +112,9 @@ def _get_repository_components_for_installation( def __get_install_info_from_tool_shed( self, tool_shed_url: str, name: str, owner: str, changeset_revision: str ) -> Tuple[RepositoryMetadataInstallInfoDict, List[ExtraRepoInfo]]: - params = dict(name=name, owner=owner, changeset_revision=changeset_revision) - pathspec = ["api", "repositories", "get_repository_revision_install_info"] - try: - raw_text = util.url_get( - tool_shed_url, - auth=self.app.tool_shed_registry.url_auth(tool_shed_url), - pathspec=pathspec, - params=params, - ) - except Exception: - message = "Error attempting to retrieve installation information from tool shed " - message += f"{tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" - log.exception(message) - raise exceptions.InternalServerError(message) - if raw_text: - # If successful, the response from get_repository_revision_install_info will be 3 - # dictionaries, a dictionary defining the Repository, a dictionary defining the - # Repository revision (RepositoryMetadata), and a dictionary including the additional - # information required to install the repository. - items = json.loads(util.unicodify(raw_text)) - repository_revision_dict: RepositoryMetadataInstallInfoDict = items[1] - repo_info_dict: ExtraRepoInfo = items[2] - else: - message = f"Unable to retrieve installation information from tool shed {tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" - log.warning(message) - raise exceptions.InternalServerError(message) + repository_revision_dict, repo_info_dict = get_install_info_from_tool_shed( + tool_shed_url, self.app.tool_shed_registry, name, owner, changeset_revision + ) # Make sure the tool shed returned everything we need for installing the repository. if not repository_revision_dict or not repo_info_dict: invalid_parameter_message = "No information is available for the requested repository revision.\n" From 9e7fafb8a58429f37ae59f484ca9ca9b75e86f5a Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 22 Dec 2022 10:47:55 -0500 Subject: [PATCH 45/73] Remove contact owner functionality from the tool shed - use github please. --- lib/tool_shed/test/base/twilltestcase.py | 16 ----- .../test_0000_basic_repository_features.py | 25 +------- lib/tool_shed/util/shed_util_common.py | 14 ----- .../webapp/controllers/repository.py | 58 ------------------- .../common/repository_actions_menu.mako | 8 --- .../tool_shed/repository/contact_owner.mako | 38 ------------ 6 files changed, 3 insertions(+), 156 deletions(-) delete mode 100644 lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 41543fb10b50..5df2d26a2ed8 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1760,22 +1760,6 @@ def search_for_valid_tools( tc.submit() self.check_for_strings(strings_displayed, strings_not_displayed) - def send_message_to_repository_owner( - self, - repository: Repository, - message: str, - strings_displayed=None, - strings_not_displayed=None, - post_submit_strings_displayed=None, - post_submit_strings_not_displayed=None, - ) -> None: - params = {"id": repository.id} - self.visit_url("/repository/contact_owner", params=params) - self.check_for_strings(strings_displayed, strings_not_displayed) - tc.fv(1, "message", message) - tc.submit() - self.check_for_strings(post_submit_strings_displayed, post_submit_strings_not_displayed) - def set_form_value(self, form, kwd, field_name, field_value): """ Set the form field field_name to field_value if it exists, and return the provided dict containing that value. If diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index f5c886f7cd62..d9092076a404 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -269,28 +269,9 @@ def test_0100_verify_reserved_username_handling(self): self.check_for_strings(strings_displayed=[error_message]) def test_0105_contact_repository_owner(self): - """Fill out and submit the form to contact the owner of a repository. - - This test should not actually send the email, since functional tests are designed to function without - any external network connection. The embedded tool shed server these tests are running against has been configured - with an SMTP server address that will not and should not resolve correctly. However, since the successful sending of - the email is the last step in the process, this will verify functional correctness of all preceding steps. - """ - self.login(email=common.test_user_2_email, username=common.test_user_2_name) - repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - message = "This is a test message." - strings_displayed = [ - "Contact the owner of the repository named", - repository.name, - "streamline appropriate communication", - ] - post_submit_strings_displayed = ["An error occurred sending your message by email"] - self.send_message_to_repository_owner( - repository=repository, - message=message, - strings_displayed=strings_displayed, - post_submit_strings_displayed=post_submit_strings_displayed, - ) + """""" + # We no longer implement this. + pass def test_0110_delete_filtering_repository(self): """Delete the filtering_0000 repository and verify that it no longer has any downloadable revisions.""" diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index a37243a648d6..749ad9ea8655 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -87,20 +87,6 @@ ----------------------------------------------------------------------------- """ -contact_owner_template = """ -GALAXY TOOL SHED REPOSITORY MESSAGE ------------------------- - -The user '${username}' sent you the following message regarding your tool shed -repository named '${repository_name}'. You can respond by sending a reply to -the user's email address: ${email}. ------------------------------------------------------------------------------ -${message} ------------------------------------------------------------------------------ -This message was sent from the Galaxy Tool Shed instance hosted on the server -'${host}' -""" - def count_repositories_in_category(app: "ToolShedApp", category_id: str) -> int: sa_session = app.model.session diff --git a/lib/tool_shed/webapp/controllers/repository.py b/lib/tool_shed/webapp/controllers/repository.py index 03ee7e2a6671..16c4bffdfd65 100644 --- a/lib/tool_shed/webapp/controllers/repository.py +++ b/lib/tool_shed/webapp/controllers/repository.py @@ -645,26 +645,6 @@ def browse_valid_repositories(self, trans, **kwd): self.valid_repository_grid.title = title return self.valid_repository_grid(trans, **kwd) - @web.expose - def contact_owner(self, trans, id, **kwd): - message = escape(kwd.get("message", "")) - status = kwd.get("status", "done") - repository = repository_util.get_repository_in_tool_shed(trans.app, id) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( - trans.app, id, repository.tip(), metadata_only=True - ) - if trans.user and trans.user.email: - return trans.fill_template( - "/webapps/tool_shed/repository/contact_owner.mako", - repository=repository, - metadata=metadata, - message=message, - status=status, - ) - else: - # Do all we can to eliminate spam. - return trans.show_error_message("You must be logged in to contact the owner of a repository.") - @web.expose def create_galaxy_docker_image(self, trans, **kwd): message = escape(kwd.get("message", "")) @@ -2184,44 +2164,6 @@ def reset_metadata_on_my_writable_repositories_in_tool_shed(self, trans, **kwd): status=status, ) - @web.expose - def send_to_owner(self, trans, id, message=""): - repository = repository_util.get_repository_in_tool_shed(trans.app, id) - if not message: - message = "Enter a message" - status = "error" - elif trans.user and trans.user.email: - smtp_server = trans.app.config.smtp_server - from_address = trans.app.config.email_from - if smtp_server is None or from_address is None: - return trans.show_error_message("Mail is not configured for this Galaxy tool shed instance") - to_address = repository.user.email - # Get the name of the server hosting the tool shed instance. - host = trans.request.host - # Build the email message - body = string.Template(suc.contact_owner_template).safe_substitute( - username=trans.user.username, - repository_name=repository.name, - email=trans.user.email, - message=message, - host=host, - ) - subject = f"Regarding your tool shed repository named {repository.name}" - # Send it - try: - util.send_mail(from_address, to_address, subject, body, trans.app.config) - message = "Your message has been sent" - status = "done" - except Exception as e: - message = f"An error occurred sending your message by email: {util.unicodify(e)}" - status = "error" - else: - # Do all we can to eliminate spam. - return trans.show_error_message("You must be logged in to contact the owner of a repository.") - return trans.response.send_redirect( - web.url_for(controller="repository", action="contact_owner", id=id, message=message, status=status) - ) - @web.expose @require_login("set email alerts") def set_email_alerts(self, trans, **kwd): diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako index bfe679f59a53..978837fb4fcc 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako @@ -33,11 +33,6 @@ can_browse_contents = not is_new - if trans.user and trans.user != repository.user: - can_contact_owner = True - else: - can_contact_owner = False - if not is_new and trans.user and ( is_admin or repository.user == trans.user ) and not is_deprecated: can_deprecate = True else: @@ -112,9 +107,6 @@ %if can_rate: Rate repository %endif - %if can_contact_owner: - Contact repository owner - %endif %if can_reset_all_metadata: Reset all repository metadata %endif diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako deleted file mode 100644 index 3b2d5b8a50ba..000000000000 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako +++ /dev/null @@ -1,38 +0,0 @@ -<%inherit file="/base.mako"/> -<%namespace file="/message.mako" import="render_msg" /> -<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" /> - -<%! - def inherit(context): - if context.get('use_panels'): - return '/webapps/tool_shed/base_panels.mako' - else: - return '/base.mako' -%> -<%inherit file="${inherit(context)}"/> - -${render_tool_shed_repository_actions( repository, metadata=metadata )} - -%if message: - ${render_msg( message, status )} -%endif - -
    -
    Contact the owner of the repository named '${repository.name | h}'
    -
    -
    - This feature is intended to streamline appropriate communication between - Galaxy tool developers and those in the Galaxy community that use them. - Please don't send messages unnecessarily. -
    -
    -
    - - -
    -
    - -
    -
    -
    -
    From 85100edff05ff8ddbb6fa9435c59e49802cd24f7 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 17 Aug 2023 15:05:45 -0400 Subject: [PATCH 46/73] Tool Shed API Modernization ahead of FastAPI - Use the controller base that allow DI. - Move a bunch functionality we want to port to FastAPI into manage layer. - Some typing enhancements. --- lib/tool_shed/managers/categories.py | 66 +++ lib/tool_shed/managers/repositories.py | 506 +++++++++++++++++- lib/tool_shed/managers/tools.py | 44 ++ lib/tool_shed/managers/users.py | 61 ++- lib/tool_shed/test/functional/api_notes.md | 6 + .../test/functional/test_shed_repositories.py | 2 + lib/tool_shed/webapp/api/categories.py | 94 ++-- lib/tool_shed/webapp/api/configuration.py | 7 +- lib/tool_shed/webapp/api/groups.py | 7 +- lib/tool_shed/webapp/api/repositories.py | 424 ++------------- .../webapp/api/repository_revisions.py | 8 +- lib/tool_shed/webapp/api/tools.py | 47 +- lib/tool_shed/webapp/api/users.py | 51 +- .../webapp/controllers/repository.py | 12 +- lib/tool_shed/webapp/search/tool_search.py | 4 +- lib/tool_shed/webapp/security/__init__.py | 6 +- lib/tool_shed_client/schema/__init__.py | 4 + 17 files changed, 813 insertions(+), 536 deletions(-) create mode 100644 lib/tool_shed/managers/categories.py create mode 100644 lib/tool_shed/managers/tools.py diff --git a/lib/tool_shed/managers/categories.py b/lib/tool_shed/managers/categories.py new file mode 100644 index 000000000000..b9ae0535b038 --- /dev/null +++ b/lib/tool_shed/managers/categories.py @@ -0,0 +1,66 @@ +from typing import ( + Any, + Callable, + Dict, + List, +) + +import tool_shed.util.shed_util_common as suc +from galaxy import ( + exceptions, + web, +) +from galaxy.model.base import transaction +from tool_shed.context import ProvidesUserContext +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.model import Category +from tool_shed_client.schema import CreateCategoryRequest + + +class CategoryManager: + def __init__(self, app: ToolShedApp): + self.app = app + + def create(self, trans: ProvidesUserContext, category_request: CreateCategoryRequest) -> Category: + name = category_request.name + description = category_request.description or name + if name: + if suc.get_category_by_name(self.app, name): + raise exceptions.Conflict("A category with that name already exists.") + else: + # Create the category + category = self.app.model.Category(name=name, description=description) + trans.sa_session.add(category) + with transaction(trans.sa_session): + trans.sa_session.commit() + return category + else: + raise exceptions.RequestParameterMissingException('Missing required parameter "name".') + + def index_db(self, trans: ProvidesUserContext, deleted: bool) -> List[Category]: + category_db_objects: List[Category] = [] + if deleted and not trans.user_is_admin: + raise exceptions.AdminRequiredException("Only administrators can query deleted categories.") + for category in ( + trans.sa_session.query(Category).filter(Category.table.c.deleted == deleted).order_by(Category.table.c.name) + ): + category_db_objects.append(category) + return category_db_objects + + def index(self, trans: ProvidesUserContext, deleted: bool) -> List[Dict[str, Any]]: + category_dicts: List[Dict[str, Any]] = [] + for category in self.index_db(trans, deleted): + category_dict = category.to_dict(view="collection", value_mapper=get_value_mapper(trans.app)) + category_dict["url"] = web.url_for( + controller="categories", action="show", id=trans.security.encode_id(category.id) + ) + category_dict[ + "repositories" + ] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get(category.name, 0) + category_dicts.append(category_dict) + return category_dicts + + +def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: + value_mapper = {"id": app.security.encode_id} + return value_mapper diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 61625a505de6..010232e830b5 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -1,26 +1,530 @@ """ Manager and Serializer for TS repositories. """ +import json import logging +from collections import namedtuple +from time import strftime +from typing import ( + Any, + Callable, + cast, + Dict, + List, + Optional, + Union, +) + +from pydantic import BaseModel +from sqlalchemy import ( + and_, + false, +) +from galaxy import web from galaxy.exceptions import ( + ConfigDoesNotAllowException, InternalServerError, MalformedContents, + ObjectNotFound, + RequestParameterInvalidException, ) from galaxy.tool_shed.util import dependency_display +from galaxy.util import listify +from tool_shed.context import ProvidesUserContext from tool_shed.metadata import repository_metadata_manager from tool_shed.repository_types import util as rt_util from tool_shed.structured_app import ToolShedApp -from tool_shed.util import hg_util +from tool_shed.util import ( + encoding_util, + hg_util, +) +from tool_shed.util.metadata_util import ( + get_all_dependencies, + get_current_repository_metadata_for_changeset_revision, + get_metadata_revisions, + get_next_downloadable_changeset_revision, + get_repository_metadata_by_changeset_revision, +) +from tool_shed.util.readme_util import build_readme_files_dict from tool_shed.util.repository_content_util import upload_tar +from tool_shed.util.repository_util import ( + create_repository as low_level_create_repository, + get_repo_info_dict, + get_repositories_by_category, + get_repository_by_name_and_owner, + get_repository_in_tool_shed, + validate_repository_name, +) +from tool_shed.util.shed_util_common import ( + get_category, + count_repositories_in_category, +) +from tool_shed.util.tool_util import generate_message_for_invalid_tools from tool_shed.webapp.model import ( Repository, User, ) +from tool_shed.webapp.search.repo_search import RepoSearch +from tool_shed_client.schema import ( + CreateRepositoryRequest, + ExtraRepoInfo, + LegacyInstallInfoTuple, + Repository as SchemaRepository, + RepositoryMetadataInstallInfoDict, + ResetMetadataOnRepositoryResponse, +) +from .categories import get_value_mapper as category_value_mapper log = logging.getLogger(__name__) +def search(trans: ProvidesUserContext, q: str, page: int = 1, page_size: int = 10): + """ + Perform the search over TS repositories. + Note that search works over the Whoosh index which you have + to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. + Also TS config option toolshed_search_on has to be True and + whoosh_index_dir has to be specified. + """ + app = trans.app + conf = app.config + if not conf.toolshed_search_on: + raise ConfigDoesNotAllowException("Searching the TS through the API is turned off for this instance.") + if not conf.whoosh_index_dir: + raise ConfigDoesNotAllowException( + "There is no directory for the search index specified. Please contact the administrator." + ) + + search_term = q.strip() + if len(search_term) < 1: + raise RequestParameterInvalidException("The search term has to be at least one character long.") + + repo_search = RepoSearch() + + Boosts = namedtuple( + "Boosts", + [ + "repo_name_boost", + "repo_description_boost", + "repo_long_description_boost", + "repo_homepage_url_boost", + "repo_remote_repository_url_boost", + "categories_boost", + "repo_owner_username_boost", + ], + ) + boosts = Boosts( + float(conf.get("repo_name_boost", 0.9)), + float(conf.get("repo_description_boost", 0.6)), + float(conf.get("repo_long_description_boost", 0.5)), + float(conf.get("repo_homepage_url_boost", 0.3)), + float(conf.get("repo_remote_repository_url_boost", 0.2)), + float(conf.get("categories_boost", 0.5)), + float(conf.get("repo_owner_username_boost", 0.3)), + ) + + results = repo_search.search(trans, search_term, page, page_size, boosts) + results["hostname"] = web.url_for("/", qualified=True) + return results + + +class UpdatesRequest(BaseModel): + name: Optional[str] = None + owner: Optional[str] = None + changeset_revision: str + hexlify: bool = True + + +def check_updates(app: ToolShedApp, request: UpdatesRequest) -> Union[str, Dict[str, Any]]: + name = request.name + owner = request.owner + changeset_revision = request.changeset_revision + hexlify_this = request.hexlify + repository = get_repository_by_name_and_owner( + app, name, owner, eagerload_columns=[Repository.downloadable_revisions] + ) + if repository and repository.downloadable_revisions: + repository_metadata = get_repository_metadata_by_changeset_revision( + app, app.security.encode_id(repository.id), changeset_revision + ) + tool_shed_status_dict = {} + # Handle repository deprecation. + tool_shed_status_dict["repository_deprecated"] = str(repository.deprecated) + tip_revision = repository.downloadable_revisions[0] + # Handle latest installable revision. + if changeset_revision == tip_revision: + tool_shed_status_dict["latest_installable_revision"] = "True" + else: + next_installable_revision = get_next_downloadable_changeset_revision(app, repository, changeset_revision) + if repository_metadata is None: + if next_installable_revision and next_installable_revision != changeset_revision: + tool_shed_status_dict["latest_installable_revision"] = "True" + else: + tool_shed_status_dict["latest_installable_revision"] = "False" + else: + if next_installable_revision and next_installable_revision != changeset_revision: + tool_shed_status_dict["latest_installable_revision"] = "False" + else: + tool_shed_status_dict["latest_installable_revision"] = "True" + # Handle revision updates. + if changeset_revision == tip_revision: + tool_shed_status_dict["revision_update"] = "False" + else: + if repository_metadata is None: + tool_shed_status_dict["revision_update"] = "True" + else: + tool_shed_status_dict["revision_update"] = "False" + # Handle revision upgrades. + metadata_revisions = [revision[1] for revision in get_metadata_revisions(app, repository)] + num_metadata_revisions = len(metadata_revisions) + for index, metadata_revision in enumerate(metadata_revisions): + if index == num_metadata_revisions: + tool_shed_status_dict["revision_upgrade"] = "False" + break + if metadata_revision == changeset_revision: + if num_metadata_revisions - index > 1: + tool_shed_status_dict["revision_upgrade"] = "True" + else: + tool_shed_status_dict["revision_upgrade"] = "False" + break + return ( + encoding_util.tool_shed_encode(tool_shed_status_dict) if hexlify_this else json.dumps(tool_shed_status_dict) + ) + return encoding_util.tool_shed_encode({}) if hexlify_this else json.dumps({}) + + +def guid_to_repository(app: ToolShedApp, tool_id: str) -> "Repository": + # tool_id = remove_protocol_and_user_from_clone_url(tool_id) + shed, _, owner, name, rest = tool_id.split("/", 5) + clause_list = [ + and_( + app.model.Repository.table.c.deprecated == false(), + app.model.Repository.table.c.deleted == false(), + app.model.Repository.table.c.name == name, + app.model.User.table.c.username == owner, + app.model.Repository.table.c.user_id == app.model.User.table.c.id, + ) + ] + repository = app.model.context.query(app.model.Repository).filter(*clause_list).first() + return repository + + +def index_tool_ids(app: ToolShedApp, tool_ids: List[str]) -> Dict[str, Any]: + repository_found = [] + all_metadata = dict() + for tool_id in tool_ids: + repository = guid_to_repository(app, tool_id) + owner = repository.user.username + name = repository.name + clause_list = [ + and_( + app.model.Repository.table.c.deprecated == false(), + app.model.Repository.table.c.deleted == false(), + app.model.Repository.table.c.name == name, + app.model.User.table.c.username == owner, + app.model.Repository.table.c.user_id == app.model.User.table.c.id, + ) + ] + repository = app.model.context.current.sa_session.query(app.model.Repository).filter(*clause_list).first() + if not repository: + log.warning(f"Repository {owner}/{name} does not exist, skipping") + continue + for changeset, changehash in repository.installable_revisions(app): + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) + tools: Optional[List[Dict[str, Any]]] = metadata.metadata.get("tools") + if not tools: + log.warning(f"Repository {owner}/{name}/{changehash} does not contain valid tools, skipping") + continue + for tool_metadata in tools: + if tool_metadata["guid"] in tool_ids: + repository_found.append("%d:%s" % (int(changeset), changehash)) + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) + if metadata is None: + continue + metadata_dict = metadata.to_dict( + value_mapper={"id": app.security.encode_id, "repository_id": app.security.encode_id} + ) + metadata_dict["repository"] = repository.to_dict(value_mapper={"id": app.security.encode_id}) + if metadata.has_repository_dependencies: + metadata_dict["repository_dependencies"] = get_all_dependencies( + app, metadata, processed_dependency_links=[] + ) + else: + metadata_dict["repository_dependencies"] = [] + if metadata.includes_tool_dependencies: + metadata_dict["tool_dependencies"] = repository.get_tool_dependencies(app, changehash) + else: + metadata_dict["tool_dependencies"] = {} + if metadata.includes_tools: + metadata_dict["tools"] = metadata.metadata["tools"] + all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict + if repository_found: + all_metadata["current_changeset"] = repository_found[0] + # all_metadata[ 'found_changesets' ] = repository_found + return all_metadata + else: + return {} + + +def index_repositories(app: ToolShedApp, name: Optional[str], owner: Optional[str], deleted: bool): + clause_list = [ + and_( + app.model.Repository.table.c.deprecated == false(), + app.model.Repository.table.c.deleted == deleted, + ) + ] + if owner is not None: + clause_list.append( + and_( + app.model.User.table.c.username == owner, + app.model.Repository.table.c.user_id == app.model.User.table.c.id, + ) + ) + if name is not None: + clause_list.append(app.model.Repository.table.c.name == name) + repositories = [] + for repository in ( + app.model.context.query(app.model.Repository).filter(*clause_list).order_by(app.model.Repository.table.c.name) + ): + repositories.append(repository) + return repositories + + +def can_manage_repo(trans: ProvidesUserContext, repository: Repository) -> bool: + security_agent = trans.app.security_agent + return trans.user_is_admin or security_agent.user_can_administer_repository(trans.user, repository) + + +def get_install_info(app: ToolShedApp, name, owner, changeset_revision) -> LegacyInstallInfoTuple: + value_mapper = get_value_mapper(app) + # Example URL: + # http:///api/repositories/get_repository_revision_install_info?name=&owner=&changeset_revision= + if name and owner and changeset_revision: + # Get the repository information. + repository = get_repository_by_name_and_owner( + app, name, owner, eagerload_columns=[Repository.downloadable_revisions] + ) + if repository is None: + log.debug(f"Cannot locate repository {name} owned by {owner}") + return {}, {}, {} + encoded_repository_id = app.security.encode_id(repository.id) + repository_dict: dict = repository.to_dict(view="element", value_mapper=value_mapper) + repository_dict["url"] = web.url_for(controller="repositories", action="show", id=encoded_repository_id) + # Get the repository_metadata information. + repository_metadata = get_repository_metadata_by_changeset_revision( + app, encoded_repository_id, changeset_revision + ) + if repository_metadata is None: + # The changeset_revision column in the repository_metadata table has been updated with a new + # value value, so find the changeset_revision to which we need to update. + new_changeset_revision = get_next_downloadable_changeset_revision(app, repository, changeset_revision) + repository_metadata = get_repository_metadata_by_changeset_revision( + app, encoded_repository_id, new_changeset_revision + ) + changeset_revision = new_changeset_revision + if repository_metadata is not None: + encoded_repository_metadata_id = app.security.encode_id(repository_metadata.id) + repository_metadata_dict: RepositoryMetadataInstallInfoDict = cast( + RepositoryMetadataInstallInfoDict, + repository_metadata.to_dict(view="collection", value_mapper=value_mapper), + ) + repository_metadata_dict["url"] = web.url_for( + controller="repository_revisions", action="show", id=encoded_repository_metadata_id + ) + if "tools" in repository_metadata.metadata: + repository_metadata_dict["valid_tools"] = repository_metadata.metadata["tools"] + # Get the repo_info_dict for installing the repository. + repo_info_dict: ExtraRepoInfo + ( + repo_info_dict, + includes_tools, + includes_tool_dependencies, + includes_tools_for_display_in_tool_panel, + has_repository_dependencies, + has_repository_dependencies_only_if_compiling_contained_td, + ) = get_repo_info_dict(app, None, encoded_repository_id, changeset_revision) + return repository_dict, repository_metadata_dict, repo_info_dict + else: + log.debug( + "Unable to locate repository_metadata record for repository id %s and changeset_revision %s", + repository.id, + changeset_revision, + ) + return repository_dict, {}, {} + else: + debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " + debug_msg += f"Invalid name {name} or owner {owner} or changeset_revision {changeset_revision} received." + log.debug(debug_msg) + return {}, {}, {} + + +def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: + value_mapper = { + "id": app.security.encode_id, + "repository_id": app.security.encode_id, + "user_id": app.security.encode_id, + } + return value_mapper + + +def get_ordered_installable_revisions( + app: ToolShedApp, name: Optional[str], owner: Optional[str], tsr_id: Optional[str] +) -> List[str]: + eagerload_columns = [Repository.downloadable_revisions] + if None not in [name, owner]: + # Get the repository information. + repository = get_repository_by_name_and_owner(app, name, owner, eagerload_columns=eagerload_columns) + if repository is None: + raise ObjectNotFound(f"No repository named {name} found with owner {owner}") + elif tsr_id is not None: + repository = get_repository_in_tool_shed(app, tsr_id, eagerload_columns=eagerload_columns) + else: + error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " + error_message += "invalid parameters received." + log.debug(error_message) + return [] + return [revision[1] for revision in repository.installable_revisions(app, sort_revisions=True)] + + +def get_repository_metadata_dict(app: ToolShedApp, id: str, recursive: bool, downloadable_only: bool) -> Dict[str, Any]: + all_metadata = {} + repository = get_repository_in_tool_shed(app, id, eagerload_columns=[Repository.downloadable_revisions]) + for changeset, changehash in get_metadata_revisions( + app, repository, sort_revisions=True, downloadable=downloadable_only + ): + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) + if metadata is None: + continue + metadata_dict = metadata.to_dict( + value_mapper={"id": app.security.encode_id, "repository_id": app.security.encode_id} + ) + metadata_dict["repository"] = repository.to_dict(value_mapper={"id": app.security.encode_id}) + if metadata.has_repository_dependencies and recursive: + metadata_dict["repository_dependencies"] = get_all_dependencies( + app, metadata, processed_dependency_links=[] + ) + else: + metadata_dict["repository_dependencies"] = [] + if metadata.includes_tools: + metadata_dict["tools"] = metadata.metadata["tools"] + all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict + return all_metadata + + +def readmes(app: ToolShedApp, repository: Repository, changeset_revision: str) -> dict: + encoded_repository_id = app.security.encode_id(repository.id) + repository_metadata = get_repository_metadata_by_changeset_revision(app, encoded_repository_id, changeset_revision) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + return build_readme_files_dict(app, repository, changeset_revision, repository_metadata.metadata) + return {} + + +def reset_metadata_on_repository(trans: ProvidesUserContext, repository_id) -> ResetMetadataOnRepositoryResponse: + app: ToolShedApp = trans.app + + def handle_repository(trans, start_time, repository): + results = dict(start_time=start_time, repository_status=[]) + try: + rmm = repository_metadata_manager.RepositoryMetadataManager( + app=app, + user=trans.user, + repository=repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + rmm.reset_all_metadata_on_repository_in_tool_shed() + rmm_invalid_file_tups = rmm.get_invalid_file_tups() + if rmm_invalid_file_tups: + message = generate_message_for_invalid_tools( + app, rmm_invalid_file_tups, repository, None, as_html=False + ) + results["status"] = "warning" + else: + message = ( + f"Successfully reset metadata on repository {repository.name} owned by {repository.user.username}" + ) + results["status"] = "ok" + except Exception as e: + message = ( + f"Error resetting metadata on repository {repository.name} owned by {repository.user.username}: {e}" + ) + results["status"] = "error" + status = f"{repository.name} : {message}" + results["repository_status"].append(status) + return results + + if repository_id is not None: + repository = get_repository_in_tool_shed(app, repository_id) + start_time = strftime("%Y-%m-%d %H:%M:%S") + log.debug(f"{start_time}...resetting metadata on repository {repository.name}") + results = handle_repository(trans, start_time, repository) + stop_time = strftime("%Y-%m-%d %H:%M:%S") + results["stop_time"] = stop_time + return ResetMetadataOnRepositoryResponse(**results) + + +def create_repository(trans: ProvidesUserContext, request: CreateRepositoryRequest) -> Repository: + app: ToolShedApp = trans.app + user = trans.user + assert user + category_ids = listify(request.category_ids) + name = request.name + invalid_message = validate_repository_name(app, name, user) + if invalid_message: + raise RequestParameterInvalidException(invalid_message) + + repo, _ = low_level_create_repository( + app=app, + name=name, + type=request.type_, + description=request.synopsis, + long_description=request.description, + user_id=user.id, + category_ids=category_ids, + remote_repository_url=request.remote_repository_url, + homepage_url=request.homepage_url, + ) + return repo + + +def to_element_dict(app, repository: Repository, include_categories: bool = False) -> Dict[str, Any]: + value_mapper = get_value_mapper(app) + repository_dict = repository.to_dict(view="element", value_mapper=value_mapper) + if include_categories: + repository_dict["category_ids"] = [app.security.encode_id(x.category.id) for x in repository.categories] + return repository_dict + + +def repositories_by_category( + app: ToolShedApp, + category_id: str, + page: Optional[int] = None, + sort_key: str = "name", + sort_order: str = "asc", + installable: bool = True, +): + category = get_category(app, category_id) + category_dict: Dict[str, Any] + if category is None: + category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") + return category_dict + category_dict = category.to_dict(view="element", value_mapper=category_value_mapper(app)) + category_dict["repository_count"] = count_repositories_in_category(app, category_id) + repositories = get_repositories_by_category( + app, category.id, installable=installable, sort_order=sort_order, sort_key=sort_key, page=page + ) + category_dict["repositories"] = repositories + return category_dict + + +def to_model(app, repository: Repository) -> SchemaRepository: + return SchemaRepository(**to_element_dict(app, repository)) + + def upload_tar_and_set_metadata( app: ToolShedApp, host: str, diff --git a/lib/tool_shed/managers/tools.py b/lib/tool_shed/managers/tools.py new file mode 100644 index 000000000000..84dfc9c6d0fc --- /dev/null +++ b/lib/tool_shed/managers/tools.py @@ -0,0 +1,44 @@ +from collections import namedtuple + +from galaxy import exceptions +from tool_shed.context import SessionRequestContext +from tool_shed.webapp.search.tool_search import ToolSearch + + +def search(trans: SessionRequestContext, q: str, page: int = 1, page_size: int = 10) -> dict: + """ + Perform the search over TS tools index. + Note that search works over the Whoosh index which you have + to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. + Also TS config option toolshed_search_on has to be True and + whoosh_index_dir has to be specified. + """ + app = trans.app + conf = app.config + if not conf.toolshed_search_on: + raise exceptions.ConfigDoesNotAllowException( + "Searching the TS through the API is turned off for this instance." + ) + if not conf.whoosh_index_dir: + raise exceptions.ConfigDoesNotAllowException( + "There is no directory for the search index specified. Please contact the administrator." + ) + search_term = q.strip() + if len(search_term) < 1: + raise exceptions.RequestParameterInvalidException("The search term has to be at least one character long.") + + tool_search = ToolSearch() + + Boosts = namedtuple( + "Boosts", ["tool_name_boost", "tool_description_boost", "tool_help_boost", "tool_repo_owner_username_boost"] + ) + boosts = Boosts( + float(conf.get("tool_name_boost", 1.2)), + float(conf.get("tool_description_boost", 0.6)), + float(conf.get("tool_help_boost", 0.4)), + float(conf.get("tool_repo_owner_username_boost", 0.3)), + ) + + results = tool_search.search(trans.app, search_term, page, page_size, boosts) + results["hostname"] = trans.url_builder("/", qualified=True) + return results diff --git a/lib/tool_shed/managers/users.py b/lib/tool_shed/managers/users.py index a6cc02a1dc05..055aaf1945d1 100644 --- a/lib/tool_shed/managers/users.py +++ b/lib/tool_shed/managers/users.py @@ -1,8 +1,35 @@ +from typing import List + +from galaxy.exceptions import RequestParameterInvalidException +from galaxy.model.base import transaction +from galaxy.security.validate_user_input import ( + validate_email, + validate_password, + validate_publicname, +) +from tool_shed.context import ProvidesUserContext from tool_shed.structured_app import ToolShedApp from tool_shed.webapp.model import User +from tool_shed_client.schema import ( + CreateUserRequest, + User as ApiUser, +) + + +def index(app: ToolShedApp, deleted: bool) -> List[ApiUser]: + users: List[ApiUser] = [] + for user in ( + app.model.context.query(app.model.User) + .filter(app.model.User.table.c.deleted == deleted) + .order_by(app.model.User.table.c.username) + ): + users.append(get_api_user(app, user)) + return users def create_user(app: ToolShedApp, email: str, username: str, password: str) -> User: + if username == "repos": + raise RequestParameterInvalidException("Cannot create a tool shed user with the username repos") sa_session = app.model.context user = User(email=email) user.set_password_cleartext(password) @@ -13,6 +40,38 @@ def create_user(app: ToolShedApp, email: str, username: str, password: str) -> U # else: # user.active = True # Activation is off, every new user is active by default. sa_session.add(user) - sa_session.flush() + with transaction(sa_session): + sa_session.commit() app.security_agent.create_private_user_role(user) return user + + +def api_create_user(trans: ProvidesUserContext, request: CreateUserRequest) -> ApiUser: + app = trans.app + message = _validate( + trans, email=request.email, password=request.password, confirm=request.password, username=request.username + ) + if message: + raise RequestParameterInvalidException(message) + user = create_user(app, request.email, request.username, request.password) + return get_api_user(app, user) + + +def get_api_user(app: ToolShedApp, user: User) -> ApiUser: + return ApiUser( + id=app.security.encode_id(user.id), + username=user.username, + ) + + +def _validate(trans: ProvidesUserContext, email: str, password: str, confirm: str, username: str) -> str: + if username in ["repos"]: + return f"The term '{username}' is a reserved word in the Tool Shed, so it cannot be used as a public user name." + message = "\n".join( + ( + validate_email(trans, email), + validate_password(trans, password, confirm), + validate_publicname(trans, username), + ) + ).rstrip() + return message diff --git a/lib/tool_shed/test/functional/api_notes.md b/lib/tool_shed/test/functional/api_notes.md index 0e225c08478c..dc207aa658f0 100644 --- a/lib/tool_shed/test/functional/api_notes.md +++ b/lib/tool_shed/test/functional/api_notes.md @@ -41,4 +41,10 @@ for deletion instead of writing new tests and modernizing the API. | repositories/{repository_id}/changeset_revision | NO | YES | NO | YES | NO | | | POST repositories | NO | YES | NO | YES | NO | | | GET repositories (without search query) | ? | ? |? | True | True | | +| GET /repositories/updates/ | YES | NO | NO | NO | YES | | +Research if searching by tool_ids is used with the repository index API. + +Added in: +- https://github.com/galaxyproject/galaxy/pull/3626/files +- Likely no longer used? diff --git a/lib/tool_shed/test/functional/test_shed_repositories.py b/lib/tool_shed/test/functional/test_shed_repositories.py index f2fba654d2ad..8fefcddea005 100644 --- a/lib/tool_shed/test/functional/test_shed_repositories.py +++ b/lib/tool_shed/test/functional/test_shed_repositories.py @@ -52,6 +52,8 @@ def test_metadata_simple(self): assert not only_revision.malicious def test_index_simple(self): + # Logic and typing is pretty different if given a tool id to search for - this should + # be tested or dropped in v2. populator = self.populator repo = populator.setup_column_maker_repo(prefix="repoforindex") repository_id = repo.id diff --git a/lib/tool_shed/webapp/api/categories.py b/lib/tool_shed/webapp/api/categories.py index c0ca91ef8704..15183b2a62ab 100644 --- a/lib/tool_shed/webapp/api/categories.py +++ b/lib/tool_shed/webapp/api/categories.py @@ -1,13 +1,13 @@ import logging from typing import ( Any, - Callable, Dict, + List, ) import tool_shed.util.shed_util_common as suc +import tool_shed_client.schema from galaxy import ( - exceptions, util, web, ) @@ -17,18 +17,22 @@ expose_api_anonymous_and_sessionless, require_admin, ) -from galaxy.webapps.base.controller import BaseAPIController -from tool_shed.util import repository_util +from galaxy.webapps.galaxy.api import depends +from tool_shed.managers.categories import ( + CategoryManager, + get_value_mapper, +) +from tool_shed.managers.repositories import repositories_by_category +from tool_shed.webapp.model import Category +from . import BaseShedAPIController log = logging.getLogger(__name__) -class CategoriesController(BaseAPIController): +class CategoriesController(BaseShedAPIController): """RESTful controller for interactions with categories in the Tool Shed.""" - def __get_value_mapper(self, trans) -> Dict[str, Callable]: - value_mapper = {"id": trans.security.encode_id} - return value_mapper + category_manager: CategoryManager = depends(CategoryManager) @expose_api @require_admin @@ -46,27 +50,16 @@ def create(self, trans, payload, **kwd): Content-Disposition: form-data; name="description" Category_Description """ category_dict = dict(message="", status="ok") - name = payload.get("name", "") - if name: - description = payload.get("description", "") - if not description: - # Default the description to the name. - description = name - if suc.get_category_by_name(self.app, name): - raise exceptions.Conflict("A category with that name already exists.") - else: - # Create the category - category = self.app.model.Category(name=name, description=description) - trans.sa_session.add(category) - with transaction(trans.sa_session): - trans.sa_session.commit() - category_dict = category.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - category_dict["message"] = f"Category '{str(category.name)}' has been created" - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - else: - raise exceptions.RequestParameterMissingException('Missing required parameter "name".') + request = tool_shed_client.schema.CreateCategoryRequest( + name=payload.get("name"), + description=payload.get("description", ""), + ) + category: Category = self.category_manager.create(trans, request) + category_dict = category.to_dict(view="element", value_mapper=get_value_mapper(trans.app)) + category_dict["message"] = f"Category '{str(category.name)}' has been created" + category_dict["url"] = web.url_for( + controller="categories", action="show", id=trans.security.encode_id(category.id) + ) return category_dict @expose_api_anonymous_and_sessionless @@ -86,24 +79,19 @@ def get_repositories(self, trans, category_id, **kwd): sort_key = kwd.get("sort_key", "name") sort_order = kwd.get("sort_order", "asc") page = kwd.get("page", None) - category = suc.get_category(self.app, category_id) - category_dict: Dict[str, Any] - if category is None: - category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") - return category_dict - category_dict = category.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - category_dict["repository_count"] = suc.count_repositories_in_category(self.app, category_id) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - repositories = repository_util.get_repositories_by_category( - self.app, category.id, installable=installable, sort_order=sort_order, sort_key=sort_key, page=page + category_dict = repositories_by_category( + self.app, + category_id, + page=page, + sort_key=sort_key, + sort_order=sort_order, + installable=installable, ) - category_dict["repositories"] = repositories + category_dict["url"] = web.url_for(controller="categories", action="show", id=category_dict["id"]) return category_dict @expose_api_anonymous_and_sessionless - def index(self, trans, deleted=False, **kwd): + def index(self, trans, deleted=False, **kwd) -> List[Dict[str, Any]]: """ GET /api/categories Return a list of dictionaries that contain information about each Category. @@ -112,24 +100,8 @@ def index(self, trans, deleted=False, **kwd): Example: GET localhost:9009/api/categories """ - category_dicts = [] deleted = util.asbool(deleted) - if deleted and not trans.user_is_admin: - raise exceptions.AdminRequiredException("Only administrators can query deleted categories.") - for category in ( - trans.sa_session.query(self.app.model.Category) - .filter(self.app.model.Category.table.c.deleted == deleted) - .order_by(self.app.model.Category.table.c.name) - ): - category_dict = category.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - category_dict[ - "repositories" - ] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get(category.name, 0) - category_dicts.append(category_dict) - return category_dicts + return self.category_manager.index(trans, deleted) @expose_api_anonymous_and_sessionless def show(self, trans, id, **kwd): @@ -145,7 +117,7 @@ def show(self, trans, id, **kwd): if category is None: category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") return category_dict - category_dict = category.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) + category_dict = category.to_dict(view="element", value_mapper=get_value_mapper(trans.app)) category_dict["url"] = web.url_for( controller="categories", action="show", id=trans.security.encode_id(category.id) ) diff --git a/lib/tool_shed/webapp/api/configuration.py b/lib/tool_shed/webapp/api/configuration.py index fb4495976071..920baeb670b5 100644 --- a/lib/tool_shed/webapp/api/configuration.py +++ b/lib/tool_shed/webapp/api/configuration.py @@ -5,15 +5,12 @@ import logging from galaxy.web import expose_api_anonymous_and_sessionless -from galaxy.webapps.base.controller import BaseAPIController +from . import BaseShedAPIController log = logging.getLogger(__name__) -class ConfigurationController(BaseAPIController): - def __init__(self, app): - super().__init__(app) - +class ConfigurationController(BaseShedAPIController): @expose_api_anonymous_and_sessionless def version(self, trans, **kwds): """ diff --git a/lib/tool_shed/webapp/api/groups.py b/lib/tool_shed/webapp/api/groups.py index 05f46673fc8b..72329e20b787 100644 --- a/lib/tool_shed/webapp/api/groups.py +++ b/lib/tool_shed/webapp/api/groups.py @@ -22,16 +22,17 @@ expose_api_anonymous_and_sessionless, require_admin, ) -from galaxy.webapps.base.controller import BaseAPIController from tool_shed.managers import groups +from tool_shed.structured_app import ToolShedApp +from . import BaseShedAPIController log = logging.getLogger(__name__) -class GroupsController(BaseAPIController): +class GroupsController(BaseShedAPIController): """RESTful controller for interactions with groups in the Tool Shed.""" - def __init__(self, app): + def __init__(self, app: ToolShedApp): super().__init__(app) self.group_manager = groups.GroupManager() diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index 28094114fca2..403d04182191 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -1,7 +1,6 @@ import json import logging import os -from collections import namedtuple from io import StringIO from time import strftime from typing import ( @@ -9,10 +8,6 @@ Dict, ) -from sqlalchemy import ( - and_, - false, -) from webob.compat import cgi_FieldStorage from galaxy import ( @@ -21,7 +16,6 @@ ) from galaxy.exceptions import ( ActionInputError, - ConfigDoesNotAllowException, InsufficientPermissionsException, MessageException, ObjectNotFound, @@ -33,26 +27,41 @@ expose_api_anonymous_and_sessionless, expose_api_raw_anonymous_and_sessionless, ) -from galaxy.webapps.base.controller import ( - BaseAPIController, - HTTPBadRequest, +from galaxy.webapps.base.controller import HTTPBadRequest +from tool_shed.managers.repositories import ( + can_update_repo, + check_updates, + create_repository, + get_install_info, + get_ordered_installable_revisions, + get_repository_metadata_dict, + get_value_mapper, + index_repositories, + index_tool_ids, + reset_metadata_on_repository, + search, + to_element_dict, + UpdatesRequest, + upload_tar_and_set_metadata, ) -from tool_shed.managers.repositories import upload_tar_and_set_metadata from tool_shed.metadata import repository_metadata_manager from tool_shed.repository_types import util as rt_util from tool_shed.util import ( - encoding_util, metadata_util, repository_util, tool_util, ) from tool_shed.webapp import model -from tool_shed.webapp.search.repo_search import RepoSearch +from tool_shed_client.schema import ( + CreateRepositoryRequest, + LegacyInstallInfoTuple, +) +from . import BaseShedAPIController log = logging.getLogger(__name__) -class RepositoriesController(BaseAPIController): +class RepositoriesController(BaseShedAPIController): """RESTful controller for interactions with repositories in the Tool Shed.""" @web.legacy_expose_api @@ -115,28 +124,12 @@ def get_ordered_installable_revisions(self, trans, name=None, owner=None, **kwd) if owner is None: owner = kwd.get("owner", None) tsr_id = kwd.get("tsr_id", None) - eagerload_columns = [model.Repository.downloadable_revisions] - if None not in [name, owner]: - # Get the repository information. - repository = repository_util.get_repository_by_name_and_owner( - self.app, name, owner, eagerload_columns=eagerload_columns - ) - if repository is None: - trans.response.status = 404 - return {"status": "error", "message": f"No repository named {name} found with owner {owner}"} - elif tsr_id is not None: - repository = repository_util.get_repository_in_tool_shed( - self.app, tsr_id, eagerload_columns=eagerload_columns - ) - else: - error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " - error_message += "invalid parameters received." - log.debug(error_message) - return [] - return [revision[1] for revision in repository.installable_revisions(self.app, sort_revisions=True)] + return get_ordered_installable_revisions(self.app, name, owner, tsr_id) @web.legacy_expose_api_anonymous - def get_repository_revision_install_info(self, trans, name, owner, changeset_revision, **kwd): + def get_repository_revision_install_info( + self, trans, name, owner, changeset_revision, **kwd + ) -> LegacyInstallInfoTuple: """ GET /api/repositories/get_repository_revision_install_info @@ -209,65 +202,7 @@ def get_repository_revision_install_info(self, trans, name, owner, changeset_rev } """ - # Example URL: - # http:///api/repositories/get_repository_revision_install_info?name=&owner=&changeset_revision= - if name and owner and changeset_revision: - # Get the repository information. - repository = repository_util.get_repository_by_name_and_owner( - self.app, name, owner, eagerload_columns=[model.Repository.downloadable_revisions] - ) - if repository is None: - log.debug(f"Cannot locate repository {name} owned by {owner}") - return {}, {}, {} - encoded_repository_id = trans.security.encode_id(repository.id) - repository_dict = repository.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - repository_dict["url"] = web.url_for(controller="repositories", action="show", id=encoded_repository_id) - # Get the repository_metadata information. - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, changeset_revision - ) - if repository_metadata is None: - # The changeset_revision column in the repository_metadata table has been updated with a new - # value value, so find the changeset_revision to which we need to update. - new_changeset_revision = metadata_util.get_next_downloadable_changeset_revision( - self.app, repository, changeset_revision - ) - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, new_changeset_revision - ) - changeset_revision = new_changeset_revision - if repository_metadata is not None: - encoded_repository_metadata_id = trans.security.encode_id(repository_metadata.id) - repository_metadata_dict = repository_metadata.to_dict( - view="collection", value_mapper=self.__get_value_mapper(trans) - ) - repository_metadata_dict["url"] = web.url_for( - controller="repository_revisions", action="show", id=encoded_repository_metadata_id - ) - if "tools" in repository_metadata.metadata: - repository_metadata_dict["valid_tools"] = repository_metadata.metadata["tools"] - # Get the repo_info_dict for installing the repository. - ( - repo_info_dict, - includes_tools, - includes_tool_dependencies, - includes_tools_for_display_in_tool_panel, - has_repository_dependencies, - has_repository_dependencies_only_if_compiling_contained_td, - ) = repository_util.get_repo_info_dict(self.app, trans.user, encoded_repository_id, changeset_revision) - return repository_dict, repository_metadata_dict, repo_info_dict - else: - log.debug( - "Unable to locate repository_metadata record for repository id %s and changeset_revision %s", - repository.id, - changeset_revision, - ) - return repository_dict, {}, {} - else: - debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " - debug_msg += f"Invalid name {name} or owner {owner} or changeset_revision {changeset_revision} received." - log.debug(debug_msg) - return {}, {}, {} + return get_install_info(self.app, name, owner, changeset_revision) @web.legacy_expose_api_anonymous def get_installable_revisions(self, trans, **kwd): @@ -292,12 +227,7 @@ def get_installable_revisions(self, trans, **kwd): return repository.installable_revisions(self.app) def __get_value_mapper(self, trans) -> Dict[str, Callable]: - value_mapper = { - "id": trans.security.encode_id, - "repository_id": trans.security.encode_id, - "user_id": trans.security.encode_id, - } - return value_mapper + return get_value_mapper(self.app) @expose_api_raw_anonymous_and_sessionless def index(self, trans, deleted=False, owner=None, name=None, **kwd): @@ -352,7 +282,7 @@ def index(self, trans, deleted=False, owner=None, name=None, **kwd): raise RequestParameterInvalidException('The "page" and "page_size" parameters have to be integers.') return_jsonp = util.asbool(kwd.get("jsonp", False)) callback = kwd.get("callback", "callback") - search_results = self._search(trans, q, page, page_size) + search_results = search(trans, q, page, page_size) if return_jsonp: response = str(f"{callback}({json.dumps(search_results)});") else: @@ -361,134 +291,18 @@ def index(self, trans, deleted=False, owner=None, name=None, **kwd): tool_ids = kwd.get("tool_ids", None) if tool_ids is not None: tool_ids = util.listify(tool_ids) - repository_found = [] - all_metadata = dict() - for tool_id in tool_ids: - # A valid GUID looks like toolshed.g2.bx.psu.edu/repos/bgruening/deeptools/deeptools_computeMatrix/1.1.0 - shed, _, owner, name, tool, version = tool_id.split("/") - clause_list = [ - and_( - self.app.model.Repository.table.c.deprecated == false(), - self.app.model.Repository.table.c.deleted == false(), - self.app.model.Repository.table.c.name == name, - self.app.model.User.table.c.username == owner, - self.app.model.Repository.table.c.user_id == self.app.model.User.table.c.id, - ) + response = index_tool_ids(self.app, tool_ids) + return json.dumps(response) + else: + repositories = index_repositories(self.app, name, owner, deleted) + repository_dicts = [] + for repository in repositories: + repository_dict = repository.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) + repository_dict["category_ids"] = [ + trans.security.encode_id(x.category.id) for x in repository.categories ] - repository = trans.sa_session.query(self.app.model.Repository).filter(*clause_list).first() - if not repository: - log.warning(f"Repository {owner}/{name} does not exist, skipping") - continue - for changeset, changehash in repository.installable_revisions(self.app): - metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( - self.app, repository, changehash - ) - tools = metadata.metadata.get("tools") - if not tools: - log.warning(f"Repository {owner}/{name}/{changehash} does not contain valid tools, skipping") - continue - for tool in tools: - if tool["guid"] in tool_ids: - repository_found.append("%d:%s" % (int(changeset), changehash)) - metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( - self.app, repository, changehash - ) - if metadata is None: - continue - metadata_dict = metadata.to_dict( - value_mapper={"id": self.app.security.encode_id, "repository_id": self.app.security.encode_id} - ) - metadata_dict["repository"] = repository.to_dict(value_mapper={"id": self.app.security.encode_id}) - if metadata.has_repository_dependencies: - metadata_dict["repository_dependencies"] = metadata_util.get_all_dependencies( - self.app, metadata, processed_dependency_links=[] - ) - else: - metadata_dict["repository_dependencies"] = [] - if metadata.includes_tool_dependencies: - metadata_dict["tool_dependencies"] = repository.get_tool_dependencies(self.app, changehash) - else: - metadata_dict["tool_dependencies"] = {} - if metadata.includes_tools: - metadata_dict["tools"] = metadata.metadata["tools"] - all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict - if repository_found: - all_metadata["current_changeset"] = repository_found[0] - # all_metadata[ 'found_changesets' ] = repository_found - return json.dumps(all_metadata) - return "{}" - - clause_list = [ - and_( - self.app.model.Repository.table.c.deprecated == false(), - self.app.model.Repository.table.c.deleted == deleted, - ) - ] - if owner is not None: - clause_list.append( - and_( - self.app.model.User.table.c.username == owner, - self.app.model.Repository.table.c.user_id == self.app.model.User.table.c.id, - ) - ) - if name is not None: - clause_list.append(self.app.model.Repository.table.c.name == name) - for repository in ( - trans.sa_session.query(self.app.model.Repository) - .filter(*clause_list) - .order_by(self.app.model.Repository.table.c.name) - ): - repository_dict = repository.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) - repository_dict["category_ids"] = [trans.security.encode_id(x.category.id) for x in repository.categories] - repository_dicts.append(repository_dict) - return json.dumps(repository_dicts) - - def _search(self, trans, q, page=1, page_size=10): - """ - Perform the search over TS repositories. - Note that search works over the Whoosh index which you have - to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. - Also TS config option toolshed_search_on has to be True and - whoosh_index_dir has to be specified. - """ - conf = self.app.config - if not conf.toolshed_search_on: - raise ConfigDoesNotAllowException("Searching the TS through the API is turned off for this instance.") - if not conf.whoosh_index_dir: - raise ConfigDoesNotAllowException( - "There is no directory for the search index specified. Please contact the administrator." - ) - search_term = q.strip() - if len(search_term) < 1: - raise RequestParameterInvalidException("The search term has to be at least one character long.") - - repo_search = RepoSearch() - - Boosts = namedtuple( - "Boosts", - [ - "repo_name_boost", - "repo_description_boost", - "repo_long_description_boost", - "repo_homepage_url_boost", - "repo_remote_repository_url_boost", - "categories_boost", - "repo_owner_username_boost", - ], - ) - boosts = Boosts( - float(conf.get("repo_name_boost", 0.9)), - float(conf.get("repo_description_boost", 0.6)), - float(conf.get("repo_long_description_boost", 0.5)), - float(conf.get("repo_homepage_url_boost", 0.3)), - float(conf.get("repo_remote_repository_url_boost", 0.2)), - float(conf.get("categories_boost", 0.5)), - float(conf.get("repo_owner_username_boost", 0.3)), - ) - - results = repo_search.search(trans, search_term, page, page_size, boosts) - results["hostname"] = web.url_for("/", qualified=True) - return results + repository_dicts.append(repository_dict) + return json.dumps(repository_dicts) @web.legacy_expose_api def remove_repository_registry_entry(self, trans, payload, **kwd): @@ -654,46 +468,8 @@ def reset_metadata_on_repository(self, trans, payload, **kwd): The following parameters must be included in the payload. :param repository_id: the encoded id of the repository on which metadata is to be reset. """ - - def handle_repository(trans, start_time, repository): - results = dict(start_time=start_time, repository_status=[]) - try: - rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, - user=trans.user, - repository=repository, - resetting_all_metadata_on_repository=True, - updating_installed_repository=False, - persist=False, - ) - rmm.reset_all_metadata_on_repository_in_tool_shed() - rmm_invalid_file_tups = rmm.get_invalid_file_tups() - if rmm_invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( - self.app, rmm_invalid_file_tups, repository, None, as_html=False - ) - results["status"] = "warning" - else: - message = f"Successfully reset metadata on repository {repository.name} owned by {repository.user.username}" - results["status"] = "ok" - except Exception as e: - message = ( - f"Error resetting metadata on repository {repository.name} owned by {repository.user.username}: {e}" - ) - results["status"] = "error" - status = f"{repository.name} : {message}" - results["repository_status"].append(status) - return results - repository_id = payload.get("repository_id", None) - if repository_id is not None: - repository = repository_util.get_repository_in_tool_shed(self.app, repository_id) - start_time = strftime("%Y-%m-%d %H:%M:%S") - log.debug(f"{start_time}...resetting metadata on repository {repository.name}") - results = handle_repository(trans, start_time, repository) - stop_time = strftime("%Y-%m-%d %H:%M:%S") - results["stop_time"] = stop_time - return results + return reset_metadata_on_repository(trans, repository_id).dict() @expose_api_anonymous_and_sessionless def show(self, trans, id, **kwd): @@ -743,63 +519,13 @@ def updates(self, trans, **kwd): owner = kwd.get("owner", None) changeset_revision = kwd.get("changeset_revision", None) hexlify_this = util.asbool(kwd.get("hexlify", True)) - repository = repository_util.get_repository_by_name_and_owner( - trans.app, name, owner, eagerload_columns=[model.Repository.downloadable_revisions] + request = UpdatesRequest( + name=name, + owner=owner, + changeset_revision=changeset_revision, + hexlify=hexlify_this, ) - if repository and repository.downloadable_revisions: - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - trans.app, trans.security.encode_id(repository.id), changeset_revision - ) - tool_shed_status_dict = {} - # Handle repository deprecation. - tool_shed_status_dict["repository_deprecated"] = str(repository.deprecated) - tip_revision = repository.downloadable_revisions[0] - # Handle latest installable revision. - if changeset_revision == tip_revision: - tool_shed_status_dict["latest_installable_revision"] = "True" - else: - next_installable_revision = metadata_util.get_next_downloadable_changeset_revision( - trans.app, repository, changeset_revision - ) - if repository_metadata is None: - if next_installable_revision and next_installable_revision != changeset_revision: - tool_shed_status_dict["latest_installable_revision"] = "True" - else: - tool_shed_status_dict["latest_installable_revision"] = "False" - else: - if next_installable_revision and next_installable_revision != changeset_revision: - tool_shed_status_dict["latest_installable_revision"] = "False" - else: - tool_shed_status_dict["latest_installable_revision"] = "True" - # Handle revision updates. - if changeset_revision == tip_revision: - tool_shed_status_dict["revision_update"] = "False" - else: - if repository_metadata is None: - tool_shed_status_dict["revision_update"] = "True" - else: - tool_shed_status_dict["revision_update"] = "False" - # Handle revision upgrades. - metadata_revisions = [ - revision[1] for revision in metadata_util.get_metadata_revisions(trans.app, repository) - ] - num_metadata_revisions = len(metadata_revisions) - for index, metadata_revision in enumerate(metadata_revisions): - if index == num_metadata_revisions: - tool_shed_status_dict["revision_upgrade"] = "False" - break - if metadata_revision == changeset_revision: - if num_metadata_revisions - index > 1: - tool_shed_status_dict["revision_upgrade"] = "True" - else: - tool_shed_status_dict["revision_upgrade"] = "False" - break - return ( - encoding_util.tool_shed_encode(tool_shed_status_dict) - if hexlify_this - else json.dumps(tool_shed_status_dict) - ) - return encoding_util.tool_shed_encode({}) if hexlify_this else json.dumps({}) + return check_updates(trans.app, request) @expose_api_anonymous_and_sessionless def show_tools(self, trans, id, changeset, **kwd): @@ -843,32 +569,7 @@ def metadata(self, trans, id, **kwd): """ recursive = util.asbool(kwd.get("recursive", "True")) downloadable_only = util.asbool(kwd.get("downloadable_only", "True")) - all_metadata = {} - repository = repository_util.get_repository_in_tool_shed( - self.app, id, eagerload_columns=[model.Repository.downloadable_revisions] - ) - for changeset, changehash in metadata_util.get_metadata_revisions( - self.app, repository, sort_revisions=True, downloadable=downloadable_only - ): - metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( - self.app, repository, changehash - ) - if metadata is None: - continue - metadata_dict = metadata.to_dict( - value_mapper={"id": self.app.security.encode_id, "repository_id": self.app.security.encode_id} - ) - metadata_dict["repository"] = repository.to_dict(value_mapper={"id": self.app.security.encode_id}) - if metadata.has_repository_dependencies and recursive: - metadata_dict["repository_dependencies"] = metadata_util.get_all_dependencies( - self.app, metadata, processed_dependency_links=[] - ) - else: - metadata_dict["repository_dependencies"] = [] - if metadata.includes_tools: - metadata_dict["tools"] = metadata.metadata["tools"] - all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict - return all_metadata + return get_repository_metadata_dict(self.app, id, recursive, downloadable_only) @expose_api def update(self, trans, id, **kwd): @@ -966,31 +667,22 @@ def create(self, trans, **kwd): description = payload.get("description", "") remote_repository_url = payload.get("remote_repository_url", "") homepage_url = payload.get("homepage_url", "") - category_ids = util.listify(payload.get("category_ids[]", "")) repo_type = payload.get("type", rt_util.UNRESTRICTED) if repo_type not in rt_util.types: raise RequestParameterInvalidException("This repository type is not valid") - invalid_message = repository_util.validate_repository_name(self.app, name, trans.user) - if invalid_message: - raise RequestParameterInvalidException(invalid_message) - - repo, message = repository_util.create_repository( - app=self.app, + request = CreateRepositoryRequest( name=name, - type=repo_type, - description=synopsis, - long_description=description, - user_id=trans.user.id, - category_ids=category_ids, + synopsis=synopsis, + description=description, remote_repository_url=remote_repository_url, homepage_url=homepage_url, + category_ids=payload.get("category_ids[]", ""), + type_=repo_type, ) - - repository_dict = repo.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - repository_dict["category_ids"] = [trans.security.encode_id(x.category.id) for x in repo.categories] - return repository_dict + repo = create_repository(trans, request) + return to_element_dict(self.app, repo, include_categories=True) @web.legacy_expose_api def create_changeset_revision(self, trans, id, payload, **kwd): @@ -1011,11 +703,7 @@ def create_changeset_revision(self, trans, id, payload, **kwd): # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135 repository = repository_util.get_repository_in_tool_shed(self.app, id) - if not ( - trans.user_is_admin - or self.app.security_agent.user_can_administer_repository(trans.user, repository) - or self.app.security_agent.can_push(self.app, trans.user, repository) - ): + if not can_update_repo(trans, repository): trans.response.status = 400 return { "err_msg": "You do not have permission to update this repository.", diff --git a/lib/tool_shed/webapp/api/repository_revisions.py b/lib/tool_shed/webapp/api/repository_revisions.py index 4c66fbeb3b40..1b413827aced 100644 --- a/lib/tool_shed/webapp/api/repository_revisions.py +++ b/lib/tool_shed/webapp/api/repository_revisions.py @@ -11,19 +11,17 @@ web, ) from galaxy.model.base import transaction -from galaxy.webapps.base.controller import ( - BaseAPIController, - HTTPBadRequest, -) +from galaxy.webapps.base.controller import HTTPBadRequest from tool_shed.util import ( metadata_util, repository_util, ) +from . import BaseShedAPIController log = logging.getLogger(__name__) -class RepositoryRevisionsController(BaseAPIController): +class RepositoryRevisionsController(BaseShedAPIController): """RESTful controller for interactions with tool shed repository revisions.""" def __get_value_mapper(self, trans) -> Dict[str, Callable]: diff --git a/lib/tool_shed/webapp/api/tools.py b/lib/tool_shed/webapp/api/tools.py index fdbdc1043a54..33099f5e2ada 100644 --- a/lib/tool_shed/webapp/api/tools.py +++ b/lib/tool_shed/webapp/api/tools.py @@ -1,25 +1,23 @@ import json import logging -from collections import namedtuple from galaxy import ( exceptions, util, - web, ) from galaxy.web import ( expose_api, expose_api_raw_anonymous_and_sessionless, require_admin, ) -from galaxy.webapps.base.controller import BaseAPIController +from tool_shed.managers.tools import search from tool_shed.util.shed_index import build_index -from tool_shed.webapp.search.tool_search import ToolSearch +from . import BaseShedAPIController log = logging.getLogger(__name__) -class ToolsController(BaseAPIController): +class ToolsController(BaseShedAPIController): """RESTful controller for interactions with tools in the Tool Shed.""" @expose_api @@ -85,46 +83,9 @@ def index(self, trans, **kwd): raise exceptions.RequestParameterInvalidException('The "page" and "page_size" have to be integers.') return_jsonp = util.asbool(kwd.get("jsonp", False)) callback = kwd.get("callback", "callback") - search_results = self._search(trans, q, page, page_size) + search_results = search(trans, q, page, page_size) if return_jsonp: response = str(f"{callback}({json.dumps(search_results)});") else: response = json.dumps(search_results) return response - - def _search(self, trans, q, page=1, page_size=10): - """ - Perform the search over TS tools index. - Note that search works over the Whoosh index which you have - to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. - Also TS config option toolshed_search_on has to be True and - whoosh_index_dir has to be specified. - """ - conf = self.app.config - if not conf.toolshed_search_on: - raise exceptions.ConfigDoesNotAllowException( - "Searching the TS through the API is turned off for this instance." - ) - if not conf.whoosh_index_dir: - raise exceptions.ConfigDoesNotAllowException( - "There is no directory for the search index specified. Please contact the administrator." - ) - search_term = q.strip() - if len(search_term) < 1: - raise exceptions.RequestParameterInvalidException("The search term has to be at least one character long.") - - tool_search = ToolSearch() - - Boosts = namedtuple( - "Boosts", ["tool_name_boost", "tool_description_boost", "tool_help_boost", "tool_repo_owner_username_boost"] - ) - boosts = Boosts( - float(conf.get("tool_name_boost", 1.2)), - float(conf.get("tool_description_boost", 0.6)), - float(conf.get("tool_help_boost", 0.4)), - float(conf.get("tool_repo_owner_username_boost", 0.3)), - ) - - results = tool_search.search(trans, search_term, page, page_size, boosts) - results["hostname"] = web.url_for("/", qualified=True) - return results diff --git a/lib/tool_shed/webapp/api/users.py b/lib/tool_shed/webapp/api/users.py index 558c2518aadb..54f1fba3ffe7 100644 --- a/lib/tool_shed/webapp/api/users.py +++ b/lib/tool_shed/webapp/api/users.py @@ -2,23 +2,20 @@ import tool_shed.util.shed_util_common as suc from galaxy import ( - exceptions, util, web, ) -from galaxy.model.base import transaction -from galaxy.security.validate_user_input import ( - validate_email, - validate_password, - validate_publicname, +from tool_shed.managers.users import ( + api_create_user, + index, ) -from galaxy.webapps.base.controller import BaseAPIController -from tool_shed.managers.users import create_user +from tool_shed_client.schema import CreateUserRequest +from . import BaseShedAPIController log = logging.getLogger(__name__) -class UsersController(BaseAPIController): +class UsersController(BaseShedAPIController): """RESTful controller for interactions with users in the Tool Shed.""" @web.expose_api @@ -39,20 +36,18 @@ def create(self, trans, payload, **kwd): email = payload.get("email", "") password = payload.get("password", "") username = payload.get("username", "") - message = self.__validate(trans, email=email, password=password, confirm=password, username=username) - if message: - raise exceptions.RequestParameterInvalidException(message) - # Create the user. - user = self.__create_user(trans, email, username, password) - user_dict = user.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) + request = CreateUserRequest( + email=email, + username=username, + password=password, + ) + user = api_create_user(trans, request) + user_dict = user.dict() user_dict["message"] = f"User '{str(user.username)}' has been created." user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) return user_dict - def __create_user(self, trans, email, username, password): - return create_user(trans.app, email, username, password) - def __get_value_mapper(self, trans): value_mapper = {"id": trans.security.encode_id} return value_mapper @@ -66,12 +61,8 @@ def index(self, trans, deleted=False, **kwd): # Example URL: http://localhost:9009/api/users user_dicts = [] deleted = util.asbool(deleted) - for user in ( - trans.sa_session.query(trans.app.model.User) - .filter(trans.app.model.User.table.c.deleted == deleted) - .order_by(trans.app.model.User.table.c.username) - ): - user_dict = user.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) + for user in index(trans.app, deleted): + user_dict = user.dict() user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) user_dicts.append(user_dict) return user_dicts @@ -94,15 +85,3 @@ def show(self, trans, id, **kwd): user_dict = user.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) return user_dict - - def __validate(self, trans, email, password, confirm, username): - if username in ["repos"]: - return f"The term '{username}' is a reserved word in the Tool Shed, so it cannot be used as a public user name." - message = "\n".join( - ( - validate_email(trans, email), - validate_password(trans, password, confirm), - validate_publicname(trans, username), - ) - ).rstrip() - return message diff --git a/lib/tool_shed/webapp/controllers/repository.py b/lib/tool_shed/webapp/controllers/repository.py index 16c4bffdfd65..b6b67d2bd0f1 100644 --- a/lib/tool_shed/webapp/controllers/repository.py +++ b/lib/tool_shed/webapp/controllers/repository.py @@ -35,6 +35,7 @@ from galaxy.web.legacy_framework import grids from galaxy.webapps.base.controller import BaseUIController from tool_shed.dependencies.repository import relation_builder +from tool_shed.managers.repositories import readmes from tool_shed.metadata import repository_metadata_manager from tool_shed.tools import ( tool_validator, @@ -1182,16 +1183,7 @@ def get_readme_files(self, trans, **kwd): changeset_revision = kwd.get("changeset_revision", None) if repository_name is not None and repository_owner is not None and changeset_revision is not None: repository = repository_util.get_repository_by_name_and_owner(trans.app, repository_name, repository_owner) - if repository: - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - trans.app, trans.security.encode_id(repository.id), changeset_revision - ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - return readme_util.build_readme_files_dict( - trans.app, repository, changeset_revision, repository_metadata.metadata - ) + return readmes(trans.app, repository, changeset_revision) return {} @web.json diff --git a/lib/tool_shed/webapp/search/tool_search.py b/lib/tool_shed/webapp/search/tool_search.py index bbe53d7df1d5..bf3617c4cd38 100644 --- a/lib/tool_shed/webapp/search/tool_search.py +++ b/lib/tool_shed/webapp/search/tool_search.py @@ -31,7 +31,7 @@ class ToolSearch: - def search(self, trans, search_term, page, page_size, boosts): + def search(self, app, search_term, page, page_size, boosts): """ Perform the search on the given search_term @@ -39,7 +39,7 @@ def search(self, trans, search_term, page, page_size, boosts): :returns results: dictionary containing number of hits, hits themselves and matched terms for each """ - tool_index_dir = os.path.join(trans.app.config.whoosh_index_dir, "tools") + tool_index_dir = os.path.join(app.config.whoosh_index_dir, "tools") index_exists = whoosh.index.exists_in(tool_index_dir) if index_exists: index = whoosh.index.open_dir(tool_index_dir) diff --git a/lib/tool_shed/webapp/security/__init__.py b/lib/tool_shed/webapp/security/__init__.py index b8bbbd30cc6e..a2cea3ade3bf 100644 --- a/lib/tool_shed/webapp/security/__init__.py +++ b/lib/tool_shed/webapp/security/__init__.py @@ -1,5 +1,6 @@ """Tool Shed Security""" import logging +from typing import List from sqlalchemy import ( and_, @@ -239,9 +240,12 @@ def set_entity_user_associations(self, users=None, roles=None, groups=None, dele for group in groups: self.associate_components(user=user, group=group) + def usernames_that_can_push(self, repository) -> List[str]: + return listify(repository.allow_push()) + def can_push(self, app, user, repository): if user: - return user.username in listify(repository.allow_push()) + return user.username in self.usernames_that_can_push(repository) return False def user_can_administer_repository(self, user, repository): diff --git a/lib/tool_shed_client/schema/__init__.py b/lib/tool_shed_client/schema/__init__.py index 870bacd5ea90..2651ff1c81b1 100644 --- a/lib/tool_shed_client/schema/__init__.py +++ b/lib/tool_shed_client/schema/__init__.py @@ -35,6 +35,10 @@ class Repository(BaseModel): create_time: str +class RepositoryRevisionReadmes(BaseModel): + __root__: Dict[str, str] + + class CreateUserRequest(BaseModel): username: str email: str From 8f89935de32174c7541345535daf8f70ef01cf3f Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 20 Dec 2022 11:27:43 -0500 Subject: [PATCH 47/73] metadata_generator: mark private method as internal --- .../metadata/repository_metadata_manager.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 67e4e1f54c6a..385af54da9ad 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -269,13 +269,13 @@ def __init__( self.SUBSET = "subset" self.SUBSET_VALUES = [self.EQUAL, self.SUBSET] - def add_tool_versions(self, id, repository_metadata, changeset_revisions): + def _add_tool_versions(self, id, repository_metadata, changeset_revisions): # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. metadata = repository_metadata.metadata tool_versions_dict = {} for tool_dict in metadata.get("tools", []): # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = self.get_parent_id( + parent_id = self._get_parent_id( id, tool_dict["id"], tool_dict["version"], tool_dict["guid"], changeset_revisions ) tool_versions_dict[tool_dict["guid"]] = parent_id @@ -582,7 +582,7 @@ def different_revision_defines_tip_only_repository_dependency(self, rd_tup, repo return isinstance(repository_type_class, TipOnly) return False - def get_parent_id(self, id, old_id, version, guid, changeset_revisions): + def _get_parent_id(self, id, old_id, version, guid, changeset_revisions): parent_id = None # Compare from most recent to oldest. changeset_revisions.reverse() @@ -914,9 +914,9 @@ def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=Non self._clean_repository_metadata(changeset_revisions) # Set tool version information for all downloadable changeset revisions. Get the list of changeset # revisions from the changelog. - self.reset_all_tool_versions(repo) + self._reset_all_tool_versions(repo) - def reset_all_tool_versions(self, repo): + def _reset_all_tool_versions(self, repo): """Reset tool version lineage for those changeset revisions that include valid tools.""" assert self.repository encoded_repository_id = self.app.security.encode_id(self.repository.id) @@ -953,7 +953,7 @@ def reset_all_tool_versions(self, repo): else: log.info(f"reset_all... tool_dicts is {tool_dicts}") for tool_dict in tool_dicts: - parent_id = self.get_parent_id( + parent_id = self._get_parent_id( encoded_repository_id, tool_dict["id"], tool_dict["version"], @@ -1099,7 +1099,7 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): self.app, encoded_id, changeset_revision ): changeset_revisions.append(changeset_revision) - self.add_tool_versions(encoded_id, repository_metadata, changeset_revisions) + self._add_tool_versions(encoded_id, repository_metadata, changeset_revisions) elif len(repo) == 1 and not self.invalid_file_tups: message = "Revision %s includes no Galaxy utilities for which metadata can " % str( self.repository.tip() From bb468a28cdd1a537c8cfc556cc800a6c3afd7d25 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 26 Dec 2022 19:38:53 -0500 Subject: [PATCH 48/73] Less encoding in toolshed app to adapt to functions... --- .../metadata/repository_metadata_manager.py | 40 +++++++++---------- lib/tool_shed/util/repository_util.py | 8 ++-- 2 files changed, 22 insertions(+), 26 deletions(-) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 385af54da9ad..210ccfe03594 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -35,6 +35,7 @@ shed_util_common as suc, tool_util, ) +from tool_shed.util.metadata_util import repository_metadata_by_changeset_revision from tool_shed.webapp.model import Repository log = logging.getLogger(__name__) @@ -269,7 +270,7 @@ def __init__( self.SUBSET = "subset" self.SUBSET_VALUES = [self.EQUAL, self.SUBSET] - def _add_tool_versions(self, id, repository_metadata, changeset_revisions): + def _add_tool_versions(self, id: int, repository_metadata, changeset_revisions): # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. metadata = repository_metadata.metadata tool_versions_dict = {} @@ -517,8 +518,8 @@ def create_or_update_repository_metadata(self, changeset_revision, metadata_dict else: downloadable = False assert self.repository - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, self.app.security.encode_id(self.repository.id), changeset_revision + repository_metadata = repository_metadata_by_changeset_revision( + self.app.model, self.repository.id, changeset_revision ) if repository_metadata: repository_metadata.metadata = metadata_dict @@ -543,6 +544,7 @@ def create_or_update_repository_metadata(self, changeset_revision, metadata_dict assert repository_metadata # Always set the default values for the following columns. When resetting all metadata # on a repository this will reset the values. + assert repository_metadata repository_metadata.missing_test_components = False self.sa_session.add(repository_metadata) session = self.sa_session() @@ -582,13 +584,13 @@ def different_revision_defines_tip_only_repository_dependency(self, rd_tup, repo return isinstance(repository_type_class, TipOnly) return False - def _get_parent_id(self, id, old_id, version, guid, changeset_revisions): + def _get_parent_id(self, id: int, old_id, version, guid, changeset_revisions): parent_id = None # Compare from most recent to oldest. changeset_revisions.reverse() for changeset_revision in changeset_revisions: - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, id, changeset_revision + repository_metadata = repository_metadata_by_changeset_revision( + self.app.model, id, changeset_revision ) assert repository_metadata metadata = repository_metadata.metadata @@ -919,12 +921,11 @@ def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=Non def _reset_all_tool_versions(self, repo): """Reset tool version lineage for those changeset revisions that include valid tools.""" assert self.repository - encoded_repository_id = self.app.security.encode_id(self.repository.id) changeset_revisions_that_contain_tools = [] for changeset in repo.changelog: changeset_revision = str(repo[changeset]) - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, changeset_revision + repository_metadata = repository_metadata_by_changeset_revision( + self.app.model, self.repository.id, changeset_revision ) log.info(f"changeset_is {changeset_revision} with rm {repository_metadata}") if repository_metadata: @@ -937,8 +938,8 @@ def _reset_all_tool_versions(self, repo): # { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. for index, changeset_revision in enumerate(changeset_revisions_that_contain_tools): tool_versions_dict = {} - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, changeset_revision + repository_metadata = repository_metadata_by_changeset_revision( + self.app.model, self.repository.id, changeset_revision ) assert repository_metadata metadata = repository_metadata.metadata @@ -949,12 +950,10 @@ def _reset_all_tool_versions(self, repo): # first changeset_revision will be the "old_id" in the tool config. for tool_dict in tool_dicts: tool_versions_dict[tool_dict["guid"]] = tool_dict["id"] - log.info(f"reset_all... tool_dicts is {tool_dicts}") else: - log.info(f"reset_all... tool_dicts is {tool_dicts}") for tool_dict in tool_dicts: parent_id = self._get_parent_id( - encoded_repository_id, + self.repository.id, tool_dict["id"], tool_dict["version"], tool_dict["guid"], @@ -962,9 +961,6 @@ def _reset_all_tool_versions(self, repo): ) tool_versions_dict[tool_dict["guid"]] = parent_id if tool_versions_dict: - print( - f"\n reset_all_tool_versions:: id: {encoded_repository_id} rmi: {repository_metadata.id} d: {tool_versions_dict}\n" - ) repository_metadata.tool_versions = tool_versions_dict self.sa_session.add(repository_metadata) session = self.sa_session() @@ -1032,7 +1028,7 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): assert self.repository message = "" status = "done" - encoded_id = self.app.security.encode_id(self.repository.id) + repository_id = self.repository.id repo = self.repository.hg_repo self.generate_metadata_for_changeset_revision() if self.metadata_dict: @@ -1052,7 +1048,7 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): else: # Update the latest stored repository metadata with the contents and attributes of self.metadata_dict. repository_metadata = metadata_util.get_latest_repository_metadata( - self.app, self.repository.id, downloadable=False + self.app, repository_id, downloadable=False ) if repository_metadata: downloadable = metadata_util.is_downloadable(self.metadata_dict) @@ -1095,11 +1091,11 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): changeset_revisions = [] for changeset in repo.changelog: changeset_revision = str(repo[changeset]) - if metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_id, changeset_revision + if repository_metadata_by_changeset_revision( + self.app.model, repository_id, changeset_revision ): changeset_revisions.append(changeset_revision) - self._add_tool_versions(encoded_id, repository_metadata, changeset_revisions) + self._add_tool_versions(repository_id, repository_metadata, changeset_revisions) elif len(repo) == 1 and not self.invalid_file_tups: message = "Revision %s includes no Galaxy utilities for which metadata can " % str( self.repository.tip() diff --git a/lib/tool_shed/util/repository_util.py b/lib/tool_shed/util/repository_util.py index afd773f05242..835c6c207377 100644 --- a/lib/tool_shed/util/repository_util.py +++ b/lib/tool_shed/util/repository_util.py @@ -58,6 +58,7 @@ from tool_shed.util.metadata_util import ( get_next_downloadable_changeset_revision, get_repository_metadata_by_changeset_revision, + repository_metadata_by_changeset_revision, ) if TYPE_CHECKING: @@ -109,8 +110,8 @@ def create_repo_info_dict( repository = get_repository_by_name_and_owner(app, repository_name, repository_owner) if app.name == "tool_shed": # We're in the tool shed. - repository_metadata = get_repository_metadata_by_changeset_revision( - app, app.security.encode_id(repository.id), changeset_revision + repository_metadata = repository_metadata_by_changeset_revision( + app.model, repository.id, changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata @@ -368,8 +369,7 @@ def get_repositories_by_category( repository_dict = repository.to_dict(value_mapper=default_value_mapper) repository_dict["metadata"] = {} for changeset, changehash in repository.installable_revisions(app): - encoded_id = app.security.encode_id(repository.id) - metadata = get_repository_metadata_by_changeset_revision(app, encoded_id, changehash) + metadata = repository_metadata_by_changeset_revision(app.model, repository.id, changehash) assert metadata repository_dict["metadata"][f"{changeset}:{changehash}"] = metadata.to_dict( value_mapper=default_value_mapper From 9db5e9cf006f1339d026c5d7f13c23b78c609477 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 20 Dec 2022 12:06:10 -0500 Subject: [PATCH 49/73] metadata_generator: refactor helper method out... --- .../metadata/repository_metadata_manager.py | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index 210ccfe03594..dcbe03a3b11c 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -589,9 +589,7 @@ def _get_parent_id(self, id: int, old_id, version, guid, changeset_revisions): # Compare from most recent to oldest. changeset_revisions.reverse() for changeset_revision in changeset_revisions: - repository_metadata = repository_metadata_by_changeset_revision( - self.app.model, id, changeset_revision - ) + repository_metadata = repository_metadata_by_changeset_revision(self.app.model, id, changeset_revision) assert repository_metadata metadata = repository_metadata.metadata tools_dicts = metadata.get("tools", []) @@ -921,18 +919,9 @@ def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=Non def _reset_all_tool_versions(self, repo): """Reset tool version lineage for those changeset revisions that include valid tools.""" assert self.repository - changeset_revisions_that_contain_tools = [] - for changeset in repo.changelog: - changeset_revision = str(repo[changeset]) - repository_metadata = repository_metadata_by_changeset_revision( - self.app.model, self.repository.id, changeset_revision - ) - log.info(f"changeset_is {changeset_revision} with rm {repository_metadata}") - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if metadata.get("tools", None): - changeset_revisions_that_contain_tools.append(changeset_revision) + changeset_revisions_that_contain_tools = _get_changeset_revisions_that_contain_tools( + self.app, repo, self.repository + ) # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that # are downloadable and contain tools. If a repository includes tools, build a dictionary of # { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. @@ -1091,9 +1080,7 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): changeset_revisions = [] for changeset in repo.changelog: changeset_revision = str(repo[changeset]) - if repository_metadata_by_changeset_revision( - self.app.model, repository_id, changeset_revision - ): + if repository_metadata_by_changeset_revision(self.app.model, repository_id, changeset_revision): changeset_revisions.append(changeset_revision) self._add_tool_versions(repository_id, repository_metadata, changeset_revisions) elif len(repo) == 1 and not self.invalid_file_tups: @@ -1113,3 +1100,16 @@ def set_repository_metadata_due_to_new_tip(self, host, content_alert_str=None, * """Set metadata on the tip of self.repository in the tool shed.""" error_message, status = self.set_repository_metadata(host, content_alert_str=content_alert_str, **kwd) return status, error_message + + +def _get_changeset_revisions_that_contain_tools(app: "ToolShedApp", repo, repository) -> List[str]: + changeset_revisions_that_contain_tools = [] + for changeset in repo.changelog: + changeset_revision = str(repo[changeset]) + repository_metadata = repository_metadata_by_changeset_revision(app.model, repository.id, changeset_revision) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if metadata.get("tools", None): + changeset_revisions_that_contain_tools.append(changeset_revision) + return changeset_revisions_that_contain_tools From 68899ff8fd1ef9b2904f1704daf603e4cb77c1f2 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 19 May 2023 10:09:22 -0400 Subject: [PATCH 50/73] More format fixes. --- lib/galaxy/tool_shed/metadata/metadata_generator.py | 5 +---- lib/tool_shed/metadata/repository_metadata_manager.py | 10 ++-------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 39d40555b841..d797d1eba98c 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -1009,10 +1009,7 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td # if an installed repository cannot be found. This may not be ideal because the tool # shed may have simply been inaccessible when metadata was being generated for the # installed tool shed repository. - error_message = ( - "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, " - % (toolshed, name, owner) - ) + error_message = f"Ignoring invalid repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " error_message += f"changeset revision {changeset_revision}." log.debug(error_message) is_valid = False diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index dcbe03a3b11c..fe930fd6e390 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -207,10 +207,7 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td found = True break if not found: - error_message = ( - "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " - % (toolshed, name, owner) - ) + error_message = f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " error_message += ( f"changeset revision {changeset_revision} because the changeset revision is invalid. " ) @@ -220,10 +217,7 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td else: # Repository dependencies are currently supported within a single tool shed. error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring " - error_message += ( - "repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s. " - % (toolshed, name, owner, changeset_revision) - ) + error_message += f"repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, changeset revision {changeset_revision}. " log.debug(error_message) is_valid = False return repository_dependency_tup, is_valid, error_message From 688d2375ae64ef7885544ca9cb9d41fa33bb7ba5 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 17 Mar 2023 10:35:24 -0400 Subject: [PATCH 51/73] repository_metadata_manager.py - linting fix --- lib/tool_shed/metadata/repository_metadata_manager.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index fe930fd6e390..f28aa58da00f 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -153,10 +153,8 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td .one() ) except Exception: - error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % ( - toolshed, - name, - owner, + error_message = ( + f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " ) error_message += f"changeset revision {changeset_revision} because the owner is invalid." log.debug(error_message) From 5b3c34dfda448bd8161049f683d63943d1875c0c Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 19 Dec 2022 20:14:50 -0500 Subject: [PATCH 52/73] Remove admin mako helper that is unused. --- templates/webapps/tool_shed/repository/common.mako | 5 ----- 1 file changed, 5 deletions(-) diff --git a/templates/webapps/tool_shed/repository/common.mako b/templates/webapps/tool_shed/repository/common.mako index 677fcdfb08a7..88761598d423 100644 --- a/templates/webapps/tool_shed/repository/common.mako +++ b/templates/webapps/tool_shed/repository/common.mako @@ -230,11 +230,6 @@ ${ sharable_link } -<%def name="render_clone_str( repository )"><% - from tool_shed.util.common_util import generate_clone_url_for_repository_in_tool_shed - clone_str = generate_clone_url_for_repository_in_tool_shed( trans.user, repository ) - %>hg clone ${ clone_str } - <%def name="render_folder( folder, folder_pad, parent=None, row_counter=None, is_root_folder=False, render_repository_actions_for='tool_shed' )"> <% encoded_id = trans.security.encode_id( folder.id ) From cdf8193b8a489499b803e800c1a347e9d27a6ffe Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 20 Dec 2022 15:50:44 -0500 Subject: [PATCH 53/73] Improve tool shed API client schema. --- lib/tool_shed/test/base/populators.py | 4 +++- lib/tool_shed_client/schema/__init__.py | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 2dd53dc27537..1c4fc2ce5f00 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -17,6 +17,7 @@ from galaxy_test.base import api_asserts from galaxy_test.base.api_util import random_name from tool_shed_client.schema import ( + BuildSearchIndexResponse, Category, CreateCategoryRequest, CreateRepositoryRequest, @@ -214,9 +215,10 @@ def create_repository(self, request: CreateRepositoryRequest) -> Repository: api_asserts.assert_status_code_is_ok(response) return Repository(**response.json()) - def reindex(self): + def reindex(self) -> BuildSearchIndexResponse: index_response = self._admin_api_interactor.put("tools/build_search_index") index_response.raise_for_status() + return BuildSearchIndexResponse(**index_response.json()) def new_category( self, name: Optional[str] = None, description: Optional[str] = None, prefix=DEFAULT_PREFIX diff --git a/lib/tool_shed_client/schema/__init__.py b/lib/tool_shed_client/schema/__init__.py index 2651ff1c81b1..e0cffb690df2 100644 --- a/lib/tool_shed_client/schema/__init__.py +++ b/lib/tool_shed_client/schema/__init__.py @@ -433,3 +433,8 @@ def from_legacy_install_info(legacy_install_info: LegacyInstallInfoTuple) -> Ins metadata_info=metadata_info, repo_info=repo_info, ) + + +class BuildSearchIndexResponse(BaseModel): + repositories_indexed: int + tools_indexed: int From f4031b7a1071b1d14584406b9c42fb2e4f3788b4 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 28 Dec 2022 15:44:06 -0500 Subject: [PATCH 54/73] Test setup for an allow push API (implement in v1?) --- lib/tool_shed/test/base/populators.py | 18 +++++++++++ .../test/functional/test_shed_repositories.py | 32 +++++++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 1c4fc2ce5f00..b3eed823b502 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -276,6 +276,24 @@ def repository_index(self, request: Optional[RepositoryIndexRequest]) -> Reposit api_asserts.assert_status_code_is_ok(repository_response) return RepositoryIndexResponse(__root__=repository_response.json()) + def get_usernames_allowed_to_push(self, repository: HasRepositoryId) -> List[str]: + repository_id = self._repository_id(repository) + show_response = self._api_interactor.get(f"repositories/{repository_id}/allow_push") + show_response.raise_for_status() + as_list = show_response.json() + assert isinstance(as_list, list) + return as_list + + def allow_user_to_push(self, repository: HasRepositoryId, username: str) -> None: + repository_id = self._repository_id(repository) + post_response = self._api_interactor.post(f"repositories/{repository_id}/allow_push/{username}") + post_response.raise_for_status() + + def disallow_user_to_push(self, repository: HasRepositoryId, username: str) -> None: + repository_id = self._repository_id(repository) + delete_response = self._api_interactor.delete(f"repositories/{repository_id}/allow_push/{username}") + delete_response.raise_for_status() + def get_metadata(self, repository: HasRepositoryId, downloadable_only=True) -> RepositoryMetadata: repository_id = self._repository_id(repository) metadata_response = self._api_interactor.get( diff --git a/lib/tool_shed/test/functional/test_shed_repositories.py b/lib/tool_shed/test/functional/test_shed_repositories.py index 8fefcddea005..992369bfa3db 100644 --- a/lib/tool_shed/test/functional/test_shed_repositories.py +++ b/lib/tool_shed/test/functional/test_shed_repositories.py @@ -5,6 +5,7 @@ from galaxy.util.compression_utils import CompressedFile from galaxy.util.resources import resource_path from galaxy_test.base import api_asserts +from tool_shed.test.base.api_util import create_user from tool_shed.test.base.populators import repo_tars from ..base.api import ShedApiTestCase @@ -68,6 +69,37 @@ def test_index_simple(self): assert repository.owner == repo.owner assert repository.name == repo.name + def test_allow_push(self): + populator = self.populator + request = { + "email": "sharewith@galaxyproject.org", + "username": "sharewith", + "password": "pAssworD1", + } + create_user(self.admin_api_interactor, request) + request = { + "email": "alsosharewith@galaxyproject.org", + "username": "alsosharewith", + "password": "pAssworD2", + } + create_user(self.admin_api_interactor, request) + + repo = populator.setup_column_maker_repo(prefix="repoforindex") + assert "sharewith" not in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" not in populator.get_usernames_allowed_to_push(repo) + + populator.allow_user_to_push(repo, "sharewith") + assert "sharewith" in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" not in populator.get_usernames_allowed_to_push(repo) + + populator.allow_user_to_push(repo, "alsosharewith") + assert "sharewith" in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" in populator.get_usernames_allowed_to_push(repo) + + populator.disallow_user_to_push(repo, "sharewith") + assert "sharewith" not in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" in populator.get_usernames_allowed_to_push(repo) + def test_install_info(self): # actually installing requires a whole Galaxy setup and the install manager but # we can test the response validates against the future facing InstallInfo pydandic From 4a5e355900655a67cf95773cc8cfa69c4ada5e1c Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 21 Dec 2022 09:49:53 -0500 Subject: [PATCH 55/73] Reusable tool ID generation helper for TS API tests. --- lib/tool_shed/test/base/populators.py | 22 +++++++++++++++++++ .../test/functional/test_galaxy_install.py | 2 +- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index b3eed823b502..43d4c9eaaa18 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -8,6 +8,7 @@ ) import requests +from typing_extensions import Protocol from galaxy.util.resources import ( files, @@ -72,6 +73,11 @@ def repo_tars(test_data_path: str) -> List[Path]: return tar_paths +class HostsTestToolShed(Protocol): + host: str + port: int + + class ToolShedPopulator: """Utilities for easy fixture creation of tool shed related things.""" @@ -317,6 +323,22 @@ def tool_search(self, search_request: ToolSearchRequest) -> ToolSearchResults: api_asserts.assert_status_code_is_ok(search_response) return ToolSearchResults(**search_response.json()) + def tool_guid( + self, shed_host: HostsTestToolShed, repository: Repository, tool_id: str, tool_version: Optional[str] = None + ) -> str: + owner = repository.owner + name = repository.name + port = shed_host.port + if port in [None, 80, 443]: + host_and_port = shed_host.host + else: + host_and_port = f"{shed_host.host}:{shed_host.port}" + tool_id_base = f"{host_and_port}/repos/{owner}/{name}/{tool_id}" + if tool_version is None: + return tool_id_base + else: + return f"{tool_id_base}/{tool_version}" + def repo_search_query(self, query: str) -> RepositorySearchResults: return self.repo_search(RepositorySearchRequest(q=query)) diff --git a/lib/tool_shed/test/functional/test_galaxy_install.py b/lib/tool_shed/test/functional/test_galaxy_install.py index d46fa0a9b45b..227e41aa49c1 100644 --- a/lib/tool_shed/test/functional/test_galaxy_install.py +++ b/lib/tool_shed/test/functional/test_galaxy_install.py @@ -12,7 +12,7 @@ def test_install_simple_tool(self): self.install_repository(owner, name, latest_install_revision, tool_shed_url=self.url) response = self.galaxy_interactor._get("tools?in_panel=False") response.raise_for_status() - expected_tool = f"{self.host}:{self.port}/repos/{owner}/{name}/Add_a_column1/1.1.0" + expected_tool = populator.tool_guid(self, repository, "Add_a_column1", "1.1.0") tool_ids = [t["id"] for t in response.json()] assert expected_tool in tool_ids, f"Didn't find {expected_tool} in {tool_ids}" From a46425c97172eb0f30576e8b84ff77a39ac78e6f Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 23 Dec 2022 13:10:05 -0500 Subject: [PATCH 56/73] Refactor tool shed interactions for reuse outside of test framework. --- lib/tool_shed/test/base/api.py | 48 +--------------- lib/tool_shed/test/base/api_util.py | 56 +++++++++++++++++++ lib/tool_shed/test/base/populators.py | 5 +- .../test/functional/test_shed_users.py | 8 +-- 4 files changed, 67 insertions(+), 50 deletions(-) diff --git a/lib/tool_shed/test/base/api.py b/lib/tool_shed/test/base/api.py index b14c19dd7c15..ffe4a0b38fad 100644 --- a/lib/tool_shed/test/base/api.py +++ b/lib/tool_shed/test/base/api.py @@ -1,19 +1,14 @@ import os -import re from typing import ( Any, Dict, Optional, ) -from urllib.parse import urljoin import pytest -import requests from galaxy.tool_util.verify.interactor import GalaxyInteractorApi -from galaxy_test.base import api_asserts from galaxy_test.base.api_util import ( - baseauth_headers, get_admin_api_key as get_galaxy_admin_api_key, get_user_api_key as get_galaxy_user_key, TEST_USER, @@ -22,6 +17,7 @@ from galaxy_test.driver.testcase import DrivenFunctionalTestCase from . import driver from .api_util import ( + ensure_user_with_email, get_admin_api_key, get_user_api_key, ShedApiInteractor, @@ -49,12 +45,12 @@ def api_interactor(self) -> ShedApiInteractor: email = TEST_USER password = "testpassword" ensure_user_with_email(self.admin_api_interactor, email, password) - user_api_key = self._api_key(email, password) + user_api_key = self.admin_api_interactor.create_api_key(email, password) return self._api_interactor(user_api_key) def _api_interactor_by_credentials(self, email: str, password: str) -> ShedApiInteractor: ensure_user_with_email(self.admin_api_interactor, email, password) - user_api_key = self._api_key(email, password) + user_api_key = self.admin_api_interactor.create_api_key(email, password) return self._api_interactor(user_api_key) def _api_interactor(self, api_key: str) -> ShedApiInteractor: @@ -63,15 +59,6 @@ def _api_interactor(self, api_key: str) -> ShedApiInteractor: def _get_populator(self, user_api_interactor) -> ToolShedPopulator: return ToolShedPopulator(self.admin_api_interactor, user_api_interactor) - def _api_key(self, email: str, password: str) -> str: - headers = baseauth_headers(email, password) - url = urljoin(self.url, "api/authenticate/baseauth") - auth_response = requests.get(url, headers=headers) - api_asserts.assert_status_code_is(auth_response, 200) - auth_dict = auth_response.json() - api_asserts.assert_has_keys(auth_dict, "api_key") - return auth_dict["api_key"] - def setUp(self): host = os.environ.get("TOOL_SHED_TEST_HOST") assert host @@ -100,35 +87,6 @@ def _get_driver(self, tool_shed_test_driver): self._test_driver = tool_shed_test_driver -def ensure_user_with_email(admin_api_interactor: ShedApiInteractor, email: str, password: Optional[str]): - all_users_response = admin_api_interactor.get("users") - try: - all_users_response.raise_for_status() - except requests.exceptions.HTTPError as e: - raise Exception( - f"Failed to verify user with email [{email}] exists - perhaps you're targetting the wrong Galaxy server or using an incorrect admin API key. HTTP error: {e}" - ) - username = email_to_username(email) - all_users = all_users_response.json() - try: - test_user = [user for user in all_users if user["username"] == username][0] - except IndexError: - password = password or "testpass" - data = dict( - remote_user_email=email, - email=email, - password=password, - username=username, - ) - test_user = admin_api_interactor.post("users", json=data).json() - return test_user - - -def email_to_username(email: str) -> str: - """Pattern used for test user generation - does not use the API.""" - return re.sub(r"[^a-z-\d]", "--", email.lower()) - - class ShedGalaxyInteractorApi(GalaxyInteractorApi): def __init__(self, galaxy_url: str): interactor_kwds: Dict[str, Any] = {} diff --git a/lib/tool_shed/test/base/api_util.py b/lib/tool_shed/test/base/api_util.py index 412b92af4dad..4c65f452fc1e 100644 --- a/lib/tool_shed/test/base/api_util.py +++ b/lib/tool_shed/test/base/api_util.py @@ -1,6 +1,9 @@ import os +import re from functools import wraps from typing import ( + Any, + Dict, Callable, Optional, ) @@ -8,6 +11,13 @@ import requests +from galaxy_test.base.api_asserts import ( + assert_has_keys, + assert_status_code_is, + assert_status_code_is_ok, +) +from galaxy_test.base.api_util import baseauth_headers + DEFAULT_TOOL_SHED_BOOTSTRAP_ADMIN_API_KEY = "TEST1234" DEFAULT_TOOL_SHED_USER_API_KEY = None @@ -52,6 +62,15 @@ def __init__(self, url: str, api_key: str): self.url = url self.api_key = api_key + def create_api_key(self, email: str, password: str) -> str: + headers = baseauth_headers(email, password) + url = urljoin(self.url, "api/authenticate/baseauth") + auth_response = requests.get(url, headers=headers) + assert_status_code_is(auth_response, 200) + auth_dict = auth_response.json() + assert_has_keys(auth_dict, "api_key") + return auth_dict["api_key"] + def _append_headers(self, kwd): if "admin" in kwd: key = get_admin_api_key() @@ -66,3 +85,40 @@ def _append_headers(self, kwd): get = decorate_method(requests.get) post = decorate_method(requests.post) put = decorate_method(requests.put) + + +def create_user(admin_interactor: ShedApiInteractor, user_dict: Dict[str, Any], assert_ok=True) -> Dict[str, Any]: + email = user_dict["email"] + if "password" not in user_dict: + user_dict["password"] = "testpass" + if "remote_user_email" not in user_dict: + user_dict["remote_user_email"] = email + response = admin_interactor.post("users", json=user_dict) + if assert_ok: + assert_status_code_is_ok(response) + return response.json() + + +def ensure_user_with_email( + admin_api_interactor: ShedApiInteractor, email: str, password: Optional[str] +) -> Dict[str, Any]: + all_users_response = admin_api_interactor.get("users") + try: + all_users_response.raise_for_status() + except requests.exceptions.HTTPError as e: + raise Exception( + f"Failed to verify user with email [{email}] exists - perhaps you're targeting the wrong ToolShed server or using an incorrect admin API key. HTTP error: {e}" + ) + username = email_to_username(email) + all_users = all_users_response.json() + try: + test_user = [user for user in all_users if user["username"] == username][0] + except IndexError: + request = {"email": email, "username": username, "password": password} + test_user = create_user(admin_api_interactor, request, assert_ok=False) + return test_user + + +def email_to_username(email: str) -> str: + """Pattern used for test user generation - does not use the API.""" + return re.sub(r"[^a-z-\d]", "--", email.lower()) diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 43d4c9eaaa18..90d140570aa8 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -41,7 +41,10 @@ ToolSearchRequest, ToolSearchResults, ) -from .api_util import ShedApiInteractor +from .api_util import ( + ensure_user_with_email, + ShedApiInteractor, +) HasRepositoryId = Union[str, Repository] diff --git a/lib/tool_shed/test/functional/test_shed_users.py b/lib/tool_shed/test/functional/test_shed_users.py index fdb8c21373a6..2c326cf6a791 100644 --- a/lib/tool_shed/test/functional/test_shed_users.py +++ b/lib/tool_shed/test/functional/test_shed_users.py @@ -7,12 +7,12 @@ CreateUserRequest, User, ) -from ..base.api import ( +from ..base.api import ShedApiTestCase +from ..base.api_util import ( email_to_username, ensure_user_with_email, - ShedApiTestCase, + get_admin_api_key, ) -from ..base.api_util import get_admin_api_key class TestShedUsersApi(ShedApiTestCase): @@ -78,4 +78,4 @@ def test_simple_index_and_user(self): assert show_response.json()["id"] == user_id def _verify_username_password(self, email, password): - self._api_key(email, password) + self.api_interactor.create_api_key(email, password) From 698d58eed49c70676b6463c67205078d66f719a1 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 7 Nov 2022 10:16:52 -0500 Subject: [PATCH 57/73] WIP: Implement dry_run in upload API. --- lib/tool_shed/managers/repositories.py | 1 + lib/tool_shed/util/commit_util.py | 18 ++++++---- lib/tool_shed/util/repository_content_util.py | 10 +++++- test/unit/tool_shed/test_repository_utils.py | 35 +++++++++++++++++++ 4 files changed, 57 insertions(+), 7 deletions(-) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 010232e830b5..a6c52d21bf19 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -532,6 +532,7 @@ def upload_tar_and_set_metadata( repository: Repository, uploaded_file, commit_message: str, + dry_run: bool = False, ): repo_dir = repository.repo_path(app) tip = repository.tip() diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py index 6bed88bcea00..e3e3148343c1 100644 --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -172,12 +172,13 @@ def handle_directory_changes( commit_message: str, undesirable_dirs_removed: int, undesirable_files_removed: int, + repo_path: Optional[str] = None, + dry_run: bool = False, ) -> ChangeResponseT: - repo_path = repository.repo_path(app) + repo_path = repo_path or repository.repo_path(app) content_alert_str = "" files_to_remove = [] filenames_in_archive = [os.path.normpath(os.path.join(full_path, name)) for name in filenames_in_archive] - print(filenames_in_archive) if remove_repo_files_not_in_tar and not repository.is_new(): # We have a repository that is not new (it contains files), so discover those files that are in the # repository, but not in the uploaded archive. @@ -211,7 +212,6 @@ def handle_directory_changes( # Check file content to ensure it is appropriate. if check_contents and os.path.isfile(filename_in_archive): content_alert_str += check_file_content_for_html_and_images(filename_in_archive) - print(filename_in_archive) hg_util.add_changeset(repo_path, filename_in_archive) if filename_in_archive.endswith("tool_data_table_conf.xml.sample"): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded @@ -230,9 +230,15 @@ def handle_directory_changes( ) hg_util.commit_changeset(repo_path, full_path_to_changeset=full_path, username=username, message=commit_message) admin_only = len(repository.downloadable_revisions) != 1 - suc.handle_email_alerts( - app, host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only - ) + if not dry_run: + suc.handle_email_alerts( + app, + host, + repository, + content_alert_str=content_alert_str, + new_repo_alert=new_repo_alert, + admin_only=admin_only, + ) return True, "", files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 3e863f2e5d00..382add32cf37 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -1,12 +1,14 @@ import os import shutil import tarfile +import tempfile from typing import ( Optional, TYPE_CHECKING, ) import tool_shed.repository_types.util as rt_util +from galaxy.tool_shed.util.hg_util import clone_repository from galaxy.util import checkers from tool_shed.dependencies.attribute_handlers import ( RepositoryDependencyAttributeHandler, @@ -45,6 +47,7 @@ def upload_tar( repository: "Repository", uploaded_file, commit_message: str, + dry_run: bool = False, remove_repo_files_not_in_tar: bool = True, new_repo_alert: bool = False, tar=None, @@ -71,7 +74,11 @@ def upload_tar( return False, message, [], "", undesirable_dirs_removed, undesirable_files_removed else: repo_dir = repository.repo_path(app) - full_path = os.path.abspath(repo_dir) + if dry_run: + full_path = tempfile.mkdtemp() + clone_repository(repo_dir, full_path) + else: + full_path = os.path.abspath(repo_dir) undesirable_files_removed = len(check_results.undesirable_files) undesirable_dirs_removed = len(check_results.undesirable_dirs) filenames_in_archive = [ti.name for ti in check_results.valid] @@ -114,4 +121,5 @@ def upload_tar( commit_message, undesirable_dirs_removed, undesirable_files_removed, + repo_path=full_path, ) diff --git a/test/unit/tool_shed/test_repository_utils.py b/test/unit/tool_shed/test_repository_utils.py index 5d21058b0e5b..b56ec162451f 100644 --- a/test/unit/tool_shed/test_repository_utils.py +++ b/test/unit/tool_shed/test_repository_utils.py @@ -61,3 +61,38 @@ def test_upload_fails_if_contains_symlink(shed_app: TestToolShedApp, new_reposit ) assert not upload_ok assert "Invalid paths" in message + + +def test_upload_dry_run_ok(shed_app: TestToolShedApp, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("column_maker/column_maker.tar") + old_tip = new_repository.tip() + upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( + shed_app, + "localhost", + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + dry_run=True, + ) + assert upload_ok + assert alert == "" + assert dirs_removed == 0 + assert files_removed == 0 + new_tip = new_repository.tip() + assert old_tip == new_tip + + +def test_upload_dry_run_failed(shed_app: TestToolShedApp, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("safetar_with_symlink.tar") + upload_ok, message, _, _, _, _ = upload_tar( + shed_app, + "localhost", + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + dry_run=True, + ) + assert not upload_ok + assert "Invalid paths" in message From bad135b9d73968b68253ecb06e13904cd420861d Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sun, 26 Feb 2023 10:50:05 -0500 Subject: [PATCH 58/73] Formatting... --- lib/galaxy/tool_util/toolbox/base.py | 1 - lib/tool_shed/test/base/twilltestcase.py | 7 +++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/tool_util/toolbox/base.py b/lib/galaxy/tool_util/toolbox/base.py index 912bc1ea35cd..24274e56cd8c 100644 --- a/lib/galaxy/tool_util/toolbox/base.py +++ b/lib/galaxy/tool_util/toolbox/base.py @@ -121,7 +121,6 @@ def handle_tags(self, tool_id, tool_definition_source): class NullToolTagManager(AbstractToolTagManager): - def reset_tags(self) -> None: return None diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 5df2d26a2ed8..b88a8f05728f 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1978,10 +1978,9 @@ def _assert_has_installed_repository_dependency( changeset: Optional[str] = None, ) -> None: json = self.display_installed_repository_manage_json(installed_repository) - assert "repository_dependencies" in json, "No repository dependencies were defined in %s. manage json is %s" % ( - installed_repository.name, - json, - ) + if "repository_dependencies" not in json: + name = installed_repository.name + raise AssertionError(f"No repository dependencies were defined in {name}. manage json is {json}") repository_dependencies = json["repository_dependencies"] found = False for folder in repository_dependencies.get("folders"): From d1d629074bb7a5cacc27c5969cff55be526bbb85 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 18 Jan 2023 14:47:10 -0500 Subject: [PATCH 59/73] ToolShed API 2.0 --- .github/workflows/toolshed.yaml | 2 + .redocly.lint-ignore.yaml | 3 + Makefile | 4 + .../util/tool_shed/tool_shed_registry.py | 20 + lib/galaxy/webapps/galaxy/api/__init__.py | 19 +- lib/tool_shed/context.py | 70 ++- .../dependencies/attribute_handlers.py | 27 +- .../repository/relation_builder.py | 8 +- lib/tool_shed/managers/categories.py | 32 +- lib/tool_shed/managers/repositories.py | 53 +- lib/tool_shed/managers/tools.py | 2 +- .../metadata/repository_metadata_manager.py | 34 +- lib/tool_shed/test/base/api.py | 28 +- lib/tool_shed/test/base/api_util.py | 15 +- lib/tool_shed/test/base/populators.py | 55 ++ lib/tool_shed/test/functional/api_notes.md | 1 - .../functional/test_shed_configuration.py | 8 + .../test/functional/test_shed_repositories.py | 65 ++- .../test/functional/test_shed_users.py | 28 +- .../1/column_maker.xml | 83 +++ .../column_maker_with_readme/1/readme.txt | 1 + lib/tool_shed/util/common_util.py | 29 +- lib/tool_shed/util/repository_content_util.py | 11 +- lib/tool_shed/util/repository_util.py | 25 +- lib/tool_shed/util/shed_util_common.py | 5 +- lib/tool_shed/webapp/api/categories.py | 10 +- lib/tool_shed/webapp/api/repositories.py | 11 +- lib/tool_shed/webapp/api2/__init__.py | 273 ++++++++++ lib/tool_shed/webapp/api2/authenticate.py | 27 + lib/tool_shed/webapp/api2/categories.py | 80 +++ lib/tool_shed/webapp/api2/configuration.py | 24 + lib/tool_shed/webapp/api2/repositories.py | 494 ++++++++++++++++++ lib/tool_shed/webapp/api2/tools.py | 55 ++ lib/tool_shed/webapp/api2/users.py | 130 +++++ lib/tool_shed/webapp/app.py | 9 +- lib/tool_shed/webapp/buildapp.py | 265 +++++----- lib/tool_shed/webapp/controllers/admin.py | 2 +- .../webapp/controllers/repository.py | 8 +- lib/tool_shed/webapp/fast_app.py | 66 ++- lib/tool_shed/webapp/model/__init__.py | 2 + lib/tool_shed_client/schema/__init__.py | 36 +- scripts/dump_openapi_schema.py | 13 +- test/unit/tool_shed/_util.py | 44 +- test/unit/tool_shed/conftest.py | 6 + .../test_repository_metadata_manager.py | 38 +- test/unit/tool_shed/test_repository_utils.py | 25 +- 46 files changed, 1963 insertions(+), 283 deletions(-) create mode 100644 lib/tool_shed/test/functional/test_shed_configuration.py create mode 100644 lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml create mode 100644 lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt create mode 100644 lib/tool_shed/webapp/api2/__init__.py create mode 100644 lib/tool_shed/webapp/api2/authenticate.py create mode 100644 lib/tool_shed/webapp/api2/categories.py create mode 100644 lib/tool_shed/webapp/api2/configuration.py create mode 100644 lib/tool_shed/webapp/api2/repositories.py create mode 100644 lib/tool_shed/webapp/api2/tools.py create mode 100644 lib/tool_shed/webapp/api2/users.py diff --git a/.github/workflows/toolshed.yaml b/.github/workflows/toolshed.yaml index e8fa6e852a48..c9ee6c6a10eb 100644 --- a/.github/workflows/toolshed.yaml +++ b/.github/workflows/toolshed.yaml @@ -22,6 +22,7 @@ jobs: matrix: python-version: ['3.7'] test-install-client: ['standalone', 'galaxy_api'] + shed-api: ['v1', 'v2'] services: postgres: image: postgres:13 @@ -57,6 +58,7 @@ jobs: run: './run_tests.sh -toolshed' env: TOOL_SHED_TEST_INSTALL_CLIENT: ${{ matrix.test-install-client }} + TOOL_SHED_API_VERSION: ${{ matrix.shed-api }} working-directory: 'galaxy root' - uses: actions/upload-artifact@v3 if: failure() diff --git a/.redocly.lint-ignore.yaml b/.redocly.lint-ignore.yaml index 01997597cebb..a09ad94d3248 100644 --- a/.redocly.lint-ignore.yaml +++ b/.redocly.lint-ignore.yaml @@ -14,3 +14,6 @@ _schema.yaml: #/paths/~1api~1histories~1{history_id}~1contents~1{history_content_id}~1metadata_file - '#/paths/~1api~1histories~1{history_id}~1contents~1{id}~1validate' - '#/paths/~1api~1histories~1{history_id}~1contents~1{type}s~1{id}' +_shed_schema.yaml: + no-empty-servers: + - '#/openapi' diff --git a/Makefile b/Makefile index 1e5a171796c5..0dd02e253c9d 100644 --- a/Makefile +++ b/Makefile @@ -182,9 +182,11 @@ endif build-api-schema: $(IN_VENV) python scripts/dump_openapi_schema.py _schema.yaml + $(IN_VENV) python scripts/dump_openapi_schema.py --app shed _shed_schema.yaml remove-api-schema: rm _schema.yaml + rm _shed_schema.yaml update-client-api-schema: client-node-deps build-api-schema $(IN_VENV) cd client && node openapi_to_schema.mjs ../_schema.yaml > src/schema/schema.ts && npx prettier --write src/schema/schema.ts @@ -192,7 +194,9 @@ update-client-api-schema: client-node-deps build-api-schema lint-api-schema: build-api-schema $(IN_VENV) npx --yes @redocly/cli lint _schema.yaml + $(IN_VENV) npx --yes @redocly/cli lint _shed_schema.yaml $(IN_VENV) codespell -I .ci/ignore-spelling.txt _schema.yaml + $(IN_VENV) codespell -I .ci/ignore-spelling.txt _shed_schema.yaml $(MAKE) remove-api-schema update-navigation-schema: client-node-deps diff --git a/lib/galaxy/util/tool_shed/tool_shed_registry.py b/lib/galaxy/util/tool_shed/tool_shed_registry.py index b50393d005d9..c1a60aa56dc2 100644 --- a/lib/galaxy/util/tool_shed/tool_shed_registry.py +++ b/lib/galaxy/util/tool_shed/tool_shed_registry.py @@ -5,6 +5,8 @@ Optional, ) +from typing_extensions import Literal + from galaxy.util import parse_xml_string from galaxy.util.path import StrPath from galaxy.util.tool_shed import common_util @@ -20,6 +22,8 @@ """ +API_VERSION = Literal["v1", "v2"] + class AUTH_TUPLE(NamedTuple): username: str @@ -28,11 +32,13 @@ class AUTH_TUPLE(NamedTuple): class Registry: tool_sheds: Dict[str, str] + tool_shed_api_versions: Dict[str, API_VERSION] tool_sheds_auth: Dict[str, Optional[AUTH_TUPLE]] def __init__(self, config: Optional[StrPath] = None): self.tool_sheds = {} self.tool_sheds_auth = {} + self.tool_shed_api_versions = {} if config: # Parse tool_sheds_conf.xml tree, error_message = parse_xml(config) @@ -48,10 +54,17 @@ def __init__(self, config: Optional[StrPath] = None): try: name = elem.get("name", None) url = elem.get("url", None) + version_raw = elem.get("version", "1") + version: API_VERSION + if version_raw == "1": + version = "v1" + else: + version = "v2" username = elem.get("user", None) password = elem.get("pass", None) if name and url: self.tool_sheds[name] = url + self.tool_shed_api_versions[name] = version self.tool_sheds_auth[name] = None log.debug(f"Loaded reference to tool shed: {name}") if name and url and username and password: @@ -75,6 +88,13 @@ def url_auth(self, url: str) -> Optional[AUTH_TUPLE]: log.debug(f"Invalid url '{str(url)}' received by tool shed registry's url_auth method.") return None + def is_legacy(self, url: str) -> bool: + shed_name = self._shed_name_for_url(url) + if shed_name is None: + return True + else: + return self.tool_shed_api_versions[shed_name] == "v1" + def _shed_name_for_url(self, url: str) -> Optional[str]: url_sans_protocol = common_util.remove_protocol_from_tool_shed_url(url) for shed_name, shed_url in self.tool_sheds.items(): diff --git a/lib/galaxy/webapps/galaxy/api/__init__.py b/lib/galaxy/webapps/galaxy/api/__init__.py index 15fe9102b9a7..b38ef704a398 100644 --- a/lib/galaxy/webapps/galaxy/api/__init__.py +++ b/lib/galaxy/webapps/galaxy/api/__init__.py @@ -109,7 +109,7 @@ def __init__(self, callable, dep_type): self.galaxy_type_depends = dep_type -def depends(dep_type: Type[T]) -> T: +def depends(dep_type: Type[T], get_app=get_app) -> T: def _do_resolve(request: Request): return get_app().resolve(dep_type) @@ -178,7 +178,10 @@ def __call__(self, name: str, **path_params): query_params = path_params.pop("query_params", None) try: if qualified: - url = str(self.request.url_for(name, **path_params)) + if name == "/": + url = str(self.request.base_url) + else: + url = str(self.request.url_for(name, **path_params)) else: url = self.request.app.url_path_for(name, **path_params) if query_params: @@ -301,9 +304,11 @@ class RestVerb(str, Enum): options = "OPTIONS" -class Router(InferringRouter): +class FrameworkRouter(InferringRouter): """A FastAPI Inferring Router tailored to Galaxy.""" + admin_user_dependency: Any + def wrap_with_alias(self, verb: RestVerb, *args, alias: Optional[str] = None, **kwd): """ Wraps FastAPI methods with additional alias keyword and require_admin handling. @@ -382,9 +387,9 @@ def _handle_galaxy_kwd(self, kwd): require_admin = kwd.pop("require_admin", False) if require_admin: if "dependencies" in kwd: - kwd["dependencies"].append(AdminUserRequired) + kwd["dependencies"].append(self.admin_user_dependency) else: - kwd["dependencies"] = [AdminUserRequired] + kwd["dependencies"] = [self.admin_user_dependency] return kwd @@ -398,6 +403,10 @@ def cbv(self): return cbv(self) +class Router(FrameworkRouter): + admin_user_dependency = AdminUserRequired + + class APIContentTypeRoute(APIRoute): """ Determines endpoint to match using content-type. diff --git a/lib/tool_shed/context.py b/lib/tool_shed/context.py index df4b07a5ccdc..6991bbd112cd 100644 --- a/lib/tool_shed/context.py +++ b/lib/tool_shed/context.py @@ -2,6 +2,7 @@ from typing import Optional from sqlalchemy.orm import scoped_session +from typing_extensions import Protocol from galaxy.security.idencoding import IdEncodingHelper from galaxy.work.context import ( @@ -16,7 +17,7 @@ from tool_shed.webapp.model.mapping import ToolShedModelMapping -class ProvidesAppContext: +class ProvidesAppContext(Protocol): """For transaction-like objects to provide the shed convenience layer for database and event handling. @@ -45,7 +46,7 @@ def model(self) -> ToolShedModelMapping: return self.app.model -class ProvidesUserContext(ProvidesAppContext): +class ProvidesUserContext(ProvidesAppContext, Protocol): """For transaction-like objects to provide Galaxy convenience layer for reasoning about users. @@ -72,7 +73,13 @@ def user_is_bootstrap_admin(self) -> bool: return not self.anonymous and user is not None and user.bootstrap_admin_user -class SessionRequestContext(ProvidesUserContext): +class ProvidesRepositoriesContext(ProvidesUserContext, Protocol): + @abc.abstractproperty + def repositories_hostname(self) -> str: + """Provide access to hostname used by target mercurial server.""" + + +class SessionRequestContext(ProvidesRepositoriesContext, Protocol): @abc.abstractmethod def get_galaxy_session(self) -> Optional[GalaxySession]: ... @@ -84,3 +91,60 @@ def request(self) -> GalaxyAbstractRequest: @abc.abstractproperty def response(self) -> GalaxyAbstractResponse: ... + + @abc.abstractmethod + def url_builder(self): + ... + + +class SessionRequestContextImpl(SessionRequestContext): + _app: ToolShedApp + _user: Optional[User] + _galaxy_session: Optional[GalaxySession] + + def __init__( + self, + app: ToolShedApp, + request: GalaxyAbstractRequest, + response: GalaxyAbstractResponse, + user: Optional[User] = None, + galaxy_session: Optional[GalaxySession] = None, + url_builder=None, + ): + self._app = app + self._user = user + self._galaxy_session = galaxy_session + self._url_builder = url_builder + self.__request = request + self.__response = response + + @property + def app(self) -> ToolShedApp: + return self._app + + @property + def url_builder(self): + return self._url_builder + + @property + def user(self) -> Optional[User]: + return self._user + + def get_galaxy_session(self) -> Optional[GalaxySession]: + return self._galaxy_session + + @property + def repositories_hostname(self) -> str: + return str(self.request.base).rstrip("/") + + @property + def host(self): + return self.__request.host + + @property + def request(self) -> GalaxyAbstractRequest: + return self.__request + + @property + def response(self) -> GalaxyAbstractResponse: + return self.__response diff --git a/lib/tool_shed/dependencies/attribute_handlers.py b/lib/tool_shed/dependencies/attribute_handlers.py index 57d312a832e8..ea611b132ab8 100644 --- a/lib/tool_shed/dependencies/attribute_handlers.py +++ b/lib/tool_shed/dependencies/attribute_handlers.py @@ -5,13 +5,13 @@ List, Optional, Tuple, + TYPE_CHECKING, ) from galaxy.util import ( asbool, etree, ) -from galaxy.web import url_for from tool_shed.dependencies.tool import tag_attribute_handler from tool_shed.repository_types.util import ( REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, @@ -24,12 +24,21 @@ xml_util, ) +if TYPE_CHECKING: + from tool_shed.context import ProvidesRepositoriesContext + from tool_shed.structured_app import ToolShedApp + + log = logging.getLogger(__name__) class RepositoryDependencyAttributeHandler: - def __init__(self, app, unpopulate): - self.app = app + trans: "ProvidesRepositoriesContext" + app: "ToolShedApp" + + def __init__(self, trans: "ProvidesRepositoriesContext", unpopulate): + self.trans = trans + self.app = trans.app self.file_name = REPOSITORY_DEPENDENCY_DEFINITION_FILENAME self.unpopulate = unpopulate @@ -111,7 +120,7 @@ def handle_elem(self, elem): # From here on we're populating the toolshed and changeset_revision attributes if necessary. if not toolshed: # Default the setting to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") + toolshed = str(self.trans.url_builder("/", qualified=True)).rstrip("/") elem.attrib["toolshed"] = toolshed altered = True if not changeset_revision: @@ -188,8 +197,12 @@ def handle_tag_attributes(self, config): class ToolDependencyAttributeHandler: - def __init__(self, app, unpopulate): - self.app = app + trans: "ProvidesRepositoriesContext" + app: "ToolShedApp" + + def __init__(self, trans: "ProvidesRepositoriesContext", unpopulate): + self.trans = trans + self.app = trans.app self.file_name = TOOL_DEPENDENCY_DEFINITION_FILENAME self.unpopulate = unpopulate @@ -198,7 +211,7 @@ def handle_tag_attributes(self, tool_dependencies_config): Populate or unpopulate the tooshed and changeset_revision attributes of each tag defined within a tool_dependencies.xml file. """ - rdah = RepositoryDependencyAttributeHandler(self.app, self.unpopulate) + rdah = RepositoryDependencyAttributeHandler(self.trans, self.unpopulate) tah = tag_attribute_handler.TagAttributeHandler(self.app, rdah, self.unpopulate) altered = False error_message = "" diff --git a/lib/tool_shed/dependencies/repository/relation_builder.py b/lib/tool_shed/dependencies/repository/relation_builder.py index 1f3c175e5d7d..33811377d031 100644 --- a/lib/tool_shed/dependencies/repository/relation_builder.py +++ b/lib/tool_shed/dependencies/repository/relation_builder.py @@ -16,7 +16,7 @@ class RelationBuilder: - def __init__(self, app, repository, repository_metadata, tool_shed_url): + def __init__(self, app, repository, repository_metadata, tool_shed_url, trans=None): self.all_repository_dependencies = {} self.app = app self.circular_repository_dependencies = [] @@ -25,6 +25,7 @@ def __init__(self, app, repository, repository_metadata, tool_shed_url): self.handled_key_rd_dicts = [] self.key_rd_dicts_to_be_processed = [] self.tool_shed_url = tool_shed_url + self.trans = trans def can_add_to_key_rd_dicts(self, key_rd_dict, key_rd_dicts): """Handle the case where an update to the changeset revision was done.""" @@ -212,7 +213,8 @@ def get_updated_changeset_revisions_for_repository_dependencies(self, key_rd_dic rd_prior_installation_required, rd_only_if_compiling_contained_td, ) = common_util.parse_repository_dependency_tuple(repository_dependency) - if suc.tool_shed_is_this_tool_shed(rd_toolshed): + tool_shed_is_this_tool_shed = suc.tool_shed_is_this_tool_shed(rd_toolshed, trans=self.trans) + if tool_shed_is_this_tool_shed: repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( self.app, rd_name, rd_owner ) @@ -318,7 +320,7 @@ def handle_key_rd_dicts_for_repository(self, current_repository_key, repository_ prior_installation_required, only_if_compiling_contained_td, ) = common_util.parse_repository_dependency_tuple(repository_dependency) - if suc.tool_shed_is_this_tool_shed(toolshed): + if suc.tool_shed_is_this_tool_shed(toolshed, trans=self.trans): required_repository = tool_shed.util.repository_util.get_repository_by_name_and_owner(self.app, name, owner) self.repository = required_repository repository_id = self.app.security.encode_id(required_repository.id) diff --git a/lib/tool_shed/managers/categories.py b/lib/tool_shed/managers/categories.py index b9ae0535b038..9176fef9e978 100644 --- a/lib/tool_shed/managers/categories.py +++ b/lib/tool_shed/managers/categories.py @@ -14,7 +14,10 @@ from tool_shed.context import ProvidesUserContext from tool_shed.structured_app import ToolShedApp from tool_shed.webapp.model import Category -from tool_shed_client.schema import CreateCategoryRequest +from tool_shed_client.schema import ( + Category as CategoryResponse, + CreateCategoryRequest, +) class CategoryManager: @@ -50,16 +53,29 @@ def index_db(self, trans: ProvidesUserContext, deleted: bool) -> List[Category]: def index(self, trans: ProvidesUserContext, deleted: bool) -> List[Dict[str, Any]]: category_dicts: List[Dict[str, Any]] = [] for category in self.index_db(trans, deleted): - category_dict = category.to_dict(view="collection", value_mapper=get_value_mapper(trans.app)) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - category_dict[ - "repositories" - ] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get(category.name, 0) + category_dict = self.to_dict(category) category_dicts.append(category_dict) return category_dicts + def to_dict(self, category: Category) -> Dict[str, Any]: + category_dict = category.to_dict(view="collection", value_mapper=get_value_mapper(self.app)) + category_dict["repositories"] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get( + category.name, 0 + ) + category_dict["url"] = web.url_for( + controller="categories", action="show", id=self.app.security.encode_id(category.id) + ) + return category_dict + + def to_model(self, category: Category) -> CategoryResponse: + as_dict = self.to_dict(category) + return CategoryResponse( + id=as_dict["id"], + name=as_dict["name"], + description=as_dict["description"], + repositories=as_dict["repositories"], + ) + def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: value_mapper = {"id": app.security.encode_id} diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index a6c52d21bf19..e7464b114272 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -24,6 +24,7 @@ from galaxy import web from galaxy.exceptions import ( ConfigDoesNotAllowException, + InsufficientPermissionsException, InternalServerError, MalformedContents, ObjectNotFound, @@ -31,7 +32,10 @@ ) from galaxy.tool_shed.util import dependency_display from galaxy.util import listify -from tool_shed.context import ProvidesUserContext +from tool_shed.context import ( + ProvidesRepositoriesContext, + ProvidesUserContext, +) from tool_shed.metadata import repository_metadata_manager from tool_shed.repository_types import util as rt_util from tool_shed.structured_app import ToolShedApp @@ -57,17 +61,18 @@ validate_repository_name, ) from tool_shed.util.shed_util_common import ( - get_category, count_repositories_in_category, + get_category, ) from tool_shed.util.tool_util import generate_message_for_invalid_tools from tool_shed.webapp.model import ( Repository, - User, + RepositoryMetadata, ) from tool_shed.webapp.search.repo_search import RepoSearch from tool_shed_client.schema import ( CreateRepositoryRequest, + DetailedRepository, ExtraRepoInfo, LegacyInstallInfoTuple, Repository as SchemaRepository, @@ -296,7 +301,27 @@ def can_manage_repo(trans: ProvidesUserContext, repository: Repository) -> bool: return trans.user_is_admin or security_agent.user_can_administer_repository(trans.user, repository) -def get_install_info(app: ToolShedApp, name, owner, changeset_revision) -> LegacyInstallInfoTuple: +def can_update_repo(trans: ProvidesUserContext, repository: Repository) -> bool: + app = trans.app + security_agent = app.security_agent + return can_manage_repo(trans, repository) or security_agent.can_push(app, trans.user, repository) + + +def get_repository_metadata_for_management( + trans: ProvidesUserContext, encoded_repository_id: str, changeset_revision: str +) -> RepositoryMetadata: + repository = get_repository_in_tool_shed(trans.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("Cannot manage target repository") + revisions = [r for r in repository.metadata_revisions if r.changeset_revision == changeset_revision] + if len(revisions) != 1: + raise ObjectNotFound() + repository_metadata = revisions[0] + return repository_metadata + + +def get_install_info(trans: ProvidesRepositoriesContext, name, owner, changeset_revision) -> LegacyInstallInfoTuple: + app = trans.app value_mapper = get_value_mapper(app) # Example URL: # http:///api/repositories/get_repository_revision_install_info?name=&owner=&changeset_revision= @@ -343,7 +368,7 @@ def get_install_info(app: ToolShedApp, name, owner, changeset_revision) -> Legac includes_tools_for_display_in_tool_panel, has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td, - ) = get_repo_info_dict(app, None, encoded_repository_id, changeset_revision) + ) = get_repo_info_dict(trans, encoded_repository_id, changeset_revision) return repository_dict, repository_metadata_dict, repo_info_dict else: log.debug( @@ -429,8 +454,7 @@ def handle_repository(trans, start_time, repository): results = dict(start_time=start_time, repository_status=[]) try: rmm = repository_metadata_manager.RepositoryMetadataManager( - app=app, - user=trans.user, + trans, repository=repository, resetting_all_metadata_on_repository=True, updating_installed_repository=False, @@ -525,20 +549,25 @@ def to_model(app, repository: Repository) -> SchemaRepository: return SchemaRepository(**to_element_dict(app, repository)) +def to_detailed_model(app, repository: Repository) -> DetailedRepository: + return DetailedRepository(**to_element_dict(app, repository)) + + def upload_tar_and_set_metadata( - app: ToolShedApp, + trans: ProvidesRepositoriesContext, host: str, - user: User, repository: Repository, uploaded_file, commit_message: str, dry_run: bool = False, ): + app = trans.app + user = trans.user + assert user repo_dir = repository.repo_path(app) tip = repository.tip() (ok, message, _, content_alert_str, _, _,) = upload_tar( - app, - host, + trans, user.username, repository, uploaded_file, @@ -551,7 +580,7 @@ def upload_tar_and_set_metadata( if tip == repository.tip(): raise MalformedContents("No changes to repository.") else: - rmm = repository_metadata_manager.RepositoryMetadataManager(app=app, user=user, repository=repository) + rmm = repository_metadata_manager.RepositoryMetadataManager(trans, repository=repository) _, error_message = rmm.set_repository_metadata_due_to_new_tip(host, content_alert_str=content_alert_str) if error_message: raise InternalServerError(error_message) diff --git a/lib/tool_shed/managers/tools.py b/lib/tool_shed/managers/tools.py index 84dfc9c6d0fc..bd648d4903a9 100644 --- a/lib/tool_shed/managers/tools.py +++ b/lib/tool_shed/managers/tools.py @@ -40,5 +40,5 @@ def search(trans: SessionRequestContext, q: str, page: int = 1, page_size: int = ) results = tool_search.search(trans.app, search_term, page, page_size, boosts) - results["hostname"] = trans.url_builder("/", qualified=True) + results["hostname"] = trans.repositories_hostname return results diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index f28aa58da00f..9ec1b29271b2 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -21,8 +21,8 @@ InvalidFileT, ) from galaxy.util import inflector -from galaxy.web import url_for from galaxy.web.form_builder import SelectField +from tool_shed.context import ProvidesRepositoriesContext from tool_shed.repository_types import util as rt_util from tool_shed.repository_types.metadata import TipOnly from tool_shed.structured_app import ToolShedApp @@ -54,7 +54,7 @@ class ToolShedMetadataGenerator(BaseMetadataGenerator): def __init__( self, - app: ToolShedApp, + trans: ProvidesRepositoriesContext, repository: Optional[Repository] = None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, @@ -67,7 +67,8 @@ def __init__( metadata_dict=None, user=None, ): - self.app = app + self.trans = trans + self.app = trans.app self.user = user self.repository = repository if changeset_revision is None and self.repository is not None: @@ -75,9 +76,7 @@ def __init__( else: self.changeset_revision = changeset_revision if repository_clone_url is None and self.repository is not None: - self.repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( - self.user, self.repository - ) + self.repository_clone_url = common_util.generate_clone_url_for(self.trans, self.repository) else: self.repository_clone_url = repository_clone_url if shed_config_dict is None: @@ -98,7 +97,7 @@ def __init__( self.updating_installed_repository = updating_installed_repository self.persist = persist self.invalid_file_tups = [] - self.sa_session = app.model.session + self.sa_session = trans.app.model.session def initial_metadata_dict(self) -> Dict[str, Any]: return {} @@ -141,11 +140,15 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td ] if not toolshed: # Default to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") + toolshed = self.trans.repositories_hostname + log.warning(f"\n\n\n\n\n\nin not toolshed with {toolshed}\n\n\n\n") + # toolshed = str(url_for("/", qualified=True)).rstrip("/") repository_dependency_tup[0] = toolshed + else: + log.warning(f"moooocww.....{toolshed}\n\n\n\n\n") toolshed = common_util.remove_protocol_from_tool_shed_url(toolshed) - if suc.tool_shed_is_this_tool_shed(toolshed): + if suc.tool_shed_is_this_tool_shed(toolshed, trans=self.trans): try: user = ( self.sa_session.query(self.app.model.User) @@ -225,8 +228,7 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td class RepositoryMetadataManager(ToolShedMetadataGenerator): def __init__( self, - app: ToolShedApp, - user, + trans: ProvidesRepositoriesContext, repository=None, changeset_revision=None, repository_clone_url=None, @@ -239,7 +241,7 @@ def __init__( metadata_dict=None, ): super().__init__( - app, + trans, repository, changeset_revision, repository_clone_url, @@ -250,8 +252,10 @@ def __init__( updating_installed_repository, persist, metadata_dict=metadata_dict, - user=user, + user=trans.user, ) + app = trans.app + user = trans.user self.sa_session = app.model.context self.app = app self.user = user @@ -997,9 +1001,7 @@ def reset_metadata_on_selected_repositories(self, **kwd): def set_repository(self, repository, repository_clone_url=None): super().set_repository(repository) - self.repository_clone_url = repository_clone_url or common_util.generate_clone_url_for_repository_in_tool_shed( - self.user, repository - ) + self.repository_clone_url = repository_clone_url or common_util.generate_clone_url_for(self.trans, repository) def set_repository_metadata(self, host, content_alert_str="", **kwd): """ diff --git a/lib/tool_shed/test/base/api.py b/lib/tool_shed/test/base/api.py index ffe4a0b38fad..0983dd88d3e6 100644 --- a/lib/tool_shed/test/base/api.py +++ b/lib/tool_shed/test/base/api.py @@ -1,4 +1,5 @@ import os +from functools import wraps from typing import ( Any, Dict, @@ -38,6 +39,12 @@ def populator(self) -> ToolShedPopulator: def admin_api_interactor(self) -> ShedApiInteractor: return ShedApiInteractor(self.url, get_admin_api_key()) + def _api_interactor_for_key(self, key: str) -> ShedApiInteractor: + return self._api_interactor(key) + + def populator_for_key(self, key: str) -> ToolShedPopulator: + return self._get_populator(self._api_interactor_for_key(key)) + @property def api_interactor(self) -> ShedApiInteractor: user_api_key = get_user_api_key() @@ -46,7 +53,7 @@ def api_interactor(self) -> ShedApiInteractor: password = "testpassword" ensure_user_with_email(self.admin_api_interactor, email, password) user_api_key = self.admin_api_interactor.create_api_key(email, password) - return self._api_interactor(user_api_key) + return self._api_interactor_for_key(user_api_key) def _api_interactor_by_credentials(self, email: str, password: str) -> ShedApiInteractor: ensure_user_with_email(self.admin_api_interactor, email, password) @@ -96,6 +103,25 @@ def __init__(self, galaxy_url: str): super().__init__(**interactor_kwds) +def make_skip_if_api_version_wrapper(version): + def wrapper(method): + @wraps(method) + def wrapped_method(api_test_case, *args, **kwd): + interactor: ShedApiInteractor = api_test_case.api_interactor + api_version = interactor.api_version + if api_version == version: + raise pytest.skip(f"{version} tool shed API found, skipping test") + return method(api_test_case, *args, **kwd) + + return wrapped_method + + return wrapper + + +skip_if_api_v1 = make_skip_if_api_version_wrapper("v1") +skip_if_api_v2 = make_skip_if_api_version_wrapper("v2") + + class ShedApiTestCase(ShedBaseTestCase, UsesShedApi): _galaxy_interactor: Optional[GalaxyInteractorApi] = None diff --git a/lib/tool_shed/test/base/api_util.py b/lib/tool_shed/test/base/api_util.py index 4c65f452fc1e..81dad57cb181 100644 --- a/lib/tool_shed/test/base/api_util.py +++ b/lib/tool_shed/test/base/api_util.py @@ -3,13 +3,14 @@ from functools import wraps from typing import ( Any, - Dict, Callable, + Dict, Optional, ) from urllib.parse import urljoin import requests +from typing_extensions import Literal from galaxy_test.base.api_asserts import ( assert_has_keys, @@ -85,6 +86,18 @@ def _append_headers(self, kwd): get = decorate_method(requests.get) post = decorate_method(requests.post) put = decorate_method(requests.put) + delete = decorate_method(requests.delete) + + @property + def api_version(self) -> Literal["v1", "v2"]: + config = self.version() + api_version = config.get("api_version", "v1") + return api_version + + def version(self) -> Dict[str, Any]: + response = self.get("version") + response.raise_for_status() + return response.json() def create_user(admin_interactor: ShedApiInteractor, user_dict: Dict[str, Any], assert_ok=True) -> Dict[str, Any]: diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 90d140570aa8..dc2cddf1bdc1 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -40,6 +40,7 @@ ResetMetadataOnRepositoryResponse, ToolSearchRequest, ToolSearchResults, + Version, ) from .api_util import ( ensure_user_with_email, @@ -156,6 +157,11 @@ def setup_column_maker_and_get_metadata(self, prefix=DEFAULT_PREFIX) -> Reposito repository = self.setup_column_maker_repo(prefix=prefix) return self.get_metadata(repository) + def get_install_info_for_repository(self, has_repository_id: HasRepositoryId) -> InstallInfo: + repository_id = self._repository_id(has_repository_id) + metadata = self.get_metadata(repository_id, True) + return self.get_install_info(metadata) + def get_install_info(self, repository_metadata: RepositoryMetadata) -> InstallInfo: revision_metadata = repository_metadata.latest_revision repo = revision_metadata.repository @@ -303,6 +309,41 @@ def disallow_user_to_push(self, repository: HasRepositoryId, username: str) -> N delete_response = self._api_interactor.delete(f"repositories/{repository_id}/allow_push/{username}") delete_response.raise_for_status() + def set_malicious(self, repository: HasRepositoryId, changeset_revision: str): + repository_id = self._repository_id(repository) + put_response = self._api_interactor.put( + f"repositories/{repository_id}/revisions/{changeset_revision}/malicious" + ) + put_response.raise_for_status() + + def unset_malicious(self, repository: HasRepositoryId, changeset_revision: str): + repository_id = self._repository_id(repository) + delete_response = self._api_interactor.delete( + f"repositories/{repository_id}/revisions/{changeset_revision}/malicious" + ) + delete_response.raise_for_status() + + def tip_is_malicious(self, repository: HasRepositoryId) -> bool: + repository_metadata = self.get_metadata(repository) + revision = repository_metadata.latest_revision + return revision.malicious + + def set_deprecated(self, repository: HasRepositoryId): + repository_id = self._repository_id(repository) + put_response = self._api_interactor.put(f"repositories/{repository_id}/deprecated") + put_response.raise_for_status() + + def unset_deprecated(self, repository: HasRepositoryId): + repository_id = self._repository_id(repository) + delete_response = self._api_interactor.delete(f"repositories/{repository_id}/deprecated") + delete_response.raise_for_status() + + def is_deprecated(self, repository: HasRepositoryId) -> bool: + repository_id = self._repository_id(repository) + repository_response = self._api_interactor.get(f"repositories/{repository_id}") + repository_response.raise_for_status() + return Repository(**repository_response.json()).deprecated + def get_metadata(self, repository: HasRepositoryId, downloadable_only=True) -> RepositoryMetadata: repository_id = self._repository_id(repository) metadata_response = self._api_interactor.get( @@ -318,6 +359,11 @@ def reset_metadata(self, repository: HasRepositoryId) -> ResetMetadataOnReposito api_asserts.assert_status_code_is_ok(reset_response) return ResetMetadataOnRepositoryResponse(**reset_response.json()) + def version(self) -> Version: + version_response = self._admin_api_interactor.get("version") + api_asserts.assert_status_code_is_ok(version_response) + return Version(**version_response.json()) + def tool_search_query(self, query: str) -> ToolSearchResults: return self.tool_search(ToolSearchRequest(q=query)) @@ -350,6 +396,15 @@ def repo_search(self, repo_search_request: RepositorySearchRequest) -> Repositor api_asserts.assert_status_code_is_ok(search_response) return RepositorySearchResults(**search_response.json()) + def delete_api_key(self) -> None: + response = self._api_interactor.delete("users/current/api_key") + response.raise_for_status() + + def create_new_api_key(self) -> str: + response = self._api_interactor.post("users/current/api_key") + response.raise_for_status() + return response.json() + def guid(self, repository: Repository, tool_id: str, tool_version: str) -> str: url = self._api_interactor.url base = url.split("://")[1].split("/")[0] diff --git a/lib/tool_shed/test/functional/api_notes.md b/lib/tool_shed/test/functional/api_notes.md index dc207aa658f0..527a71f4a2e9 100644 --- a/lib/tool_shed/test/functional/api_notes.md +++ b/lib/tool_shed/test/functional/api_notes.md @@ -23,7 +23,6 @@ for deletion instead of writing new tests and modernizing the API. - reset_metadata_on_repositories - remove_repository_registry_entry -- get_repository_revision_install_info - get_installable_revisions - The whole Groups API. - The whole Repository Revisions API. diff --git a/lib/tool_shed/test/functional/test_shed_configuration.py b/lib/tool_shed/test/functional/test_shed_configuration.py new file mode 100644 index 000000000000..5e631f2dfeb9 --- /dev/null +++ b/lib/tool_shed/test/functional/test_shed_configuration.py @@ -0,0 +1,8 @@ +from ..base.api import ShedApiTestCase + + +class TestShedConfigurationApi(ShedApiTestCase): + def test_version(self) -> None: + version = self.populator.version() + assert version.version + assert version.version_major diff --git a/lib/tool_shed/test/functional/test_shed_repositories.py b/lib/tool_shed/test/functional/test_shed_repositories.py index 992369bfa3db..2fd4f551a08e 100644 --- a/lib/tool_shed/test/functional/test_shed_repositories.py +++ b/lib/tool_shed/test/functional/test_shed_repositories.py @@ -6,12 +6,21 @@ from galaxy.util.resources import resource_path from galaxy_test.base import api_asserts from tool_shed.test.base.api_util import create_user -from tool_shed.test.base.populators import repo_tars -from ..base.api import ShedApiTestCase +from tool_shed.test.base.populators import ( + HasRepositoryId, + repo_tars, +) +from tool_shed_client.schema import RepositoryRevisionMetadata +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, + skip_if_api_v2, +) COLUMN_MAKER_PATH = resource_path(__package__, "../test_data/column_maker/column_maker.tar") +# test_0000 tests commit_message - find a way to test it here class TestShedRepositoriesApi(ShedApiTestCase): def test_create(self): populator = self.populator @@ -69,6 +78,7 @@ def test_index_simple(self): assert repository.owner == repo.owner assert repository.name == repo.name + @skip_if_api_v1 def test_allow_push(self): populator = self.populator request = { @@ -100,6 +110,32 @@ def test_allow_push(self): assert "sharewith" not in populator.get_usernames_allowed_to_push(repo) assert "alsosharewith" in populator.get_usernames_allowed_to_push(repo) + @skip_if_api_v1 + def test_set_malicious(self): + populator = self.populator + repository = populator.setup_column_maker_repo(prefix="repoformalicious") + + only_revision = self._get_only_revision(repository) + assert only_revision.downloadable + assert not only_revision.malicious + + assert not populator.tip_is_malicious(repository) + populator.set_malicious(repository, only_revision.changeset_revision) + assert populator.tip_is_malicious(repository) + populator.unset_malicious(repository, only_revision.changeset_revision) + assert not populator.tip_is_malicious(repository) + + @skip_if_api_v1 + def test_set_deprecated(self): + populator = self.populator + repository = populator.setup_column_maker_repo(prefix="repofordeprecated") + assert not repository.deprecated + assert not populator.is_deprecated(repository) + populator.set_deprecated(repository) + assert populator.is_deprecated(repository) + populator.unset_deprecated(repository) + assert not populator.is_deprecated(repository) + def test_install_info(self): # actually installing requires a whole Galaxy setup and the install manager but # we can test the response validates against the future facing InstallInfo pydandic @@ -157,6 +193,19 @@ def test_repo_tars(self): else: raise AssertionError("Wrong number of repo tars returned...") + @skip_if_api_v1 + def test_readmes(self): + populator = self.populator + repository = populator.setup_test_data_repo("column_maker_with_readme") + only_revision = self._get_only_revision(repository) + populator.assert_has_n_installable_revisions(repository, 1) + response = self.api_interactor.get( + f"repositories/{repository.id}/revisions/{only_revision.changeset_revision}/readmes" + ) + api_asserts.assert_status_code_is_ok(response) + readme_dicts = response.json() + assert "readme.txt" in readme_dicts + def test_reset_on_simple_repository(self): populator = self.populator repository = populator.setup_test_data_repo("column_maker") @@ -178,6 +227,7 @@ def test_reset_with_uninstallable_revisions(self): api_asserts.assert_status_code_is_ok(response) populator.assert_has_n_installable_revisions(repository, 3) + @skip_if_api_v2 def test_reset_all(self): populator = self.populator repository = populator.setup_test_data_repo("column_maker_with_download_gaps") @@ -190,3 +240,14 @@ def test_reset_all(self): ) api_asserts.assert_status_code_is_ok(response) populator.assert_has_n_installable_revisions(repository, 3) + + def _get_only_revision(self, repository: HasRepositoryId) -> RepositoryRevisionMetadata: + populator = self.populator + repository_metadata = populator.get_metadata(repository) + metadata_for_revisions = repository_metadata.__root__ + assert len(metadata_for_revisions) == 1 + only_key = list(metadata_for_revisions.keys())[0] + assert only_key.startswith("0:") + only_revision = list(metadata_for_revisions.values())[0] + assert only_revision + return only_revision diff --git a/lib/tool_shed/test/functional/test_shed_users.py b/lib/tool_shed/test/functional/test_shed_users.py index 2c326cf6a791..0fa6fce83e27 100644 --- a/lib/tool_shed/test/functional/test_shed_users.py +++ b/lib/tool_shed/test/functional/test_shed_users.py @@ -7,7 +7,10 @@ CreateUserRequest, User, ) -from ..base.api import ShedApiTestCase +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, +) from ..base.api_util import ( email_to_username, ensure_user_with_email, @@ -77,5 +80,24 @@ def test_simple_index_and_user(self): assert show_response.json()["username"] == username assert show_response.json()["id"] == user_id - def _verify_username_password(self, email, password): - self.api_interactor.create_api_key(email, password) + @skip_if_api_v1 + def test_api_key_endpoints(self): + email = "testindexapi@bx.psu.edu" + password = "mycoolpassword123" + ensure_user_with_email(self.admin_api_interactor, email, password) + api_key = self._verify_username_password(email, password) + second_try_api_key = self._verify_username_password(email, password) + assert api_key == second_try_api_key + + user_populator = self.populator_for_key(api_key) + user_populator.delete_api_key() + new_api_key = self._verify_username_password(email, password) + assert api_key != new_api_key + + user_populator = self.populator_for_key(new_api_key) + another_new_api_key = user_populator.create_new_api_key() + assert new_api_key != another_new_api_key + assert new_api_key != api_key + + def _verify_username_password(self, email: str, password: str) -> str: + return self.api_interactor.create_api_key(email, password) diff --git a/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml new file mode 100644 index 000000000000..e3f8ff296c69 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml @@ -0,0 +1,83 @@ + + an expression on every row + + column_maker.py $input $out_file1 "$cond" $round ${input.metadata.columns} "${input.metadata.column_types}" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .. class:: infomark + +**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert* + +----- + +**What it does** + +This tool computes an expression for every row of a query and appends the result as a new column (field). + +- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file + +- **c3-c2** will add a length column to the query if **c2** and **c3** are start and end position + +----- + +**Example** + +If this is your input:: + + chr1 151077881 151077918 2 200 - + chr1 151081985 151082078 3 500 + + +computing "c4*c5" will produce:: + + chr1 151077881 151077918 2 200 - 400.0 + chr1 151081985 151082078 3 500 + 1500.0 + +if, at the same time, "Round result?" is set to **YES** results will look like this:: + + chr1 151077881 151077918 2 200 - 400 + chr1 151081985 151082078 3 500 + 1500 + +You can also use this tool to evaluate expressions. For example, computing "c3>=c2" for Input will result in the following:: + + chr1 151077881 151077918 2 200 - True + chr1 151081985 151082078 3 500 + True + +or computing "type(c2)==type('') for Input will return:: + + chr1 151077881 151077918 2 200 - False + chr1 151081985 151082078 3 500 + False + + + diff --git a/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt new file mode 100644 index 000000000000..9956da05797d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt @@ -0,0 +1 @@ +This is a readme that will be rendered as rst. diff --git a/lib/tool_shed/util/common_util.py b/lib/tool_shed/util/common_util.py index 7519967fe9e1..b05e34332eb6 100644 --- a/lib/tool_shed/util/common_util.py +++ b/lib/tool_shed/util/common_util.py @@ -1,3 +1,8 @@ +from typing import ( + Optional, + TYPE_CHECKING, +) + from routes import url_for from galaxy.util.tool_shed.common_util import ( @@ -19,16 +24,24 @@ remove_protocol_from_tool_shed_url, ) +if TYPE_CHECKING: + from tool_shed.context import ProvidesRepositoriesContext + from tool_shed.webapp.model import ( + Repository, + User, + ) + + +def generate_clone_url_for(trans: "ProvidesRepositoriesContext", repository: "Repository") -> str: + return generate_clone_url_for_repository_in_tool_shed(trans.user, repository, trans.repositories_hostname) + -def generate_clone_url_for_repository_in_tool_shed(user, repository) -> str: +def generate_clone_url_for_repository_in_tool_shed( + user: Optional["User"], repository: "Repository", hostname: Optional[str] = None +) -> str: """Generate the URL for cloning a repository that is in the tool shed.""" - base_url = url_for("/", qualified=True).rstrip("/") - if user: - protocol, base = base_url.split("://") - username = f"{user.username}@" - return f"{protocol}://{username}{base}/repos/{repository.user.username}/{repository.name}" - else: - return f"{base_url}/repos/{repository.user.username}/{repository.name}" + base_url = hostname or url_for("/", qualified=True).rstrip("/") + return f"{base_url}/repos/{repository.user.username}/{repository.name}" __all__ = ( diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 382add32cf37..10b59af6f5be 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -22,7 +22,7 @@ ) if TYPE_CHECKING: - from tool_shed.structured_app import ToolShedApp + from tool_shed.context import ProvidesRepositoriesContext from tool_shed.webapp.model import Repository @@ -41,8 +41,7 @@ def tar_open(uploaded_file): def upload_tar( - app: "ToolShedApp", - host: str, + trans: "ProvidesRepositoriesContext", username: str, repository: "Repository", uploaded_file, @@ -54,10 +53,12 @@ def upload_tar( rdah: Optional[RepositoryDependencyAttributeHandler] = None, tdah: Optional[ToolDependencyAttributeHandler] = None, ) -> ChangeResponseT: + host = trans.repositories_hostname + app = trans.app if tar is None: tar = tar_open(uploaded_file) - rdah = rdah or RepositoryDependencyAttributeHandler(app, unpopulate=False) - tdah = tdah or ToolDependencyAttributeHandler(app, unpopulate=False) + rdah = rdah or RepositoryDependencyAttributeHandler(trans, unpopulate=False) + tdah = tdah or ToolDependencyAttributeHandler(trans, unpopulate=False) # Upload a tar archive of files. undesirable_dirs_removed = 0 undesirable_files_removed = 0 diff --git a/lib/tool_shed/util/repository_util.py b/lib/tool_shed/util/repository_util.py index 835c6c207377..2f242a5d66f0 100644 --- a/lib/tool_shed/util/repository_util.py +++ b/lib/tool_shed/util/repository_util.py @@ -48,7 +48,7 @@ repository_was_previously_installed, set_repository_attributes, ) -from galaxy.util.tool_shed import common_util +from tool_shed.util.common_util import generate_clone_url_for from tool_shed.util.hg_util import ( changeset2rev, create_hgrc_file, @@ -62,7 +62,10 @@ ) if TYPE_CHECKING: - from tool_shed.context import ProvidesUserContext + from tool_shed.context import ( + ProvidesRepositoriesContext, + ProvidesUserContext, + ) from tool_shed.structured_app import ToolShedApp from tool_shed.webapp.model import Repository @@ -83,6 +86,7 @@ def create_repo_info_dict( repository_metadata=None, tool_dependencies=None, repository_dependencies=None, + trans=None, ): """ Return a dictionary that includes all of the information needed to install a repository into a local @@ -110,15 +114,16 @@ def create_repo_info_dict( repository = get_repository_by_name_and_owner(app, repository_name, repository_owner) if app.name == "tool_shed": # We're in the tool shed. - repository_metadata = repository_metadata_by_changeset_revision( - app.model, repository.id, changeset_revision - ) + repository_metadata = repository_metadata_by_changeset_revision(app.model, repository.id, changeset_revision) if repository_metadata: metadata = repository_metadata.metadata if metadata: - tool_shed_url = web.url_for("/", qualified=True).rstrip("/") + if trans is not None: + tool_shed_url = trans.repositories_hostname + else: + tool_shed_url = web.url_for("/", qualified=True).rstrip("/") rb = tool_shed.dependencies.repository.relation_builder.RelationBuilder( - app, repository, repository_metadata, tool_shed_url + app, repository, repository_metadata, tool_shed_url, trans=trans ) # Get a dictionary of all repositories upon which the contents of the received repository depends. repository_dependencies = rb.get_repository_dependencies_for_changeset_revision() @@ -259,9 +264,10 @@ def get_repository_in_tool_shed(app, id, eagerload_columns=None): return q.get(app.security.decode_id(id)) -def get_repo_info_dict(app: "ToolShedApp", user, repository_id, changeset_revision): +def get_repo_info_dict(trans: "ProvidesRepositoriesContext", repository_id, changeset_revision): + app = trans.app repository = get_repository_in_tool_shed(app, repository_id) - repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed(user, repository) + repository_clone_url = generate_clone_url_for(trans, repository) repository_metadata = get_repository_metadata_by_changeset_revision(app, repository_id, changeset_revision) if not repository_metadata: # The received changeset_revision is no longer installable, so get the next changeset_revision @@ -314,6 +320,7 @@ def get_repo_info_dict(app: "ToolShedApp", user, repository_id, changeset_revisi repository_metadata=repository_metadata, tool_dependencies=None, repository_dependencies=None, + trans=trans, ) return ( repo_info_dict, diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index 749ad9ea8655..548f8ede40e4 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -435,10 +435,11 @@ def open_repository_files_folder(app, folder_path, repository_id, is_admin=False return folder_contents -def tool_shed_is_this_tool_shed(toolshed_base_url): +def tool_shed_is_this_tool_shed(toolshed_base_url, trans=None): """Determine if a tool shed is the current tool shed.""" cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url(toolshed_base_url) - cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url(str(url_for("/", qualified=True))) + hostname = trans.repositories_hostname if trans else str(url_for("/", qualified=True)) + cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url(hostname) return cleaned_toolshed_base_url == cleaned_tool_shed diff --git a/lib/tool_shed/webapp/api/categories.py b/lib/tool_shed/webapp/api/categories.py index 15183b2a62ab..f9dba0a0d55d 100644 --- a/lib/tool_shed/webapp/api/categories.py +++ b/lib/tool_shed/webapp/api/categories.py @@ -18,10 +18,7 @@ require_admin, ) from galaxy.webapps.galaxy.api import depends -from tool_shed.managers.categories import ( - CategoryManager, - get_value_mapper, -) +from tool_shed.managers.categories import CategoryManager from tool_shed.managers.repositories import repositories_by_category from tool_shed.webapp.model import Category from . import BaseShedAPIController @@ -117,8 +114,5 @@ def show(self, trans, id, **kwd): if category is None: category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") return category_dict - category_dict = category.to_dict(view="element", value_mapper=get_value_mapper(trans.app)) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) + category_dict = self.category_manager.to_dict(category) return category_dict diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index 403d04182191..4499534c0cdf 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -202,7 +202,7 @@ def get_repository_revision_install_info( } """ - return get_install_info(self.app, name, owner, changeset_revision) + return get_install_info(trans, name, owner, changeset_revision) @web.legacy_expose_api_anonymous def get_installable_revisions(self, trans, **kwd): @@ -378,8 +378,7 @@ def handle_repository(trans, repository, results): log.debug(f"Resetting metadata on repository {repository.name}") try: rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, - user=trans.user, + trans, resetting_all_metadata_on_repository=True, updating_installed_repository=False, repository=repository, @@ -423,8 +422,7 @@ def handle_repository(trans, repository, results): else: my_writable = True rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, - user=trans.user, + trans, resetting_all_metadata_on_repository=True, updating_installed_repository=False, persist=False, @@ -725,9 +723,8 @@ def create_changeset_revision(self, trans, id, payload, **kwd): uploaded_file_name = file_data["local_filename"] try: message = upload_tar_and_set_metadata( - self.app, + trans, trans.request.host, - trans.user, repository, uploaded_file_name, commit_message, diff --git a/lib/tool_shed/webapp/api2/__init__.py b/lib/tool_shed/webapp/api2/__init__.py new file mode 100644 index 000000000000..7f270db14d4a --- /dev/null +++ b/lib/tool_shed/webapp/api2/__init__.py @@ -0,0 +1,273 @@ +from json import JSONDecodeError +from typing import ( + AsyncGenerator, + cast, + List, + Optional, + Type, + TypeVar, +) + +from fastapi import ( + Depends, + HTTPException, + Path, + Query, + Request, + Response, + Security, +) +from fastapi.security import ( + APIKeyCookie, + APIKeyHeader, + APIKeyQuery, +) +from pydantic import BaseModel +from starlette_context import context as request_context + +from galaxy.exceptions import AdminRequiredException +from galaxy.managers.session import GalaxySessionManager +from galaxy.managers.users import UserManager +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.web.framework.decorators import require_admin_message +from galaxy.webapps.galaxy.api import ( + depends as framework_depends, + FrameworkRouter, + GalaxyASGIRequest, + GalaxyASGIResponse, + T, + UrlBuilder, +) +from tool_shed.context import ( + SessionRequestContext, + SessionRequestContextImpl, +) +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp import app as tool_shed_app_mod +from tool_shed.webapp.model import ( + GalaxySession, + User, +) + + +def get_app() -> ToolShedApp: + if tool_shed_app_mod.app is None: + raise Exception("Failed to initialize the tool shed app correctly for FastAPI") + return cast(ToolShedApp, tool_shed_app_mod.app) + + +async def get_app_with_request_session() -> AsyncGenerator[ToolShedApp, None]: + app = get_app() + request_id = request_context.data["X-Request-ID"] + app.model.set_request_id(request_id) + try: + yield app + finally: + app.model.unset_request_id(request_id) + + +DependsOnApp = cast(ToolShedApp, Depends(get_app_with_request_session)) + +api_key_query = APIKeyQuery(name="key", auto_error=False) +api_key_header = APIKeyHeader(name="x-api-key", auto_error=False) +api_key_cookie = APIKeyCookie(name="galaxycommunitysession", auto_error=False) + + +def depends(dep_type: Type[T]) -> T: + return framework_depends(dep_type, get_app=get_app) + + +def get_api_user( + user_manager: UserManager = depends(UserManager), + key: str = Security(api_key_query), + x_api_key: str = Security(api_key_header), +) -> Optional[User]: + api_key = key or x_api_key + if not api_key: + return None + user = user_manager.by_api_key(api_key=api_key) + return user + + +def get_session_manager(app: ToolShedApp = DependsOnApp) -> GalaxySessionManager: + # TODO: find out how to adapt dependency for Galaxy/Report/TS + return GalaxySessionManager(app.model) + + +def get_session( + session_manager=cast(GalaxySessionManager, Depends(get_session_manager)), + security: IdEncodingHelper = depends(IdEncodingHelper), + galaxysession: str = Security(api_key_cookie), +) -> Optional[GalaxySession]: + if galaxysession: + session_key = security.decode_guid(galaxysession) + if session_key: + return session_manager.get_session_from_session_key(session_key) + # TODO: What should we do if there is no session? Since this is the API, maybe nothing is the right choice? + return None + + +def get_user( + galaxy_session=cast(Optional[GalaxySession], Depends(get_session)), + api_user=cast(Optional[User], Depends(get_api_user)), +) -> Optional[User]: + if galaxy_session: + return galaxy_session.user + return api_user + + +def get_trans( + request: Request, + response: Response, + app: ToolShedApp = DependsOnApp, + user=cast(Optional[User], Depends(get_user)), + galaxy_session=cast(Optional[GalaxySession], Depends(get_session)), +) -> SessionRequestContext: + url_builder = UrlBuilder(request) + galaxy_request = GalaxyASGIRequest(request) + galaxy_response = GalaxyASGIResponse(response) + return SessionRequestContextImpl( + app, + galaxy_request, + galaxy_response, + user=user, + galaxy_session=galaxy_session, + url_builder=url_builder, + ) + + +DependsOnTrans: SessionRequestContext = cast(SessionRequestContext, Depends(get_trans)) + + +def get_admin_user(trans: SessionRequestContext = DependsOnTrans): + if not trans.user_is_admin: + raise AdminRequiredException(require_admin_message(trans.app.config, trans.user)) + return trans.user + + +AdminUserRequired = Depends(get_admin_user) + + +class Router(FrameworkRouter): + admin_user_dependency = AdminUserRequired + + +B = TypeVar("B", bound=BaseModel) + + +# async def depend_on_either_json_or_form_data(model: Type[T]): +# return Depends(get_body) + + +def depend_on_either_json_or_form_data(model: Type[B]) -> B: + async def get_body(request: Request): + content_type = request.headers.get("Content-Type") + if content_type is None: + raise HTTPException(status_code=400, detail="No Content-Type provided!") + elif content_type == "application/json": + try: + return model(**await request.json()) + except JSONDecodeError: + raise HTTPException(status_code=400, detail="Invalid JSON data") + elif content_type == "application/x-www-form-urlencoded" or content_type.startswith("multipart/form-data"): + try: + return model(**await request.form()) + except Exception: + raise HTTPException(status_code=400, detail="Invalid Form data") + else: + raise HTTPException(status_code=400, detail="Content-Type not supported!") + + return Depends(get_body) + + +UserIdPathParam: str = Path(..., title="User ID", description="The encoded database identifier of the user.") + +RequiredRepoOwnerParam: str = Query( + title="owner", + description="Owner of the target repository.", +) + +RequiredRepoNameParam: str = Query( + title="Name", + description="Name of the target repository.", +) + +RequiredChangesetParam: str = Query( + title="changeset", + description="Changeset of the target repository.", +) + +RepositoryIdPathParam: str = Path( + ..., title="Repository ID", description="The encoded database identifier of the repository." +) + +ChangesetRevisionPathParam: str = Path( + ..., + title="Change Revision", + description="The changeset revision corresponding to the target revision of the target repository.", +) + +UsernameIdPathParam: str = Path(..., title="Username", description="The target username.") + +CommitMessageQueryParam: Optional[str] = Query( + default=None, + title="Commit Message", + description="Set commit message as a query parameter.", +) + +DownloadableQueryParam: bool = Query( + default=True, + title="downloadable_only", + description="Include only downable repositories.", +) + +CommitMessage: str = Query( + None, + title="Commit message", + description="A commit message to store with repository update.", +) + +RepositoryIndexQueryParam: Optional[str] = Query( + default=None, + title="Search Query", +) + +ToolsIndexQueryParam: str = Query( + default=..., + title="Search Query", +) + +RepositorySearchPageQueryParam: int = Query( + default=1, + title="Page", +) + +RepositorySearchPageSizeQueryParam: int = Query( + default=10, + title="Page Size", +) + +RepositoryIndexDeletedQueryParam: Optional[bool] = Query(False, title="Deleted?") + +RepositoryIndexOwnerQueryParam: Optional[str] = Query(None, title="Owner") + +RepositoryIndexNameQueryParam: Optional[str] = Query(None, title="Name") + +RepositoryIndexToolIdsQueryParam: Optional[List[str]] = Query( + None, title="Tool IDs", description="List of tool GUIDs to find the repository for" +) + + +OptionalRepositoryOwnerParam: Optional[str] = Query(None, title="Owner") +OptionalRepositoryNameParam: Optional[str] = Query(None, title="Name") +RequiredRepositoryChangesetRevisionParam: str = Query(..., title="Changeset Revision") +OptionalRepositoryIdParam: Optional[str] = Query(None, title="TSR ID") +OptionalHexlifyParam: Optional[bool] = Query(True, title="Hexlify response") + +CategoryIdPathParam: str = Path( + ..., title="Category ID", description="The encoded database identifier of the category." +) +CategoryRepositoriesInstallableQueryParam: bool = Query(False, title="Installable?") +CategoryRepositoriesSortKeyQueryParam: str = Query("name", title="Sort Key") +CategoryRepositoriesSortOrderQueryParam: str = Query("asc", title="Sort Order") +CategoryRepositoriesPageQueryParam: Optional[int] = Query(None, title="Page") diff --git a/lib/tool_shed/webapp/api2/authenticate.py b/lib/tool_shed/webapp/api2/authenticate.py new file mode 100644 index 000000000000..7e4ea1c4ee2d --- /dev/null +++ b/lib/tool_shed/webapp/api2/authenticate.py @@ -0,0 +1,27 @@ +from fastapi import Request + +from galaxy.webapps.galaxy.services.authenticate import ( + APIKeyResponse, + AuthenticationService, +) +from . import ( + depends, + Router, +) + +router = Router(tags=["authenticate"]) + + +@router.cbv +class FastAPIAuthenticate: + authentication_service: AuthenticationService = depends(AuthenticationService) + + @router.get( + "/api/authenticate/baseauth", + summary="Returns returns an API key for authenticated user based on BaseAuth headers.", + operation_id="authenticate__baseauth", + ) + def get_api_key(self, request: Request) -> APIKeyResponse: + authorization = request.headers.get("Authorization") + auth = {"HTTP_AUTHORIZATION": authorization} + return self.authentication_service.get_api_key(auth, request) diff --git a/lib/tool_shed/webapp/api2/categories.py b/lib/tool_shed/webapp/api2/categories.py new file mode 100644 index 000000000000..023242469dd1 --- /dev/null +++ b/lib/tool_shed/webapp/api2/categories.py @@ -0,0 +1,80 @@ +from typing import ( + List, + Optional, +) + +from fastapi import Body + +from tool_shed.context import SessionRequestContext +from tool_shed.managers.categories import CategoryManager +from tool_shed.managers.repositories import repositories_by_category +from tool_shed_client.schema import ( + Category as CategoryResponse, + CreateCategoryRequest, + RepositoriesByCategory, +) +from . import ( + CategoryIdPathParam, + CategoryRepositoriesInstallableQueryParam, + CategoryRepositoriesPageQueryParam, + CategoryRepositoriesSortKeyQueryParam, + CategoryRepositoriesSortOrderQueryParam, + depends, + DependsOnTrans, + Router, +) + +router = Router(tags=["categories"]) + + +@router.cbv +class FastAPICategories: + category_manager: CategoryManager = depends(CategoryManager) + + @router.post( + "/api/categories", + description="create a category", + operation_id="categories__create", + require_admin=True, + ) + def create( + self, trans: SessionRequestContext = DependsOnTrans, request: CreateCategoryRequest = Body(...) + ) -> CategoryResponse: + category = self.category_manager.create(trans, request) + return self.category_manager.to_model(category) + + @router.get( + "/api/categories", + description="index category", + operation_id="categories__index", + ) + def index(self, trans: SessionRequestContext = DependsOnTrans) -> List[CategoryResponse]: + """ + Return a list of dictionaries that contain information about each Category. + """ + deleted = False + categories = self.category_manager.index_db(trans, deleted) + return [self.category_manager.to_model(c) for c in categories] + + @router.get( + "/api/categories/{encoded_category_id}/repositories", + description="display repositories by category", + operation_id="categories__repositories", + ) + def repositories( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_category_id: str = CategoryIdPathParam, + installable: bool = CategoryRepositoriesInstallableQueryParam, + sort_key: str = CategoryRepositoriesSortKeyQueryParam, + sort_order: str = CategoryRepositoriesSortOrderQueryParam, + page: Optional[int] = CategoryRepositoriesPageQueryParam, + ) -> RepositoriesByCategory: + return repositories_by_category( + trans.app, + encoded_category_id, + page=page, + sort_key=sort_key, + sort_order=sort_order, + installable=installable, + ) diff --git a/lib/tool_shed/webapp/api2/configuration.py b/lib/tool_shed/webapp/api2/configuration.py new file mode 100644 index 000000000000..815039150e81 --- /dev/null +++ b/lib/tool_shed/webapp/api2/configuration.py @@ -0,0 +1,24 @@ +from tool_shed.structured_app import ToolShedApp +from tool_shed_client.schema import Version +from . import ( + depends, + Router, +) + +router = Router(tags=["configuration"]) + + +@router.cbv +class FastAPIConfiguration: + app: ToolShedApp = depends(ToolShedApp) + + @router.get( + "/api/version", + operation_id="configuration__version", + ) + def version(self) -> Version: + return Version( + version_major=self.app.config.version_major, + version=self.app.config.version, + api_version="v2", + ) diff --git a/lib/tool_shed/webapp/api2/repositories.py b/lib/tool_shed/webapp/api2/repositories.py new file mode 100644 index 000000000000..9cbf95c91913 --- /dev/null +++ b/lib/tool_shed/webapp/api2/repositories.py @@ -0,0 +1,494 @@ +import os +import shutil +import tempfile +from typing import ( + cast, + IO, + List, + Optional, + Union, +) + +from fastapi import ( + Body, + Depends, + Request, + Response, + status, + UploadFile, +) +from starlette.datastructures import UploadFile as StarletteUploadFile + +from galaxy.exceptions import InsufficientPermissionsException +from galaxy.webapps.galaxy.api import as_form +from tool_shed.context import SessionRequestContext +from tool_shed.managers.repositories import ( + can_manage_repo, + can_update_repo, + check_updates, + create_repository, + get_install_info, + get_ordered_installable_revisions, + get_repository_metadata_dict, + get_repository_metadata_for_management, + index_repositories, + readmes, + reset_metadata_on_repository, + search, + to_detailed_model, + to_model, + UpdatesRequest, + upload_tar_and_set_metadata, +) +from tool_shed.structured_app import ToolShedApp +from tool_shed.util.repository_util import get_repository_in_tool_shed +from tool_shed_client.schema import ( + CreateRepositoryRequest, + DetailedRepository, + from_legacy_install_info, + InstallInfo, + Repository, + RepositoryMetadata, + RepositoryPermissions, + RepositoryRevisionReadmes, + RepositorySearchResults, + RepositoryUpdate, + RepositoryUpdateRequest, + ResetMetadataOnRepositoryRequest, + ResetMetadataOnRepositoryResponse, + ValidRepostiroyUpdateMessage, +) +from . import ( + ChangesetRevisionPathParam, + CommitMessageQueryParam, + depend_on_either_json_or_form_data, + depends, + DependsOnTrans, + DownloadableQueryParam, + OptionalHexlifyParam, + OptionalRepositoryIdParam, + OptionalRepositoryNameParam, + OptionalRepositoryOwnerParam, + RepositoryIdPathParam, + RepositoryIndexDeletedQueryParam, + RepositoryIndexNameQueryParam, + RepositoryIndexOwnerQueryParam, + RepositoryIndexQueryParam, + RepositorySearchPageQueryParam, + RepositorySearchPageSizeQueryParam, + RequiredChangesetParam, + RequiredRepoNameParam, + RequiredRepoOwnerParam, + RequiredRepositoryChangesetRevisionParam, + Router, + UsernameIdPathParam, +) + +router = Router(tags=["repositories"]) + +IndexResponse = Union[RepositorySearchResults, List[Repository]] + + +@as_form +class RepositoryUpdateRequestFormData(RepositoryUpdateRequest): + pass + + +@router.cbv +class FastAPIRepositories: + app: ToolShedApp = depends(ToolShedApp) + + @router.get( + "/api/repositories", + description="Get a list of repositories or perform a search.", + operation_id="repositories__index", + ) + def index( + self, + q: Optional[str] = RepositoryIndexQueryParam, + page: Optional[int] = RepositorySearchPageQueryParam, + page_size: Optional[int] = RepositorySearchPageSizeQueryParam, + deleted: Optional[bool] = RepositoryIndexDeletedQueryParam, + owner: Optional[str] = RepositoryIndexOwnerQueryParam, + name: Optional[str] = RepositoryIndexNameQueryParam, + trans: SessionRequestContext = DependsOnTrans, + ) -> IndexResponse: + if q: + assert page is not None + assert page_size is not None + search_results = search(trans, q, page, page_size) + return RepositorySearchResults(**search_results) + # See API notes - was added in https://github.com/galaxyproject/galaxy/pull/3626/files + # but I think is currently unused. So probably we should just drop it until someone + # complains. + # elif params.tool_ids: + # response = index_tool_ids(self.app, params.tool_ids) + # return response + else: + repositories = index_repositories(self.app, name, owner, deleted or False) + return [to_model(self.app, r) for r in repositories] + + @router.get( + "/api/repositories/get_repository_revision_install_info", + description="Get information used by the install client to install this repository.", + operation_id="repositories__legacy_install_info", + ) + def legacy_install_info( + self, + trans: SessionRequestContext = DependsOnTrans, + name: str = RequiredRepoNameParam, + owner: str = RequiredRepoOwnerParam, + changeset_revision: str = RequiredChangesetParam, + ) -> list: + legacy_install_info = get_install_info( + trans, + name, + owner, + changeset_revision, + ) + return list(legacy_install_info) + + @router.get( + "/api/repositories/install_info", + description="Get information used by the install client to install this repository.", + operation_id="repositories__install_info", + ) + def install_info( + self, + trans: SessionRequestContext = DependsOnTrans, + name: str = RequiredRepoNameParam, + owner: str = RequiredRepoOwnerParam, + changeset_revision: str = RequiredChangesetParam, + ) -> InstallInfo: + # A less problematic version of the above API, but I guess we + # need to maintain the older version for older Galaxy API clients + # for... sometime... or forever. + legacy_install_info = get_install_info( + trans, + name, + owner, + changeset_revision, + ) + return from_legacy_install_info(legacy_install_info) + + @router.get( + "/api/repositories/{encoded_repository_id}/metadata", + description="Get information about repository metadata", + operation_id="repositories__metadata", + # See comment below. + # response_model=RepositoryMetadata, + ) + def metadata( + self, + encoded_repository_id: str = RepositoryIdPathParam, + downloadable_only: bool = DownloadableQueryParam, + ) -> dict: + recursive = True + as_dict = get_repository_metadata_dict(self.app, encoded_repository_id, recursive, downloadable_only) + # fails 1020 if we try to use the model - I guess repository dependencies + # are getting lost + return as_dict + # return _hack_fastapi_4428(as_dict) + + @router.get( + "/api/repositories/get_ordered_installable_revisions", + description="Get an ordered list of the repository changeset revisions that are installable", + operation_id="repositories__get_ordered_installable_revisions", + ) + def get_ordered_installable_revisions( + self, + owner: Optional[str] = OptionalRepositoryOwnerParam, + name: Optional[str] = OptionalRepositoryNameParam, + tsr_id: Optional[str] = OptionalRepositoryIdParam, + ) -> List[str]: + return get_ordered_installable_revisions(self.app, name, owner, tsr_id) + + @router.post( + "/api/repositories/reset_metadata_on_repository", + description="reset metadata on a repository", + operation_id="repositories__reset_legacy", + ) + def reset_metadata_on_repository_legacy( + self, + trans: SessionRequestContext = DependsOnTrans, + request: ResetMetadataOnRepositoryRequest = depend_on_either_json_or_form_data( + ResetMetadataOnRepositoryRequest + ), + ) -> ResetMetadataOnRepositoryResponse: + return reset_metadata_on_repository(trans, request.repository_id) + + @router.post( + "/api/repositories/{encoded_repository_id}/reset_metadata", + description="reset metadata on a repository", + operation_id="repositories__reset", + ) + def reset_metadata_on_repository( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> ResetMetadataOnRepositoryResponse: + return reset_metadata_on_repository(trans, encoded_repository_id) + + @router.get( + "/api/repositories/updates", + operation_id="repositories__update", + ) + @router.get( + "/api/repositories/updates/", + ) + def updates( + self, + owner: Optional[str] = OptionalRepositoryOwnerParam, + name: Optional[str] = OptionalRepositoryNameParam, + changeset_revision: str = RequiredRepositoryChangesetRevisionParam, + hexlify: Optional[bool] = OptionalHexlifyParam, + ): + request = UpdatesRequest( + name=name, + owner=owner, + changeset_revision=changeset_revision, + hexlify=hexlify, + ) + return Response(content=check_updates(self.app, request)) + + @router.post( + "/api/repositories", + description="create a new repository", + operation_id="repositories__create", + ) + def create( + self, + trans: SessionRequestContext = DependsOnTrans, + request: CreateRepositoryRequest = Body(...), + ) -> Repository: + db_repository = create_repository( + trans, + request, + ) + return to_model(self.app, db_repository) + + @router.get( + "/api/repositories/{encoded_repository_id}", + operation_id="repositories__show", + ) + def show( + self, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> DetailedRepository: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + return to_detailed_model(self.app, repository) + + @router.get( + "/api/repositories/{encoded_repository_id}/permissions", + operation_id="repositories__permissions", + ) + def permissions( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> RepositoryPermissions: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_update_repo(trans, repository): + raise InsufficientPermissionsException( + "You do not have permission to inspect repository repository permissions." + ) + return RepositoryPermissions( + allow_push=trans.app.security_agent.usernames_that_can_push(repository), + can_manage=can_manage_repo(trans, repository), + can_push=can_update_repo(trans, repository), + ) + + @router.get( + "/api/repositories/{encoded_repository_id}/allow_push", + operation_id="repositories__show_allow_push", + ) + def show_allow_push( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> List[str]: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + return trans.app.security_agent.usernames_that_can_push(repository) + + @router.post( + "/api/repositories/{encoded_repository_id}/allow_push/{username}", + operation_id="repositories__add_allow_push", + ) + def add_allow_push( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + username: str = UsernameIdPathParam, + ) -> List[str]: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.set_allow_push([username]) + return trans.app.security_agent.usernames_that_can_push(repository) + + @router.put( + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/malicious", + operation_id="repositories__set_malicious", + status_code=status.HTTP_204_NO_CONTENT, + ) + def set_malicious( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + changeset_revision: str = ChangesetRevisionPathParam, + ): + repository_metadata = get_repository_metadata_for_management(trans, encoded_repository_id, changeset_revision) + repository_metadata.malicious = True + trans.sa_session.add(repository_metadata) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete( + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/malicious", + operation_id="repositories__unset_malicious", + status_code=status.HTTP_204_NO_CONTENT, + ) + def unset_malicious( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + changeset_revision: str = ChangesetRevisionPathParam, + ): + repository_metadata = get_repository_metadata_for_management(trans, encoded_repository_id, changeset_revision) + repository_metadata.malicious = False + trans.sa_session.add(repository_metadata) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.put( + "/api/repositories/{encoded_repository_id}/deprecated", + operation_id="repositories__set_deprecated", + status_code=status.HTTP_204_NO_CONTENT, + ) + def set_deprecated( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ): + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.deprecated = True + trans.sa_session.add(repository) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete( + "/api/repositories/{encoded_repository_id}/deprecated", + operation_id="repositories__unset_deprecated", + status_code=status.HTTP_204_NO_CONTENT, + ) + def unset_deprecated( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ): + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.deprecated = False + trans.sa_session.add(repository) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete( + "/api/repositories/{encoded_repository_id}/allow_push/{username}", + operation_id="repositories__remove_allow_push", + ) + def remove_allow_push( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + username: str = UsernameIdPathParam, + ) -> List[str]: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.set_allow_push(None, remove_auth=username) + return trans.app.security_agent.usernames_that_can_push(repository) + + @router.post( + "/api/repositories/{encoded_repository_id}/changeset_revision", + description="upload new revision to the repository", + operation_id="repositories__create_revision", + ) + async def create_changeset_revision( + self, + request: Request, + encoded_repository_id: str = RepositoryIdPathParam, + commit_message: Optional[str] = CommitMessageQueryParam, + trans: SessionRequestContext = DependsOnTrans, + files: Optional[List[UploadFile]] = None, + revision_request: RepositoryUpdateRequest = Depends(RepositoryUpdateRequestFormData.as_form), # type: ignore[attr-defined] + ) -> RepositoryUpdate: + try: + # Code stolen from Marius' work in Galaxy's Tools API. + + files2: List[StarletteUploadFile] = cast(List[StarletteUploadFile], files or []) + # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile + if not files2: + data = await request.form() + for value in data.values(): + if isinstance(value, StarletteUploadFile): + files2.append(value) + + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + + if not can_update_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + + assert trans.user + assert files2 + the_file = files2[0] + with tempfile.NamedTemporaryFile( + dir=trans.app.config.new_file_path, prefix="upload_file_data_", delete=False + ) as dest: + upload_file_like: IO[bytes] = the_file.file + shutil.copyfileobj(upload_file_like, dest) # type: ignore[misc] # https://github.com/python/mypy/issues/15031 + the_file.file.close() + filename = dest.name + try: + message = upload_tar_and_set_metadata( + trans, + trans.request.host, + repository, + filename, + commit_message or revision_request.commit_message or "Uploaded", + ) + return RepositoryUpdate(__root__=ValidRepostiroyUpdateMessage(message=message)) + finally: + if os.path.exists(filename): + os.remove(filename) + except Exception: + import logging + + log = logging.getLogger(__name__) + log.exception("Problem in here...") + raise + + @router.get( + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/readmes", + description="fetch readmes for repository revision", + operation_id="repositories__readmes", + response_model=RepositoryRevisionReadmes, + ) + def get_readmes( + self, + encoded_repository_id: str = RepositoryIdPathParam, + changeset_revision: str = ChangesetRevisionPathParam, + ) -> dict: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + return readmes(self.app, repository, changeset_revision) + + +def _hack_fastapi_4428(as_dict) -> dict: + # https://github.com/tiangolo/fastapi/pull/4428#issuecomment-1145429263 + # after pydantic2 swap to really returning the object + return RepositoryMetadata(__root__=as_dict).dict()["__root__"] diff --git a/lib/tool_shed/webapp/api2/tools.py b/lib/tool_shed/webapp/api2/tools.py new file mode 100644 index 000000000000..b93502ed9e8e --- /dev/null +++ b/lib/tool_shed/webapp/api2/tools.py @@ -0,0 +1,55 @@ +from tool_shed.context import SessionRequestContext +from tool_shed.managers.tools import search +from tool_shed.structured_app import ToolShedApp +from tool_shed.util.shed_index import build_index +from tool_shed_client.schema import BuildSearchIndexResponse +from . import ( + depends, + DependsOnTrans, + RepositorySearchPageQueryParam, + RepositorySearchPageSizeQueryParam, + Router, + ToolsIndexQueryParam, +) + +router = Router(tags=["tools"]) + + +@router.cbv +class FastAPITools: + app: ToolShedApp = depends(ToolShedApp) + + @router.get( + "/api/tools", + operation_id="tools__index", + ) + def index( + self, + q: str = ToolsIndexQueryParam, + page: int = RepositorySearchPageQueryParam, + page_size: int = RepositorySearchPageSizeQueryParam, + trans: SessionRequestContext = DependsOnTrans, + ): + search_results = search(trans, q, page, page_size) + return search_results + + @router.put( + "/api/tools/build_search_index", + operation_id="tools__build_search_index", + require_admin=True, + ) + def build_search_index(self) -> BuildSearchIndexResponse: + """Not part of the stable API, just something to simplify + bootstrapping tool sheds, scripting, testing, etc... + """ + config = self.app.config + repos_indexed, tools_indexed = build_index( + config.whoosh_index_dir, + config.file_path, + config.hgweb_config_dir, + config.database_connection, + ) + return BuildSearchIndexResponse( + repositories_indexed=repos_indexed, + tools_indexed=tools_indexed, + ) diff --git a/lib/tool_shed/webapp/api2/users.py b/lib/tool_shed/webapp/api2/users.py new file mode 100644 index 000000000000..3e57735b6718 --- /dev/null +++ b/lib/tool_shed/webapp/api2/users.py @@ -0,0 +1,130 @@ +from typing import ( + List, + Optional, +) + +from fastapi import ( + Body, + Response, + status, +) +from pydantic import BaseModel + +import tool_shed.util.shed_util_common as suc +from galaxy.exceptions import ( + InsufficientPermissionsException, + ObjectNotFound, + RequestParameterInvalidException, +) +from galaxy.managers.api_keys import ApiKeyManager +from tool_shed.context import SessionRequestContext +from tool_shed.managers.users import ( + api_create_user, + get_api_user, + index, +) +from tool_shed_client.schema import ( + CreateUserRequest, + User, +) +from . import ( + depends, + DependsOnTrans, + Router, + UserIdPathParam, +) + +router = Router(tags=["users"]) + + +@router.cbv +class FastAPIUsers: + api_key_manager: ApiKeyManager = depends(ApiKeyManager) + + @router.get( + "/api/users", + description="index users", + operation_id="users__index", + ) + def index(self, trans: SessionRequestContext = DependsOnTrans) -> List[User]: + deleted = False + return index(trans.app, deleted) + + @router.post( + "/api/users", + description="create a user", + operation_id="users__create", + require_admin=True, + ) + def create(self, trans: SessionRequestContext = DependsOnTrans, request: CreateUserRequest = Body(...)) -> User: + return api_create_user(trans, request) + + @router.get( + "/api/users/current", + description="show current user", + operation_id="users__current", + ) + def current(self, trans: SessionRequestContext = DependsOnTrans) -> User: + user = trans.user + assert user + return get_api_user(trans.app, user) + + @router.get( + "/api/users/{encoded_user_id}", + description="show a user", + operation_id="users__show", + ) + def show(self, trans: SessionRequestContext = DependsOnTrans, encoded_user_id: str = UserIdPathParam) -> User: + user = suc.get_user(trans.app, encoded_user_id) + if user is None: + raise ObjectNotFound() + return get_api_user(trans.app, user) + + @router.get( + "/api/users/{encoded_user_id}/api_key", + name="get_or_create_api_key", + summary="Return the user's API key", + operation_id="users__get_or_create_api_key", + ) + def get_or_create_api_key( + self, trans: SessionRequestContext = DependsOnTrans, encoded_user_id: str = UserIdPathParam + ) -> str: + user = self._get_user(trans, encoded_user_id) + return self.api_key_manager.get_or_create_api_key(user) + + @router.post( + "/api/users/{encoded_user_id}/api_key", + summary="Creates a new API key for the user", + operation_id="users__create_api_key", + ) + def create_api_key( + self, trans: SessionRequestContext = DependsOnTrans, encoded_user_id: str = UserIdPathParam + ) -> str: + user = self._get_user(trans, encoded_user_id) + return self.api_key_manager.create_api_key(user).key + + @router.delete( + "/api/users/{encoded_user_id}/api_key", + summary="Delete the current API key of the user", + status_code=status.HTTP_204_NO_CONTENT, + operation_id="users__delete_api_key", + ) + def delete_api_key( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_user_id: str = UserIdPathParam, + ): + user = self._get_user(trans, encoded_user_id) + self.api_key_manager.delete_api_key(user) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + def _get_user(self, trans: SessionRequestContext, encoded_user_id: str): + if encoded_user_id == "current": + user = trans.user + else: + user = suc.get_user(trans.app, encoded_user_id) + if user is None: + raise ObjectNotFound() + if not (trans.user_is_admin or trans.user == user): + raise InsufficientPermissionsException() + return user diff --git a/lib/tool_shed/webapp/app.py b/lib/tool_shed/webapp/app.py index bdd32ef95d86..c71ad3938c68 100644 --- a/lib/tool_shed/webapp/app.py +++ b/lib/tool_shed/webapp/app.py @@ -1,7 +1,10 @@ import logging import sys import time -from typing import Any +from typing import ( + Any, + Optional, +) from sqlalchemy.orm.scoping import scoped_session @@ -108,3 +111,7 @@ def __init__(self, **kwd) -> None: # used for cachebusting -- refactor this into a *SINGLE* UniverseApplication base. self.server_starttime = int(time.time()) log.debug("Tool shed hgweb.config file is: %s", self.hgweb_config_manager.hgweb_config) + + +# Global instance of the universe app. +app: Optional[ToolShedApp] = None diff --git a/lib/tool_shed/webapp/buildapp.py b/lib/tool_shed/webapp/buildapp.py index 7565a7a2d98c..e25859c33be6 100644 --- a/lib/tool_shed/webapp/buildapp.py +++ b/lib/tool_shed/webapp/buildapp.py @@ -13,20 +13,36 @@ import galaxy.webapps.base.webapp from galaxy import util +from galaxy.structured_app import BasicSharedApp from galaxy.util import asbool from galaxy.util.properties import load_app_properties +from galaxy.web import url_for from galaxy.web.framework.middleware.error import ErrorMiddleware from galaxy.web.framework.middleware.request_id import RequestIDMiddleware from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware -from galaxy.webapps.base.webapp import build_url_map +from galaxy.webapps.base.webapp import ( + build_url_map, + GalaxyWebTransaction, +) from galaxy.webapps.util import wrap_if_allowed +SHED_API_VERSION = os.environ.get("TOOL_SHED_API_VERSION", "v1") + log = logging.getLogger(__name__) +class ToolShedGalaxyWebTransaction(GalaxyWebTransaction): + @property + def repositories_hostname(self) -> str: + return url_for("/", qualified=True).rstrip("/") + + class CommunityWebApplication(galaxy.webapps.base.webapp.WebApplication): injection_aware: bool = True + def transaction_chooser(self, environ, galaxy_app: BasicSharedApp, session_cookie: str): + return ToolShedGalaxyWebTransaction(environ, galaxy_app, self, session_cookie) + def add_ui_controllers(webapp, app): """ @@ -65,11 +81,15 @@ def app_pair(global_conf, load_app_kwds=None, **kwargs): kwargs = load_app_properties(kwds=kwargs, config_prefix="TOOL_SHED_CONFIG_", **load_app_kwds) if "app" in kwargs: app = kwargs.pop("app") + import tool_shed.webapp.app + + tool_shed.webapp.app.app = app else: try: - from tool_shed.webapp.app import UniverseApplication + import tool_shed.webapp.app - app = UniverseApplication(global_conf=global_conf, **kwargs) + app = tool_shed.webapp.app.UniverseApplication(global_conf=global_conf, **kwargs) + tool_shed.webapp.app.app = app except Exception: import sys import traceback @@ -98,123 +118,128 @@ def app_pair(global_conf, load_app_kwds=None, **kwargs): # Enable 'hg clone' functionality on repos by letting hgwebapp handle the request webapp.add_route("/repos/*path_info", controller="hg", action="handle_request", path_info="/") # Add the web API. # A good resource for RESTful services - https://routes.readthedocs.io/en/latest/restful.html - webapp.add_api_controllers("tool_shed.webapp.api", app) - webapp.mapper.connect( - "api_key_retrieval", - "/api/authenticate/baseauth/", - controller="authenticate", - action="get_tool_shed_api_key", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect("group", "/api/groups/", controller="groups", action="index", conditions=dict(method=["GET"])) - webapp.mapper.connect( - "group", "/api/groups/", controller="groups", action="create", conditions=dict(method=["POST"]) - ) - webapp.mapper.connect( - "group", "/api/groups/{encoded_id}", controller="groups", action="show", conditions=dict(method=["GET"]) - ) - webapp.mapper.resource( - "category", - "categories", - controller="categories", - name_prefix="category_", - path_prefix="/api", - parent_resources=dict(member_name="category", collection_name="categories"), - ) - webapp.mapper.connect( - "repositories_in_category", - "/api/categories/{category_id}/repositories", - controller="categories", - action="get_repositories", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect( - "show_updates_for_repository", - "/api/repositories/updates", - controller="repositories", - action="updates", - conditions=dict(method=["GET"]), - ) - webapp.mapper.resource( - "repository", - "repositories", - controller="repositories", - collection={ - "add_repository_registry_entry": "POST", - "get_repository_revision_install_info": "GET", - "get_ordered_installable_revisions": "GET", - "get_installable_revisions": "GET", - "remove_repository_registry_entry": "POST", - "reset_metadata_on_repositories": "POST", - "reset_metadata_on_repository": "POST", - }, - name_prefix="repository_", - path_prefix="/api", - parent_resources=dict(member_name="repository", collection_name="repositories"), - ) - webapp.mapper.resource( - "repository_revision", - "repository_revisions", - member={"repository_dependencies": "GET", "export": "POST"}, - controller="repository_revisions", - name_prefix="repository_revision_", - path_prefix="/api", - parent_resources=dict(member_name="repository_revision", collection_name="repository_revisions"), - ) - webapp.mapper.resource( - "user", - "users", - controller="users", - name_prefix="user_", - path_prefix="/api", - parent_resources=dict(member_name="user", collection_name="users"), - ) - webapp.mapper.connect( - "update_repository", - "/api/repositories/{id}", - controller="repositories", - action="update", - conditions=dict(method=["PATCH", "PUT"]), - ) - webapp.mapper.connect( - "repository_create_changeset_revision", - "/api/repositories/{id}/changeset_revision", - controller="repositories", - action="create_changeset_revision", - conditions=dict(method=["POST"]), - ) - webapp.mapper.connect( - "repository_get_metadata", - "/api/repositories/{id}/metadata", - controller="repositories", - action="metadata", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect( - "repository_show_tools", - "/api/repositories/{id}/{changeset}/show_tools", - controller="repositories", - action="show_tools", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect( - "create_repository", - "/api/repositories", - controller="repositories", - action="create", - conditions=dict(method=["POST"]), - ) - webapp.mapper.connect( - "tools", - "/api/tools/build_search_index", - controller="tools", - action="build_search_index", - conditions=dict(method=["PUT"]), - ) - webapp.mapper.connect("tools", "/api/tools", controller="tools", action="index", conditions=dict(method=["GET"])) - webapp.mapper.connect( - "version", "/api/version", controller="configuration", action="version", conditions=dict(method=["GET"]) - ) + if SHED_API_VERSION == "v1": + webapp.add_api_controllers("tool_shed.webapp.api", app) + webapp.mapper.connect( + "api_key_retrieval", + "/api/authenticate/baseauth/", + controller="authenticate", + action="get_tool_shed_api_key", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "group", "/api/groups/", controller="groups", action="index", conditions=dict(method=["GET"]) + ) + webapp.mapper.connect( + "group", "/api/groups/", controller="groups", action="create", conditions=dict(method=["POST"]) + ) + webapp.mapper.connect( + "group", "/api/groups/{encoded_id}", controller="groups", action="show", conditions=dict(method=["GET"]) + ) + webapp.mapper.resource( + "category", + "categories", + controller="categories", + name_prefix="category_", + path_prefix="/api", + parent_resources=dict(member_name="category", collection_name="categories"), + ) + webapp.mapper.connect( + "repositories_in_category", + "/api/categories/{category_id}/repositories", + controller="categories", + action="get_repositories", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "show_updates_for_repository", + "/api/repositories/updates", + controller="repositories", + action="updates", + conditions=dict(method=["GET"]), + ) + webapp.mapper.resource( + "repository", + "repositories", + controller="repositories", + collection={ + "add_repository_registry_entry": "POST", + "get_repository_revision_install_info": "GET", + "get_ordered_installable_revisions": "GET", + "get_installable_revisions": "GET", + "remove_repository_registry_entry": "POST", + "reset_metadata_on_repositories": "POST", + "reset_metadata_on_repository": "POST", + }, + name_prefix="repository_", + path_prefix="/api", + parent_resources=dict(member_name="repository", collection_name="repositories"), + ) + webapp.mapper.resource( + "repository_revision", + "repository_revisions", + member={"repository_dependencies": "GET", "export": "POST"}, + controller="repository_revisions", + name_prefix="repository_revision_", + path_prefix="/api", + parent_resources=dict(member_name="repository_revision", collection_name="repository_revisions"), + ) + webapp.mapper.resource( + "user", + "users", + controller="users", + name_prefix="user_", + path_prefix="/api", + parent_resources=dict(member_name="user", collection_name="users"), + ) + webapp.mapper.connect( + "update_repository", + "/api/repositories/{id}", + controller="repositories", + action="update", + conditions=dict(method=["PATCH", "PUT"]), + ) + webapp.mapper.connect( + "repository_create_changeset_revision", + "/api/repositories/{id}/changeset_revision", + controller="repositories", + action="create_changeset_revision", + conditions=dict(method=["POST"]), + ) + webapp.mapper.connect( + "repository_get_metadata", + "/api/repositories/{id}/metadata", + controller="repositories", + action="metadata", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "repository_show_tools", + "/api/repositories/{id}/{changeset}/show_tools", + controller="repositories", + action="show_tools", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "create_repository", + "/api/repositories", + controller="repositories", + action="create", + conditions=dict(method=["POST"]), + ) + webapp.mapper.connect( + "tools", + "/api/tools/build_search_index", + controller="tools", + action="build_search_index", + conditions=dict(method=["PUT"]), + ) + webapp.mapper.connect( + "tools", "/api/tools", controller="tools", action="index", conditions=dict(method=["GET"]) + ) + webapp.mapper.connect( + "version", "/api/version", controller="configuration", action="version", conditions=dict(method=["GET"]) + ) webapp.finalize_config() # Wrap the webapp in some useful middleware diff --git a/lib/tool_shed/webapp/controllers/admin.py b/lib/tool_shed/webapp/controllers/admin.py index c1e4c17286b8..aec70e430121 100644 --- a/lib/tool_shed/webapp/controllers/admin.py +++ b/lib/tool_shed/webapp/controllers/admin.py @@ -362,7 +362,7 @@ def manage_role_associations(self, trans, **kwd): @web.expose @web.require_admin def reset_metadata_on_selected_repositories_in_tool_shed(self, trans, **kwd): - rmm = repository_metadata_manager.RepositoryMetadataManager(trans.app, trans.user) + rmm = repository_metadata_manager.RepositoryMetadataManager(trans) if "reset_metadata_on_selected_repositories_button" in kwd: message, status = rmm.reset_metadata_on_selected_repositories(**kwd) else: diff --git a/lib/tool_shed/webapp/controllers/repository.py b/lib/tool_shed/webapp/controllers/repository.py index b6b67d2bd0f1..b3c1725e8507 100644 --- a/lib/tool_shed/webapp/controllers/repository.py +++ b/lib/tool_shed/webapp/controllers/repository.py @@ -1235,7 +1235,7 @@ def get_repository_information(self, trans, repository_ids, changeset_revisions, cur_includes_tools_for_display_in_tool_panel, cur_has_repository_dependencies, cur_has_repository_dependencies_only_if_compiling_contained_td, - ) = repository_util.get_repo_info_dict(trans.app, trans.user, repository_id, changeset_revision) + ) = repository_util.get_repo_info_dict(trans, repository_id, changeset_revision) if cur_has_repository_dependencies and not has_repository_dependencies: has_repository_dependencies = True if ( @@ -2119,7 +2119,7 @@ def reset_all_metadata(self, trans, id, **kwd): # This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template. repository = repository_util.get_repository_in_tool_shed(trans.app, id) rmm = repository_metadata_manager.RepositoryMetadataManager( - app=trans.app, user=trans.user, repository=repository, resetting_all_metadata_on_repository=True + trans, repository=repository, resetting_all_metadata_on_repository=True ) rmm.reset_all_metadata_on_repository_in_tool_shed() rmm_metadata_dict = rmm.get_metadata_dict() @@ -2138,9 +2138,7 @@ def reset_all_metadata(self, trans, id, **kwd): @web.expose def reset_metadata_on_my_writable_repositories_in_tool_shed(self, trans, **kwd): - rmm = repository_metadata_manager.RepositoryMetadataManager( - trans.app, trans.user, resetting_all_metadata_on_repository=True - ) + rmm = repository_metadata_manager.RepositoryMetadataManager(trans, resetting_all_metadata_on_repository=True) if "reset_metadata_on_selected_repositories_button" in kwd: message, status = rmm.reset_metadata_on_selected_repositories(**kwd) else: diff --git a/lib/tool_shed/webapp/fast_app.py b/lib/tool_shed/webapp/fast_app.py index 707dba00041d..e1fab8be007f 100644 --- a/lib/tool_shed/webapp/fast_app.py +++ b/lib/tool_shed/webapp/fast_app.py @@ -1,3 +1,8 @@ +from typing import ( + Any, + Dict, +) + from a2wsgi import WSGIMiddleware from fastapi import FastAPI @@ -6,25 +11,72 @@ add_request_id_middleware, include_all_package_routers, ) +from galaxy.webapps.openapi.utils import get_openapi + +api_tags_metadata = [ + { + "name": "authenticate", + "description": "Authentication-related endpoints.", + }, + { + "name": "categories", + "description": "Category-related endpoints.", + }, + { + "name": "repositories", + "description": "Repository-related endpoints.", + }, + { + "name": "users", + "description": "User-related endpoints.", + }, + {"name": "undocumented", "description": "API routes that have not yet been ported to FastAPI."}, +] def initialize_fast_app(gx_webapp, tool_shed_app): - app = FastAPI( - title="Galaxy Tool Shed API", - description=("This API allows you to manage the Tool Shed repositories."), - docs_url="/api/docs", - redoc_url="/api/redoc", - ) + app = get_fastapi_instance() add_exception_handler(app) add_request_id_middleware(app) - include_all_package_routers(app, "tool_shed.webapp.api") + from .buildapp import SHED_API_VERSION + + routes_package = "tool_shed.webapp.api" if SHED_API_VERSION == "v1" else "tool_shed.webapp.api2" + include_all_package_routers(app, routes_package) wsgi_handler = WSGIMiddleware(gx_webapp) tool_shed_app.haltables.append(("WSGI Middleware threadpool", wsgi_handler.executor.shutdown)) app.mount("/", wsgi_handler) return app +def get_fastapi_instance() -> FastAPI: + return FastAPI( + title="Galaxy Tool Shed API", + description=("This API allows you to manage the Tool Shed repositories."), + docs_url="/api/docs", + redoc_url="/api/redoc", + tags=api_tags_metadata, + license_info={"name": "MIT", "url": "https://github.com/galaxyproject/galaxy/blob/dev/LICENSE.txt"}, + ) + + +def get_openapi_schema() -> Dict[str, Any]: + """ + Dumps openAPI schema without starting a full app and webserver. + """ + app = get_fastapi_instance() + include_all_package_routers(app, "tool_shed.webapp.api2") + return get_openapi( + title=app.title, + version=app.version, + openapi_version="3.1.0", + description=app.description, + routes=app.routes, + license_info=app.license_info, + ) + + __all__ = ( "add_request_id_middleware", + "get_openapi_schema", "initialize_fast_app", ) diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 1a6761156dd6..f748f7938735 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -399,6 +399,7 @@ class Repository(Base, Dictifiable): "times_downloaded", "deprecated", "create_time", + "update_time", ] dict_element_visible_keys = [ "id", @@ -414,6 +415,7 @@ class Repository(Base, Dictifiable): "times_downloaded", "deprecated", "create_time", + "update_time", ] file_states = Bunch(NORMAL="n", NEEDS_MERGING="m", MARKED_FOR_REMOVAL="r", MARKED_FOR_ADDITION="a", NOT_TRACKED="?") diff --git a/lib/tool_shed_client/schema/__init__.py b/lib/tool_shed_client/schema/__init__.py index e0cffb690df2..91dfd767170c 100644 --- a/lib/tool_shed_client/schema/__init__.py +++ b/lib/tool_shed_client/schema/__init__.py @@ -33,6 +33,17 @@ class Repository(BaseModel): times_downloaded: int deprecated: bool create_time: str + update_time: str + + +class DetailedRepository(Repository): + long_description: Optional[str] + + +class RepositoryPermissions(BaseModel): + allow_push: List[str] + can_manage: bool # can the requesting user manage the repository + can_push: bool class RepositoryRevisionReadmes(BaseModel): @@ -53,6 +64,8 @@ class User(BaseModel): class Category(BaseModel): id: str name: str + description: str + repositories: int class CreateCategoryRequest(BaseModel): @@ -117,10 +130,6 @@ def is_ok(self): return isinstance(self.__root__, ValidRepostiroyUpdateMessage) -class RepositoryDependency(BaseModel): - pass - - class RepositoryTool(BaseModel): pass @@ -128,8 +137,8 @@ class RepositoryTool(BaseModel): class RepositoryRevisionMetadata(BaseModel): id: str repository: Repository - repository_dependencies: List[RepositoryDependency] - tools: Optional[List[RepositoryTool]] + repository_dependencies: List["RepositoryDependency"] + tools: Optional[List["RepositoryTool"]] repository_id: str numeric_revision: int changeset_revision: str @@ -145,6 +154,15 @@ class RepositoryRevisionMetadata(BaseModel): includes_workflows: Optional[bool] +class RepositoryDependency(RepositoryRevisionMetadata): + # This only needs properties for tests it seems? + # e.g. test_0550_metadata_updated_dependencies.py + pass + + +RepositoryRevisionMetadata.update_forward_refs() + + class RepositoryMetadata(BaseModel): __root__: Dict[str, RepositoryRevisionMetadata] @@ -438,3 +456,9 @@ def from_legacy_install_info(legacy_install_info: LegacyInstallInfoTuple) -> Ins class BuildSearchIndexResponse(BaseModel): repositories_indexed: int tools_indexed: int + + +class Version(BaseModel): + version_major: str + version: str + api_version: str = "v1" diff --git a/scripts/dump_openapi_schema.py b/scripts/dump_openapi_schema.py index 3215a9b73e51..c112733513fa 100644 --- a/scripts/dump_openapi_schema.py +++ b/scripts/dump_openapi_schema.py @@ -28,8 +28,17 @@ class YamlDumper(yaml.SafeDumper): @click.command("Write openapi schema to path") @click.argument("schema_path", type=click.Path(dir_okay=False, writable=True), required=False) -def write_open_api_schema(schema_path): - openapi_schema = get_openapi_schema() +@click.option("--app", type=click.Choice(["gx", "shed"]), required=False, default="gx") +def write_open_api_schema(schema_path, app: str): + if app == "shed": + # Importing this causes the Galaxy schema to generate + # in a different fashion and causes a diff in downstream + # typescript generation for instance. So delay this. + from tool_shed.webapp.fast_app import get_openapi_schema as get_openapi_schema_shed + + openapi_schema = get_openapi_schema_shed() + else: + openapi_schema = get_openapi_schema() if schema_path: if schema_path.endswith((".yml", ".yaml")): with open(schema_path, "w") as f: diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py index 22571813de22..ec93408ce421 100644 --- a/test/unit/tool_shed/_util.py +++ b/test/unit/tool_shed/_util.py @@ -12,6 +12,7 @@ import tool_shed.repository_registry from galaxy.security.idencoding import IdEncodingHelper from galaxy.util import safe_makedirs +from tool_shed.context import ProvidesRepositoriesContext from tool_shed.managers.repositories import upload_tar_and_set_metadata from tool_shed.managers.users import create_user from tool_shed.repository_types import util as rt_util @@ -88,7 +89,32 @@ def user_fixture( ) -def repository_fixture(app: TestToolShedApp, user: User, name: str) -> Repository: +class ProvidesRepositoriesImpl(ProvidesRepositoriesContext): + def __init__(self, app: TestToolShedApp, user: User): + self._app = app + self._user = user + + @property + def app(self) -> ToolShedApp: + return self._app + + @property + def user(self) -> User: + return self._user + + @property + def repositories_hostname(self) -> str: + return "shed_unit_test://localhost" + + +def provides_repositories_fixture( + app: TestToolShedApp, + user: User, +): + return ProvidesRepositoriesImpl(app, user) + + +def repository_fixture(app: ToolShedApp, user: User, name: str) -> Repository: type = rt_util.UNRESTRICTED description = f"test repo named {name}" long_description = f"test repo named {name} a longer description" @@ -116,7 +142,12 @@ def _mock_url_for(x, qualified: bool = False): patch_url_for = mock.patch("galaxy.util.tool_shed.common_util.url_for", _mock_url_for) -def upload(app: TestToolShedApp, repository: Repository, path: Path, arcname: Optional[str] = None): +def upload( + provides_repositories: ProvidesRepositoriesContext, + repository: Repository, + path: Path, + arcname: Optional[str] = None, +): if path.is_dir(): tf = NamedTemporaryFile(delete=False) with tarfile.open(tf.name, "w:gz") as tar: @@ -127,19 +158,20 @@ def upload(app: TestToolShedApp, repository: Repository, path: Path, arcname: Op else: tar_path = str(path) return upload_tar_and_set_metadata( - app, + provides_repositories, TEST_HOST, - repository.user, repository, tar_path, commit_message=TEST_COMMIT_MESSAGE, ) -def upload_directories_to_repository(app: TestToolShedApp, repository: Repository, test_data_path: str): +def upload_directories_to_repository( + provides_repositories: ProvidesRepositoriesContext, repository: Repository, test_data_path: str +): paths = repo_tars(test_data_path) for path in paths: - upload(app, repository, Path(path), arcname=test_data_path) + upload(provides_repositories, repository, Path(path), arcname=test_data_path) def random_name(len: int = 10) -> str: diff --git a/test/unit/tool_shed/conftest.py b/test/unit/tool_shed/conftest.py index f527ce5847a1..44bdbd58be00 100644 --- a/test/unit/tool_shed/conftest.py +++ b/test/unit/tool_shed/conftest.py @@ -5,6 +5,7 @@ User, ) from ._util import ( + provides_repositories_fixture, random_name, repository_fixture, TestToolShedApp, @@ -26,3 +27,8 @@ def new_user(shed_app: TestToolShedApp) -> User: @pytest.fixture def new_repository(shed_app: TestToolShedApp, new_user: User) -> Repository: return repository_fixture(shed_app, new_user, random_name()) + + +@pytest.fixture +def provides_repositories(shed_app: TestToolShedApp, new_user: User) -> User: + return provides_repositories_fixture(shed_app, new_user) diff --git a/test/unit/tool_shed/test_repository_metadata_manager.py b/test/unit/tool_shed/test_repository_metadata_manager.py index 93cc148c020b..da24e64ff158 100644 --- a/test/unit/tool_shed/test_repository_metadata_manager.py +++ b/test/unit/tool_shed/test_repository_metadata_manager.py @@ -1,20 +1,16 @@ +from tool_shed.context import ProvidesRepositoriesContext from tool_shed.metadata import repository_metadata_manager from tool_shed.webapp.model import Repository -from ._util import ( - patch_url_for, - TestToolShedApp, - upload_directories_to_repository, -) +from ._util import upload_directories_to_repository -@patch_url_for -def test_reset_simple(shed_app: TestToolShedApp, new_repository: Repository): - upload_directories_to_repository(shed_app, new_repository, "column_maker") +def test_reset_simple(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + shed_app = provides_repositories.app + upload_directories_to_repository(provides_repositories, new_repository, "column_maker") assert len(new_repository.downloadable_revisions) == 3 assert "2:" in new_repository.revision() rmm = repository_metadata_manager.RepositoryMetadataManager( - app=shed_app, - user=new_repository.user, + provides_repositories, repository=new_repository, resetting_all_metadata_on_repository=True, updating_installed_repository=False, @@ -25,14 +21,15 @@ def test_reset_simple(shed_app: TestToolShedApp, new_repository: Repository): assert len(new_repository.downloadable_revisions) == 3 -@patch_url_for -def test_reset_on_repo_with_uninstallable_revisions(shed_app: TestToolShedApp, new_repository: Repository): - upload_directories_to_repository(shed_app, new_repository, "column_maker_with_download_gaps") +def test_reset_on_repo_with_uninstallable_revisions( + provides_repositories: ProvidesRepositoriesContext, new_repository: Repository +): + shed_app = provides_repositories.app + upload_directories_to_repository(provides_repositories, new_repository, "column_maker_with_download_gaps") assert len(new_repository.downloadable_revisions) == 3 assert "3:" in new_repository.revision() rmm = repository_metadata_manager.RepositoryMetadataManager( - app=shed_app, - user=new_repository.user, + provides_repositories, repository=new_repository, resetting_all_metadata_on_repository=True, updating_installed_repository=False, @@ -43,14 +40,15 @@ def test_reset_on_repo_with_uninstallable_revisions(shed_app: TestToolShedApp, n assert len(new_repository.downloadable_revisions) == 3 -@patch_url_for -def test_reset_dm_with_uninstallable_revisions(shed_app: TestToolShedApp, new_repository: Repository): - upload_directories_to_repository(shed_app, new_repository, "data_manager_gaps") +def test_reset_dm_with_uninstallable_revisions( + provides_repositories: ProvidesRepositoriesContext, new_repository: Repository +): + shed_app = provides_repositories.app + upload_directories_to_repository(provides_repositories, new_repository, "data_manager_gaps") assert len(new_repository.downloadable_revisions) == 1 assert "2:" in new_repository.revision() rmm = repository_metadata_manager.RepositoryMetadataManager( - app=shed_app, - user=new_repository.user, + provides_repositories, repository=new_repository, resetting_all_metadata_on_repository=True, updating_installed_repository=False, diff --git a/test/unit/tool_shed/test_repository_utils.py b/test/unit/tool_shed/test_repository_utils.py index b56ec162451f..d23388b8a897 100644 --- a/test/unit/tool_shed/test_repository_utils.py +++ b/test/unit/tool_shed/test_repository_utils.py @@ -1,3 +1,4 @@ +from tool_shed.context import ProvidesRepositoriesContext from tool_shed.util.repository_content_util import upload_tar from tool_shed.webapp.model import ( Repository, @@ -25,12 +26,11 @@ def test_create_repository(shed_app: TestToolShedApp, new_user: User): assert entry -def test_upload_tar(shed_app: TestToolShedApp, new_repository: Repository): +def test_upload_tar(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): tar_resource = TEST_DATA_FILES.joinpath("column_maker/column_maker.tar") old_tip = new_repository.tip() upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( - shed_app, - "localhost", + provides_repositories, new_repository.user.username, new_repository, tar_resource, @@ -42,18 +42,19 @@ def test_upload_tar(shed_app: TestToolShedApp, new_repository: Repository): assert files_removed == 0 new_tip = new_repository.tip() assert old_tip != new_tip - changesets = new_repository.get_changesets_for_setting_metadata(shed_app) + changesets = new_repository.get_changesets_for_setting_metadata(provides_repositories.app) assert len(changesets) == 1 for change in changesets: ctx = new_repository.hg_repo[change] assert str(ctx) == new_tip -def test_upload_fails_if_contains_symlink(shed_app: TestToolShedApp, new_repository: Repository): +def test_upload_fails_if_contains_symlink( + provides_repositories: ProvidesRepositoriesContext, new_repository: Repository +): tar_resource = TEST_DATA_FILES.joinpath("safetar_with_symlink.tar") upload_ok, message, _, _, _, _ = upload_tar( - shed_app, - "localhost", + provides_repositories, new_repository.user.username, new_repository, tar_resource, @@ -63,12 +64,11 @@ def test_upload_fails_if_contains_symlink(shed_app: TestToolShedApp, new_reposit assert "Invalid paths" in message -def test_upload_dry_run_ok(shed_app: TestToolShedApp, new_repository: Repository): +def test_upload_dry_run_ok(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): tar_resource = TEST_DATA_FILES.joinpath("column_maker/column_maker.tar") old_tip = new_repository.tip() upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( - shed_app, - "localhost", + provides_repositories, new_repository.user.username, new_repository, tar_resource, @@ -83,11 +83,10 @@ def test_upload_dry_run_ok(shed_app: TestToolShedApp, new_repository: Repository assert old_tip == new_tip -def test_upload_dry_run_failed(shed_app: TestToolShedApp, new_repository: Repository): +def test_upload_dry_run_failed(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): tar_resource = TEST_DATA_FILES.joinpath("safetar_with_symlink.tar") upload_ok, message, _, _, _, _ = upload_tar( - shed_app, - "localhost", + provides_repositories, new_repository.user.username, new_repository, tar_resource, From 0772a51ce6d40bad47dd5c60561f6eeff997f628 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 2 Feb 2023 15:51:31 -0500 Subject: [PATCH 60/73] black formatting fix --- lib/tool_shed/managers/repositories.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index e7464b114272..f72e61fa1a31 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -566,13 +566,21 @@ def upload_tar_and_set_metadata( assert user repo_dir = repository.repo_path(app) tip = repository.tip() - (ok, message, _, content_alert_str, _, _,) = upload_tar( + tar_response = upload_tar( trans, user.username, repository, uploaded_file, commit_message, ) + ( + ok, + message, + _, + content_alert_str, + _, + _, + ) = tar_response if ok: # Update the repository files for browsing. hg_util.update_repository(repo_dir) From b04c3264ab86f399e8bc5b491e61bf4ac902329c Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 22 Dec 2022 09:59:32 -0500 Subject: [PATCH 61/73] Refactor tool shed tests for injection of browser. --- lib/tool_shed/test/base/browser.py | 49 ++++ lib/tool_shed/test/base/twillbrowser.py | 150 +++++++++++++ lib/tool_shed/test/base/twilltestcase.py | 210 +++++++----------- .../test_0000_basic_repository_features.py | 3 +- .../tool_shed/repository/find_tools.mako | 2 +- 5 files changed, 279 insertions(+), 135 deletions(-) create mode 100644 lib/tool_shed/test/base/browser.py create mode 100644 lib/tool_shed/test/base/twillbrowser.py diff --git a/lib/tool_shed/test/base/browser.py b/lib/tool_shed/test/base/browser.py new file mode 100644 index 000000000000..559ce7468cf3 --- /dev/null +++ b/lib/tool_shed/test/base/browser.py @@ -0,0 +1,49 @@ +import abc +from typing import ( + List, + Union, +) + +FormValueType = Union[str, bool] + + +class ShedBrowser(metaclass=abc.ABCMeta): + @abc.abstractmethod + def visit_url(self, url: str, allowed_codes: List[int]) -> str: + """Navigate to the supplied URL.""" + + @abc.abstractmethod + def page_content(self) -> str: + """Return the page content for""" + + @abc.abstractmethod + def check_page_for_string(self, patt: str) -> None: + """Looks for 'patt' in the current browser page""" + + @abc.abstractmethod + def check_string_not_in_page(self, patt: str) -> None: + """Looks for 'patt' not being in the current browser page""" + + @abc.abstractmethod + def fill_form_value(self, form_name: str, control_name: str, value: FormValueType): + """Fill in a form value.""" + + @abc.abstractmethod + def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): + """Submit the target button.""" + + @abc.abstractmethod + def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): + """Submit the target button.""" + + @abc.abstractproperty + def is_twill(self) -> bool: + """Return whether this is a twill browser.""" + + @abc.abstractmethod + def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + """Select some new categories and then restore the component.""" + + @abc.abstractmethod + def grant_users_access(self, usernames: List[str]) -> None: + """Select users to grant access to.""" diff --git a/lib/tool_shed/test/base/twillbrowser.py b/lib/tool_shed/test/base/twillbrowser.py new file mode 100644 index 000000000000..65cf8c48eda3 --- /dev/null +++ b/lib/tool_shed/test/base/twillbrowser.py @@ -0,0 +1,150 @@ +import tempfile +from typing import ( + Dict, + List, +) + +import twill.commands as tc +from twill.browser import FormElement + +from galaxy.util import smart_str +from .browser import ( + FormValueType, + ShedBrowser, +) + +tc.options["equiv_refresh_interval"] = 0 + + +def visit_url(url: str, allowed_codes: List[int]) -> str: + new_url = tc.go(url) + return_code = tc.browser.code + assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( + return_code, + ", ".join(str(code) for code in allowed_codes), + ) + return new_url + + +def page_content() -> str: + return tc.browser.html + + +class TwillShedBrowser(ShedBrowser): + def visit_url(self, url: str, allowed_codes: List[int]) -> str: + return visit_url(url, allowed_codes=allowed_codes) + + def page_content(self) -> str: + """ + Return the last visited page (usually HTML, but can binary data as + well). + """ + return page_content() + + def check_page_for_string(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) == -1: + fname = self.write_temp_file(page) + errmsg = f"no match to '{patt}'\npage content written to '{fname}'\npage: [[{page}]]" + raise AssertionError(errmsg) + + def check_string_not_in_page(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) != -1: + fname = self.write_temp_file(page) + errmsg = f"string ({patt}) incorrectly displayed in page.\npage content written to '{fname}'" + raise AssertionError(errmsg) + + def write_temp_file(self, content, suffix=".html"): + with tempfile.NamedTemporaryFile(suffix=suffix, prefix="twilltestcase-", delete=False) as fh: + fh.write(smart_str(content)) + return fh.name + + def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): + forms_by_name: Dict[str, FormElement] = {f.get("name"): f for f in self._show_forms()} + form = forms_by_name[form_name] + self._submit_form(form, button, **kwd) + + def _show_forms(self) -> List[FormElement]: + """Shows form, helpful for debugging new tests""" + return tc.browser.forms + + def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): + if form is None: + try: + form = self._show_forms()[form_no] + except IndexError: + raise ValueError("No form to submit found") + self._submit_form(form, button, **kwd) + + def _submit_form(self, form, button, **kwd): + controls = {c.name: c for c in form.inputs} + form_name = form.get("name") + for control_name, control_value in kwd.items(): + if control_name not in controls: + continue # these cannot be handled safely - cause the test to barf out + if not isinstance(control_value, list): + control_value = [str(control_value)] + control = controls[control_name] + control_type = getattr(control, "type", None) + if control_type in ( + "text", + "textfield", + "submit", + "password", + "TextareaElement", + "checkbox", + "radio", + None, + ): + for cv in control_value: + tc.fv(form_name, control.name, cv) + else: + # Add conditions for other control types here when necessary. + pass + tc.submit(button) + + def fill_form_value(self, form_name: str, control_name: str, value: FormValueType): + value = str(value) + tc.fv(form_name, control_name, value) + + def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + """Select some new categories and then restore the component.""" + strings_displayed = [] + strings_not_displayed = [] + for category in categories_to_add: + self.fill_form_value("categories", "category_id", f"+{category}") + strings_displayed.append(f"selected>{category}") + for category in categories_to_remove: + self.fill_form_value("categories", "category_id", f"-{category}") + strings_not_displayed.append(f"selected>{category}") + self.submit_form_with_name("categories", "manage_categories_button") + self._check_for_strings(strings_displayed, strings_not_displayed) + + strings_displayed = [] + strings_not_displayed = [] + for category in categories_to_remove: + self.fill_form_value("categories", "category_id", f"+{category}") + strings_displayed.append(f"selected>{category}") + for category in categories_to_add: + self.fill_form_value("categories", "category_id", f"-{category}") + strings_not_displayed.append(f"selected>{category}") + self.submit_form_with_name("categories", "manage_categories_button") + self._check_for_strings(strings_displayed, strings_not_displayed) + + def grant_users_access(self, usernames: List[str]): + for username in usernames: + self.fill_form_value("user_access", "allow_push", f"+{username}") + self.submit_form_with_name("user_access", "user_access_button") + + @property + def is_twill(self) -> bool: + return True + + def _check_for_strings(self, strings_displayed: List[str], strings_not_displayed: List[str]): + if strings_displayed: + for check_str in strings_displayed: + self.check_page_for_string(check_str) + if strings_not_displayed: + for check_str in strings_not_displayed: + self.check_string_not_in_page(check_str) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index b88a8f05728f..312091e7a113 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -22,8 +22,8 @@ urlparse, ) +import pytest import requests -import twill.commands as tc from mercurial import ( commands, hg, @@ -51,7 +51,6 @@ from galaxy.util import ( DEFAULT_SOCKET_TIMEOUT, smart_str, - unicodify, ) from galaxy_test.base.api_asserts import assert_status_code_is_ok from galaxy_test.base.api_util import get_admin_api_key @@ -74,12 +73,16 @@ test_db_util, ) from .api import ShedApiTestCase +from .browser import ShedBrowser +from .twillbrowser import ( + page_content, + visit_url, +) # Set a 10 minute timeout for repository installation. repository_installation_timeout = 600 log = logging.getLogger(__name__) -tc.options["equiv_refresh_interval"] = 0 class ToolShedInstallationClient(metaclass=abc.ABCMeta): @@ -398,7 +401,7 @@ def _create_user_in_galaxy(self, email="test@bx.psu.edu", password="testuser", u def _galaxy_token(self): self._visit_galaxy_url("/") - html = self.testcase.last_page() + html = page_content() token_def_index = html.find("session_csrf_token") token_sep_index = html.find(":", token_def_index) token_quote_start_index = html.find('"', token_sep_index) @@ -411,7 +414,7 @@ def _get_tool_panel_section_from_api(self, metadata): tool_guid = quote_plus(tool_metadata[0]["guid"], safe="") api_url = f"/api/tools/{tool_guid}" self._visit_galaxy_url(api_url) - tool_dict = loads(self.testcase.last_page()) + tool_dict = loads(page_content()) tool_panel_section = tool_dict["panel_section_name"] return tool_panel_section @@ -425,11 +428,12 @@ def _wait_for_repository_installation(self, repository_ids): ) _wait_for_installation(galaxy_repository, test_db_util.ga_refresh) - def _visit_galaxy_url(self, url, params=None, doseq=False, allowed_codes=None): + def _visit_galaxy_url(self, url, params=None, allowed_codes=None): if allowed_codes is None: allowed_codes = [200] url = f"{self.testcase.galaxy_url}{url}" - self.testcase.visit_url(url, params=params, doseq=doseq, allowed_codes=allowed_codes) + url = self.testcase.join_url_and_params(url, params) + return visit_url(url, allowed_codes) class StandaloneToolShedInstallationClient(ToolShedInstallationClient): @@ -607,11 +611,13 @@ def get_tool_names(self) -> List[str]: return tool_names +@pytest.mark.usefixtures("shed_browser") class ShedTwillTestCase(ShedApiTestCase): """Class of FunctionalTestCase geared toward HTML interactions using the Twill library.""" requires_galaxy: bool = False _installation_client = None + __browser: Optional[ShedBrowser] = None def setUp(self): super().setUp() @@ -639,6 +645,15 @@ def setUp(self): self.__class__._installation_client.setup() self._installation_client = self.__class__._installation_client + @pytest.fixture(autouse=True) + def inject_shed_browser(self, shed_browser: ShedBrowser): + self.__browser = shed_browser + + @property + def _browser(self) -> ShedBrowser: + assert self.__browser + return self.__browser + def check_for_strings(self, strings_displayed=None, strings_not_displayed=None): strings_displayed = strings_displayed or [] strings_not_displayed = strings_not_displayed or [] @@ -660,19 +675,11 @@ def check_page(self, strings_displayed, strings_displayed_count, strings_not_dis def check_page_for_string(self, patt): """Looks for 'patt' in the current browser page""" - page = unicodify(self.last_page()) - if page.find(patt) == -1: - fname = self.write_temp_file(page) - errmsg = f"no match to '{patt}'\npage content written to '{fname}'\npage: [[{page}]]" - raise AssertionError(errmsg) + self._browser.check_page_for_string(patt) def check_string_not_in_page(self, patt): """Checks to make sure 'patt' is NOT in the page.""" - page = self.last_page() - if page.find(patt) != -1: - fname = self.write_temp_file(page) - errmsg = f"string ({patt}) incorrectly displayed in page.\npage content written to '{fname}'" - raise AssertionError(errmsg) + self._browser.check_string_not_in_page(patt) # Functions associated with user accounts @@ -681,12 +688,12 @@ def create(self, cntrller="user", email="test@bx.psu.edu", password="testuser", # can't find form fields (and hence user can't be logged in). params = dict(cntrller=cntrller, use_panels=False) self.visit_url("/user/create", params) - tc.fv("registration", "email", email) - tc.fv("registration", "redirect", redirect) - tc.fv("registration", "password", password) - tc.fv("registration", "confirm", password) - tc.fv("registration", "username", username) - tc.submit("create_user_button") + self._browser.fill_form_value("registration", "email", email) + self._browser.fill_form_value("registration", "redirect", redirect) + self._browser.fill_form_value("registration", "password", password) + self._browser.fill_form_value("registration", "confirm", password) + self._browser.fill_form_value("registration", "username", username) + self._browser.submit_form_with_name("registration", "create_user_button") previously_created = False username_taken = False invalid_username = False @@ -715,10 +722,7 @@ def last_page(self): Return the last visited page (usually HTML, but can binary data as well). """ - return tc.browser.html - - def last_url(self): - return tc.browser.url + return self._browser.page_content() def user_api_interactor(self, email="test@bx.psu.edu", password="testuser"): return self._api_interactor_by_credentials(email, password) @@ -727,7 +731,12 @@ def user_populator(self, email="test@bx.psu.edu", password="testuser"): return self._get_populator(self.user_api_interactor(email=email, password=password)) def login( - self, email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="", logout_first=True + self, + email: str = "test@bx.psu.edu", + password: str = "testuser", + username: str = "admin-user", + redirect: str = "", + logout_first: bool = True, ): # Clear cookies. if logout_first: @@ -748,69 +757,37 @@ def logout(self): self.visit_url("/user/logout") self.check_page_for_string("You have been logged out") - def showforms(self): - """Shows form, helpful for debugging new tests""" - return tc.browser.forms - def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): """Populates and submits a form from the keyword arguments.""" # An HTMLForm contains a sequence of Controls. Supported control classes are: # TextControl, FileControl, ListControl, RadioControl, CheckboxControl, SelectControl, # SubmitControl, ImageControl - if form is None: - try: - form = self.showforms()[form_no] - except IndexError: - raise ValueError("No form to submit found") - controls = {c.name: c for c in form.inputs} - form_name = form.get("name") - for control_name, control_value in kwd.items(): - if control_name not in controls: - continue # these cannot be handled safely - cause the test to barf out - if not isinstance(control_value, list): - control_value = [str(control_value)] - control = controls[control_name] - control_type = getattr(control, "type", None) - if control_type in ( - "text", - "textfield", - "submit", - "password", - "TextareaElement", - "checkbox", - "radio", - None, - ): - for cv in control_value: - tc.fv(form_name, control.name, cv) - else: - # Add conditions for other control types here when necessary. - pass - tc.submit(button) + self._browser.submit_form(form_no, button, form, **kwd) - def visit_url(self, url, params=None, doseq=False, allowed_codes=None): - if allowed_codes is None: - allowed_codes = [200] + def join_url_and_params(self, url: str, params, query=None) -> str: if params is None: params = dict() + if query is None: + query = urlparse(url).query + if query: + for query_parameter in query.split("&"): + key, value = query_parameter.split("=") + params[key] = value + if params: + url += f"?{urlencode(params)}" + return url + + def visit_url(self, url: str, params=None, allowed_codes: Optional[List[int]] = None) -> str: parsed_url = urlparse(url) if len(parsed_url.netloc) == 0: url = f"http://{self.host}:{self.port}{parsed_url.path}" else: url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}" - if parsed_url.query: - for query_parameter in parsed_url.query.split("&"): - key, value = query_parameter.split("=") - params[key] = value - if params: - url += f"?{urlencode(params, doseq=doseq)}" - new_url = tc.go(url) - return_code = tc.browser.code - assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( - return_code, - ", ".join(str(code) for code in allowed_codes), - ) - return new_url + url = self.join_url_and_params(url, params, query=parsed_url.query) + if allowed_codes is None: + allowed_codes = [200] + + return self._browser.visit_url(url, allowed_codes=allowed_codes) def write_temp_file(self, content, suffix=".html"): with tempfile.NamedTemporaryFile(suffix=suffix, prefix="twilltestcase-", delete=False) as fh: @@ -1220,33 +1197,15 @@ def display_repository_file_contents( self.check_for_strings(strings_displayed, strings_not_displayed) def edit_repository_categories( - self, repository: Repository, categories_to_add=None, categories_to_remove=None, restore_original=True + self, + repository: Repository, + categories_to_add: List[str], + categories_to_remove: List[str], + restore_original=True, ) -> None: - categories_to_add = categories_to_add or [] - categories_to_remove = categories_to_remove or [] params = {"id": repository.id} self.visit_url("/repository/manage_repository", params=params) - strings_displayed = [] - strings_not_displayed = [] - for category in categories_to_add: - tc.fv("2", "category_id", f"+{category}") - strings_displayed.append(f"selected>{category}") - for category in categories_to_remove: - tc.fv("2", "category_id", f"-{category}") - strings_not_displayed.append(f"selected>{category}") - tc.submit("manage_categories_button") - self.check_for_strings(strings_displayed, strings_not_displayed) - if restore_original: - strings_displayed = [] - strings_not_displayed = [] - for category in categories_to_remove: - tc.fv("2", "category_id", f"+{category}") - strings_displayed.append(f"selected>{category}") - for category in categories_to_add: - tc.fv("2", "category_id", f"-{category}") - strings_not_displayed.append(f"selected>{category}") - tc.submit("manage_categories_button") - self.check_for_strings(strings_displayed, strings_not_displayed) + self._browser.edit_repository_categories(categories_to_add, categories_to_remove) def edit_repository_information(self, repository: Repository, revert=True, **kwd): params = {"id": repository.id} @@ -1260,17 +1219,21 @@ def edit_repository_information(self, repository: Repository, revert=True, **kwd strings_displayed = [] for input_elem_name in ["repo_name", "description", "long_description", "repository_type"]: if input_elem_name in kwd: - tc.fv("edit_repository", input_elem_name, kwd[input_elem_name]) + self._browser.fill_form_value("edit_repository", input_elem_name, kwd[input_elem_name]) strings_displayed.append(self.escape_html(kwd[input_elem_name])) - tc.submit("edit_repository_button") - self.check_for_strings(strings_displayed) + self._browser.submit_form_with_name("edit_repository", "edit_repository_button") + # TODO: come back to this (and similar conditional below), the problem is check + # for strings isn't working with with textboxes I think? + if self._browser.is_twill: + self.check_for_strings(strings_displayed) if revert: strings_displayed = [] for input_elem_name in ["repo_name", "description", "long_description"]: - tc.fv("edit_repository", input_elem_name, original_information[input_elem_name]) + self._browser.fill_form_value("edit_repository", input_elem_name, original_information[input_elem_name]) strings_displayed.append(self.escape_html(original_information[input_elem_name])) - tc.submit("edit_repository_button") - self.check_for_strings(strings_displayed) + self._browser.submit_form_with_name("edit_repository", "edit_repository_button") + if self._browser.is_twill: + self.check_for_strings(strings_displayed) def enable_email_alerts(self, repository: Repository, strings_displayed=None, strings_not_displayed=None) -> None: repository_id = repository.id @@ -1562,9 +1525,7 @@ def grant_write_access( usernames = usernames or [] self.display_manage_repository_page(repository) self.check_for_strings(strings_displayed, strings_not_displayed) - for username in usernames: - tc.fv("user_access", "allow_push", f"+{username}") - tc.submit("user_access_button") + self._browser.grant_users_access(usernames) self.check_for_strings(post_submit_strings_displayed, post_submit_strings_not_displayed) def _install_repository( @@ -1755,28 +1716,11 @@ def search_for_valid_tools( params["galaxy_url"] = self.galaxy_url for field_name, search_string in search_fields.items(): self.visit_url("/repository/find_tools", params=params) - tc.fv("1", "exact_matches", exact_matches) - tc.fv("1", field_name, search_string) - tc.submit() + self._browser.fill_form_value("find_tools", "exact_matches", exact_matches) + self._browser.fill_form_value("find_tools", field_name, search_string) + self._browser.submit_form_with_name("find_tools", "find_tools_submit") self.check_for_strings(strings_displayed, strings_not_displayed) - def set_form_value(self, form, kwd, field_name, field_value): - """ - Set the form field field_name to field_value if it exists, and return the provided dict containing that value. If - the field does not exist in the provided form, return a dict without that index. - """ - form_id = form.attrib.get("id") - controls = [control for control in form.inputs if str(control.name) == field_name] - if len(controls) > 0: - log.debug(f"Setting field {field_name} of form {form_id} to {field_value}.") - tc.formvalue(form_id, field_name, str(field_value)) - kwd[field_name] = str(field_value) - else: - if field_name in kwd: - log.debug("No field %s in form %s, discarding from return value.", field_name, form_id) - del kwd[field_name] - return kwd - def set_repository_deprecated( self, repository: Repository, set_deprecated=True, strings_displayed=None, strings_not_displayed=None ): @@ -1788,8 +1732,8 @@ def set_repository_malicious( self, repository: Repository, set_malicious=True, strings_displayed=None, strings_not_displayed=None ) -> None: self.display_manage_repository_page(repository) - tc.fv("malicious", "malicious", set_malicious) - tc.submit("malicious_button") + self._browser.fill_form_value("malicious", "malicious", set_malicious) + self._browser.submit_form_with_name("malicious", "malicious_button") self.check_for_strings(strings_displayed, strings_not_displayed) def tip_has_metadata(self, repository: Repository) -> bool: diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index d9092076a404..029f2987866c 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -353,7 +353,8 @@ def test_0140_view_invalid_changeset(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) encoded_repository_id = repository.id strings_displayed = ["Invalid+changeset+revision"] - self.visit_url( + view_repo_url = ( f"/repository/view_repository?id={encoded_repository_id}&changeset_revision=nonsensical_changeset" ) + self.visit_url(view_repo_url) self.check_for_strings(strings_displayed=strings_displayed, strings_not_displayed=[]) diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako index 6a49ea0a0414..a9ea1825c304 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako @@ -55,7 +55,7 @@
    - +
    From 24b3abeb3e1794a0a2f1189a193141c4b1d4454a Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 23 Dec 2022 22:50:00 -0500 Subject: [PATCH 62/73] Script for bootstrapping the tool shed. --- lib/tool_shed/test/base/api_util.py | 4 + lib/tool_shed/test/base/populators.py | 17 +- scripts/bootstrap_test_shed.py | 249 ++++++++++++++++++++++++++ 3 files changed, 267 insertions(+), 3 deletions(-) create mode 100644 scripts/bootstrap_test_shed.py diff --git a/lib/tool_shed/test/base/api_util.py b/lib/tool_shed/test/base/api_util.py index 81dad57cb181..e90a495aa69b 100644 --- a/lib/tool_shed/test/base/api_util.py +++ b/lib/tool_shed/test/base/api_util.py @@ -99,6 +99,10 @@ def version(self) -> Dict[str, Any]: response.raise_for_status() return response.json() + @property + def hg_url_base(self): + return self.url + def create_user(admin_interactor: ShedApiInteractor, user_dict: Dict[str, Any], assert_ok=True) -> Dict[str, Any]: email = user_dict["email"] diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index dc2cddf1bdc1..7e2f93c3316f 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -132,16 +132,24 @@ def setup_test_data_repo( assert_ok=True, start: int = 0, end: Optional[int] = None, + category_id: Optional[str] = None, ) -> Repository: if repository is None: prefix = test_data_path.replace("_", "") - category_id = self.new_category(prefix=prefix).id + if category_id is None: + category_id = self.new_category(prefix=prefix).id repository = self.new_repository(category_id, prefix=prefix) self.setup_test_data_repo_by_id(test_data_path, repository, assert_ok=assert_ok, start=start, end=end) return repository - def setup_column_maker_repo(self, prefix=DEFAULT_PREFIX) -> Repository: - category_id = self.new_category(prefix=prefix).id + def setup_column_maker_repo( + self, + prefix=DEFAULT_PREFIX, + category_id: Optional[str] = None, + ) -> Repository: + if category_id is None: + category_id = self.new_category(prefix=prefix).id + assert category_id repository = self.new_repository(category_id, prefix=prefix) repository_id = repository.id assert repository_id @@ -410,6 +418,9 @@ def guid(self, repository: Repository, tool_id: str, tool_version: str) -> str: base = url.split("://")[1].split("/")[0] return f"{base}/repos/{repository.owner}/{repository.name}/{tool_id}/{tool_version}" + def new_user(self, username: str, password: str): + return ensure_user_with_email(self._admin_api_interactor, username, password) + def _repository_id(self, has_id: HasRepositoryId) -> str: if isinstance(has_id, Repository): return has_id.id diff --git a/scripts/bootstrap_test_shed.py b/scripts/bootstrap_test_shed.py new file mode 100644 index 000000000000..40f257ded9e4 --- /dev/null +++ b/scripts/bootstrap_test_shed.py @@ -0,0 +1,249 @@ +"""Script to bootstrap a tool shed server for development. + +- Create categories. +- Create some users. +- Create some repositories +""" + +import argparse +import os +import subprocess +import sys +import tempfile +from typing import ( + Any, + Dict, + List, + Optional, +) + +import requests + +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, "lib"))) + + +from galaxy.tool_shed.util.hg_util import clone_repository +from tool_shed.test.base.api import ensure_user_with_email +from tool_shed.test.base.api_util import ( + create_user, + ShedApiInteractor, +) +from tool_shed.test.base.populators import ToolShedPopulator +from tool_shed_client.schema import ( + Category, + CreateRepositoryRequest, +) + +DESCRIPTION = "Script to bootstrap a tool shed server for development" +DEFAULT_USER = "jmchilton@gmail.com" +DEFAULT_USER_PASSWORD = "password123" # it is safe because of the 123 + +TEST_CATEGORY_NAME = "Testing Category" +TEST_CATEGORY_DESCRIPTION = "A longer description of the testing category" + +MAIN_SHED_URL = "https://toolshed.g2.bx.psu.edu/" +MAIN_SHED_API = f"{MAIN_SHED_URL}/api" + +CATEGORIES_TO_COPY = ["Data Export", "Climate Analysis", "Materials science"] + + +def main(argv: List[str]) -> None: + arg_parser = _arg_parser() + namespace = arg_parser.parse_args(argv) + populator = init_populator(namespace) + + category = populator.new_category_if_needed( + {"name": "Invalid Test Tools", "description": "A contains a repository with invalid tools."} + ) + populator.setup_bismark_repo(category_id=category.id) + + category = populator.new_category_if_needed({"name": "Test Category", "description": "A longer test description."}) + mirror_main_categories(populator) + mirror_main_users(populator) + + populator.new_user_if_needed({"email": "bob@bobsdomain.com"}) + populator.new_user_if_needed({"email": "alice@alicesdomain.com"}) + populator.new_user_if_needed({"email": "thirduser@threeis.com"}) + + populator.setup_column_maker_repo(prefix="bootstrap", category_id=category.id) + populator.setup_column_maker_repo(prefix="bootstrap2", category_id=category.id) + + main_categories = get_main_categories() + for category in main_categories: + category_id = category["id"] + category_name = category["name"] + if category_name in CATEGORIES_TO_COPY: + local_category = populator.get_category_with_name(category_name) + repos = get_main_repositories_for_category(category_id) + for repo in repos: + mirror_main_repository(populator, repo, local_category.id) + + +def get_main_categories() -> List[Dict[str, Any]]: + main_categories_endpoint = f"{MAIN_SHED_API}/categories" + main_categories = requests.get(main_categories_endpoint).json() + return main_categories + + +def get_main_users() -> List[Dict[str, Any]]: + main_users_endpoint = f"{MAIN_SHED_API}/users" + main_users = requests.get(main_users_endpoint).json() + return main_users + + +def get_main_repositories_for_category(category_id) -> List[Dict[str, Any]]: + main_category_repos_endpoint = f"{MAIN_SHED_API}/categories/{category_id}/repositories" + main_repos_for_category_response = requests.get(main_category_repos_endpoint) + main_repos_for_category = main_repos_for_category_response.json() + assert "repositories" in main_repos_for_category + return main_repos_for_category["repositories"] + + +class RemoteToolShedPopulator(ToolShedPopulator): + """Extend the tool shed populator with some state tracking... + + ... tailored toward bootstrapping dev instances instead of + for tests. + """ + + _categories_by_name: Optional[Dict[str, Category]] = None + _users_by_username: Optional[Dict[str, Dict[str, Any]]] = None + _populators_by_username: Dict[str, "RemoteToolShedPopulator"] = {} + + def __init__(self, admin_interactor: ShedApiInteractor, user_interactor: ShedApiInteractor): + super().__init__(admin_interactor, user_interactor) + + def populator_for_user(self, username): + if username not in self._populators_by_username: + user = self.users_by_username[username] + assert user + mock_email = f"{username}@galaxyproject.org" + password = "testpass" + api_key = self._admin_api_interactor.create_api_key(mock_email, password) + user_interactor = ShedApiInteractor(self._admin_api_interactor.url, api_key) + self._populators_by_username[username] = RemoteToolShedPopulator( + self._admin_api_interactor, user_interactor + ) + return self._populators_by_username[username] + + @property + def categories_by_name(self) -> Dict[str, Category]: + if self._categories_by_name is None: + categories = self.get_categories() + self._categories_by_name = {c.name: c for c in categories} + return self._categories_by_name + + @property + def users_by_username(self) -> Dict[str, Dict[str, Any]]: + if self._users_by_username is None: + users_response = self._api_interactor.get("users") + if users_response.status_code == 400: + error_response = users_response.json() + raise Exception(str(error_response)) + users_response.raise_for_status() + users = users_response.json() + self._users_by_username = {u["username"]: u for u in users} + return self._users_by_username + + def new_category_if_needed(self, as_json: Dict[str, Any]) -> Category: + name = as_json["name"] + description = as_json["description"] + if name in self.categories_by_name: + return self.categories_by_name[name] + return self.new_category(name, description) + + def new_user_if_needed(self, as_json) -> Dict[str, Any]: + if "username" not in as_json: + email = as_json["email"] + as_json["username"] = email.split("@", 1)[0] + username = as_json["username"] + if username in self.users_by_username: + return self.users_by_username[username] + if "email" not in as_json: + mock_email = f"{username}@galaxyproject.org" + as_json["email"] = mock_email + request = {"username": as_json["username"], "email": as_json["email"]} + user = create_user(self._admin_api_interactor, request) + self.users_by_username[username] = user + return user + + +def mirror_main_categories(populator: RemoteToolShedPopulator): + main_categories = get_main_categories() + for category in main_categories: + populator.new_category_if_needed(category) + + +def mirror_main_users(populator: RemoteToolShedPopulator): + main_users = get_main_users() + for user in main_users: + populator.new_user_if_needed(user) + + +def mirror_main_repository(populator: RemoteToolShedPopulator, repository: Dict[str, Any], category_id: str): + # TODO: mirror the user + as_dict = repository.copy() + as_dict["category_ids"] = category_id + as_dict["synopsis"] = repository["description"] + request = CreateRepositoryRequest(**as_dict) + username = repository["owner"] + user_populator = populator.populator_for_user(username) + new_repository = None + try: + new_repository = user_populator.create_repository(request) + except AssertionError as e: + # if the problem is just a repository already + # exists, continue + err_msg = str(e) + if "already own" not in err_msg: + raise + if new_repository: + name = repository["name"] + clone_url = f"{MAIN_SHED_URL}/repos/{username}/{name}" + temp_dir = tempfile.mkdtemp() + clone_repository(clone_url, temp_dir) + url_base = populator._api_interactor.hg_url_base + prefix, rest = url_base.split("://", 1) + target = f"{prefix}://{username}@{rest}/repos/{username}/{name}" + try: + _push_to(temp_dir, target) + except Exception as e: + print(f"Problem cloning repository {e}, continuing bootstrap though...") + pass + populator.reset_metadata(new_repository) + + +def _push_to(repo_path: str, repo_target: str) -> None: + subprocess.check_output(["hg", "push", repo_target], cwd=repo_path) + + +def init_populator(namespace) -> RemoteToolShedPopulator: + admin_interactor = ShedApiInteractor( + namespace.shed_url, + namespace.admin_key, + ) + if namespace.user_key is None: + ensure_user_with_email(admin_interactor, DEFAULT_USER, DEFAULT_USER_PASSWORD) + user_key = admin_interactor.create_api_key(DEFAULT_USER, DEFAULT_USER_PASSWORD) + else: + user_key = namespace.user_key + + user_interactor = ShedApiInteractor(namespace.shed_url, user_key) + return RemoteToolShedPopulator( + admin_interactor, + user_interactor, + ) + + +def _arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=DESCRIPTION) + parser.add_argument("-u", "--shed-url", default="http://localhost:9009", help="Tool Shed URL") + parser.add_argument("-a", "--admin-key", default="tsadminkey", help="Tool Shed Admin API Key") + parser.add_argument( + "-k", "--user-key", default=None, help="Tool Shed User API Key (will create a new user if unspecified)" + ) + return parser + + +if __name__ == "__main__": + main(sys.argv[1:]) From dd58bfcfe6b6f02bbc2beefc1e65add0efc32739 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 20 Dec 2022 14:21:55 -0500 Subject: [PATCH 63/73] Implement the GA4GH TRS API in the new tool shed. --- .../schemas/tool_shed_config_schema.yml | 52 +++++ lib/galaxy/webapps/base/api.py | 9 +- lib/tool_shed/managers/trs.py | 153 +++++++++++++ .../test/functional/test_shed_tools.py | 38 ++- lib/tool_shed/webapp/api2/tools.py | 68 ++++++ lib/tool_shed_client/schema/gen.sh | 13 ++ lib/tool_shed_client/schema/trs.py | 216 ++++++++++++++++++ .../schema/trs_service_info.py | 87 +++++++ lib/tool_shed_client/trs_util.py | 24 ++ pyproject.toml | 2 + test/unit/tool_shed/test_trs_tool.py | 21 ++ 11 files changed, 681 insertions(+), 2 deletions(-) create mode 100644 lib/tool_shed/managers/trs.py create mode 100755 lib/tool_shed_client/schema/gen.sh create mode 100644 lib/tool_shed_client/schema/trs.py create mode 100644 lib/tool_shed_client/schema/trs_service_info.py create mode 100644 lib/tool_shed_client/trs_util.py create mode 100644 test/unit/tool_shed/test_trs_tool.py diff --git a/lib/galaxy/config/schemas/tool_shed_config_schema.yml b/lib/galaxy/config/schemas/tool_shed_config_schema.yml index 116f0521f483..358215d70316 100644 --- a/lib/galaxy/config/schemas/tool_shed_config_schema.yml +++ b/lib/galaxy/config/schemas/tool_shed_config_schema.yml @@ -342,6 +342,58 @@ mapping: desc: | Address to join mailing list + ga4gh_service_id: + type: str + required: false + desc: | + Service ID for GA4GH services (exposed via the service-info endpoint for the Galaxy DRS API). + If unset, one will be generated using the URL the target API requests are made against. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + + This value should likely reflect your service's URL. For instance for usegalaxy.org + this value should be org.usegalaxy. Particular Galaxy implementations will treat this + value as a prefix and append the service type to this ID. For instance for the DRS + service "id" (available via the DRS API) for the above configuration value would be + org.usegalaxy.drs. + + ga4gh_service_organization_name: + type: str + required: false + desc: | + Service name for host organization (exposed via the service-info endpoint for the Galaxy DRS API). + If unset, one will be generated using ga4gh_service_id. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + + ga4gh_service_organization_url: + type: str + required: False + desc: | + Organization URL for host organization (exposed via the service-info endpoint for the Galaxy DRS API). + If unset, one will be generated using the URL the target API requests are made against. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + + ga4gh_service_environment: + type: str + required: False + desc: | + Service environment (exposed via the service-info endpoint for the Galaxy DRS API) for + implemented GA4GH services. + + Suggested values are prod, test, dev, staging. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + use_heartbeat: type: bool default: true diff --git a/lib/galaxy/webapps/base/api.py b/lib/galaxy/webapps/base/api.py index 2835a2340faf..73056bc88a09 100644 --- a/lib/galaxy/webapps/base/api.py +++ b/lib/galaxy/webapps/base/api.py @@ -165,7 +165,14 @@ def get_error_response_for_request(request: Request, exc: MessageException) -> J if "ga4gh" in path: # When serving GA4GH APIs use limited exceptions to conform their expected # error schema. Tailored to DRS currently. - content = {"status_code": status_code, "msg": error_dict["err_msg"]} + message = error_dict["err_msg"] + if "drs" in path: + content = {"status_code": status_code, "msg": message} + elif "trs" in path: + content = {"code": status_code, "message": message} + else: + # unknown schema - just yield the most useful error message + content = error_dict else: content = error_dict diff --git a/lib/tool_shed/managers/trs.py b/lib/tool_shed/managers/trs.py new file mode 100644 index 000000000000..c36488bdb361 --- /dev/null +++ b/lib/tool_shed/managers/trs.py @@ -0,0 +1,153 @@ +from typing import ( + Any, + cast, + Dict, + List, + Optional, + Tuple, +) + +from starlette.datastructures import URL + +from galaxy.exceptions import ObjectNotFound +from galaxy.util.tool_shed.common_util import remove_protocol_and_user_from_clone_url +from galaxy.version import VERSION +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.structured_app import ToolShedApp +from tool_shed.util.metadata_util import get_current_repository_metadata_for_changeset_revision +from tool_shed.webapp.model import ( + Repository, + RepositoryMetadata, +) +from tool_shed_client.schema.trs import ( + DescriptorType, + Tool, + ToolClass, + ToolVersion, +) +from tool_shed_client.schema.trs_service_info import ( + Organization, + Service, + ServiceType, +) +from tool_shed_client.trs_util import decode_identifier +from .repositories import guid_to_repository + +TRS_SERVICE_NAME = "Tool Shed TRS API" +TRS_SERVICE_DESCRIPTION = "Serves tool shed repository tools according to the GA4GH TRS specification" + + +def service_info(app: ToolShedApp, request_url: URL): + components = request_url.components + hostname = components.hostname + assert hostname + default_organization_id = ".".join(reversed(hostname.split("."))) + config = app.config + organization_id = cast(str, config.ga4gh_service_id or default_organization_id) + organization_name = cast(str, config.ga4gh_service_organization_name or organization_id) + organization_url = cast(str, config.ga4gh_service_organization_url or f"{components.scheme}://{components.netloc}") + + organization = Organization( + url=organization_url, + name=organization_name, + ) + service_type = ServiceType( + group="org.ga4gh", + artifact="trs", + version="2.1.0", + ) + environment = config.ga4gh_service_environment + extra_kwds = {} + if environment: + extra_kwds["environment"] = environment + return Service( + id=organization_id + ".trs", + name=TRS_SERVICE_NAME, + description=TRS_SERVICE_DESCRIPTION, + organization=organization, + type=service_type, + version=VERSION, + **extra_kwds, + ) + + +def tool_classes() -> List[ToolClass]: + return [ToolClass(id="galaxy_tool", name="Galaxy Tool", description="Galaxy XML Tools")] + + +def trs_tool_id_to_repository(trans: ProvidesRepositoriesContext, trs_tool_id: str) -> Repository: + guid = decode_identifier(trans.repositories_hostname, trs_tool_id) + guid = remove_protocol_and_user_from_clone_url(guid) + return guid_to_repository(trans.app, guid) + + +def get_repository_metadata_by_tool_version( + app: ToolShedApp, repository: Repository, tool_id: str +) -> Dict[str, RepositoryMetadata]: + versions = {} + for _, changeset in repository.installable_revisions(app): + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changeset) + tools: Optional[List[Dict[str, Any]]] = metadata.metadata.get("tools") + if not tools: + continue + for tool_metadata in tools: + if tool_metadata["id"] != tool_id: + continue + versions[tool_metadata["version"]] = metadata + return versions + + +def get_tools_for(repository_metadata: RepositoryMetadata) -> List[Dict[str, Any]]: + tools: Optional[List[Dict[str, Any]]] = repository_metadata.metadata.get("tools") + assert tools + return tools + + +def trs_tool_id_to_repository_metadata( + trans: ProvidesRepositoriesContext, trs_tool_id: str +) -> Optional[Tuple[Repository, Dict[str, RepositoryMetadata]]]: + tool_guid = decode_identifier(trans.repositories_hostname, trs_tool_id) + tool_guid = remove_protocol_and_user_from_clone_url(tool_guid) + _, tool_id = tool_guid.rsplit("/", 1) + repository = guid_to_repository(trans.app, tool_guid) + app = trans.app + versions: Dict[str, RepositoryMetadata] = get_repository_metadata_by_tool_version(app, repository, tool_id) + if not versions: + return None + + return repository, versions + + +def get_tool(trans: ProvidesRepositoriesContext, trs_tool_id: str) -> Tool: + guid = decode_identifier(trans.repositories_hostname, trs_tool_id) + guid = remove_protocol_and_user_from_clone_url(guid) + repo_metadata = trs_tool_id_to_repository_metadata(trans, trs_tool_id) + if not repo_metadata: + raise ObjectNotFound() + repository, metadata_by_version = repo_metadata + + repo_owner = repository.user.username + aliases: List[str] = [guid] + hostname = remove_protocol_and_user_from_clone_url(trans.repositories_hostname) + url = f"https://{hostname}/repos/{repo_owner}/{repository.name}" + + versions: List[ToolVersion] = [] + for tool_version_str, _ in metadata_by_version.items(): + version_url = url # TODO: + tool_version = ToolVersion( + author=[repo_owner], + containerfile=False, + descriptor_type=[DescriptorType.GALAXY], + id=tool_version_str, + url=version_url, + verified=False, + ) + versions.append(tool_version) + return Tool( + aliases=aliases, + id=trs_tool_id, + url=url, + toolclass=tool_classes()[0], + organization=repo_owner, + versions=versions, + ) diff --git a/lib/tool_shed/test/functional/test_shed_tools.py b/lib/tool_shed/test/functional/test_shed_tools.py index 103cfeecfbeb..8d91ce088b73 100644 --- a/lib/tool_shed/test/functional/test_shed_tools.py +++ b/lib/tool_shed/test/functional/test_shed_tools.py @@ -1,4 +1,12 @@ -from ..base.api import ShedApiTestCase +from tool_shed_client.schema.trs import ( + Tool, + ToolClass, +) +from tool_shed_client.trs_util import encode_identifier +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, +) class TestShedToolsApi(ShedApiTestCase): @@ -32,3 +40,31 @@ def test_tool_search(self): # but if this tool has been installed a bunch by other tests - it might not be. tool_search_hit = response.find_search_hit(repository) assert tool_search_hit + + @skip_if_api_v1 + def test_trs_service_info(self): + service_info = self.api_interactor.get("ga4gh/trs/v2/service-info") + service_info.raise_for_status() + + @skip_if_api_v1 + def test_trs_tool_classes(self): + classes_response = self.api_interactor.get("ga4gh/trs/v2/toolClasses") + classes_response.raise_for_status() + classes = classes_response.json() + assert isinstance(classes, list) + assert len(classes) == 1 + class0 = classes[0] + assert ToolClass(**class0) + + @skip_if_api_v1 + def test_trs_tool_list(self): + populator = self.populator + repository = populator.setup_column_maker_repo(prefix="toolstrsindex") + tool_id = populator.tool_guid(self, repository, "Add_a_column1") + tool_shed_base, encoded_tool_id = encode_identifier(tool_id) + print(encoded_tool_id) + url = f"ga4gh/trs/v2/tools/{encoded_tool_id}" + print(url) + tool_response = self.api_interactor.get(url) + tool_response.raise_for_status() + assert Tool(**tool_response.json()) diff --git a/lib/tool_shed/webapp/api2/tools.py b/lib/tool_shed/webapp/api2/tools.py index b93502ed9e8e..0d8c2f2d5524 100644 --- a/lib/tool_shed/webapp/api2/tools.py +++ b/lib/tool_shed/webapp/api2/tools.py @@ -1,8 +1,27 @@ +import logging +from typing import List + +from fastapi import ( + Path, + Request, +) + from tool_shed.context import SessionRequestContext from tool_shed.managers.tools import search +from tool_shed.managers.trs import ( + get_tool, + service_info, + tool_classes, +) from tool_shed.structured_app import ToolShedApp from tool_shed.util.shed_index import build_index from tool_shed_client.schema import BuildSearchIndexResponse +from tool_shed_client.schema.trs import ( + Tool, + ToolClass, + ToolVersion, +) +from tool_shed_client.schema.trs_service_info import Service from . import ( depends, DependsOnTrans, @@ -12,8 +31,16 @@ ToolsIndexQueryParam, ) +log = logging.getLogger(__name__) + router = Router(tags=["tools"]) +TOOL_ID_PATH_PARAM: str = Path( + ..., + title="GA4GH TRS Tool ID", + description="See also https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs-tool-and-trs-tool-version-ids", +) + @router.cbv class FastAPITools: @@ -53,3 +80,44 @@ def build_search_index(self) -> BuildSearchIndexResponse: repositories_indexed=repos_indexed, tools_indexed=tools_indexed, ) + + @router.get("/api/ga4gh/trs/v2/service-info", operation_id="tools_trs_service_info") + def service_info(self, request: Request) -> Service: + return service_info(self.app, request.url) + + @router.get("/api/ga4gh/trs/v2/toolClasses", operation_id="tools__trs_tool_classes") + def tool_classes(self) -> List[ToolClass]: + return tool_classes() + + @router.get( + "/api/ga4gh/trs/v2/tools", + operation_id="tools__trs_index", + ) + def trs_index( + self, + ): + # we probably want to be able to query the database at the + # tool level and such to do this right? + return [] + + @router.get( + "/api/ga4gh/trs/v2/tools/{tool_id}", + operation_id="tools__trs_get", + ) + def trs_get( + self, + trans: SessionRequestContext = DependsOnTrans, + tool_id: str = TOOL_ID_PATH_PARAM, + ) -> Tool: + return get_tool(trans, tool_id) + + @router.get( + "/api/ga4gh/trs/v2/tools/{tool_id}/versions", + operation_id="tools__trs_get_versions", + ) + def trs_get_versions( + self, + trans: SessionRequestContext = DependsOnTrans, + tool_id: str = TOOL_ID_PATH_PARAM, + ) -> List[ToolVersion]: + return get_tool(trans, tool_id).versions diff --git a/lib/tool_shed_client/schema/gen.sh b/lib/tool_shed_client/schema/gen.sh new file mode 100755 index 000000000000..d6ee47360c55 --- /dev/null +++ b/lib/tool_shed_client/schema/gen.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# must be run from a virtualenv with... +# https://github.com/koxudaxi/datamodel-code-generator +#for model in AccessMethod Checksum DrsObject Error AccessURL ContentsObject DrsService +#do +# datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh/tool-registry-service-schemas/develop/openapi/ga4gh-tool-discovery.yaml" --output "$model.py" +#one + +#datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-info/v1.0.0/service-info.yaml#/components/schemas/Service" --output Service.py + +datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-info/v1.0.0/service-info.yaml#/paths/~1service-info" --output trs_service_info.py +datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh/tool-registry-service-schemas/develop/openapi/openapi.yaml" --output trs.py diff --git a/lib/tool_shed_client/schema/trs.py b/lib/tool_shed_client/schema/trs.py new file mode 100644 index 000000000000..fceff90e76f7 --- /dev/null +++ b/lib/tool_shed_client/schema/trs.py @@ -0,0 +1,216 @@ +# generated by datamodel-codegen: +# filename: https://raw.githubusercontent.com/ga4gh/tool-registry-service-schemas/develop/openapi/openapi.yaml +# timestamp: 2022-12-20T21:01:58+00:00 + +from __future__ import annotations + +from enum import Enum +from typing import ( + Dict, + List, + Optional, + Union, +) + +from pydantic import ( + BaseModel, + Field, +) + + +class Checksum(BaseModel): + checksum: str = Field(..., description="The hex-string encoded checksum for the data. ") + type: str = Field( + ..., + description="The digest method used to create the checksum.\nThe value (e.g. `sha-256`) SHOULD be listed as `Hash Name String` in the https://github.com/ga4gh-discovery/ga4gh-checksum/blob/master/hash-alg.csv[GA4GH Checksum Hash Algorithm Registry].\nOther values MAY be used, as long as implementors are aware of the issues discussed in https://tools.ietf.org/html/rfc6920#section-9.4[RFC6920].\nGA4GH may provide more explicit guidance for use of non-IANA-registered algorithms in the future.", + ) + + +class FileType(Enum): + TEST_FILE = "TEST_FILE" + PRIMARY_DESCRIPTOR = "PRIMARY_DESCRIPTOR" + SECONDARY_DESCRIPTOR = "SECONDARY_DESCRIPTOR" + CONTAINERFILE = "CONTAINERFILE" + OTHER = "OTHER" + + +class ToolFile(BaseModel): + path: Optional[str] = Field( + None, + description="Relative path of the file. A descriptor's path can be used with the GA4GH .../{type}/descriptor/{relative_path} endpoint.", + ) + file_type: Optional[FileType] = None + checksum: Optional[Checksum] = None + + +class ToolClass(BaseModel): + id: Optional[str] = Field(None, description="The unique identifier for the class.") + name: Optional[str] = Field(None, description="A short friendly name for the class.") + description: Optional[str] = Field( + None, description="A longer explanation of what this class is and what it can accomplish." + ) + + +class ImageType(Enum): + Docker = "Docker" + Singularity = "Singularity" + Conda = "Conda" + + +class DescriptorType(Enum): + CWL = "CWL" + WDL = "WDL" + NFL = "NFL" + GALAXY = "GALAXY" + SMK = "SMK" + + +class DescriptorTypeVersion(BaseModel): + __root__: str = Field( + ..., + description="The language version for a given descriptor type. The version should correspond to the actual declared version of the descriptor. For example, tools defined in CWL could have a version of `v1.0.2` whereas WDL tools may have a version of `1.0` or `draft-2`", + ) + + +class DescriptorTypeWithPlain(Enum): + CWL = "CWL" + WDL = "WDL" + NFL = "NFL" + GALAXY = "GALAXY" + SMK = "SMK" + PLAIN_CWL = "PLAIN_CWL" + PLAIN_WDL = "PLAIN_WDL" + PLAIN_NFL = "PLAIN_NFL" + PLAIN_GALAXY = "PLAIN_GALAXY" + PLAIN_SMK = "PLAIN_SMK" + + +class FileWrapper(BaseModel): + content: Optional[str] = Field( + None, description="The content of the file itself. One of url or content is required." + ) + checksum: Optional[List[Checksum]] = Field( + None, + description="A production (immutable) tool version is required to have a hashcode. Not required otherwise, but might be useful to detect changes. ", + example=[{"checksum": "ea2a5db69bd20a42976838790bc29294df3af02b", "type": "sha1"}], + ) + image_type: Optional[Union[ImageType, DescriptorType]] = Field( + None, description="Optionally return additional information on the type of file this is" + ) + url: Optional[str] = Field( + None, + description="Optional url to the underlying content, should include version information, and can include a git hash. Note that this URL should resolve to the raw unwrapped content that would otherwise be available in content. One of url or content is required.", + example={ + "descriptorfile": { + "url": "https://raw.githubusercontent.com/ICGC-TCGA-PanCancer/pcawg_delly_workflow/ea2a5db69bd20a42976838790bc29294df3af02b/delly_docker/Delly.cwl" + }, + "containerfile": { + "url": "https://raw.githubusercontent.com/ICGC-TCGA-PanCancer/pcawg_delly_workflow/c83478829802b4d36374870843821abe1b625a71/delly_docker/Dockerfile" + }, + }, + ) + + +class Error(BaseModel): + code: int + message: Optional[str] = "Internal Server Error" + + +class ImageData(BaseModel): + registry_host: Optional[str] = Field( + None, + description="A docker registry or a URL to a Singularity registry. Used along with image_name to locate a specific image.", + example=["registry.hub.docker.com"], + ) + image_name: Optional[str] = Field( + None, + description="Used in conjunction with a registry_url if provided to locate images.", + example=["quay.io/seqware/seqware_full/1.1", "ubuntu:latest"], + ) + size: Optional[int] = Field(None, description="Size of the container in bytes.") + updated: Optional[str] = Field(None, description="Last time the container was updated.") + checksum: Optional[List[Checksum]] = Field( + None, + description="A production (immutable) tool version is required to have a hashcode. Not required otherwise, but might be useful to detect changes. This exposes the hashcode for specific image versions to verify that the container version pulled is actually the version that was indexed by the registry.", + example=[{"checksum": "77af4d6b9913e693e8d0b4b294fa62ade6054e6b2f1ffb617ac955dd63fb0182", "type": "sha256"}], + ) + image_type: Optional[ImageType] = None + + +class ToolVersion(BaseModel): + author: Optional[List[str]] = Field( + None, + description="Contact information for the author of this version of the tool in the registry. (More complex authorship information is handled by the descriptor).", + ) + name: Optional[str] = Field(None, description="The name of the version.") + url: str = Field( + ..., + description="The URL for this tool version in this registry.", + example="http://agora.broadinstitute.org/tools/123456/versions/1", + ) + id: str = Field( + ..., description="An identifier of the version of this tool for this particular tool registry.", example="v1" + ) + is_production: Optional[bool] = Field( + None, + description="This version of a tool is guaranteed to not change over time (for example, a tool built from a tag in git as opposed to a branch). A production quality tool is required to have a checksum", + ) + images: Optional[List[ImageData]] = Field( + None, + description="All known docker images (and versions/hashes) used by this tool. If the tool has to evaluate any of the docker images strings at runtime, those ones cannot be reported here.", + ) + descriptor_type: Optional[List[DescriptorType]] = Field( + None, description="The type (or types) of descriptors available." + ) + descriptor_type_version: Optional[Dict[str, List[DescriptorTypeVersion]]] = Field( + None, + description="A map providing information about the language versions used in this tool. The keys should be the same values used in the `descriptor_type` field, and the value should be an array of all the language versions used for the given `descriptor_type`. Depending on the `descriptor_type` (e.g. CWL) multiple version values may be used in a single tool.", + example='{\n "WDL": ["1.0", "1.0"],\n "CWL": ["v1.0.2"],\n "NFL": ["DSL2"]\n}\n', + ) + containerfile: Optional[bool] = Field( + None, + description="Reports if this tool has a containerfile available. (For Docker-based tools, this would indicate the presence of a Dockerfile)", + ) + meta_version: Optional[str] = Field( + None, + description="The version of this tool version in the registry. Iterates when fields like the description, author, etc. are updated.", + ) + verified: Optional[bool] = Field( + None, description="Reports whether this tool has been verified by a specific organization or individual." + ) + verified_source: Optional[List[str]] = Field( + None, description="Source of metadata that can support a verified tool, such as an email or URL." + ) + signed: Optional[bool] = Field(None, description="Reports whether this version of the tool has been signed.") + included_apps: Optional[List[str]] = Field( + None, + description="An array of IDs for the applications that are stored inside this tool.", + example=["https://bio.tools/tool/mytum.de/SNAP2/1", "https://bio.tools/bioexcel_seqqc"], + ) + + +class Tool(BaseModel): + url: str = Field( + ..., + description="The URL for this tool in this registry.", + example="http://agora.broadinstitute.org/tools/123456", + ) + id: str = Field(..., description="A unique identifier of the tool, scoped to this registry.", example=123456) + aliases: Optional[List[str]] = Field( + None, + description="Support for this parameter is optional for tool registries that support aliases.\nA list of strings that can be used to identify this tool which could be straight up URLs. \nThis can be used to expose alternative ids (such as GUIDs) for a tool\nfor registries. Can be used to match tools across registries.", + ) + organization: str = Field(..., description="The organization that published the image.") + name: Optional[str] = Field(None, description="The name of the tool.") + toolclass: ToolClass + description: Optional[str] = Field(None, description="The description of the tool.") + meta_version: Optional[str] = Field( + None, + description="The version of this tool in the registry. Iterates when fields like the description, author, etc. are updated.", + ) + has_checker: Optional[bool] = Field(None, description="Whether this tool has a checker tool associated with it.") + checker_url: Optional[str] = Field( + None, + description="Optional url to the checker tool that will exit successfully if this tool produced the expected result given test data.", + ) + versions: List[ToolVersion] = Field(..., description="A list of versions for this tool.") diff --git a/lib/tool_shed_client/schema/trs_service_info.py b/lib/tool_shed_client/schema/trs_service_info.py new file mode 100644 index 000000000000..68b2f04287b6 --- /dev/null +++ b/lib/tool_shed_client/schema/trs_service_info.py @@ -0,0 +1,87 @@ +# generated by datamodel-codegen: +# filename: https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-info/v1.0.0/service-info.yaml#/paths/~1service-info +# timestamp: 2022-12-20T21:01:57+00:00 + +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from pydantic import ( + AnyUrl, + BaseModel, + Field, +) + + +class Organization(BaseModel): + name: str = Field( + ..., description="Name of the organization responsible for the service", example="My organization" + ) + url: AnyUrl = Field( + ..., description="URL of the website of the organization (RFC 3986 format)", example="https://example.com" + ) + + +class ServiceType(BaseModel): + group: str = Field( + ..., + description="Namespace in reverse domain name format. Use `org.ga4gh` for implementations compliant with official GA4GH specifications. For services with custom APIs not standardized by GA4GH, or implementations diverging from official GA4GH specifications, use a different namespace (e.g. your organization's reverse domain name).", + example="org.ga4gh", + ) + artifact: str = Field( + ..., + description="Name of the API or GA4GH specification implemented. Official GA4GH types should be assigned as part of standards approval process. Custom artifacts are supported.", + example="beacon", + ) + version: str = Field( + ..., + description="Version of the API or specification. GA4GH specifications use semantic versioning.", + example="1.0.0", + ) + + +class Service(BaseModel): + id: str = Field( + ..., + description="Unique ID of this service. Reverse domain name notation is recommended, though not required. The identifier should attempt to be globally unique so it can be used in downstream aggregator services e.g. Service Registry.", + example="org.ga4gh.myservice", + ) + name: str = Field(..., description="Name of this service. Should be human readable.", example="My project") + type: ServiceType + description: Optional[str] = Field( + None, + description="Description of the service. Should be human readable and provide information about the service.", + example="This service provides...", + ) + organization: Organization = Field(..., description="Organization providing the service") + contactUrl: Optional[AnyUrl] = Field( + None, + description="URL of the contact for the provider of this service, e.g. a link to a contact form (RFC 3986 format), or an email (RFC 2368 format).", + example="mailto:support@example.com", + ) + documentationUrl: Optional[AnyUrl] = Field( + None, + description="URL of the documentation of this service (RFC 3986 format). This should help someone learn how to use your service, including any specifics required to access data, e.g. authentication.", + example="https://docs.myservice.example.com", + ) + createdAt: Optional[datetime] = Field( + None, + description="Timestamp describing when the service was first deployed and available (RFC 3339 format)", + example="2019-06-04T12:58:19Z", + ) + updatedAt: Optional[datetime] = Field( + None, + description="Timestamp describing when the service was last updated (RFC 3339 format)", + example="2019-06-04T12:58:19Z", + ) + environment: Optional[str] = Field( + None, + description="Environment the service is running in. Use this to distinguish between production, development and testing/staging deployments. Suggested values are prod, test, dev, staging. However this is advised and not enforced.", + example="test", + ) + version: str = Field( + ..., + description="Version of the service being described. Semantic versioning is recommended, but other identifiers, such as dates or commit hashes, are also allowed. The version should be changed whenever the service is updated.", + example="1.0.0", + ) diff --git a/lib/tool_shed_client/trs_util.py b/lib/tool_shed_client/trs_util.py new file mode 100644 index 000000000000..a9d46238b96a --- /dev/null +++ b/lib/tool_shed_client/trs_util.py @@ -0,0 +1,24 @@ +from typing import NamedTuple + + +class EncodedIdentifier(NamedTuple): + tool_shed_base: str + encoded_id: str + + +# TRS specified encoding/decoding according to... +# https://datatracker.ietf.org/doc/html/rfc3986#section-2.4 +# Failed to get whole tool shed IDs working with FastAPI +# - https://github.com/tiangolo/fastapi/issues/791#issuecomment-742799299 +# - urllib.parse.quote(identifier, safe='') will produce the URL fragements but +# but FastAPI eat them. + + +def decode_identifier(tool_shed_base: str, quoted_tool_id: str) -> str: + suffix = "/".join(quoted_tool_id.split("~")) + return f"{tool_shed_base}/repos/{suffix}" + + +def encode_identifier(identifier: str) -> EncodedIdentifier: + base, rest = identifier.split("/repos/", 1) + return EncodedIdentifier(base, "~".join(rest.split("/"))) diff --git a/pyproject.toml b/pyproject.toml index 6ab9b16296be..e99a973160f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -202,3 +202,5 @@ relative-imports-order = "closest-to-furthest" # Don't check some pyupgrade rules on generated files "lib/galaxy/schema/bco/*" = ["UP006", "UP007"] "lib/galaxy/schema/drs/*" = ["UP006", "UP007"] +"lib/tool_shed_client/schema/trs.py" = ["UP006", "UP007"] +"lib/tool_shed_client/schema/trs_service_info.py" = ["UP006", "UP007"] diff --git a/test/unit/tool_shed/test_trs_tool.py b/test/unit/tool_shed/test_trs_tool.py new file mode 100644 index 000000000000..79aef77bca4f --- /dev/null +++ b/test/unit/tool_shed/test_trs_tool.py @@ -0,0 +1,21 @@ +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.managers.trs import get_tool +from tool_shed.webapp.model import Repository +from tool_shed_client.schema.trs import Tool +from ._util import upload_directories_to_repository + + +def test_get_tool(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + upload_directories_to_repository(provides_repositories, new_repository, "column_maker") + owner = new_repository.user.username + name = new_repository.name + encoded_id = f"{owner}~{name}~Add_a_column1" + tool: Tool = get_tool(provides_repositories, encoded_id) + assert tool + assert tool.organization == owner + assert tool.id == encoded_id + assert tool.aliases + assert tool.aliases[0] == f"localhost/repos/{owner}/{name}/Add_a_column1" + + tool_versions = tool.versions + assert len(tool_versions) == 3 From b5fb7c04e606fbf54fbab1c20f03680c6f50f739 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 24 Jan 2023 17:40:30 -0500 Subject: [PATCH 64/73] Category test stuff... --- lib/tool_shed/test/base/twilltestcase.py | 11 +++++------ .../test_0050_circular_dependencies_4_levels.py | 2 ++ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 312091e7a113..fc5e8fedd0dd 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -986,13 +986,12 @@ def commit_and_push(self, repository, hgrepo, options, username, password): raise def create_category(self, **kwd) -> Category: - category = self.populator.get_category_with_name(kwd["name"]) + category_name = kwd["name"] + category = self.populator.get_category_with_name(category_name) if category is None: - params = {"operation": "create"} - self.visit_url("/admin/manage_categories", params=params) - self.submit_form(button="create_category_button", **kwd) - category = self.populator.get_category_with_name(kwd["name"]) - assert category + # not recreating this functionality in the UI I don't think? + category = self.populator.new_category(category_name) + return category return category def create_repository_dependency( diff --git a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py index 6a0609262b8b..bbddfa0c219e 100644 --- a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py @@ -68,6 +68,8 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) + assert repository + assert repository.id self.commit_tar_to_repository( repository, "column_maker/column_maker.tar", From c98fa8c981ba09929c2068d46b3dcdf8cd3c98aa Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 24 Jan 2023 17:39:03 -0500 Subject: [PATCH 65/73] Category reworking.. --- lib/tool_shed/webapp/api/categories.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/lib/tool_shed/webapp/api/categories.py b/lib/tool_shed/webapp/api/categories.py index f9dba0a0d55d..b0eb31cf4cca 100644 --- a/lib/tool_shed/webapp/api/categories.py +++ b/lib/tool_shed/webapp/api/categories.py @@ -11,7 +11,6 @@ util, web, ) -from galaxy.model.base import transaction from galaxy.web import ( expose_api, expose_api_anonymous_and_sessionless, @@ -52,11 +51,8 @@ def create(self, trans, payload, **kwd): description=payload.get("description", ""), ) category: Category = self.category_manager.create(trans, request) - category_dict = category.to_dict(view="element", value_mapper=get_value_mapper(trans.app)) + category_dict = self.category_manager.to_dict(category) category_dict["message"] = f"Category '{str(category.name)}' has been created" - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) return category_dict @expose_api_anonymous_and_sessionless From 0d42ad7967c51e677f26d43c5210fbf90928c3c0 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 24 Jan 2023 17:43:43 -0500 Subject: [PATCH 66/73] bismark populator improvement... --- lib/tool_shed/test/base/populators.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 7e2f93c3316f..9b63e1f7a559 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -92,8 +92,16 @@ def __init__(self, admin_api_interactor: ShedApiInteractor, api_interactor: Shed self._admin_api_interactor = admin_api_interactor self._api_interactor = api_interactor - def setup_bismark_repo(self, repository_id: HasRepositoryId, end: Optional[int] = None): - self.setup_test_data_repo_by_id("bismark", repository_id, assert_ok=False, end=end) + def setup_bismark_repo( + self, + repository_id: Optional[HasRepositoryId] = None, + end: Optional[int] = None, + category_id: Optional[str] = None, + ) -> HasRepositoryId: + if repository_id is None: + category_id = category_id or self.new_category(prefix="testbismark").id + repository_id = self.new_repository(category_id, prefix="testbismark") + return self.setup_test_data_repo_by_id("bismark", repository_id, assert_ok=False, end=end) def setup_test_data_repo_by_id( self, From 06667470b6192110b968dd3f1ec5c9d398da7eef Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 2 Feb 2023 11:35:03 -0500 Subject: [PATCH 67/73] Modernized framework to replace tool shed functional tests. Goals: - Replace direct database access with pydantic typed API requests. - Replace Twill with playwright. - Eliminate access to the Galaxy UI in the tests - the tests should assume API access from the installing party. pytest-playwright requires small bits of #13909. This is a worthy project that could let us remove a bunch of deprecated stuff from the Galaxy admin controllers and a bunch of mako stuff that is unused and could be used to test PRs like #14609 (which prompted me to do this) but I'm anxious about growing emotionally attached to code I want to remove and I'm worried about losing track of which helpers are required for Planemo/Emphemeris/Galaxy and which helpers are just being used to test the tool shed. --- .github/workflows/toolshed.yaml | 18 ++- lib/galaxy/dependencies/dev-requirements.txt | 1 + lib/tool_shed/test/base/playwrightbrowser.py | 153 ++++++++++++++++++ lib/tool_shed/test/base/twilltestcase.py | 14 +- lib/tool_shed/test/functional/conftest.py | 46 ++++++ .../test_0000_basic_repository_features.py | 71 +++++--- pyproject.toml | 1 + 7 files changed, 277 insertions(+), 27 deletions(-) create mode 100644 lib/tool_shed/test/base/playwrightbrowser.py create mode 100644 lib/tool_shed/test/functional/conftest.py diff --git a/.github/workflows/toolshed.yaml b/.github/workflows/toolshed.yaml index c9ee6c6a10eb..cc54bf43c067 100644 --- a/.github/workflows/toolshed.yaml +++ b/.github/workflows/toolshed.yaml @@ -22,7 +22,12 @@ jobs: matrix: python-version: ['3.7'] test-install-client: ['standalone', 'galaxy_api'] - shed-api: ['v1', 'v2'] + # v1 is mostly working... + shed-api: ['v1'] + # lets get twill working with twill then try to + # make progress on the playwright + # shed-browser: ['twill', 'playwright'] + shed-browser: ['playwright'] services: postgres: image: postgres:13 @@ -54,11 +59,22 @@ jobs: with: path: 'galaxy root/.venv' key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-toolshed + key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy_root/requirements.txt') }}-toolshed + - name: Install dependencies + run: ./scripts/common_startup.sh --skip-client-build + working-directory: 'galaxy root' + - name: Build Frontend + run: '. .venv/bin/activate && cd lib/tool_shed/webapp/frontend && yarn && make client' + working-directory: 'galaxy root' + - name: Install playwright + run: '. .venv/bin/activate && playwright install' + working-directory: 'galaxy root' - name: Run tests run: './run_tests.sh -toolshed' env: TOOL_SHED_TEST_INSTALL_CLIENT: ${{ matrix.test-install-client }} TOOL_SHED_API_VERSION: ${{ matrix.shed-api }} + TOOL_SHED_TEST_BROWSER: ${{ matrix.shed-browser }} working-directory: 'galaxy root' - uses: actions/upload-artifact@v3 if: failure() diff --git a/lib/galaxy/dependencies/dev-requirements.txt b/lib/galaxy/dependencies/dev-requirements.txt index 2ef5e06b919b..6d417c6fe1fa 100644 --- a/lib/galaxy/dependencies/dev-requirements.txt +++ b/lib/galaxy/dependencies/dev-requirements.txt @@ -105,6 +105,7 @@ pytest-httpserver==1.0.6 ; python_version >= "3.7" and python_version < "3.12" pytest-json-report==1.5.0 ; python_version >= "3.7" and python_version < "3.12" pytest-metadata==3.0.0 ; python_version >= "3.7" and python_version < "3.12" pytest-mock==3.11.1 ; python_version >= "3.7" and python_version < "3.12" +pytest-playwright==0.3.0 ; python_version >= "3.7" and python_version < "3.12" pytest-postgresql==4.1.1 ; python_version >= "3.7" and python_version < "3.12" pytest-shard==0.1.2 ; python_version >= "3.7" and python_version < "3.12" pytest==7.4.2 ; python_version >= "3.7" and python_version < "3.12" diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py new file mode 100644 index 000000000000..6d00f794c69b --- /dev/null +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -0,0 +1,153 @@ +import time +from typing import List + +from playwright.sync_api import ( + expect, + Locator, + Page, +) + +from .browser import ( + FormValueType, + ShedBrowser, +) + + +class PlaywrightShedBrowser(ShedBrowser): + _page: Page + + def __init__(self, page: Page): + self._page = page + + def visit_url(self, url: str, allowed_codes: List[int]) -> str: + response = self._page.goto(url) + assert response is not None + return_code = response.status + assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( + return_code, + ", ".join(str(code) for code in allowed_codes), + ) + return response.url + + def page_content(self) -> str: + self._page.wait_for_load_state("networkidle") + return self._page.content() + + def check_page_for_string(self, patt: str) -> None: + """Looks for 'patt' in the current browser page""" + patt = patt.replace("", "").replace("", "") + expect(self._page.locator("body")).to_contain_text(patt) + + def check_string_not_in_page(self, patt: str) -> None: + patt = patt.replace("", "").replace("", "") + expect(self._page.locator("body")).not_to_contain_text(patt) + + def xcheck_page_for_string(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) == -1: + fname = self.write_temp_file(page) + errmsg = f"no match to '{patt}'\npage content written to '{fname}'\npage: [[{page}]]" + raise AssertionError(errmsg) + + def xcheck_string_not_in_page(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) != -1: + fname = self.write_temp_file(page) + errmsg = f"string ({patt}) incorrectly displayed in page.\npage content written to '{fname}'" + raise AssertionError(errmsg) + + def write_temp_file(self, content, suffix=".html"): + import tempfile + + from galaxy.util import smart_str + + with tempfile.NamedTemporaryFile(suffix=suffix, prefix="twilltestcase-", delete=False) as fh: + fh.write(smart_str(content)) + return fh.name + + def show_forms(self) -> Locator: + """Shows form, helpful for debugging new tests""" + return self._page.locator("form") + + def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): + form = self._form_with_name(form_name) + self._submit_form(form, button, **kwd) + + def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): + """Populates and submits a form from the keyword arguments.""" + # An HTMLForm contains a sequence of Controls. Supported control classes are: + # TextControl, FileControl, ListControl, RadioControl, CheckboxControl, SelectControl, + # SubmitControl, ImageControl + if form is None: + try: + form = self.show_forms().nth(form_no) + except IndexError: + raise ValueError("No form to submit found") + self._submit_form(form, button, **kwd) + + def _submit_form(self, form: Locator, button="runtool_btn", **kwd): + for control_name, control_value in kwd.items(): + self._fill_form_value(form, control_name, control_value) + input = self._page.locator(f"[name='{button}']") + if input.count(): + input.click() + else: + submit_input = form.locator("input[type=submit]") + submit_input.click() + time.sleep(0.25) + # tc.submit(button) + + def _form_with_name(self, name: str) -> Locator: + forms = self.show_forms() + count = forms.count() + for i in range(count): + nth_form = self.show_forms().nth(i) + if nth_form.get_attribute("name") == name: + return nth_form + raise KeyError(f"No form with name [{name}]") + + def fill_form_value(self, form_name: str, control_name: str, value: FormValueType): + form: Locator = self._form_with_name(form_name) + self._fill_form_value(form, control_name, value) + + def _fill_form_value(self, form: Locator, control_name: str, value: FormValueType): + input_i = form.locator(f"input[name='{control_name}']") + input_t = form.locator(f"textarea[name='{control_name}']") + input_s = form.locator(f"select[name='{control_name}']") + if input_i.count(): + if control_name in ["redirect"]: + input_i.input_value = value + else: + if isinstance(value, bool): + if value and not input_i.is_checked(): + input_i.check() + elif not value and input_i.is_checked(): + input_i.uncheck() + else: + input_i.fill(value) + if input_t.count(): + input_t.fill(value) + if input_s.count(): + input_s.select_option(value) + + def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + multi_select = "form[name='categories'] select[name='category_id']" + select_locator = self._page.locator(multi_select) + select_locator.evaluate("node => node.selectedOptions = []") + select_locator.select_option(label=categories_to_add) + self.submit_form_with_name("categories", "manage_categories_button") + + select_locator.evaluate("node => node.selectedOptions = []") + select_locator.select_option(label=categories_to_remove) + self.submit_form_with_name("categories", "manage_categories_button") + + def grant_users_access(self, usernames: List[str]): + multi_select = "form[name='user_access'] select[name='allow_push']" + select_locator = self._page.locator(multi_select) + select_locator.evaluate("node => node.selectedOptions = []") + select_locator.select_option(label=usernames) + self.submit_form_with_name("user_access", "user_access_button") + + @property + def is_twill(self) -> bool: + return False diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index fc5e8fedd0dd..5eca1b0543f2 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -239,14 +239,18 @@ def display_installed_jobs_list_page( for data_manager_name in data_manager_names: params = {"id": data_managers[data_manager_name]["guid"]} self._visit_galaxy_url("/data_manager/jobs_list", params=params) - self.testcase.check_for_strings(strings_displayed) + content = page_content() + for expected in strings_displayed: + if content.find(expected) == -1: + raise AssertionError(f"Failed to find pattern {expected} in {content}") def installed_repository_extended_info( self, installed_repository: galaxy_model.ToolShedRepository ) -> Dict[str, Any]: params = {"id": self.testcase.security.encode_id(installed_repository.id)} self._visit_galaxy_url("/admin_toolshed/manage_repository_json", params=params) - return loads(self.testcase.last_page()) + json = page_content() + return loads(json) def install_repository( self, @@ -654,6 +658,12 @@ def _browser(self) -> ShedBrowser: assert self.__browser return self.__browser + def _escape_page_content_if_needed(self, content: str) -> str: + # if twill browser is being used - replace spaces with " " + if self._browser.is_twill: + content = content.replace(" ", " ") + return content + def check_for_strings(self, strings_displayed=None, strings_not_displayed=None): strings_displayed = strings_displayed or [] strings_not_displayed = strings_not_displayed or [] diff --git a/lib/tool_shed/test/functional/conftest.py b/lib/tool_shed/test/functional/conftest.py new file mode 100644 index 000000000000..9798b868a212 --- /dev/null +++ b/lib/tool_shed/test/functional/conftest.py @@ -0,0 +1,46 @@ +import os +from typing import ( + Any, + Dict, + Generator, +) + +import pytest +from playwright.sync_api import ( + Browser, + BrowserContext, +) +from typing_extensions import Literal + +from ..base.browser import ShedBrowser +from ..base.playwrightbrowser import PlaywrightShedBrowser +from ..base.twillbrowser import TwillShedBrowser + +DEFAULT_BROWSER: Literal["twill", "playwright"] = "playwright" + + +def twill_browser() -> Generator[ShedBrowser, None, None]: + yield TwillShedBrowser() + + +def playwright_browser(class_context: BrowserContext) -> Generator[ShedBrowser, None, None]: + page = class_context.new_page() + yield PlaywrightShedBrowser(page) + + +if os.environ.get("TOOL_SHED_TEST_BROWSER", DEFAULT_BROWSER) == "twill": + shed_browser = pytest.fixture(scope="class")(twill_browser) +else: + shed_browser = pytest.fixture(scope="class")(playwright_browser) + + +@pytest.fixture(scope="class") +def class_context( + browser: Browser, + browser_context_args: Dict, + pytestconfig: Any, + request: pytest.FixtureRequest, +) -> Generator[BrowserContext, None, None]: + from pytest_playwright.pytest_playwright import context + + yield from context.__pytest_wrapped__.obj(browser, browser_context_args, pytestconfig, request) diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index 029f2987866c..bb34b1aca3fb 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -108,10 +108,16 @@ def test_0040_verify_repository(self): self.browse_repository( repository, strings_displayed=[f"Repository '{repository.name}' revision", "(repository tip)"] ) + strings = ["Uploaded filtering 1.1.0"] + if self._browser.is_twill: + # this appears in a link - it isn't how one would check this + # in playwright. But also we're testing the mercurial page + # here so this is probably a questionable check overall. + strings += [latest_changeset_revision] self.display_repository_clone_page( common.test_user_1_name, repository_name, - strings_displayed=["Uploaded filtering 1.1.0", latest_changeset_revision], + strings_displayed=strings, ) def test_0045_alter_repository_states(self): @@ -145,33 +151,40 @@ def test_0050_display_repository_tip_file(self): """Display the contents of filtering.xml in the repository tip revision""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) assert repository - self.display_repository_file_contents( - repository=repository, - filename="filtering.xml", - filepath=None, - strings_displayed=["1.1.0"], - strings_not_displayed=[], - ) + if self._browser.is_twill: + # probably not porting this functionality - just test + # with Twill for older UI and drop when that is all dropped + self.display_repository_file_contents( + repository=repository, + filename="filtering.xml", + filepath=None, + strings_displayed=["1.1.0"], + strings_not_displayed=[], + ) def test_0055_upload_filtering_txt_file(self): """Upload filtering.txt file associated with tool version 1.1.0.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.add_file_to_repository(repository, "filtering/filtering_0000.txt") + expected = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") self.display_manage_repository_page( - repository, strings_displayed=["Readme file for filtering 1.1.0"] + repository, strings_displayed=[expected] ) def test_0060_upload_filtering_test_data(self): """Upload filtering test data.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.add_tar_to_repository(repository, "filtering/filtering_test_data.tar") - self.display_repository_file_contents( - repository=repository, - filename="1.bed", - filepath="test-data", - strings_displayed=[], - strings_not_displayed=[], - ) + if self._browser.is_twill: + # probably not porting this functionality - just test + # with Twill for older UI and drop when that is all dropped + self.display_repository_file_contents( + repository=repository, + filename="1.bed", + filepath="test-data", + strings_displayed=[], + strings_not_displayed=[], + ) self.check_repository_metadata(repository, tip_only=True) def test_0065_upload_filtering_2_2_0(self): @@ -208,15 +221,17 @@ def test_0075_upload_readme_txt_file(self): """Upload readme.txt file associated with tool version 2.2.0.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.add_file_to_repository(repository, "readme.txt") + content = self._escape_page_content_if_needed("This is a readme file.") self.display_manage_repository_page( - repository, strings_displayed=["This is a readme file."] + repository, strings_displayed=[content] ) # Verify that there is a different readme file for each metadata revision. + readme_content = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") self.display_manage_repository_page( repository, strings_displayed=[ - "Readme file for filtering 1.1.0", - "This is a readme file.", + readme_content, + content, ], ) @@ -225,8 +240,9 @@ def test_0080_delete_readme_txt_file(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.delete_files_from_repository(repository, filenames=["readme.txt"]) self.check_count_of_metadata_revisions_associated_with_repository(repository, metadata_count=2) + readme_content = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") self.display_manage_repository_page( - repository, strings_displayed=["Readme file for filtering 1.1.0"] + repository, strings_displayed=[readme_content] ) def test_0085_search_for_valid_filter_tool(self): @@ -278,11 +294,14 @@ def test_0110_delete_filtering_repository(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) self.delete_repository(repository) + metadata = self._populator.get_metadata(repository, downloadable_only=False) + for _, value in metadata.__root__.items(): + assert not value.downloadable # Explicitly reload all metadata revisions from the database, to ensure that we have the current status of the downloadable flag. # for metadata_revision in repository.metadata_revisions: # self.test_db_util.refresh(metadata_revision) # Marking a repository as deleted should result in no metadata revisions being downloadable. - assert True not in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] + # assert True not in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] def test_0115_undelete_filtering_repository(self): """Undelete the filtering_0000 repository and verify that it now has two downloadable revisions.""" @@ -312,8 +331,9 @@ def test_0125_upload_new_readme_file(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) # Upload readme.txt to the filtering_0000 repository and verify that it is now displayed. self.add_file_to_repository(repository, "filtering/readme.txt") + content = self._escape_page_content_if_needed("These characters should not") self.display_manage_repository_page( - repository, strings_displayed=["These characters should not"] + repository, strings_displayed=[content] ) def test_0130_verify_handling_of_invalid_characters(self): @@ -331,13 +351,14 @@ def test_0130_verify_handling_of_invalid_characters(self): break # Check for the changeset revision, repository name, owner username, 'repos' in the clone url, and the captured # unicode decoding error message. + content = self._escape_page_content_if_needed("These characters should not") strings_displayed = [ "%d:%s" % (revision_number, revision_hash), "filtering_0000", "user1", "repos", "added:", - "+These characters should not", + f"+{content}", ] self.load_changeset_in_tool_shed(repository_id, changeset_revision, strings_displayed=strings_displayed) @@ -352,9 +373,11 @@ def test_0140_view_invalid_changeset(self): """View repository using an invalid changeset""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) encoded_repository_id = repository.id + assert encoded_repository_id strings_displayed = ["Invalid+changeset+revision"] view_repo_url = ( f"/repository/view_repository?id={encoded_repository_id}&changeset_revision=nonsensical_changeset" ) self.visit_url(view_repo_url) - self.check_for_strings(strings_displayed=strings_displayed, strings_not_displayed=[]) + if self._browser.is_twill: + self.check_for_strings(strings_displayed=strings_displayed, strings_not_displayed=[]) diff --git a/pyproject.toml b/pyproject.toml index e99a973160f1..19571905886c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -147,6 +147,7 @@ pytest-httpserver = "*" python-irodsclient = "!=1.1.2" # https://github.com/irods/python-irodsclient/issues/356 pytest-json-report = "*" pytest-mock = "*" +pytest-playwright = "*" pytest-postgresql = "!=3.0.0" # https://github.com/ClearcodeHQ/pytest-postgresql/issues/426 pytest-shard = "*" responses = "*" From 04e75635f55684e8e53fc3517ed98243d1ec1376 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 3 Jan 2023 09:53:54 -0500 Subject: [PATCH 68/73] Option config_hg_for_dev... --- lib/galaxy/config/schemas/tool_shed_config_schema.yml | 7 +++++++ lib/tool_shed/util/hg_util.py | 6 +++++- test/unit/tool_shed/_util.py | 1 + 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/config/schemas/tool_shed_config_schema.yml b/lib/galaxy/config/schemas/tool_shed_config_schema.yml index 358215d70316..8e52afa04d9b 100644 --- a/lib/galaxy/config/schemas/tool_shed_config_schema.yml +++ b/lib/galaxy/config/schemas/tool_shed_config_schema.yml @@ -31,6 +31,13 @@ mapping: Where the hgweb.config file is stored. The default is the Galaxy installation directory. + config_hg_for_dev: + type: str + required: false + desc: | + Allow pushing directly to mercurial repositories directly + and without authentication. + file_path: type: str default: database/community_files diff --git a/lib/tool_shed/util/hg_util.py b/lib/tool_shed/util/hg_util.py index f2c81c34d285..965013846c4f 100644 --- a/lib/tool_shed/util/hg_util.py +++ b/lib/tool_shed/util/hg_util.py @@ -85,7 +85,11 @@ def create_hgrc_file(app, repository): fp.write("default = .\n") fp.write("default-push = .\n") fp.write("[web]\n") - fp.write(f"allow_push = {repository.user.username}\n") + if app.config.config_hg_for_dev: + allow_push = "*" + else: + allow_push = repository.user.username + fp.write(f"allow_push = {allow_push}\n") fp.write(f"name = {repository.name}\n") fp.write("push_ssl = false\n") fp.write("[extensions]\n") diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py index ec93408ce421..d57c1ed39d7b 100644 --- a/test/unit/tool_shed/_util.py +++ b/test/unit/tool_shed/_util.py @@ -40,6 +40,7 @@ class TestToolShedConfig: file_path: str id_secret: str = "thisistheshedunittestsecret" smtp_server: Optional[str] = None + config_hg_for_dev = False def __init__(self, temp_directory): files_path = os.path.join(temp_directory, "files") From e2e0d443126d724cc32f161845e9f0194019e783 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Wed, 11 Jan 2023 09:34:09 -0500 Subject: [PATCH 69/73] DEBUG lint openapi CI --- .github/workflows/lint_openapi_schema.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/lint_openapi_schema.yml b/.github/workflows/lint_openapi_schema.yml index c41e1489d933..02cfb35e640d 100644 --- a/.github/workflows/lint_openapi_schema.yml +++ b/.github/workflows/lint_openapi_schema.yml @@ -51,6 +51,9 @@ jobs: - name: Build typescript schema run: make update-client-api-schema working-directory: 'galaxy root' + - name: Diff... + run: git diff + working-directory: 'galaxy root' - name: Check for changes run: | if [[ `git status --porcelain` ]]; then From 7dc0b1378b2c286bab3b3fdaad8e11f96a28a707 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 26 Sep 2023 13:48:46 -0400 Subject: [PATCH 70/73] WIP: modern tool shed frontend --- .github/workflows/toolshed.yaml | 25 +- .vscode/shed.code-snippets | 41 + Makefile | 1 + .../dependencies/pinned-requirements.txt | 2 + lib/galaxy/managers/api_keys.py | 13 +- lib/galaxy/managers/users.py | 4 +- lib/galaxy/webapps/base/webapp.py | 41 +- lib/galaxy/webapps/galaxy/api/__init__.py | 50 + lib/galaxy/webapps/galaxy/controllers/user.py | 2 +- lib/galaxy/work/context.py | 25 + lib/tool_shed/context.py | 28 + lib/tool_shed/managers/repositories.py | 1 + lib/tool_shed/test/base/playwrightbrowser.py | 21 + lib/tool_shed/test/base/twilltestcase.py | 153 +- .../test_0000_basic_repository_features.py | 47 +- ..._0010_repository_with_tool_dependencies.py | 11 +- ...test_0020_basic_repository_dependencies.py | 2 + ...st_0030_repository_dependency_revisions.py | 2 + ...t_0040_repository_circular_dependencies.py | 2 + ...est_0050_circular_dependencies_4_levels.py | 7 +- .../test/functional/test_0070_invalid_tool.py | 2 +- ...st_0100_complex_repository_dependencies.py | 18 +- ...e_repository_dependency_multiple_owners.py | 16 +- .../functional/test_0140_tool_help_images.py | 2 + ...170_complex_prior_installation_required.py | 7 +- ...test_0420_citable_urls_for_repositories.py | 77 +- .../functional/test_0430_browse_utilities.py | 3 + .../test_0460_upload_to_repository.py | 79 +- .../test_0530_repository_admin_feature.py | 2 + ...test_0550_metadata_updated_dependencies.py | 12 +- ...stall_repository_with_tool_dependencies.py | 9 +- ...repository_with_repository_dependencies.py | 9 +- ...ll_repository_with_dependency_revisions.py | 9 +- ...est_1050_circular_dependencies_4_levels.py | 18 +- ...e_repository_dependency_multiple_owners.py | 5 +- .../functional/test_1160_tool_help_images.py | 8 +- ...190_complex_prior_installation_required.py | 7 +- .../test/functional/test_frontend_login.py | 76 + .../test/functional/test_shed_graphql.py | 21 + .../test/functional/test_shed_repositories.py | 8 + lib/tool_shed/util/metadata_util.py | 1 + lib/tool_shed/webapp/api2/__init__.py | 86 +- lib/tool_shed/webapp/api2/repositories.py | 15 + lib/tool_shed/webapp/api2/users.py | 212 +- lib/tool_shed/webapp/fast_app.py | 132 +- lib/tool_shed/webapp/frontend/.eslintignore | 7 + lib/tool_shed/webapp/frontend/.eslintrc.js | 29 + lib/tool_shed/webapp/frontend/.prettierrc | 5 + lib/tool_shed/webapp/frontend/Makefile | 22 + lib/tool_shed/webapp/frontend/README.md | 27 + lib/tool_shed/webapp/frontend/codegen.ts | 16 + lib/tool_shed/webapp/frontend/index.html | 13 + lib/tool_shed/webapp/frontend/package.json | 54 + lib/tool_shed/webapp/frontend/src/App.vue | 51 + lib/tool_shed/webapp/frontend/src/apiUtil.ts | 19 + lib/tool_shed/webapp/frontend/src/apollo.ts | 25 + .../src/components/ComponentShowcase.vue | 15 + .../components/ComponentShowcaseExample.vue | 21 + .../src/components/ConfigFileContents.vue | 36 + .../frontend/src/components/ErrorBanner.vue | 38 + .../frontend/src/components/LoadingDiv.vue | 32 + .../frontend/src/components/LoginForm.vue | 38 + .../frontend/src/components/LoginPage.vue | 17 + .../src/components/ManagePushAccess.vue | 42 + .../frontend/src/components/ModalForm.vue | 23 + .../frontend/src/components/PageContainer.vue | 14 + .../RecentlyCreatedRepositories.vue | 39 + .../RecentlyUpdatedRepositories.vue | 39 + .../frontend/src/components/RegisterPage.vue | 82 + .../src/components/RegistrationSuccess.vue | 22 + .../src/components/RepositoriesForOwner.vue | 67 + .../src/components/RepositoriesGrid.vue | 160 + .../components/RepositoriesGridInterface.ts | 36 + .../src/components/RepositoryActions.vue | 45 + .../src/components/RepositoryCreation.vue | 41 + .../src/components/RepositoryExplore.vue | 71 + .../src/components/RepositoryHealth.vue | 28 + .../src/components/RepositoryLink.vue | 29 + .../src/components/RepositoryLinks.vue | 41 + .../src/components/RepositoryTool.vue | 25 + .../src/components/RepositoryUpdate.vue | 25 + .../src/components/RevisionActions.vue | 62 + .../src/components/RevisionSelect.vue | 56 + .../frontend/src/components/SelectUser.vue | 62 + .../frontend/src/components/ShedToolbar.vue | 117 + .../frontend/src/components/UtcDate.vue | 32 + .../src/components/pages/AdminControls.vue | 23 + .../src/components/pages/ChangePassword.vue | 50 + .../pages/CitableRepositoryPage.vue | 44 + .../components/pages/ComponentsShowcase.vue | 60 + .../src/components/pages/HelpPage.vue | 20 + .../src/components/pages/LandingPage.vue | 25 + .../src/components/pages/ManageApiKey.vue | 84 + .../pages/RepositoriesByCategories.vue | 44 + .../pages/RepositoriesByCategory.vue | 85 + .../components/pages/RepositoriesByOwner.vue | 15 + .../components/pages/RepositoriesByOwners.vue | 36 + .../components/pages/RepositoriesBySearch.vue | 88 + .../src/components/pages/RepositoryPage.vue | 277 ++ .../webapp/frontend/src/constants.ts | 13 + .../frontend/src/gql/fragment-masking.ts | 50 + lib/tool_shed/webapp/frontend/src/gql/gql.ts | 98 + .../webapp/frontend/src/gql/graphql.ts | 821 +++++ .../webapp/frontend/src/gql/index.ts | 2 + .../webapp/frontend/src/gqlFragements.ts | 27 + lib/tool_shed/webapp/frontend/src/main.ts | 24 + .../webapp/frontend/src/modelWrapper.ts | 15 + .../webapp/frontend/src/quasar-variables.sass | 15 + lib/tool_shed/webapp/frontend/src/router.ts | 13 + lib/tool_shed/webapp/frontend/src/routes.ts | 113 + .../webapp/frontend/src/schema/fetcher.ts | 20 + .../webapp/frontend/src/schema/index.ts | 3 + .../webapp/frontend/src/schema/schema.ts | 2061 ++++++++++++ .../webapp/frontend/src/schema/types.ts | 5 + .../webapp/frontend/src/shims-vue.d.ts | 6 + .../webapp/frontend/src/stores/auth.store.ts | 54 + .../frontend/src/stores/categories.store.ts | 33 + .../webapp/frontend/src/stores/index.ts | 4 + .../frontend/src/stores/repository.store.ts | 111 + .../webapp/frontend/src/stores/users.store.ts | 22 + lib/tool_shed/webapp/frontend/src/util.ts | 48 + .../webapp/frontend/src/vite-env.d.ts | 1 + .../webapp/frontend/static/favicon.ico | Bin 0 -> 15086 bytes lib/tool_shed/webapp/frontend/tsconfig.json | 22 + lib/tool_shed/webapp/frontend/vite.config.ts | 23 + lib/tool_shed/webapp/graphql-schema.json | 2990 +++++++++++++++++ lib/tool_shed/webapp/graphql/__init__.py | 0 lib/tool_shed/webapp/graphql/schema.py | 244 ++ lib/tool_shed_client/schema/__init__.py | 22 +- packages/test_driver/setup.cfg | 2 + pyproject.toml | 2 + run_tool_shed.sh | 1 + scripts/bootstrap_test_shed.py | 2 + test/unit/tool_shed/_util.py | 26 +- test/unit/tool_shed/test_graphql.py | 331 ++ 135 files changed, 10783 insertions(+), 227 deletions(-) create mode 100644 .vscode/shed.code-snippets create mode 100644 lib/tool_shed/test/functional/test_frontend_login.py create mode 100644 lib/tool_shed/test/functional/test_shed_graphql.py create mode 100644 lib/tool_shed/webapp/frontend/.eslintignore create mode 100644 lib/tool_shed/webapp/frontend/.eslintrc.js create mode 100644 lib/tool_shed/webapp/frontend/.prettierrc create mode 100644 lib/tool_shed/webapp/frontend/Makefile create mode 100644 lib/tool_shed/webapp/frontend/README.md create mode 100644 lib/tool_shed/webapp/frontend/codegen.ts create mode 100644 lib/tool_shed/webapp/frontend/index.html create mode 100644 lib/tool_shed/webapp/frontend/package.json create mode 100644 lib/tool_shed/webapp/frontend/src/App.vue create mode 100644 lib/tool_shed/webapp/frontend/src/apiUtil.ts create mode 100644 lib/tool_shed/webapp/frontend/src/apollo.ts create mode 100644 lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/LoginForm.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/LoginPage.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/ModalForm.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/PageContainer.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/SelectUser.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/UtcDate.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue create mode 100644 lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue create mode 100644 lib/tool_shed/webapp/frontend/src/constants.ts create mode 100644 lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts create mode 100644 lib/tool_shed/webapp/frontend/src/gql/gql.ts create mode 100644 lib/tool_shed/webapp/frontend/src/gql/graphql.ts create mode 100644 lib/tool_shed/webapp/frontend/src/gql/index.ts create mode 100644 lib/tool_shed/webapp/frontend/src/gqlFragements.ts create mode 100644 lib/tool_shed/webapp/frontend/src/main.ts create mode 100644 lib/tool_shed/webapp/frontend/src/modelWrapper.ts create mode 100644 lib/tool_shed/webapp/frontend/src/quasar-variables.sass create mode 100644 lib/tool_shed/webapp/frontend/src/router.ts create mode 100644 lib/tool_shed/webapp/frontend/src/routes.ts create mode 100644 lib/tool_shed/webapp/frontend/src/schema/fetcher.ts create mode 100644 lib/tool_shed/webapp/frontend/src/schema/index.ts create mode 100644 lib/tool_shed/webapp/frontend/src/schema/schema.ts create mode 100644 lib/tool_shed/webapp/frontend/src/schema/types.ts create mode 100644 lib/tool_shed/webapp/frontend/src/shims-vue.d.ts create mode 100644 lib/tool_shed/webapp/frontend/src/stores/auth.store.ts create mode 100644 lib/tool_shed/webapp/frontend/src/stores/categories.store.ts create mode 100644 lib/tool_shed/webapp/frontend/src/stores/index.ts create mode 100644 lib/tool_shed/webapp/frontend/src/stores/repository.store.ts create mode 100644 lib/tool_shed/webapp/frontend/src/stores/users.store.ts create mode 100644 lib/tool_shed/webapp/frontend/src/util.ts create mode 100644 lib/tool_shed/webapp/frontend/src/vite-env.d.ts create mode 100644 lib/tool_shed/webapp/frontend/static/favicon.ico create mode 100644 lib/tool_shed/webapp/frontend/tsconfig.json create mode 100644 lib/tool_shed/webapp/frontend/vite.config.ts create mode 100644 lib/tool_shed/webapp/graphql-schema.json create mode 100644 lib/tool_shed/webapp/graphql/__init__.py create mode 100644 lib/tool_shed/webapp/graphql/schema.py create mode 100644 test/unit/tool_shed/test_graphql.py diff --git a/.github/workflows/toolshed.yaml b/.github/workflows/toolshed.yaml index cc54bf43c067..f6c06663da3c 100644 --- a/.github/workflows/toolshed.yaml +++ b/.github/workflows/toolshed.yaml @@ -20,14 +20,23 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7'] - test-install-client: ['standalone', 'galaxy_api'] - # v1 is mostly working... - shed-api: ['v1'] - # lets get twill working with twill then try to - # make progress on the playwright - # shed-browser: ['twill', 'playwright'] - shed-browser: ['playwright'] + include: + - test-install-client: 'galaxy_api' + python-version: '3.7' + shed-api: 'v1' + shed-browser: 'twill' + - test-install-client: 'standalone' + python-version: '3.8' + shed-api: 'v1' + shed-browser: 'twill' + - test-install-client: 'galaxy_api' + python-version: '3.9' + shed-api: 'v2' + shed-browser: 'playwright' + - test-install-client: 'standalone' + python-version: '3.10' + shed-api: 'v2' + shed-browser: 'playwright' services: postgres: image: postgres:13 diff --git a/.vscode/shed.code-snippets b/.vscode/shed.code-snippets new file mode 100644 index 000000000000..f90a37c71313 --- /dev/null +++ b/.vscode/shed.code-snippets @@ -0,0 +1,41 @@ +{ + "shedcomp": { + "prefix": "shed_component", + "body": [ + "", + "" + ], + "description": "outline of a tool shed component" + }, + "shedpage": { + "prefix": "shed_page", + "body": [ + "", + "" + ], + "description": "outline of a tool shed page" + }, + "shedfetcher": { + "prefix": "shed_fetcher", + "body": [ + "import { fetcher } from \"@/schema\"", + "const fetcher = fetcher.path(\"$1\").method(\"get\").create()" + ], + "description": "Import shed fetcher and instantiate with a path" + }, + "shedrouter": { + "prefix": "shed_router", + "body": [ + "import router from \"@/router\"" + ] + } +} \ No newline at end of file diff --git a/Makefile b/Makefile index 0dd02e253c9d..f20548df435e 100644 --- a/Makefile +++ b/Makefile @@ -190,6 +190,7 @@ remove-api-schema: update-client-api-schema: client-node-deps build-api-schema $(IN_VENV) cd client && node openapi_to_schema.mjs ../_schema.yaml > src/schema/schema.ts && npx prettier --write src/schema/schema.ts + $(IN_VENV) cd client && node openapi_to_schema.mjs ../_shed_schema.yaml > ../lib/tool_shed/webapp/frontend/src/schema/schema.ts && npx prettier --write ../lib/tool_shed/webapp/frontend/src/schema/schema.ts $(MAKE) remove-api-schema lint-api-schema: build-api-schema diff --git a/lib/galaxy/dependencies/pinned-requirements.txt b/lib/galaxy/dependencies/pinned-requirements.txt index 4b851aed081a..dcc9ccb09e8c 100644 --- a/lib/galaxy/dependencies/pinned-requirements.txt +++ b/lib/galaxy/dependencies/pinned-requirements.txt @@ -73,6 +73,7 @@ fsspec==2023.1.0 ; python_version >= "3.7" and python_version < "3.12" future==0.18.3 ; python_version >= "3.7" and python_version < "3.12" galaxy-sequence-utils==1.1.5 ; python_version >= "3.7" and python_version < "3.12" galaxy2cwl==0.1.4 ; python_version >= "3.7" and python_version < "3.12" +graphene-sqlalchemy==3.0.0b3 ; python_version >= "3.7" and python_version < "3.12" gravity==1.0.3 ; python_version >= "3.7" and python_version < "3.12" greenlet==2.0.2 ; python_version >= "3.7" and (platform_machine == "aarch64" or platform_machine == "ppc64le" or platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "win32" or platform_machine == "WIN32") and python_version < "3.12" gunicorn==21.2.0 ; python_version >= "3.7" and python_version < "3.12" @@ -179,6 +180,7 @@ sqlalchemy==1.4.49 ; python_version >= "3.7" and python_version < "3.12" sqlitedict==2.1.0 ; python_version >= "3.7" and python_version < "3.12" sqlparse==0.4.4 ; python_version >= "3.7" and python_version < "3.12" starlette-context==0.3.5 ; python_version >= "3.7" and python_version < "3.12" +starlette_graphene3==0.6.0 ; python_version >= "3.7" and python_version < "3.12" starlette==0.27.0 ; python_version >= "3.7" and python_version < "3.12" supervisor==4.2.5 ; python_version >= "3.7" and python_version < "3.12" svgwrite==1.4.3 ; python_version >= "3.7" and python_version < "3.12" diff --git a/lib/galaxy/managers/api_keys.py b/lib/galaxy/managers/api_keys.py index dbc5121670a2..b986abde5756 100644 --- a/lib/galaxy/managers/api_keys.py +++ b/lib/galaxy/managers/api_keys.py @@ -3,22 +3,27 @@ select, update, ) +from typing_extensions import Protocol from galaxy.model.base import transaction from galaxy.structured_app import BasicSharedApp +class IsUserModel(Protocol): + id: str + + class ApiKeyManager: def __init__(self, app: BasicSharedApp): self.app = app self.session = self.app.model.context - def get_api_key(self, user): + def get_api_key(self, user: IsUserModel): APIKeys = self.app.model.APIKeys stmt = select(APIKeys).filter_by(user_id=user.id, deleted=False).order_by(APIKeys.create_time.desc()).limit(1) return self.session.scalars(stmt).first() - def create_api_key(self, user): + def create_api_key(self, user: IsUserModel): guid = self.app.security.get_new_guid() new_key = self.app.model.APIKeys() new_key.user_id = user.id @@ -28,7 +33,7 @@ def create_api_key(self, user): self.session.commit() return new_key - def get_or_create_api_key(self, user) -> str: + def get_or_create_api_key(self, user: IsUserModel) -> str: # Logic Galaxy has always used - but it would appear to have a race # condition. Worth fixing? Would kind of need a message queue to fix # in multiple process mode. @@ -36,7 +41,7 @@ def get_or_create_api_key(self, user) -> str: key = api_key.key if api_key else self.create_api_key(user).key return key - def delete_api_key(self, user) -> None: + def delete_api_key(self, user: IsUserModel) -> None: """Marks the current user API key as deleted.""" # Before it was possible to create multiple API keys for the same user although they were not considered valid # So all non-deleted keys are marked as deleted for backward compatibility diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 69e74c3bae42..f90644a5d5e2 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -455,7 +455,9 @@ def change_password(self, trans, password=None, confirm=None, token=None, id=Non trans.sa_session.add(token_result) return user, "Password has been changed. Token has been invalidated." else: - user = self.by_id(self.app.security.decode_id(id)) + if not isinstance(id, int): + id = self.app.security.decode_id(id) + user = self.by_id(id) if user: message = self.app.auth_manager.check_change_password(user, current, trans.request) if message: diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py index 1c9774efa025..8fc17986afec 100644 --- a/lib/galaxy/webapps/base/webapp.py +++ b/lib/galaxy/webapps/base/webapp.py @@ -736,21 +736,9 @@ def __create_new_session(self, prev_galaxy_session=None, user_for_new_session=No Caller is responsible for flushing the returned session. """ - session_key = self.security.get_new_guid() - galaxy_session = self.app.model.GalaxySession( - session_key=session_key, - is_valid=True, - remote_host=self.request.remote_host, - remote_addr=self.request.remote_addr, - referer=self.request.headers.get("Referer", None), + return create_new_session( + self, prev_galaxy_session=prev_galaxy_session, user_for_new_session=user_for_new_session ) - if prev_galaxy_session: - # Invalidated an existing session for some reason, keep track - galaxy_session.prev_session_id = prev_galaxy_session.id - if user_for_new_session: - # The new session should be associated with the user - galaxy_session.user = user_for_new_session - return galaxy_session @property def cookie_path(self): @@ -1110,6 +1098,31 @@ def qualified_url_for_path(self, path): return url_for(path, qualified=True) +def create_new_session(trans, prev_galaxy_session=None, user_for_new_session=None): + """ + Create a new GalaxySession for this request, possibly with a connection + to a previous session (in `prev_galaxy_session`) and an existing user + (in `user_for_new_session`). + + Caller is responsible for flushing the returned session. + """ + session_key = trans.security.get_new_guid() + galaxy_session = trans.app.model.GalaxySession( + session_key=session_key, + is_valid=True, + remote_host=trans.request.remote_host, + remote_addr=trans.request.remote_addr, + referer=trans.request.headers.get("Referer", None), + ) + if prev_galaxy_session: + # Invalidated an existing session for some reason, keep track + galaxy_session.prev_session_id = prev_galaxy_session.id + if user_for_new_session: + # The new session should be associated with the user + galaxy_session.user = user_for_new_session + return galaxy_session + + def default_url_path(path): return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) diff --git a/lib/galaxy/webapps/galaxy/api/__init__.py b/lib/galaxy/webapps/galaxy/api/__init__.py index b38ef704a398..b6a44a1f43ac 100644 --- a/lib/galaxy/webapps/galaxy/api/__init__.py +++ b/lib/galaxy/webapps/galaxy/api/__init__.py @@ -47,6 +47,7 @@ NoMatchFound, ) from starlette.types import Scope +from typing_extensions import Literal try: from starlette_context import context as request_context @@ -200,6 +201,8 @@ class GalaxyASGIRequest(GalaxyAbstractRequest): Implements the GalaxyAbstractRequest interface to provide access to some properties of the request commonly used.""" + __request: Request + def __init__(self, request: Request): self.__request = request self.__environ: Optional[MutableMapping[str, Any]] = None @@ -232,6 +235,28 @@ def environ(self) -> MutableMapping[str, Any]: self.__environ = build_environ(self.__request.scope, None) # type: ignore[arg-type] return self.__environ + @property + def headers(self): + return self.__request.headers + + @property + def remote_host(self) -> str: + # was available in wsgi and is used create_new_session + return self.host + + @property + def remote_addr(self) -> Optional[str]: + # was available in wsgi and is used create_new_session + # not sure what to do here... + return None + + @property + def is_secure(self) -> bool: + return self.__request.url.scheme == "https" + + def get_cookie(self, name): + return self.__request.cookies.get(name) + class GalaxyASGIResponse(GalaxyAbstractResponse): """Wrapper around Starlette/FastAPI Response object. @@ -246,6 +271,31 @@ def __init__(self, response: Response): def headers(self): return self.__response.headers + def set_cookie( + self, + key: str, + value: str = "", + max_age: Optional[int] = None, + expires: Optional[int] = None, + path: str = "/", + domain: Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: Optional[Literal["lax", "strict", "none"]] = "lax", + ) -> None: + """Set a cookie.""" + self.__response.set_cookie( + key, + value, + max_age=max_age, + expires=expires, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + DependsOnUser = cast(Optional[User], Depends(get_user)) diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index 7fc9b8c1e017..52587ee2d66e 100644 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -189,7 +189,7 @@ def __validate_login(self, trans, payload=None, **kwd): message, status = self.resend_activation_email(trans, user.email, user.username) return self.message_exception(trans, message, sanitize=False) else: # activation is OFF - pw_expires = trans.app.config.password_expiration_period + pw_expires = getattr(trans.app.config, "password_expiration_period", None) if pw_expires and user.last_password_change < datetime.today() - pw_expires: # Password is expired, we don't log them in. return { diff --git a/lib/galaxy/work/context.py b/lib/galaxy/work/context.py index 310779e504e3..8a1206018c0a 100644 --- a/lib/galaxy/work/context.py +++ b/lib/galaxy/work/context.py @@ -4,6 +4,8 @@ Optional, ) +from typing_extensions import Literal + from galaxy.managers.context import ProvidesHistoryContext from galaxy.model import ( GalaxySession, @@ -85,6 +87,14 @@ def base(self) -> str: def host(self) -> str: """The host address.""" + @abc.abstractproperty + def is_secure(self) -> bool: + """Was this a secure (https) request.""" + + @abc.abstractmethod + def get_cookie(self, name): + """Return cookie.""" + class GalaxyAbstractResponse: """Abstract interface to provide access to some response utilities.""" @@ -102,6 +112,21 @@ def set_content_type(self, content_type: str): def get_content_type(self): return self.headers.get("content-type", None) + @abc.abstractmethod + def set_cookie( + self, + key: str, + value: str = "", + max_age: Optional[int] = None, + expires: Optional[int] = None, + path: str = "/", + domain: Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: Optional[Literal["lax", "strict", "none"]] = "lax", + ) -> None: + """Set a cookie.""" + class SessionRequestContext(WorkRequestContext): """Like WorkRequestContext, but provides access to request.""" diff --git a/lib/tool_shed/context.py b/lib/tool_shed/context.py index 6991bbd112cd..107be8c8ed48 100644 --- a/lib/tool_shed/context.py +++ b/lib/tool_shed/context.py @@ -84,6 +84,10 @@ class SessionRequestContext(ProvidesRepositoriesContext, Protocol): def get_galaxy_session(self) -> Optional[GalaxySession]: ... + @abc.abstractmethod + def set_galaxy_session(self, galaxy_session: GalaxySession): + ... + @abc.abstractproperty def request(self) -> GalaxyAbstractRequest: ... @@ -96,6 +100,10 @@ def response(self) -> GalaxyAbstractResponse: def url_builder(self): ... + @abc.abstractproperty + def session_csrf_token(self) -> str: + ... + class SessionRequestContextImpl(SessionRequestContext): _app: ToolShedApp @@ -133,6 +141,11 @@ def user(self) -> Optional[User]: def get_galaxy_session(self) -> Optional[GalaxySession]: return self._galaxy_session + def set_galaxy_session(self, galaxy_session: GalaxySession): + self._galaxy_session = galaxy_session + if galaxy_session.user: + self._user = galaxy_session.user + @property def repositories_hostname(self) -> str: return str(self.request.base).rstrip("/") @@ -148,3 +161,18 @@ def request(self) -> GalaxyAbstractRequest: @property def response(self) -> GalaxyAbstractResponse: return self.__response + + # Following three things added v2.0 frontend + @property + def session_csrf_token(self): + token = "" + if self._galaxy_session: + token = self.security.encode_id(self._galaxy_session.id, kind="csrf") + return token + + @property + def galaxy_session(self) -> Optional[GalaxySession]: + return self._galaxy_session + + def log_event(self, str): + pass diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index f72e61fa1a31..805f716164c5 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -433,6 +433,7 @@ def get_repository_metadata_dict(app: ToolShedApp, id: str, recursive: bool, dow metadata_dict["repository_dependencies"] = [] if metadata.includes_tools: metadata_dict["tools"] = metadata.metadata["tools"] + metadata_dict["invalid_tools"] = metadata.metadata.get("invalid_tools", []) all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict return all_metadata diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py index 6d00f794c69b..d29529cece24 100644 --- a/lib/tool_shed/test/base/playwrightbrowser.py +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -13,6 +13,13 @@ ) +class Locators: + toolbar_login = ".toolbar-login" + toolbar_logout = ".toolbar-logout" + login_submit_button = '[name="login_button"]' + register_link = ".register-link" + + class PlaywrightShedBrowser(ShedBrowser): _page: Page @@ -151,3 +158,17 @@ def grant_users_access(self, usernames: List[str]): @property def is_twill(self) -> bool: return False + + def logout_if_logged_in(self, assert_logged_out=True): + self._page.wait_for_selector(f"{Locators.toolbar_login}, {Locators.toolbar_logout}") + logout_locator = self._page.locator(Locators.toolbar_logout) + if logout_locator.is_visible(): + logout_locator.click() + if assert_logged_out: + self.expect_not_logged_in() + + def expect_not_logged_in(self): + expect(self._page.locator(Locators.toolbar_logout)).not_to_be_visible() + + def expect_logged_in(self): + expect(self._page.locator(Locators.toolbar_logout)).to_be_visible() diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 5eca1b0543f2..8c15af3dc210 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -29,6 +29,7 @@ hg, ui, ) +from playwright.sync_api import Page from sqlalchemy import ( and_, false, @@ -74,6 +75,7 @@ ) from .api import ShedApiTestCase from .browser import ShedBrowser +from .playwrightbrowser import PlaywrightShedBrowser from .twillbrowser import ( page_content, visit_url, @@ -692,39 +694,52 @@ def check_string_not_in_page(self, patt): self._browser.check_string_not_in_page(patt) # Functions associated with user accounts + def _submit_register_form(self, email: str, password: str, username: str, redirect: Optional[str] = None): + self._browser.fill_form_value("registration", "email", email) + if redirect is not None: + self._browser.fill_form_value("registration", "redirect", redirect) + self._browser.fill_form_value("registration", "password", password) + self._browser.fill_form_value("registration", "confirm", password) + self._browser.fill_form_value("registration", "username", username) + self._browser.submit_form_with_name("registration", "create_user_button") + + @property + def invalid_tools_labels(self) -> str: + return "Invalid Tools" if self.is_v2 else "Invalid tools" def create(self, cntrller="user", email="test@bx.psu.edu", password="testuser", username="admin-user", redirect=""): # HACK: don't use panels because late_javascripts() messes up the twill browser and it # can't find form fields (and hence user can't be logged in). params = dict(cntrller=cntrller, use_panels=False) self.visit_url("/user/create", params) - self._browser.fill_form_value("registration", "email", email) - self._browser.fill_form_value("registration", "redirect", redirect) - self._browser.fill_form_value("registration", "password", password) - self._browser.fill_form_value("registration", "confirm", password) - self._browser.fill_form_value("registration", "username", username) - self._browser.submit_form_with_name("registration", "create_user_button") + self._submit_register_form( + email, + password, + username, + redirect, + ) previously_created = False username_taken = False invalid_username = False - try: - self.check_page_for_string("Created new user account") - except AssertionError: + if not self.is_v2: try: - # May have created the account in a previous test run... - self.check_page_for_string(f"User with email '{email}' already exists.") - previously_created = True + self.check_page_for_string("Created new user account") except AssertionError: try: - self.check_page_for_string("Public name is taken; please choose another") - username_taken = True + # May have created the account in a previous test run... + self.check_page_for_string(f"User with email '{email}' already exists.") + previously_created = True except AssertionError: - # Note that we're only checking if the usr name is >< 4 chars here... try: - self.check_page_for_string("Public name must be at least 4 characters in length") - invalid_username = True + self.check_page_for_string("Public name is taken; please choose another") + username_taken = True except AssertionError: - pass + # Note that we're only checking if the usr name is >< 4 chars here... + try: + self.check_page_for_string("Public name must be at least 4 characters in length") + invalid_username = True + except AssertionError: + pass return previously_created, username_taken, invalid_username def last_page(self): @@ -748,6 +763,11 @@ def login( redirect: str = "", logout_first: bool = True, ): + if self.is_v2: + # old version had a logout URL, this one needs to check + # page if logged in + self.visit_url("/") + # Clear cookies. if logout_first: self.logout() @@ -755,7 +775,8 @@ def login( previously_created, username_taken, invalid_username = self.create( email=email, password=password, username=username, redirect=redirect ) - if previously_created: + # v2 doesn't log you in on account creation... so force a login here + if previously_created or self.is_v2: # The acount has previously been created, so just login. # HACK: don't use panels because late_javascripts() messes up the twill browser and it # can't find form fields (and hence user can't be logged in). @@ -763,9 +784,27 @@ def login( self.visit_url("/user/login", params=params) self.submit_form(button="login_button", login=email, redirect=redirect, password=password) + @property + def is_v2(self) -> bool: + return self.api_interactor.api_version == "v2" + + @property + def _playwright_browser(self) -> PlaywrightShedBrowser: + # make sure self.is_v2 + browser = self._browser + assert isinstance(browser, PlaywrightShedBrowser) + return browser + + @property + def _page(self) -> Page: + return self._playwright_browser._page + def logout(self): - self.visit_url("/user/logout") - self.check_page_for_string("You have been logged out") + if self.is_v2: + self._playwright_browser.logout_if_logged_in() + else: + self.visit_url("/user/logout") + self.check_page_for_string("You have been logged out") def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): """Populates and submits a form from the keyword arguments.""" @@ -816,12 +855,15 @@ def assign_admin_role(self, repository: Repository, user): self.check_for_strings(strings_displayed=["Role", "has been associated"]) def browse_category(self, category: Category, strings_displayed=None, strings_not_displayed=None): - params = { - "sort": "name", - "operation": "valid_repositories_by_category", - "id": category.id, - } - self.visit_url("/repository/browse_valid_categories", params=params) + if self.is_v2: + self.visit_url(f"/repositories_by_category/{category.id}") + else: + params = { + "sort": "name", + "operation": "valid_repositories_by_category", + "id": category.id, + } + self.visit_url("/repository/browse_valid_categories", params=params) self.check_for_strings(strings_displayed, strings_not_displayed) def browse_repository(self, repository: Repository, strings_displayed=None, strings_not_displayed=None): @@ -835,7 +877,10 @@ def browse_repository_dependencies(self, strings_displayed=None, strings_not_dis self.check_for_strings(strings_displayed, strings_not_displayed) def browse_tool_shed(self, url, strings_displayed=None, strings_not_displayed=None): - url = "/repository/browse_valid_categories" + if self.is_v2: + url = "/repositories_by_category" + else: + url = "/repository/browse_valid_categories" self.visit_url(url) self.check_for_strings(strings_displayed, strings_not_displayed) @@ -875,12 +920,14 @@ def check_repository_changelog(self, repository: Repository, strings_displayed=N def check_repository_dependency( self, repository: Repository, depends_on_repository, depends_on_changeset_revision=None, changeset_revision=None ): - strings_displayed = [depends_on_repository.name, depends_on_repository.owner] - if depends_on_changeset_revision: - strings_displayed.append(depends_on_changeset_revision) - self.display_manage_repository_page( - repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed - ) + if not self.is_v2: + # v2 doesn't display repository repository dependencies, they are deprecated + strings_displayed = [depends_on_repository.name, depends_on_repository.owner] + if depends_on_changeset_revision: + strings_displayed.append(depends_on_changeset_revision) + self.display_manage_repository_page( + repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed + ) def check_repository_metadata(self, repository: Repository, tip_only=True): if tip_only: @@ -1176,7 +1223,10 @@ def display_manage_repository_page( params = {"id": repository.id} if changeset_revision: params["changeset_revision"] = changeset_revision - self.visit_url("/repository/manage_repository", params=params) + url = "/repository/manage_repository" + if self.is_v2: + url = f"/repositories/{repository.id}" + self.visit_url(url, params=params) self.check_for_strings(strings_displayed, strings_not_displayed) def display_repository_clone_page( @@ -1592,17 +1642,20 @@ def load_citable_url( url += f"/{changeset_revision}" self.visit_url(url) self.check_for_strings(strings_displayed, strings_not_displayed) - # Now load the page that should be displayed inside the iframe and check for strings. - if encoded_repository_id: - params = {"id": encoded_repository_id, "operation": "view_or_manage_repository"} - if changeset_revision: - params["changeset_revision"] = changeset_revision - self.visit_url("/repository/view_repository", params=params) - self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) - elif encoded_user_id: - params = {"user_id": encoded_user_id, "operation": "repositories_by_user"} - self.visit_url("/repository/browse_repositories", params=params) + if self.is_v2: self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) + else: + # Now load the page that should be displayed inside the iframe and check for strings. + if encoded_repository_id: + params = {"id": encoded_repository_id, "operation": "view_or_manage_repository"} + if changeset_revision: + params["changeset_revision"] = changeset_revision + self.visit_url("/repository/view_repository", params=params) + self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) + elif encoded_user_id: + params = {"user_id": encoded_user_id, "operation": "repositories_by_user"} + self.visit_url("/repository/browse_repositories", params=params) + self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) def load_changeset_in_tool_shed( self, repository_id, changeset_revision, strings_displayed=None, strings_not_displayed=None @@ -1694,9 +1747,13 @@ def repository_is_new(self, repository: Repository) -> bool: return tip_ctx.rev() < 0 def reset_metadata_on_selected_repositories(self, repository_ids): - self.visit_url("/admin/reset_metadata_on_selected_repositories_in_tool_shed") - kwd = dict(repository_ids=repository_ids) - self.submit_form(button="reset_metadata_on_selected_repositories_button", **kwd) + if self.is_v2: + for repository_id in repository_ids: + self.populator.reset_metadata(repository_id) + else: + self.visit_url("/admin/reset_metadata_on_selected_repositories_in_tool_shed") + kwd = dict(repository_ids=repository_ids) + self.submit_form(button="reset_metadata_on_selected_repositories_button", **kwd) def reset_metadata_on_installed_repositories(self, repositories): assert self._installation_client diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index bb34b1aca3fb..c34727257c17 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -21,6 +22,9 @@ def test_0000_initiate_users(self): self.login(email=common.test_user_2_email, username=common.test_user_2_name) self.login(email=common.admin_email, username=common.admin_username) + @skip_if_api_v2 + # no replicating the functionality in tool shed 2.0, use Planemo + # to create repositories. def test_0005_create_repository_without_categories(self): """Verify that a repository cannot be created unless at least one category has been defined.""" strings_displayed = ["No categories have been configured in this instance of the Galaxy Tool Shed"] @@ -69,6 +73,7 @@ def test_0025_change_repository_category(self): categories_to_remove=["Test 0000 Basic Repository Features 1"], ) + @skip_if_api_v2 def test_0030_grant_write_access(self): """Grant write access to another user""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -120,6 +125,7 @@ def test_0040_verify_repository(self): strings_displayed=strings, ) + @skip_if_api_v2 def test_0045_alter_repository_states(self): """Test toggling the malicious and deprecated repository flags.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -147,13 +153,14 @@ def test_0045_alter_repository_states(self): strings_displayed = ["Mark repository as deprecated", "Reset all repository metadata"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + @skip_if_api_v2 + # probably not porting this functionality - just test + # with Twill for older UI and drop when that is all dropped def test_0050_display_repository_tip_file(self): """Display the contents of filtering.xml in the repository tip revision""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) assert repository if self._browser.is_twill: - # probably not porting this functionality - just test - # with Twill for older UI and drop when that is all dropped self.display_repository_file_contents( repository=repository, filename="filtering.xml", @@ -167,9 +174,7 @@ def test_0055_upload_filtering_txt_file(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.add_file_to_repository(repository, "filtering/filtering_0000.txt") expected = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") - self.display_manage_repository_page( - repository, strings_displayed=[expected] - ) + self.display_manage_repository_page(repository, strings_displayed=[expected]) def test_0060_upload_filtering_test_data(self): """Upload filtering test data.""" @@ -197,7 +202,10 @@ def test_0070_verify_filtering_repository(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) tip = self.get_repository_tip(repository) self.check_for_valid_tools(repository) - strings_displayed = ["Select a revision"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["Select a revision"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) self.check_count_of_metadata_revisions_associated_with_repository(repository, metadata_count=2) tool_guid = f"{self.url.replace('http://', '').rstrip('/')}/repos/user1/filtering_0000/Filter1/2.2.0" @@ -222,9 +230,7 @@ def test_0075_upload_readme_txt_file(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.add_file_to_repository(repository, "readme.txt") content = self._escape_page_content_if_needed("This is a readme file.") - self.display_manage_repository_page( - repository, strings_displayed=[content] - ) + self.display_manage_repository_page(repository, strings_displayed=[content]) # Verify that there is a different readme file for each metadata revision. readme_content = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") self.display_manage_repository_page( @@ -241,10 +247,9 @@ def test_0080_delete_readme_txt_file(self): self.delete_files_from_repository(repository, filenames=["readme.txt"]) self.check_count_of_metadata_revisions_associated_with_repository(repository, metadata_count=2) readme_content = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") - self.display_manage_repository_page( - repository, strings_displayed=[readme_content] - ) + self.display_manage_repository_page(repository, strings_displayed=[readme_content]) + @skip_if_api_v2 # not re-implemented in the UI, there are API tests though def test_0085_search_for_valid_filter_tool(self): """Search for the filtering tool by tool ID, name, and version.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -279,16 +284,20 @@ def test_0100_verify_reserved_username_handling(self): self.login(email="baduser@bx.psu.edu", username="repos") test_user_1 = self.test_db_util.get_user("baduser@bx.psu.edu") assert test_user_1 is None, 'Creating user with public name "repos" succeeded.' - error_message = ( - "The term 'repos' is a reserved word in the Tool Shed, so it cannot be used as a public user name." - ) - self.check_for_strings(strings_displayed=[error_message]) + if not self.is_v2: + # no longer use this terminology but the above test case ensures + # the important thing and caught a bug in v2 + error_message = ( + "The term 'repos' is a reserved word in the Tool Shed, so it cannot be used as a public user name." + ) + self.check_for_strings(strings_displayed=[error_message]) def test_0105_contact_repository_owner(self): """""" # We no longer implement this. pass + @skip_if_api_v2 # v2 doesn't implement repository deleting repositories def test_0110_delete_filtering_repository(self): """Delete the filtering_0000 repository and verify that it no longer has any downloadable revisions.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -303,6 +312,7 @@ def test_0110_delete_filtering_repository(self): # Marking a repository as deleted should result in no metadata revisions being downloadable. # assert True not in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] + @skip_if_api_v2 # v2 doesn't implement repository deleting repositories def test_0115_undelete_filtering_repository(self): """Undelete the filtering_0000 repository and verify that it now has two downloadable revisions.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -316,6 +326,7 @@ def test_0115_undelete_filtering_repository(self): assert True in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] assert len(self._db_repository(repository).downloadable_revisions) == 2 + @skip_if_api_v2 # not re-implementing in tool shed 2.0 def test_0120_enable_email_notifications(self): """Enable email notifications for test user 2 on filtering_0000.""" # Log in as test_user_2 @@ -332,9 +343,7 @@ def test_0125_upload_new_readme_file(self): # Upload readme.txt to the filtering_0000 repository and verify that it is now displayed. self.add_file_to_repository(repository, "filtering/readme.txt") content = self._escape_page_content_if_needed("These characters should not") - self.display_manage_repository_page( - repository, strings_displayed=[content] - ) + self.display_manage_repository_page(repository, strings_displayed=[content]) def test_0130_verify_handling_of_invalid_characters(self): """Load the above changeset in the change log and confirm that there is no server error displayed.""" diff --git a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py index 830e0d0022c7..4b9d27ae19d7 100644 --- a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py @@ -1,5 +1,6 @@ import os +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -55,8 +56,11 @@ def test_0010_create_freebayes_repository_and_upload_tool_xml(self): assert repository strings_displayed = ["Metadata may have been defined", "This file requires an entry", "tool_data_table_conf"] self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) + if self.is_v2: + # opps... not good right? + self.populator.reset_metadata(repository) self.display_manage_repository_page( - repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] + repository, strings_displayed=[self.invalid_tools_labels], strings_not_displayed=["Valid tools"] ) tip = self.get_repository_tip(repository) strings_displayed = ["requires an entry", "tool_data_table_conf.xml"] @@ -74,7 +78,7 @@ def test_0015_upload_missing_tool_data_table_conf_file(self): repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed ) self.display_manage_repository_page( - repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] + repository, strings_displayed=[self.invalid_tools_labels], strings_not_displayed=["Valid tools"] ) tip = self.get_repository_tip(repository) strings_displayed = ["refers to a file", "sam_fa_indices.loc"] @@ -124,6 +128,7 @@ def test_0035_upload_valid_tool_dependency_xml(self): target = os.path.join("freebayes", "tool_dependencies.xml") self.add_file_to_repository(repository, target) + @skip_if_api_v2 def test_0040_verify_tool_dependencies(self): """Verify that the uploaded tool_dependencies.xml specifies the correct package versions. @@ -132,7 +137,7 @@ def test_0040_verify_tool_dependencies(self): """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) strings_displayed = ["freebayes", "0.9.4_9696d0ce8a9", "samtools", "0.1.18", "Valid tools", "package"] - strings_not_displayed = ["Invalid tools"] + strings_not_displayed = [self.invalid_tools_labels] self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed ) diff --git a/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py b/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py index 47fe9b0a0b84..f4fce544de59 100644 --- a/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -70,6 +71,7 @@ def test_0025_generate_and_upload_repository_dependencies_xml(self): repository=repository, repository_tuples=[repository_tuple], filepath=repository_dependencies_path ) + @skip_if_api_v2 def test_0030_verify_emboss_5_dependencies(self): """Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.""" repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py index 3b9883eb439e..8436d84d8794 100644 --- a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -161,6 +162,7 @@ def test_0045_generate_repository_dependency_on_emboss_6(self): repository=emboss_repository, repository_tuples=[emboss_tuple], filepath=repository_dependencies_path ) + @skip_if_api_v2 def test_0050_verify_repository_dependency_revisions(self): """Verify that different metadata revisions of the emboss repository have different repository dependencies.""" repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py b/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py index d0382b3de0cc..c1a5f0de3315 100644 --- a/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -129,6 +130,7 @@ def test_0035_verify_repository_metadata(self): for repository in [freebayes_repository, filtering_repository]: self.verify_unchanged_repository_metadata(repository) + @skip_if_api_v2 def test_0040_verify_tool_dependencies(self): """Verify that freebayes displays tool dependencies.""" repository = self._get_repository_by_name_and_owner(freebayes_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py index bbddfa0c219e..c774cf278cf3 100644 --- a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -245,9 +246,11 @@ def test_0045_verify_repository_dependencies(self): self.check_repository_dependency(filtering_repository, emboss_repository) for repository in [bismark_repository, emboss_repository, column_repository]: self.check_repository_dependency(freebayes_repository, repository) - strings_displayed = ["freebayes_0050 depends on freebayes_0050, emboss_0050, column_maker_0050."] - self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) + if not self.is_v2: + strings_displayed = ["freebayes_0050 depends on freebayes_0050, emboss_0050, column_maker_0050."] + self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) + @skip_if_api_v2 def test_0050_verify_tool_dependencies(self): """Check that freebayes and emboss display tool dependencies.""" freebayes_repository = self._get_repository_by_name_and_owner( diff --git a/lib/tool_shed/test/functional/test_0070_invalid_tool.py b/lib/tool_shed/test/functional/test_0070_invalid_tool.py index 9462bdb3b251..df577f7ea4d1 100644 --- a/lib/tool_shed/test/functional/test_0070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_0070_invalid_tool.py @@ -32,7 +32,7 @@ def test_0005_create_category_and_repository(self): ) self.user_populator().setup_bismark_repo(repository) invalid_revision = self.get_repository_first_revision(repository) - self.display_manage_repository_page(repository, strings_displayed=["Invalid tools"]) + self.display_manage_repository_page(repository, strings_displayed=[self.invalid_tools_labels]) valid_revision = self.get_repository_tip(repository) tool_guid = f"{self.url.replace('http://', '').rstrip('/')}/repos/user1/bismark_0070/bismark_methylation_extractor/0.7.7.3" tool_metadata_strings_displayed = [ diff --git a/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py b/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py index 6ff5a4a431ed..fb7625860558 100644 --- a/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py @@ -1,6 +1,7 @@ import logging import os +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -44,10 +45,11 @@ def test_0005_create_bwa_package_repository(self): strings_displayed=[], ) self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") - # Visit the manage repository page for package_bwa_0_5_9_0100. - self.display_manage_repository_page( - repository, strings_displayed=["Tool dependencies", "will not be", "to this repository"] - ) + if not self.is_v2: + # Visit the manage repository page for package_bwa_0_5_9_0100. + self.display_manage_repository_page( + repository, strings_displayed=["Tool dependencies", "will not be", "to this repository"] + ) def test_0010_create_bwa_base_repository(self): """Create and populate bwa_base_0100.""" @@ -183,10 +185,12 @@ def test_0035_generate_complex_repository_dependency(self): version="0.5.9", ) self.check_repository_dependency(base_repository, depends_on_repository=tool_repository) - self.display_manage_repository_page( - base_repository, strings_displayed=["bwa", "0.5.9", "package", changeset_revision] - ) + if not self.is_v2: + self.display_manage_repository_page( + base_repository, strings_displayed=["bwa", "0.5.9", "package", changeset_revision] + ) + @skip_if_api_v2 def test_0040_generate_tool_dependency(self): """Generate and upload a new tool_dependencies.xml file that specifies an arbitrary file on the filesystem, and verify that bwa_base depends on the new changeset revision.""" # The base_repository named bwa_base_repository_0100 is the dependent repository. diff --git a/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py index 71faea2f7758..71cd3322379f 100644 --- a/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py @@ -65,16 +65,8 @@ def test_0010_verify_datatypes_repository(self): the datatypes that are defined in datatypes_conf.xml. """ repository = self._get_repository_by_name_and_owner(datatypes_repository_name, common.test_user_2_name) - strings_displayed = [ - "BlastXml", - "BlastNucDb", - "BlastProtDb", - "application/xml", - "text/html", - "blastxml", - "blastdbn", - "blastdbp", - ] + # v2 rightfully doesn't display anything about datatypes... + strings_displayed = ["Galaxy datatypes for the BLAST top hit"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0015_create_tool_repository(self): @@ -108,7 +100,9 @@ def test_0020_verify_tool_repository(self): """ repository = self._get_repository_by_name_and_owner(tool_repository_name, common.test_user_1_name) strings_displayed = ["blastxml_to_top_descr_0120", "BLAST top hit descriptions", "Make a table from BLAST XML"] - strings_displayed.extend(["0.0.1", "Valid tools"]) + strings_displayed.append("0.0.1") + if not self.is_v2: + strings_displayed.append("Valid tools") self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0025_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_0140_tool_help_images.py b/lib/tool_shed/test/functional/test_0140_tool_help_images.py index bce376d06a6c..1a8247747700 100644 --- a/lib/tool_shed/test/functional/test_0140_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_0140_tool_help_images.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -56,6 +57,7 @@ def test_0005_create_htseq_count_repository(self): commit_message="Uploaded htseq_count.tar.", ) + @skip_if_api_v2 def test_0010_load_tool_page(self): """Load the tool page and check for the image. diff --git a/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py index 6b2b78107609..d0d1b3b1bad4 100644 --- a/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py @@ -125,6 +125,7 @@ def test_0020_verify_generated_dependency(self): ) changeset_revision = self.get_repository_tip(numpy_repository) self.check_repository_dependency(matplotlib_repository, depends_on_repository=numpy_repository) - self.display_manage_repository_page( - matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] - ) + if not self.is_v2: + self.display_manage_repository_page( + matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] + ) diff --git a/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py b/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py index 31d06c129fe2..0a46eec3fe7b 100644 --- a/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py +++ b/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py @@ -9,7 +9,7 @@ repository_name = "filtering_0420" repository_description = "Galaxy filtering tool for test 0420" -repository_long_description = "Long description of Galaxy filtering tool for test 0410" +repository_long_description = "Long description of Galaxy filtering tool for test 0420" first_changeset_hash = "" @@ -88,9 +88,12 @@ def test_0015_load_user_view_page(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/browse_repositories?user_id=&operation=repositories_by_user - strings_displayed = ["/repository/browse_repositories", encoded_user_id, "operation=repositories_by_user"] - strings_displayed.append(encoded_user_id) - strings_displayed_in_iframe = ["user1", "filtering_0420", "Galaxy filtering tool for test 0420"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["/repository/browse_repositories", encoded_user_id, "operation=repositories_by_user"] + strings_displayed.append(encoded_user_id) + strings_displayed_in_iframe = ["user1", "filtering_0420", repository_description] self.load_citable_url( username="user1", repository_name=None, @@ -115,11 +118,19 @@ def test_0020_load_repository_view_page(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/bview_repository?id= - strings_displayed = ["/repository", "view_repository", "id=", encoded_repository_id] - strings_displayed_in_iframe = ["user1", "filtering_0420", "Galaxy filtering tool for test 0420"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["/repository", "view_repository", "id=", encoded_repository_id] + strings_displayed_in_iframe = [ + "user1", + "filtering_0420", + self._escape_page_content_if_needed(repository_long_description), + ] strings_displayed_in_iframe.append(self.get_repository_tip(repository)) - strings_displayed_in_iframe.append("Link to this repository:") - strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420") + if not self.is_v2: + strings_displayed_in_iframe.append("Link to this repository:") + strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420") self.load_citable_url( username="user1", repository_name="filtering_0420", @@ -145,15 +156,19 @@ def test_0025_load_view_page_for_previous_revision(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/view_repository?id= - strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] strings_displayed_in_iframe = [ "user1", "filtering_0420", - "Galaxy filtering tool for test 0420", + self._escape_page_content_if_needed(repository_long_description), first_changeset_hash, ] - strings_displayed_in_iframe.append("Link to this repository revision:") - strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420/{first_changeset_hash}") + if not self.is_v2: + strings_displayed_in_iframe.append("Link to this repository revision:") + strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420/{first_changeset_hash}") strings_not_displayed_in_iframe = [] self.load_citable_url( username="user1", @@ -173,13 +188,16 @@ def test_0030_load_sharable_url_with_invalid_changeset_revision(self): encoded_user_id = self.security.encode_id(test_user_1.id) encoded_repository_id = repository.id invalid_changeset_hash = "invalid" - # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, - # then directly load the url that the iframe should be loading and check for the expected strings. - # The iframe should point to /repository/view_repository?id=&status=error - strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] - strings_displayed.extend( - ["The+change+log", "does+not+include+revision", invalid_changeset_hash, "status=error"] - ) + if not self.is_v2: + # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, + # then directly load the url that the iframe should be loading and check for the expected strings. + # The iframe should point to /repository/view_repository?id=&status=error + strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] + strings_displayed.extend( + ["The+change+log", "does+not+include+revision", invalid_changeset_hash, "status=error"] + ) + else: + strings_displayed = ["The change log does not include revision " + invalid_changeset_hash] self.load_citable_url( username="user1", repository_name="filtering_0420", @@ -200,12 +218,16 @@ def test_0035_load_sharable_url_with_invalid_repository_name(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/browse_repositories?user_id=&operation=repositories_by_user - strings_displayed = ["/repository", "browse_repositories", "user1"] - strings_displayed.extend( - ["list+of+repositories+owned", "does+not+include+one+named", "%21%21invalid%21%21", "status=error"] - ) - strings_displayed_in_iframe = ["user1", "filtering_0420"] - strings_displayed_in_iframe.append("Repositories Owned by user1") + if not self.is_v2: + strings_displayed = ["/repository", "browse_repositories", "user1"] + strings_displayed.extend( + ["list+of+repositories+owned", "does+not+include+one+named", "%21%21invalid%21%21", "status=error"] + ) + strings_displayed_in_iframe = ["user1", "filtering_0420"] + strings_displayed_in_iframe.append("Repositories Owned by user1") + else: + strings_displayed = ["Repository user1/!!invalid!! is not found"] + strings_displayed_in_iframe = [] self.load_citable_url( username="user1", repository_name="!!invalid!!", @@ -222,7 +244,10 @@ def test_0040_load_sharable_url_with_invalid_owner(self): We are at step 8. Visit the following url and check for appropriate strings: /view/!!invalid!! """ - strings_displayed = ["The tool shed", self.url, "contains no repositories owned by", "!!invalid!!"] + if not self.is_v2: + strings_displayed = ["The tool shed", self.url, "contains no repositories owned by", "!!invalid!!"] + else: + strings_displayed = ["No repositories found"] self.load_citable_url( username="!!invalid!!", repository_name=None, diff --git a/lib/tool_shed/test/functional/test_0430_browse_utilities.py b/lib/tool_shed/test/functional/test_0430_browse_utilities.py index 104d2e0e28b3..0202c5baf2ca 100644 --- a/lib/tool_shed/test/functional/test_0430_browse_utilities.py +++ b/lib/tool_shed/test/functional/test_0430_browse_utilities.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -85,6 +86,7 @@ def test_0020_create_tool_dependency_repository(self): commit_message="Uploaded freebayes.tar.", ) + @skip_if_api_v2 def test_0030_browse_tools(self): """Load the page to browse tools. @@ -96,6 +98,7 @@ def test_0030_browse_tools(self): strings_displayed = ["EMBOSS", "antigenic1", "5.0.0", changeset_revision, "user1", "emboss_0430"] self.browse_tools(strings_displayed=strings_displayed) + @skip_if_api_v2 def test_0040_browse_tool_dependencies(self): """Browse tool dependencies and look for the right versions of freebayes and samtools. diff --git a/lib/tool_shed/test/functional/test_0460_upload_to_repository.py b/lib/tool_shed/test/functional/test_0460_upload_to_repository.py index 5c8cb5ea9907..d1c04840ef8e 100644 --- a/lib/tool_shed/test/functional/test_0460_upload_to_repository.py +++ b/lib/tool_shed/test/functional/test_0460_upload_to_repository.py @@ -129,12 +129,13 @@ def test_0020_populate_complex_dependency_test_1_0460(self): repository = self._get_repository_by_name_and_owner("complex_dependency_test_1_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) self.add_file_to_repository(repository, "0460_files/tool_dependencies.xml") - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + changeset_revision = self.get_repository_tip(package_repository) + strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0025_populate_complex_dependency_test_2_0460(self): """Populate complex_dependency_test_2_0460. @@ -149,12 +150,13 @@ def test_0025_populate_complex_dependency_test_2_0460(self): "0460_files/tool_dependencies_in_root.tar", commit_message="Uploaded complex repository dependency definition.", ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + changeset_revision = self.get_repository_tip(package_repository) + strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0030_populate_complex_dependency_test_3_0460(self): """Populate complex_dependency_test_3_0460. @@ -170,11 +172,15 @@ def test_0030_populate_complex_dependency_test_3_0460(self): commit_message="Uploaded complex repository dependency definition.", ) changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", filepath="subfolder", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, + filename="tool_dependencies.xml", + filepath="subfolder", + strings_displayed=[changeset_revision], + ) def test_0035_create_repositories_for_url_upload(self): """Create and populate hg_tool_dependency_0460 and hg_subfolder_tool_dependency_0460. @@ -244,11 +250,12 @@ def test_0055_populate_repository_dependency_test_1_0460(self): package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) self.add_file_to_repository(repository, "0460_files/repository_dependencies.xml") changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + strings_displayed = [bwa_repository_name, "user1", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0060_populate_repository_dependency_test_2_0460(self): """Populate repository_dependency_test_2_0460. @@ -265,11 +272,12 @@ def test_0060_populate_repository_dependency_test_2_0460(self): commit_message="Uploaded complex repository dependency definition.", ) changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + strings_displayed = [bwa_repository_name, "user1", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0065_populate_repository_dependency_test_3_0460(self): """Populate repository_dependency_test_3_0460. @@ -287,14 +295,15 @@ def test_0065_populate_repository_dependency_test_3_0460(self): commit_message="Uploaded complex repository dependency definition.", ) changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, - filename="repository_dependencies.xml", - filepath="subfolder", - strings_displayed=[changeset_revision], - ) + if not self.is_v2: + strings_displayed = [bwa_repository_name, "user1", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, + filename="repository_dependencies.xml", + filepath="subfolder", + strings_displayed=[changeset_revision], + ) def test_0070_create_repositories_for_url_upload(self): """Create and populate hg_repository_dependency_0460 and hg_subfolder_repository_dependency_0460. diff --git a/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py b/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py index 19294287a283..348f70281e88 100644 --- a/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py +++ b/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -98,6 +99,7 @@ def test_0020_rename_repository(self): repository = self._get_repository_by_name_and_owner("renamed_filtering_0530", common.test_user_1_name) assert repository.name == "renamed_filtering_0530", "Repository was not renamed to renamed_filtering_0530." + @skip_if_api_v2 def test_0030_verify_access_denied(self): """Make sure a non-admin user can't modify the repository. diff --git a/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py b/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py index f622b05ac2ab..8c5b08cec3f9 100644 --- a/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py +++ b/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py @@ -71,10 +71,11 @@ def test_0005_freebayes_repository(self): freebayes, "0550_files/package_freebayes_1_0550.tgz", ) - # Visit the manage repository page for package_freebayes_0_5_9_0100. - self.display_manage_repository_page( - freebayes, strings_displayed=["Tool dependencies", "will not be", "to this repository"] - ) + if not self.is_v2: + # Visit the manage repository page for package_freebayes_0_5_9_0100. + self.display_manage_repository_page( + freebayes, strings_displayed=["Tool dependencies", "will not be", "to this repository"] + ) def test_0010_create_samtools_repository(self): """Create and populate the package_samtools_0550 repository.""" @@ -118,7 +119,8 @@ def test_0020_check_repository_dependency(self): samtools = self._get_repository_by_name_and_owner(repositories["samtools"]["name"], common.test_user_1_name) filtering = self._get_repository_by_name_and_owner(repositories["filtering"]["name"], common.test_user_1_name) strings_displayed = [freebayes.id, samtools.id] - self.display_manage_repository_page(filtering, strings_displayed=strings_displayed) + if not self.is_v2: + self.display_manage_repository_page(filtering, strings_displayed=strings_displayed) def test_0025_update_dependent_repositories(self): """ diff --git a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py index 2df6669aee2f..976d2c354ecc 100644 --- a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py @@ -43,10 +43,11 @@ def test_0010_browse_tool_shed(self): self.browse_tool_shed(url=self.url, strings_displayed=[category_name]) category = self.populator.get_category_with_name(category_name) self.browse_category(category, strings_displayed=[repository_name]) - strings_displayed = [repository_name, "Valid tools", "Tool dependencies"] - self.preview_repository_in_tool_shed( - repository_name, common.test_user_1_name, strings_displayed=strings_displayed - ) + if not self.is_v2: + strings_displayed = [repository_name, "Valid tools", "Tool dependencies"] + self.preview_repository_in_tool_shed( + repository_name, common.test_user_1_name, strings_displayed=strings_displayed + ) def test_0015_install_freebayes_repository(self): """Install the freebayes repository without installing tool dependencies.""" diff --git a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py index fbcfbb28d3be..b965011e7ca0 100644 --- a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py @@ -73,9 +73,12 @@ def test_0010_browse_tool_shed(self): self.browse_tool_shed(url=self.url, strings_displayed=["Test 0020 Basic Repository Dependencies"]) category = self.populator.get_category_with_name("Test 0020 Basic Repository Dependencies") self.browse_category(category, strings_displayed=[emboss_repository_name]) - self.preview_repository_in_tool_shed( - emboss_repository_name, common.test_user_1_name, strings_displayed=[emboss_repository_name, "Valid tools"] - ) + if not self.is_v2: + self.preview_repository_in_tool_shed( + emboss_repository_name, + common.test_user_1_name, + strings_displayed=[emboss_repository_name, "Valid tools"], + ) def test_0015_install_emboss_repository(self): """Install the emboss repository without installing tool dependencies.""" diff --git a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py index 17cebc758712..3cc894c038cb 100644 --- a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py @@ -140,9 +140,12 @@ def test_0010_browse_tool_shed(self): self.browse_tool_shed(url=self.url, strings_displayed=["Test 0030 Repository Dependency Revisions"]) category = self.populator.get_category_with_name("Test 0030 Repository Dependency Revisions") self.browse_category(category, strings_displayed=[emboss_repository_name]) - self.preview_repository_in_tool_shed( - emboss_repository_name, common.test_user_1_name, strings_displayed=[emboss_repository_name, "Valid tools"] - ) + if not self.is_v2: + self.preview_repository_in_tool_shed( + emboss_repository_name, + common.test_user_1_name, + strings_displayed=[emboss_repository_name, "Valid tools"], + ) def test_0015_install_emboss_repository(self): """Install the emboss repository without installing tool dependencies.""" diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index 48e7039032a3..e57633296faa 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -269,7 +269,8 @@ def test_0045_verify_repository_dependencies(self): strings_displayed = [ f"{freebayes_repository.name} depends on {', '.join(repo.name for repo in freebayes_dependencies)}." ] - self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) + if not self.is_v2: + self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) def test_0050_verify_tool_dependencies(self): """Check that freebayes and emboss display tool dependencies.""" @@ -277,13 +278,14 @@ def test_0050_verify_tool_dependencies(self): freebayes_repository_name, common.test_user_1_name ) emboss_repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) - self.display_manage_repository_page( - freebayes_repository, - strings_displayed=["freebayes", "0.9.4_9696d0ce8a9", "samtools", "0.1.18", "Tool dependencies"], - ) - self.display_manage_repository_page( - emboss_repository, strings_displayed=["Tool dependencies", "emboss", "5.0.0", "package"] - ) + if not self.is_v2: + self.display_manage_repository_page( + freebayes_repository, + strings_displayed=["freebayes", "0.9.4_9696d0ce8a9", "samtools", "0.1.18", "Tool dependencies"], + ) + self.display_manage_repository_page( + emboss_repository, strings_displayed=["Tool dependencies", "emboss", "5.0.0", "package"] + ) def test_0055_install_column_repository(self): """Install column_maker with repository dependencies.""" diff --git a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py index 557f20fa8ed6..12067ac1bce8 100644 --- a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py @@ -84,7 +84,8 @@ def test_0010_verify_datatypes_repository(self): "blastdbn", "blastdbp", ] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + if not self.is_v2: + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0015_create_tool_repository(self): """Create and populate the blastxml_to_top_descr_0120 repository @@ -126,7 +127,7 @@ def test_0020_verify_tool_repository(self): """ repository = self._get_repository_by_name_and_owner(tool_repository_name, common.test_user_1_name) strings_displayed = ["blastxml_to_top_descr_0120", "BLAST top hit descriptions", "Make a table from BLAST XML"] - strings_displayed.extend(["0.0.1", "Valid tools"]) + strings_displayed.extend(["0.0.1"]) self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0025_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_1160_tool_help_images.py b/lib/tool_shed/test/functional/test_1160_tool_help_images.py index dd46f0dbae92..add92d62dc55 100644 --- a/lib/tool_shed/test/functional/test_1160_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_1160_tool_help_images.py @@ -69,6 +69,8 @@ def test_0010_load_tool_page(self): # should be the tool that contains a link to the image. repository_metadata = self._db_repository(repository).metadata_revisions[0].metadata tool_path = repository_metadata["tools"][0]["tool_config"] - self.load_display_tool_page( - repository, tool_path, changeset_revision, strings_displayed=[image_path], strings_not_displayed=[] - ) + # V2 is not going to have this page right? So... do we need this test at all or that route? Likely not? + if self._browser.is_twill and not self.is_v2: + self.load_display_tool_page( + repository, tool_path, changeset_revision, strings_displayed=[image_path], strings_not_displayed=[] + ) diff --git a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py index 57714179cd0e..b49ae8998ca2 100644 --- a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py @@ -136,9 +136,10 @@ def test_0020_verify_generated_dependency(self): ) changeset_revision = self.get_repository_tip(numpy_repository) self.check_repository_dependency(matplotlib_repository, depends_on_repository=numpy_repository) - self.display_manage_repository_page( - matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] - ) + if not self.is_v2: + self.display_manage_repository_page( + matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] + ) def test_0025_install_matplotlib_repository(self): """Install the package_matplotlib_1_2_0170 repository. diff --git a/lib/tool_shed/test/functional/test_frontend_login.py b/lib/tool_shed/test/functional/test_frontend_login.py new file mode 100644 index 000000000000..1e752c37e934 --- /dev/null +++ b/lib/tool_shed/test/functional/test_frontend_login.py @@ -0,0 +1,76 @@ +from playwright.sync_api import ( + expect, + Page, +) + +from galaxy_test.base.api_util import random_name +from ..base.api import skip_if_api_v1 +from ..base.playwrightbrowser import ( + Locators, + PlaywrightShedBrowser, +) +from ..base.twilltestcase import ShedTwillTestCase + + +class PlaywrightTestCase(ShedTwillTestCase): + @property + def _playwright_browser(self) -> PlaywrightShedBrowser: + browser = self._browser + assert isinstance(browser, PlaywrightShedBrowser) + return browser + + @property + def _page(self) -> Page: + return self._playwright_browser._page + + +TEST_PASSWORD = "testpass" + + +class TestFrontendLogin(PlaywrightTestCase): + @skip_if_api_v1 + def test_register(self): + self.visit_url("/") + page = self._page + expect(page.locator(Locators.toolbar_login)).to_be_visible() + page.click(Locators.toolbar_login) + expect(page.locator(Locators.login_submit_button)).to_be_visible() + expect(page.locator(Locators.register_link)).to_be_visible() + page.click(Locators.register_link) + user = random_name(prefix="shduser") + self._submit_register_form( + f"{user}@galaxyproject.org", + TEST_PASSWORD, + user, + ) + expect(page.locator(Locators.login_submit_button)).to_be_visible() + + @skip_if_api_v1 + def test_create(self): + user = random_name(prefix="shduser") + self.create( + email=f"{user}@galaxyproject.org", + password=TEST_PASSWORD, + username=user, + ) + + @skip_if_api_v1 + def test_logout(self): + self._create_and_login() + self._playwright_browser.expect_logged_in() + self._playwright_browser.logout_if_logged_in() + self._playwright_browser.expect_not_logged_in() + + @skip_if_api_v1 + def test_change_password(self): + self._create_and_login() + + def _create_and_login(self): + user = random_name(prefix="shduser") + email = f"{user}@galaxyproject.org" + self.create( + email=email, + password=TEST_PASSWORD, + username=user, + ) + self.login(email, TEST_PASSWORD, username=user, redirect=None) diff --git a/lib/tool_shed/test/functional/test_shed_graphql.py b/lib/tool_shed/test/functional/test_shed_graphql.py new file mode 100644 index 000000000000..c427732872d8 --- /dev/null +++ b/lib/tool_shed/test/functional/test_shed_graphql.py @@ -0,0 +1,21 @@ +from galaxy_test.base.api_asserts import assert_status_code_is_ok +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, +) + + +class TestShedGraphqlApi(ShedApiTestCase): + @skip_if_api_v1 + def test_graphql_query(self): + populator = self.populator + category = populator.new_category(prefix="testcreate") + json = {"query": r"query { categories { name } }"} + response = self.api_interactor.post("graphql/", json=json) + assert_status_code_is_ok(response) + result = response.json() + assert "data" in result + data = result["data"] + assert "categories" in data + categories = data["categories"] + assert category.name in [c["name"] for c in categories] diff --git a/lib/tool_shed/test/functional/test_shed_repositories.py b/lib/tool_shed/test/functional/test_shed_repositories.py index 2fd4f551a08e..90e9b8134059 100644 --- a/lib/tool_shed/test/functional/test_shed_repositories.py +++ b/lib/tool_shed/test/functional/test_shed_repositories.py @@ -61,6 +61,14 @@ def test_metadata_simple(self): assert only_revision.downloadable assert not only_revision.malicious + def test_metadata_invalid_tools(self): + populator = self.populator + repository = populator.setup_bismark_repo() + repository_metadata = populator.get_metadata(repository) + assert repository_metadata + for _, value in repository_metadata.__root__.items(): + assert value.invalid_tools + def test_index_simple(self): # Logic and typing is pretty different if given a tool id to search for - this should # be tested or dropped in v2. diff --git a/lib/tool_shed/util/metadata_util.py b/lib/tool_shed/util/metadata_util.py index 47d82928f0b6..c41b62b7a7c0 100644 --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -45,6 +45,7 @@ def get_all_dependencies(app, metadata_entry, processed_dependency_links=None): dependency_dict["repository"] = repository.to_dict(value_mapper=value_mapper) if dependency_metadata.includes_tools: dependency_dict["tools"] = dependency_metadata.metadata["tools"] + dependency_dict["invalid_tools"] = dependency_metadata.metadata.get("invalid_tools", []) dependency_dict["repository_dependencies"] = [] if dependency_dict["includes_tool_dependencies"]: dependency_dict["tool_dependencies"] = repository.get_tool_dependencies( diff --git a/lib/tool_shed/webapp/api2/__init__.py b/lib/tool_shed/webapp/api2/__init__.py index 7f270db14d4a..6961c8407b93 100644 --- a/lib/tool_shed/webapp/api2/__init__.py +++ b/lib/tool_shed/webapp/api2/__init__.py @@ -1,3 +1,4 @@ +import logging from json import JSONDecodeError from typing import ( AsyncGenerator, @@ -29,7 +30,9 @@ from galaxy.managers.session import GalaxySessionManager from galaxy.managers.users import UserManager from galaxy.security.idencoding import IdEncodingHelper +from galaxy.util import unicodify from galaxy.web.framework.decorators import require_admin_message +from galaxy.webapps.base.webapp import create_new_session from galaxy.webapps.galaxy.api import ( depends as framework_depends, FrameworkRouter, @@ -49,6 +52,8 @@ User, ) +log = logging.getLogger(__name__) + def get_app() -> ToolShedApp: if tool_shed_app_mod.app is None: @@ -67,10 +72,11 @@ async def get_app_with_request_session() -> AsyncGenerator[ToolShedApp, None]: DependsOnApp = cast(ToolShedApp, Depends(get_app_with_request_session)) +AUTH_COOKIE_NAME = "galaxycommunitysession" api_key_query = APIKeyQuery(name="key", auto_error=False) api_key_header = APIKeyHeader(name="x-api-key", auto_error=False) -api_key_cookie = APIKeyCookie(name="galaxycommunitysession", auto_error=False) +api_key_cookie = APIKeyCookie(name=AUTH_COOKIE_NAME, auto_error=False) def depends(dep_type: Type[T]) -> T: @@ -218,7 +224,7 @@ async def get_body(request: Request): DownloadableQueryParam: bool = Query( default=True, title="downloadable_only", - description="Include only downable repositories.", + description="Include only downloadable repositories.", ) CommitMessage: str = Query( @@ -271,3 +277,79 @@ async def get_body(request: Request): CategoryRepositoriesSortKeyQueryParam: str = Query("name", title="Sort Key") CategoryRepositoriesSortOrderQueryParam: str = Query("asc", title="Sort Order") CategoryRepositoriesPageQueryParam: Optional[int] = Query(None, title="Page") + + +def ensure_valid_session(trans: SessionRequestContext) -> None: + """ + Ensure that a valid Galaxy session exists and is available as + trans.session (part of initialization) + """ + app = trans.app + mapping = app.model + session_manager = GalaxySessionManager(mapping) + sa_session = app.model.context + request = trans.request + # Try to load an existing session + secure_id = request.get_cookie(AUTH_COOKIE_NAME) + galaxy_session = None + prev_galaxy_session = None + user_for_new_session = None + invalidate_existing_session = False + # Track whether the session has changed so we can avoid calling flush + # in the most common case (session exists and is valid). + galaxy_session_requires_flush = False + if secure_id: + session_key: Optional[str] = app.security.decode_guid(secure_id) + if session_key: + # We do NOT catch exceptions here, if the database is down the request should fail, + # and we should not generate a new session. + galaxy_session = session_manager.get_session_from_session_key(session_key=session_key) + if not galaxy_session: + session_key = None + + if galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted: + invalidate_existing_session = True + log.warning(f"User '{galaxy_session.user.email}' is marked deleted, invalidating session") + # Do we need to invalidate the session for some reason? + if invalidate_existing_session: + assert galaxy_session + prev_galaxy_session = galaxy_session + prev_galaxy_session.is_valid = False + galaxy_session = None + # No relevant cookies, or couldn't find, or invalid, so create a new session + if galaxy_session is None: + galaxy_session = create_new_session(trans, prev_galaxy_session, user_for_new_session) + galaxy_session_requires_flush = True + trans.set_galaxy_session(galaxy_session) + set_auth_cookie(trans, galaxy_session) + else: + trans.set_galaxy_session(galaxy_session) + # Do we need to flush the session? + if galaxy_session_requires_flush: + sa_session.add(galaxy_session) + # FIXME: If prev_session is a proper relation this would not + # be needed. + if prev_galaxy_session: + sa_session.add(prev_galaxy_session) + sa_session.flush() + + +def set_auth_cookie(trans: SessionRequestContext, session): + cookie_name = AUTH_COOKIE_NAME + set_cookie(trans, trans.app.security.encode_guid(session.session_key), cookie_name) + + +def set_cookie(trans: SessionRequestContext, value: str, key, path="/", age=90) -> None: + """Convenience method for setting a session cookie""" + # In wsgi we were setting both a max_age and and expires, but + # all browsers support max_age now. + domain: Optional[str] = trans.app.config.cookie_domain + trans.response.set_cookie( + key, + unicodify(value), + path=path, + max_age=3600 * 24 * age, # 90 days + httponly=True, + secure=trans.request.is_secure, + domain=domain, + ) diff --git a/lib/tool_shed/webapp/api2/repositories.py b/lib/tool_shed/webapp/api2/repositories.py index 9cbf95c91913..776e4064a7ab 100644 --- a/lib/tool_shed/webapp/api2/repositories.py +++ b/lib/tool_shed/webapp/api2/repositories.py @@ -190,6 +190,21 @@ def metadata( return as_dict # return _hack_fastapi_4428(as_dict) + @router.get( + "/api_internal/repositories/{encoded_repository_id}/metadata", + description="Get information about repository metadata", + operation_id="repositories__internal_metadata", + response_model=RepositoryMetadata, + ) + def metadata_internal( + self, + encoded_repository_id: str = RepositoryIdPathParam, + downloadable_only: bool = DownloadableQueryParam, + ) -> dict: + recursive = True + as_dict = get_repository_metadata_dict(self.app, encoded_repository_id, recursive, downloadable_only) + return _hack_fastapi_4428(as_dict) + @router.get( "/api/repositories/get_ordered_installable_revisions", description="Get an ordered list of the repository changeset revisions that are installable", diff --git a/lib/tool_shed/webapp/api2/users.py b/lib/tool_shed/webapp/api2/users.py index 3e57735b6718..8873d8b9800f 100644 --- a/lib/tool_shed/webapp/api2/users.py +++ b/lib/tool_shed/webapp/api2/users.py @@ -1,3 +1,4 @@ +import logging from typing import ( List, Optional, @@ -9,6 +10,10 @@ status, ) from pydantic import BaseModel +from sqlalchemy import ( + and_, + true, +) import tool_shed.util.shed_util_common as suc from galaxy.exceptions import ( @@ -17,12 +22,16 @@ RequestParameterInvalidException, ) from galaxy.managers.api_keys import ApiKeyManager +from galaxy.managers.users import UserManager +from galaxy.webapps.base.webapp import create_new_session from tool_shed.context import SessionRequestContext from tool_shed.managers.users import ( api_create_user, get_api_user, index, ) +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.model import User as SaUser from tool_shed_client.schema import ( CreateUserRequest, User, @@ -30,15 +39,64 @@ from . import ( depends, DependsOnTrans, + ensure_valid_session, Router, + set_auth_cookie, UserIdPathParam, ) router = Router(tags=["users"]) +log = logging.getLogger(__name__) + + +class UiRegisterRequest(BaseModel): + email: str + username: str + password: str + bear_field: str + + +class HasCsrfToken(BaseModel): + session_csrf_token: str + + +class UiLoginRequest(HasCsrfToken): + login: str + password: str + + +class UiLogoutRequest(HasCsrfToken): + logout_all: bool = False + + +class UiLoginResponse(BaseModel): + pass + + +class UiLogoutResponse(BaseModel): + pass + + +class UiRegisterResponse(BaseModel): + email: str + activation_sent: bool = False + activation_error: bool = False + contact_email: Optional[str] = None + + +class UiChangePasswordRequest(BaseModel): + current: str + password: str + + +INVALID_LOGIN_OR_PASSWORD = "Invalid login or password" + @router.cbv class FastAPIUsers: + app: ToolShedApp = depends(ToolShedApp) + user_manager: UserManager = depends(UserManager) api_key_manager: ApiKeyManager = depends(ApiKeyManager) @router.get( @@ -66,7 +124,9 @@ def create(self, trans: SessionRequestContext = DependsOnTrans, request: CreateU ) def current(self, trans: SessionRequestContext = DependsOnTrans) -> User: user = trans.user - assert user + if not user: + raise ObjectNotFound() + return get_api_user(trans.app, user) @router.get( @@ -128,3 +188,153 @@ def _get_user(self, trans: SessionRequestContext, encoded_user_id: str): if not (trans.user_is_admin or trans.user == user): raise InsufficientPermissionsException() return user + + @router.post( + "/api_internal/register", + description="register a user", + operation_id="users__internal_register", + ) + def register( + self, trans: SessionRequestContext = DependsOnTrans, request: UiRegisterRequest = Body(...) + ) -> UiRegisterResponse: + honeypot_field = request.bear_field + if honeypot_field != "": + message = "You've been flagged as a possible bot. If you are not, please try registering again and fill the form out carefully." + raise RequestParameterInvalidException(message) + + username = request.username + if username == "repos": + raise RequestParameterInvalidException("Cannot create a user with the username 'repos'") + self.user_manager.create(email=request.email, username=username, password=request.password) + if self.app.config.user_activation_on: + is_activation_sent = self.user_manager.send_activation_email(trans, request.email, username) + if is_activation_sent: + return UiRegisterResponse(email=request.email, activation_sent=True) + else: + return UiRegisterResponse( + email=request.email, + activation_sent=False, + activation_error=True, + contact_email=self.app.config.error_email_to, + ) + else: + return UiRegisterResponse(email=request.email) + + @router.put( + "/api_internal/change_password", + description="reset a user", + operation_id="users__internal_change_password", + status_code=status.HTTP_204_NO_CONTENT, + ) + def change_password( + self, trans: SessionRequestContext = DependsOnTrans, request: UiChangePasswordRequest = Body(...) + ): + password = request.password + current = request.current + if trans.user is None: + raise InsufficientPermissionsException("Must be logged into use this functionality") + user_id = trans.user.id + token = None + user, message = self.user_manager.change_password( + trans, password=password, current=current, token=token, confirm=password, id=user_id + ) + if not user: + raise RequestParameterInvalidException(message) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.put( + "/api_internal/login", + description="login to web UI", + operation_id="users__internal_login", + ) + def internal_login( + self, trans: SessionRequestContext = DependsOnTrans, request: UiLoginRequest = Body(...) + ) -> UiLoginResponse: + log.info(f"top of internal_login {trans.session_csrf_token}") + ensure_csrf_token(trans, request) + login = request.login + password = request.password + user = self.user_manager.get_user_by_identity(login) + if user is None: + raise InsufficientPermissionsException(INVALID_LOGIN_OR_PASSWORD) + elif user.deleted: + message = ( + "This account has been marked deleted, contact your local Galaxy administrator to restore the account." + ) + if trans.app.config.error_email_to is not None: + message += f" Contact: {trans.app.config.error_email_to}." + raise InsufficientPermissionsException(message) + elif not trans.app.auth_manager.check_password(user, password, trans.request): + raise InsufficientPermissionsException(INVALID_LOGIN_OR_PASSWORD) + else: + handle_user_login(trans, user) + return UiLoginResponse() + + @router.put( + "/api_internal/logout", + description="logout of web UI", + operation_id="users__internal_logout", + ) + def internal_logout( + self, trans: SessionRequestContext = DependsOnTrans, request: UiLogoutRequest = Body(...) + ) -> UiLogoutResponse: + ensure_csrf_token(trans, request) + handle_user_logout(trans, logout_all=request.logout_all) + return UiLogoutResponse() + + +def ensure_csrf_token(trans: SessionRequestContext, request: HasCsrfToken): + session_csrf_token = request.session_csrf_token + if not trans.session_csrf_token: + ensure_valid_session(trans) + message = None + if not session_csrf_token: + message = "No session token set, denying request." + elif session_csrf_token != trans.session_csrf_token: + log.info(f"{session_csrf_token} != {trans.session_csrf_token}") + message = "Wrong session token found, denying request." + if message: + raise InsufficientPermissionsException(message) + + +def handle_user_login(trans: SessionRequestContext, user: SaUser) -> None: + trans.app.security_agent.create_user_role(user, trans.app) + # Set the previous session + prev_galaxy_session = trans.get_galaxy_session() + if prev_galaxy_session: + prev_galaxy_session.is_valid = False + # Define a new current_session + new_session = create_new_session(trans, prev_galaxy_session, user) + trans.set_galaxy_session(new_session) + trans.sa_session.add_all((prev_galaxy_session, new_session)) + trans.sa_session.flush() + set_auth_cookie(trans, new_session) + + +def handle_user_logout(trans, logout_all=False): + """ + Logout the current user: + - invalidate the current session + - create a new session with no user associated + """ + prev_galaxy_session = trans.get_galaxy_session() + if prev_galaxy_session: + prev_galaxy_session.is_valid = False + new_session = create_new_session(trans, prev_galaxy_session, None) + trans.set_galaxy_session(new_session) + trans.sa_session.add_all((prev_galaxy_session, new_session)) + trans.sa_session.flush() + + galaxy_user_id = prev_galaxy_session.user_id + if logout_all and galaxy_user_id is not None: + for other_galaxy_session in trans.sa_session.query(trans.app.model.GalaxySession).filter( + and_( + trans.app.model.GalaxySession.table.c.user_id == galaxy_user_id, + trans.app.model.GalaxySession.table.c.is_valid == true(), + trans.app.model.GalaxySession.table.c.id != prev_galaxy_session.id, + ) + ): + other_galaxy_session.is_valid = False + trans.sa_session.add(other_galaxy_session) + trans.sa_session.flush() + set_auth_cookie(trans, new_session) diff --git a/lib/tool_shed/webapp/fast_app.py b/lib/tool_shed/webapp/fast_app.py index e1fab8be007f..18af5dcb7708 100644 --- a/lib/tool_shed/webapp/fast_app.py +++ b/lib/tool_shed/webapp/fast_app.py @@ -1,10 +1,27 @@ +import logging +import os +from pathlib import Path from typing import ( Any, + cast, Dict, + Optional, ) from a2wsgi import WSGIMiddleware -from fastapi import FastAPI +from fastapi import ( + Depends, + FastAPI, +) +from fastapi.responses import ( + HTMLResponse, + RedirectResponse, +) +from fastapi.staticfiles import StaticFiles +from starlette_graphene3 import ( + GraphQLApp, + make_graphiql_handler, +) from galaxy.webapps.base.api import ( add_exception_handler, @@ -12,6 +29,14 @@ include_all_package_routers, ) from galaxy.webapps.openapi.utils import get_openapi +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.api2 import ( + ensure_valid_session, + get_trans, +) +from tool_shed.webapp.graphql.schema import schema + +log = logging.getLogger(__name__) api_tags_metadata = [ { @@ -33,6 +58,90 @@ {"name": "undocumented", "description": "API routes that have not yet been ported to FastAPI."}, ] +# Set this if asset handling should be sent to vite. +# Run vite with: +# yarn dev +# Start tool shed with: +# TOOL_SHED_VITE_PORT=4040 TOOL_SHED_API_VERSION=v2 ./run_tool_shed.sh +TOOL_SHED_VITE_PORT: Optional[str] = os.environ.get("TOOL_SHED_VITE_PORT", None) +TOOL_SHED_USE_HMR: bool = TOOL_SHED_VITE_PORT is not None +FRONTEND = Path(__file__).parent.resolve() / "frontend" +FRONTEND_DIST = FRONTEND / "dist" + + +def frontend_controller(app): + shed_entry_point = "main.ts" + vite_runtime = "@vite/client" + + def index(trans=Depends(get_trans)): + if TOOL_SHED_USE_HMR: + index = FRONTEND / "index.html" + index_html = index.read_text() + index_html = index_html.replace( + f"""""", + f"""""", + ) + else: + index = FRONTEND_DIST / "index.html" + index_html = index.read_text() + ensure_valid_session(trans) + cookie = trans.session_csrf_token + r: HTMLResponse = cast(HTMLResponse, trans.response) + r.set_cookie("session_csrf_token", cookie) + return index_html + + return app, index + + +def redirect_route(app, from_url: str, to_url: str): + @app.get(from_url) + def redirect(): + return RedirectResponse(to_url) + + +def frontend_route(controller, path): + app, index = controller + app.get(path, response_class=HTMLResponse)(index) + + +def mount_graphql(app: FastAPI, tool_shed_app: ToolShedApp): + context = { + "session": tool_shed_app.model.context, + "security": tool_shed_app.security, + } + g_app = GraphQLApp(schema, on_get=make_graphiql_handler(), context_value=context, root_value=context) + app.mount("/graphql", g_app) + app.mount("/api/graphql", g_app) + + +FRONT_END_ROUTES = [ + "/", + "/admin", + "/login", + "/register", + "/logout_success", + "/login_success", + "/registration_success", + "/help", + "/repositories_by_search", + "/repositories_by_category", + "/repositories_by_category/{category_id}", + "/repositories_by_owner", + "/repositories_by_owner/{username}", + "/repositories/{repository_id}", + "/repositories_search", + "/_component_showcase", + "/user/api_key", + "/user/change_password", + "/view/{username}", + "/view/{username}/{repository_name}", + "/view/{username}/{repository_name}/{changeset_revision}", +] +LEGACY_ROUTES = { + "/user/create": "/register", # for twilltestcase + "/user/login": "/login", # for twilltestcase +} + def initialize_fast_app(gx_webapp, tool_shed_app): app = get_fastapi_instance() @@ -40,6 +149,27 @@ def initialize_fast_app(gx_webapp, tool_shed_app): add_request_id_middleware(app) from .buildapp import SHED_API_VERSION + def mount_static(directory: Path): + name = directory.name + if directory.exists(): + app.mount(f"/{name}", StaticFiles(directory=directory), name=name) + + if SHED_API_VERSION == "v2": + controller = frontend_controller(app) + for route in FRONT_END_ROUTES: + frontend_route(controller, route) + + for from_route, to_route in LEGACY_ROUTES.items(): + redirect_route(app, from_route, to_route) + + mount_graphql(app, tool_shed_app) + + mount_static(FRONTEND / "static") + if TOOL_SHED_USE_HMR: + mount_static(FRONTEND / "node_modules") + else: + mount_static(FRONTEND_DIST / "assets") + routes_package = "tool_shed.webapp.api" if SHED_API_VERSION == "v1" else "tool_shed.webapp.api2" include_all_package_routers(app, routes_package) wsgi_handler = WSGIMiddleware(gx_webapp) diff --git a/lib/tool_shed/webapp/frontend/.eslintignore b/lib/tool_shed/webapp/frontend/.eslintignore new file mode 100644 index 000000000000..b22c816bfd5e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/.eslintignore @@ -0,0 +1,7 @@ +# don't ever lint node_modules +node_modules +# don't lint build output (make sure it's set to your correct build folder name) +dist + +# Ignore codegen aritfacts +src/gql/*.ts diff --git a/lib/tool_shed/webapp/frontend/.eslintrc.js b/lib/tool_shed/webapp/frontend/.eslintrc.js new file mode 100644 index 000000000000..7343e0cc14e1 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/.eslintrc.js @@ -0,0 +1,29 @@ +module.exports = { + root: true, + parser: "vue-eslint-parser", + parserOptions: { + parser: "@typescript-eslint/parser", + // project: ['./tsconfig.json'], + }, + extends: [ + "plugin:vue/strongly-recommended", + "eslint:recommended", + "@vue/typescript/recommended", + "prettier", + "plugin:vuejs-accessibility/recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:@typescript-eslint/recommended", + // More goodies.. + // "plugin:@typescript-eslint/recommended-requiring-type-checking", + ], + plugins: ["@typescript-eslint", "prettier", "vuejs-accessibility"], + rules: { + "prettier/prettier": "error", + // not needed for vue 3 + "vue/no-multiple-template-root": "off", + // upgrade warnings for common John problems + "@typescript-eslint/no-unused-vars": "error", + "vue/require-default-prop": "error", + "vue/v-slot-style": "error", + }, +} diff --git a/lib/tool_shed/webapp/frontend/.prettierrc b/lib/tool_shed/webapp/frontend/.prettierrc new file mode 100644 index 000000000000..0fe7f46213c9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/.prettierrc @@ -0,0 +1,5 @@ +{ + "tabWidth": 4, + "printWidth": 120, + "semi": false +} diff --git a/lib/tool_shed/webapp/frontend/Makefile b/lib/tool_shed/webapp/frontend/Makefile new file mode 100644 index 000000000000..b520b0be6844 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/Makefile @@ -0,0 +1,22 @@ +GALAXY_ROOT=../../../.. + +client: + yarn build + +dev: + yarn dev-all + +format: + yarn format + +lint: + yarn typecheck && yarn lint + +# These next two tasks don't really belong here, but they do demonstrate +# how to get a test server running and populated with some initial data +# for the new tool shed frontend. +run_test_backend: + cd $(GALAXY_ROOT); TOOL_SHED_CONFIG_OVERRIDE_BOOTSTRAP_ADMIN_API_KEY=tsadminkey TOOL_SHED_VITE_PORT=4040 TOOL_SHED_API_VERSION=v2 ./run_tool_shed.sh + +bootstrap_test_backend: + cd $(GALAXY_ROOT); . .venv/bin/activate; python scripts/bootstrap_test_shed.py diff --git a/lib/tool_shed/webapp/frontend/README.md b/lib/tool_shed/webapp/frontend/README.md new file mode 100644 index 000000000000..a797a275d079 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/README.md @@ -0,0 +1,27 @@ +# Vue 3 + Typescript + Vite + +This template should help get you started developing with Vue 3 and Typescript in Vite. + +## Recommended IDE Setup + +[VSCode](https://code.visualstudio.com/) + [Vetur](https://marketplace.visualstudio.com/items?itemName=octref.vetur). Make sure to enable `vetur.experimental.templateInterpolationService` in settings! + +### If Using ` + + diff --git a/lib/tool_shed/webapp/frontend/package.json b/lib/tool_shed/webapp/frontend/package.json new file mode 100644 index 000000000000..ab8b86cc6b26 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/package.json @@ -0,0 +1,54 @@ +{ + "name": "galaxy-tool-shed", + "license": "MIT", + "version": "0.2.0", + "scripts": { + "dev": "vite --port 4040 --strict-port", + "build": "vue-tsc --noEmit && vite build", + "graphql": "graphql-codegen --watch", + "dev-all": "concurrently --kill-others \"npm run dev\" \"npm run graphql\"", + "format": "prettier --write src", + "typecheck": "vue-tsc --noEmit", + "lint": "eslint src --ext .ts,.vue" + }, + "devDependencies": { + "@graphql-codegen/cli": "^2.16.1", + "@graphql-codegen/client-preset": "^1.2.3", + "@quasar/vite-plugin": "^1.0.4", + "@types/node": "^16.6.1", + "@typescript-eslint/eslint-plugin": "^5.47.1", + "@typescript-eslint/parser": "^5.47.1", + "@vitejs/plugin-vue": "^1.6.0", + "@vue/compiler-sfc": "^3.2.6", + "@vue/eslint-config-typescript": "^11.0.2", + "concurrently": "^7.6.0", + "eslint": "^8.30.0", + "eslint-config-prettier": "^8.5.0", + "eslint-plugin-prettier": "^4.2.1", + "eslint-plugin-vue": "^9.8.0", + "eslint-plugin-vuejs-accessibility": "^2.0.0", + "prettier": "^2.8.1", + "sass": "^1.32.0", + "typescript": "^4.3.2", + "vite": "^4.4.9", + "vue-eslint-parser": "^9.1.0", + "vue-tsc": "^1.0.16" + }, + "dependencies": { + "@apollo/client": "^3.7.3", + "@quasar/extras": "^1.12.4", + "@vue/apollo-composable": "^4.0.0-beta.1", + "@vue/apollo-option": "^4.0.0-alpha.20", + "axios": "^1.2.1", + "date-fns": "^2.29.3", + "date-fns-tz": "^1.3.7", + "e": "^0.2.2", + "graphql": "^16.6.0", + "graphql-tag": "^2.12.6", + "openapi-typescript-fetch": "^1.1.3", + "pinia": "^2.0.28", + "quasar": "^2.5.0", + "vue": "^3.2.6", + "vue-router": "4" + } +} diff --git a/lib/tool_shed/webapp/frontend/src/App.vue b/lib/tool_shed/webapp/frontend/src/App.vue new file mode 100644 index 000000000000..a11ce54f175e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/App.vue @@ -0,0 +1,51 @@ + + + + + diff --git a/lib/tool_shed/webapp/frontend/src/apiUtil.ts b/lib/tool_shed/webapp/frontend/src/apiUtil.ts new file mode 100644 index 000000000000..f14774767f97 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/apiUtil.ts @@ -0,0 +1,19 @@ +import axios from "axios" +import { RawAxiosRequestConfig } from "axios" +import { components } from "@/schema" + +type User = components["schemas"]["User"] + +export async function getCurrentUser(): Promise { + const conf: RawAxiosRequestConfig = {} + conf.validateStatus = (status: number) => { + const valid = status == 200 || status == 404 + return valid + } + const { data: user, status } = await axios.get("/api/users/current", conf) + if (status == 404) { + return null + } else { + return user as User + } +} diff --git a/lib/tool_shed/webapp/frontend/src/apollo.ts b/lib/tool_shed/webapp/frontend/src/apollo.ts new file mode 100644 index 000000000000..572617f52fc8 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/apollo.ts @@ -0,0 +1,25 @@ +import { createApolloProvider } from "@vue/apollo-option" +import { ApolloClient, InMemoryCache, DefaultOptions } from "@apollo/client/core" + +const defaultOptions: DefaultOptions = { + watchQuery: { + fetchPolicy: "no-cache", + errorPolicy: "ignore", + }, + query: { + fetchPolicy: "no-cache", + errorPolicy: "all", + }, +} + +export const apolloClient = new ApolloClient({ + uri: "/api/graphql/", + cache: new InMemoryCache(), + defaultOptions: defaultOptions, +}) + +export const apolloClientProvider = createApolloProvider({ + defaultClient: apolloClient, +}) + +// npx apollo schema:download --endpoint=http://localhost:9009/graphql/ graphql-schema.json diff --git a/lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue new file mode 100644 index 000000000000..4d07d9cbe897 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue @@ -0,0 +1,15 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue new file mode 100644 index 000000000000..d57104208e6c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue @@ -0,0 +1,21 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue b/lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue new file mode 100644 index 000000000000..1720ccf24208 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue @@ -0,0 +1,36 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue b/lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue new file mode 100644 index 000000000000..7dfa1aff16e1 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue @@ -0,0 +1,38 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue b/lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue new file mode 100644 index 000000000000..907c40210c82 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue @@ -0,0 +1,32 @@ + + + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/LoginForm.vue b/lib/tool_shed/webapp/frontend/src/components/LoginForm.vue new file mode 100644 index 000000000000..bc1342c5418e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/LoginForm.vue @@ -0,0 +1,38 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/LoginPage.vue b/lib/tool_shed/webapp/frontend/src/components/LoginPage.vue new file mode 100644 index 000000000000..65e7403bde93 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/LoginPage.vue @@ -0,0 +1,17 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue b/lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue new file mode 100644 index 000000000000..538c8aeb32f9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue @@ -0,0 +1,42 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ModalForm.vue b/lib/tool_shed/webapp/frontend/src/components/ModalForm.vue new file mode 100644 index 000000000000..77a34dc8d324 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ModalForm.vue @@ -0,0 +1,23 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/PageContainer.vue b/lib/tool_shed/webapp/frontend/src/components/PageContainer.vue new file mode 100644 index 000000000000..ae392f503046 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/PageContainer.vue @@ -0,0 +1,14 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue b/lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue new file mode 100644 index 000000000000..934f4e385985 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue @@ -0,0 +1,39 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue b/lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue new file mode 100644 index 000000000000..44454558a79b --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue @@ -0,0 +1,39 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue b/lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue new file mode 100644 index 000000000000..a66cf394f529 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue @@ -0,0 +1,82 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue b/lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue new file mode 100644 index 000000000000..2d6eaac69728 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue @@ -0,0 +1,22 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue new file mode 100644 index 000000000000..28fe8e1f8b35 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue @@ -0,0 +1,67 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue new file mode 100644 index 000000000000..7cba807d69db --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue @@ -0,0 +1,160 @@ + + + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts new file mode 100644 index 000000000000..5b9ca505d151 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts @@ -0,0 +1,36 @@ +import { useFragment } from "@/gql/fragment-masking" +import { RepositoryListItemFragment } from "@/gqlFragements" + +export interface RepositoryGridItem { + id: string + name: string + owner: string + index: number + update_time: string + description: string | null + homepage_url: string | null | undefined + remote_repository_url: string | null | undefined +} + +export type OnScroll = () => Promise + +/* eslint-disable @typescript-eslint/no-explicit-any */ +export function nodeToRow(node: any, index: number): RepositoryGridItem { + /* Adapt CQL results to RepositoryGridItem interface consumed by the + component. */ + if (node == null) { + throw Error("Problem with server response") + } + + const fragment = useFragment(RepositoryListItemFragment, node) + return { + id: fragment.encodedId, + index: index, + name: fragment.name as string, // TODO: fix schema.py so this is nonnull + owner: fragment.user.username, + description: fragment.description || null, + homepage_url: fragment.homepageUrl || null, + remote_repository_url: fragment.remoteRepositoryUrl || null, + update_time: fragment.updateTime, + } +} diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue new file mode 100644 index 000000000000..ccd55676efbd --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue @@ -0,0 +1,45 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue new file mode 100644 index 000000000000..8e41a409c1bd --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue @@ -0,0 +1,41 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue new file mode 100644 index 000000000000..994ca74089af --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue @@ -0,0 +1,71 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue new file mode 100644 index 000000000000..0d3f0924f9b5 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue @@ -0,0 +1,28 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue new file mode 100644 index 000000000000..706ee2143cc2 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue @@ -0,0 +1,29 @@ + + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue new file mode 100644 index 000000000000..6a69ebe9e4fa --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue @@ -0,0 +1,41 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue new file mode 100644 index 000000000000..996c4400f28d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue @@ -0,0 +1,25 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue new file mode 100644 index 000000000000..e479176d30d6 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue @@ -0,0 +1,25 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue b/lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue new file mode 100644 index 000000000000..525934331e1e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue @@ -0,0 +1,62 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue b/lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue new file mode 100644 index 000000000000..eb559931236d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue @@ -0,0 +1,56 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/SelectUser.vue b/lib/tool_shed/webapp/frontend/src/components/SelectUser.vue new file mode 100644 index 000000000000..351418bc895c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/SelectUser.vue @@ -0,0 +1,62 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue b/lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue new file mode 100644 index 000000000000..631d2349f8ac --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue @@ -0,0 +1,117 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/UtcDate.vue b/lib/tool_shed/webapp/frontend/src/components/UtcDate.vue new file mode 100644 index 000000000000..03a3a0443bed --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/UtcDate.vue @@ -0,0 +1,32 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue b/lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue new file mode 100644 index 000000000000..14744ddb64d8 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue @@ -0,0 +1,23 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue b/lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue new file mode 100644 index 000000000000..de69706dec34 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue @@ -0,0 +1,50 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue new file mode 100644 index 000000000000..bbf717fecde4 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue @@ -0,0 +1,44 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue b/lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue new file mode 100644 index 000000000000..65786575671a --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue @@ -0,0 +1,60 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue new file mode 100644 index 000000000000..adb927f6331c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue @@ -0,0 +1,20 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue new file mode 100644 index 000000000000..5e04f26eeb58 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue @@ -0,0 +1,25 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue b/lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue new file mode 100644 index 000000000000..74d7c55980f9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue @@ -0,0 +1,84 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue new file mode 100644 index 000000000000..54e7addc4e53 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue @@ -0,0 +1,44 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue new file mode 100644 index 000000000000..b57b82ecc98a --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue @@ -0,0 +1,85 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue new file mode 100644 index 000000000000..765e7e64d7ec --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue @@ -0,0 +1,15 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue new file mode 100644 index 000000000000..8b91b25fce02 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue @@ -0,0 +1,36 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue new file mode 100644 index 000000000000..a2a87875cc5f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue @@ -0,0 +1,88 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue new file mode 100644 index 000000000000..6e31d63fc2d0 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue @@ -0,0 +1,277 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/constants.ts b/lib/tool_shed/webapp/frontend/src/constants.ts new file mode 100644 index 000000000000..b3a88733c8c6 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/constants.ts @@ -0,0 +1,13 @@ +export const UPDATING_WITH_PLANEMO_URL = + "https://planemo.readthedocs.io/en/latest/publishing.html#updating-a-repository" + +export const EPHEMERIS_TRAINING = + "https://training.galaxyproject.org/training-material/topics/admin/tutorials/tool-management/tutorial.html" + +export const AUTH_FORM_INPUT_PROPS = { + square: true, + clearable: false, + // choose filled or outlined or neither I think? + outlined: true, + filled: false, +} diff --git a/lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts b/lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts new file mode 100644 index 000000000000..a1f5d6e8ef75 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts @@ -0,0 +1,50 @@ +import { ResultOf, TypedDocumentNode as DocumentNode } from "@graphql-typed-document-node/core" + +export type FragmentType> = TDocumentType extends DocumentNode< + infer TType, + any +> + ? TType extends { " $fragmentName"?: infer TKey } + ? TKey extends string + ? { " $fragmentRefs"?: { [key in TKey]: TType } } + : never + : never + : never + +// return non-nullable if `fragmentType` is non-nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: FragmentType> +): TType +// return nullable if `fragmentType` is nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: FragmentType> | null | undefined +): TType | null | undefined +// return array of non-nullable if `fragmentType` is array of non-nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: ReadonlyArray>> +): ReadonlyArray +// return array of nullable if `fragmentType` is array of nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: ReadonlyArray>> | null | undefined +): ReadonlyArray | null | undefined +export function useFragment( + _documentNode: DocumentNode, + fragmentType: + | FragmentType> + | ReadonlyArray>> + | null + | undefined +): TType | ReadonlyArray | null | undefined { + return fragmentType as any +} + +export function makeFragmentData>( + data: FT, + _fragment: F +): FragmentType { + return data as FragmentType +} diff --git a/lib/tool_shed/webapp/frontend/src/gql/gql.ts b/lib/tool_shed/webapp/frontend/src/gql/gql.ts new file mode 100644 index 000000000000..0793f6959859 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/gql.ts @@ -0,0 +1,98 @@ +/* eslint-disable */ +import * as types from "./graphql" +import { TypedDocumentNode as DocumentNode } from "@graphql-typed-document-node/core" + +/** + * Map of all GraphQL operations in the project. + * + * This map has several performance disadvantages: + * 1. It is not tree-shakeable, so it will include all operations in the project. + * 2. It is not minifiable, so the string of a GraphQL query will be multiple times inside the bundle. + * 3. It does not support dead code elimination, so it will add unused operations. + * + * Therefore it is highly recommended to use the babel-plugin for production. + */ +const documents = { + "\n query recentlyCreatedRepositories {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryCreationItem\n }\n }\n }\n }\n": + types.RecentlyCreatedRepositoriesDocument, + "\n query recentRepositoryUpdates {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryUpdateItem\n }\n }\n }\n }\n": + types.RecentRepositoryUpdatesDocument, + "\n query repositoriesByOwner($username: String, $cursor: String) {\n relayRepositoriesForOwner(username: $username, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n": + types.RepositoriesByOwnerDocument, + "\n fragment RepositoryCreationItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n createTime\n }\n": + types.RepositoryCreationItemFragmentDoc, + "\n query repositoriesByCategory($categoryId: String, $cursor: String) {\n relayRepositoriesForCategory(encodedId: $categoryId, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n": + types.RepositoriesByCategoryDocument, + "\n fragment RepositoryListItemFragment on RelayRepository {\n encodedId\n name\n user {\n username\n }\n description\n type\n updateTime\n homepageUrl\n remoteRepositoryUrl\n }\n": + types.RepositoryListItemFragmentFragmentDoc, + "\n fragment RepositoryUpdateItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n updateTime\n }\n": + types.RepositoryUpdateItemFragmentDoc, +} + +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + * + * + * @example + * ```ts + * const query = gql(`query GetUser($id: ID!) { user(id: $id) { name } }`); + * ``` + * + * The query argument is unknown! + * Please regenerate the types. + */ +export function graphql(source: string): unknown + +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query recentlyCreatedRepositories {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryCreationItem\n }\n }\n }\n }\n" +): (typeof documents)["\n query recentlyCreatedRepositories {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryCreationItem\n }\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query recentRepositoryUpdates {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryUpdateItem\n }\n }\n }\n }\n" +): (typeof documents)["\n query recentRepositoryUpdates {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryUpdateItem\n }\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query repositoriesByOwner($username: String, $cursor: String) {\n relayRepositoriesForOwner(username: $username, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n" +): (typeof documents)["\n query repositoriesByOwner($username: String, $cursor: String) {\n relayRepositoriesForOwner(username: $username, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n fragment RepositoryCreationItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n createTime\n }\n" +): (typeof documents)["\n fragment RepositoryCreationItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n createTime\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query repositoriesByCategory($categoryId: String, $cursor: String) {\n relayRepositoriesForCategory(encodedId: $categoryId, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n" +): (typeof documents)["\n query repositoriesByCategory($categoryId: String, $cursor: String) {\n relayRepositoriesForCategory(encodedId: $categoryId, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n fragment RepositoryListItemFragment on RelayRepository {\n encodedId\n name\n user {\n username\n }\n description\n type\n updateTime\n homepageUrl\n remoteRepositoryUrl\n }\n" +): (typeof documents)["\n fragment RepositoryListItemFragment on RelayRepository {\n encodedId\n name\n user {\n username\n }\n description\n type\n updateTime\n homepageUrl\n remoteRepositoryUrl\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n fragment RepositoryUpdateItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n updateTime\n }\n" +): (typeof documents)["\n fragment RepositoryUpdateItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n updateTime\n }\n"] + +export function graphql(source: string) { + return (documents as any)[source] ?? {} +} + +export type DocumentType> = TDocumentNode extends DocumentNode< + infer TType, + any +> + ? TType + : never diff --git a/lib/tool_shed/webapp/frontend/src/gql/graphql.ts b/lib/tool_shed/webapp/frontend/src/gql/graphql.ts new file mode 100644 index 000000000000..f6c3433eb96b --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/graphql.ts @@ -0,0 +1,821 @@ +/* eslint-disable */ +import { TypedDocumentNode as DocumentNode } from "@graphql-typed-document-node/core" +export type Maybe = T | null +export type InputMaybe = Maybe +export type Exact = { [K in keyof T]: T[K] } +export type MakeOptional = Omit & { [SubKey in K]?: Maybe } +export type MakeMaybe = Omit & { [SubKey in K]: Maybe } +/** All built-in and custom scalars, mapped to their actual values */ +export type Scalars = { + ID: string + String: string + Boolean: boolean + Int: number + Float: number + /** + * The `DateTime` scalar type represents a DateTime + * value as specified by + * [iso8601](https://en.wikipedia.org/wiki/ISO_8601). + */ + DateTime: any +} + +/** An object with an ID */ +export type Node = { + /** The ID of the object */ + id: Scalars["ID"] +} + +/** The Relay compliant `PageInfo` type, containing data necessary to paginate this connection. */ +export type PageInfo = { + __typename?: "PageInfo" + /** When paginating forwards, the cursor to continue. */ + endCursor?: Maybe + /** When paginating forwards, are there more items? */ + hasNextPage: Scalars["Boolean"] + /** When paginating backwards, are there more items? */ + hasPreviousPage: Scalars["Boolean"] + /** When paginating backwards, the cursor to continue. */ + startCursor?: Maybe +} + +export type Query = { + __typename?: "Query" + categories?: Maybe>> + node?: Maybe + relayCategories?: Maybe + relayRepositories?: Maybe + relayRepositoriesForCategory?: Maybe + relayRepositoriesForOwner?: Maybe + relayRevisions?: Maybe + relayUsers?: Maybe + repositories?: Maybe>> + revisions?: Maybe>> + users?: Maybe>> +} + +export type QueryNodeArgs = { + id: Scalars["ID"] +} + +export type QueryRelayCategoriesArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayRepositoriesArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayRepositoriesForCategoryArgs = { + after?: InputMaybe + before?: InputMaybe + encodedId?: InputMaybe + first?: InputMaybe + id?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayRepositoriesForOwnerArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> + username?: InputMaybe +} + +export type QueryRelayRevisionsArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayUsersArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type RelayCategory = Node & { + __typename?: "RelayCategory" + createTime?: Maybe + deleted?: Maybe + description?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + name: Scalars["String"] + repositories?: Maybe>> + updateTime?: Maybe +} + +export type RelayCategoryConnection = { + __typename?: "RelayCategoryConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayCategory` and its cursor. */ +export type RelayCategoryEdge = { + __typename?: "RelayCategoryEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +/** An enumeration. */ +export enum RelayCategorySortEnum { + CreateTimeAsc = "CREATE_TIME_ASC", + CreateTimeDesc = "CREATE_TIME_DESC", + DeletedAsc = "DELETED_ASC", + DeletedDesc = "DELETED_DESC", + DescriptionAsc = "DESCRIPTION_ASC", + DescriptionDesc = "DESCRIPTION_DESC", + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", + NameAsc = "NAME_ASC", + NameDesc = "NAME_DESC", + UpdateTimeAsc = "UPDATE_TIME_ASC", + UpdateTimeDesc = "UPDATE_TIME_DESC", +} + +export type RelayRepository = Node & { + __typename?: "RelayRepository" + categories?: Maybe>> + createTime?: Maybe + description?: Maybe + encodedId: Scalars["String"] + homepageUrl?: Maybe + id: Scalars["ID"] + longDescription?: Maybe + name: Scalars["String"] + remoteRepositoryUrl?: Maybe + type?: Maybe + updateTime?: Maybe + user: SimpleUser +} + +export type RelayRepositoryConnection = { + __typename?: "RelayRepositoryConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayRepository` and its cursor. */ +export type RelayRepositoryEdge = { + __typename?: "RelayRepositoryEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +export type RelayRepositoryMetadata = Node & { + __typename?: "RelayRepositoryMetadata" + changesetRevision: Scalars["String"] + createTime?: Maybe + downloadable?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + malicious?: Maybe + numericRevision?: Maybe + repository: SimpleRepository + updateTime?: Maybe +} + +export type RelayRepositoryMetadataConnection = { + __typename?: "RelayRepositoryMetadataConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayRepositoryMetadata` and its cursor. */ +export type RelayRepositoryMetadataEdge = { + __typename?: "RelayRepositoryMetadataEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +/** An enumeration. */ +export enum RelayRepositoryMetadataSortEnum { + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", +} + +/** An enumeration. */ +export enum RelayRepositorySortEnum { + CreateTimeAsc = "CREATE_TIME_ASC", + CreateTimeDesc = "CREATE_TIME_DESC", + DescriptionAsc = "DESCRIPTION_ASC", + DescriptionDesc = "DESCRIPTION_DESC", + HomepageUrlAsc = "HOMEPAGE_URL_ASC", + HomepageUrlDesc = "HOMEPAGE_URL_DESC", + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", + LongDescriptionAsc = "LONG_DESCRIPTION_ASC", + LongDescriptionDesc = "LONG_DESCRIPTION_DESC", + NameAsc = "NAME_ASC", + NameDesc = "NAME_DESC", + RemoteRepositoryUrlAsc = "REMOTE_REPOSITORY_URL_ASC", + RemoteRepositoryUrlDesc = "REMOTE_REPOSITORY_URL_DESC", + TypeAsc = "TYPE_ASC", + TypeDesc = "TYPE_DESC", + UpdateTimeAsc = "UPDATE_TIME_ASC", + UpdateTimeDesc = "UPDATE_TIME_DESC", +} + +export type RelayUser = Node & { + __typename?: "RelayUser" + encodedId: Scalars["String"] + id: Scalars["ID"] + username: Scalars["String"] +} + +export type RelayUserConnection = { + __typename?: "RelayUserConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayUser` and its cursor. */ +export type RelayUserEdge = { + __typename?: "RelayUserEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +/** An enumeration. */ +export enum RelayUserSortEnum { + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", + UsernameAsc = "USERNAME_ASC", + UsernameDesc = "USERNAME_DESC", +} + +export type SimpleCategory = { + __typename?: "SimpleCategory" + createTime?: Maybe + deleted?: Maybe + description?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + name: Scalars["String"] + repositories?: Maybe>> + updateTime?: Maybe +} + +export type SimpleRepository = { + __typename?: "SimpleRepository" + categories?: Maybe>> + createTime?: Maybe + description?: Maybe + downloadableRevisions?: Maybe>> + encodedId: Scalars["String"] + homepageUrl?: Maybe + id: Scalars["ID"] + longDescription?: Maybe + metadataRevisions?: Maybe>> + name: Scalars["String"] + remoteRepositoryUrl?: Maybe + type?: Maybe + updateTime?: Maybe + user: SimpleUser +} + +export type SimpleRepositoryMetadata = { + __typename?: "SimpleRepositoryMetadata" + changesetRevision: Scalars["String"] + createTime?: Maybe + downloadable?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + malicious?: Maybe + numericRevision?: Maybe + repository: SimpleRepository + updateTime?: Maybe +} + +export type SimpleUser = { + __typename?: "SimpleUser" + encodedId: Scalars["String"] + id: Scalars["ID"] + username: Scalars["String"] +} + +export type RecentlyCreatedRepositoriesQueryVariables = Exact<{ [key: string]: never }> + +export type RecentlyCreatedRepositoriesQuery = { + __typename?: "Query" + relayRepositories?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryCreationItemFragment: RepositoryCreationItemFragment } + }) + | null + } | null> + } | null +} + +export type RecentRepositoryUpdatesQueryVariables = Exact<{ [key: string]: never }> + +export type RecentRepositoryUpdatesQuery = { + __typename?: "Query" + relayRepositories?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryUpdateItemFragment: RepositoryUpdateItemFragment } + }) + | null + } | null> + } | null +} + +export type RepositoriesByOwnerQueryVariables = Exact<{ + username?: InputMaybe + cursor?: InputMaybe +}> + +export type RepositoriesByOwnerQuery = { + __typename?: "Query" + relayRepositoriesForOwner?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + cursor: string + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryListItemFragmentFragment: RepositoryListItemFragmentFragment } + }) + | null + } | null> + pageInfo: { __typename?: "PageInfo"; endCursor?: string | null; hasNextPage: boolean } + } | null +} + +export type RepositoryCreationItemFragment = { + __typename?: "RelayRepository" + encodedId: string + name: string + createTime?: any | null + user: { __typename?: "SimpleUser"; username: string } +} & { " $fragmentName"?: "RepositoryCreationItemFragment" } + +export type RepositoriesByCategoryQueryVariables = Exact<{ + categoryId?: InputMaybe + cursor?: InputMaybe +}> + +export type RepositoriesByCategoryQuery = { + __typename?: "Query" + relayRepositoriesForCategory?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + cursor: string + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryListItemFragmentFragment: RepositoryListItemFragmentFragment } + }) + | null + } | null> + pageInfo: { __typename?: "PageInfo"; endCursor?: string | null; hasNextPage: boolean } + } | null +} + +export type RepositoryListItemFragmentFragment = { + __typename?: "RelayRepository" + encodedId: string + name: string + description?: string | null + type?: string | null + updateTime?: any | null + homepageUrl?: string | null + remoteRepositoryUrl?: string | null + user: { __typename?: "SimpleUser"; username: string } +} & { " $fragmentName"?: "RepositoryListItemFragmentFragment" } + +export type RepositoryUpdateItemFragment = { + __typename?: "RelayRepository" + encodedId: string + name: string + updateTime?: any | null + user: { __typename?: "SimpleUser"; username: string } +} & { " $fragmentName"?: "RepositoryUpdateItemFragment" } + +export const RepositoryCreationItemFragmentDoc = { + kind: "Document", + definitions: [ + { + kind: "FragmentDefinition", + name: { kind: "Name", value: "RepositoryCreationItem" }, + typeCondition: { kind: "NamedType", name: { kind: "Name", value: "RelayRepository" } }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "encodedId" } }, + { kind: "Field", name: { kind: "Name", value: "name" } }, + { + kind: "Field", + name: { kind: "Name", value: "user" }, + selectionSet: { + kind: "SelectionSet", + selections: [{ kind: "Field", name: { kind: "Name", value: "username" } }], + }, + }, + { kind: "Field", name: { kind: "Name", value: "createTime" } }, + ], + }, + }, + ], +} as unknown as DocumentNode +export const RepositoryListItemFragmentFragmentDoc = { + kind: "Document", + definitions: [ + { + kind: "FragmentDefinition", + name: { kind: "Name", value: "RepositoryListItemFragment" }, + typeCondition: { kind: "NamedType", name: { kind: "Name", value: "RelayRepository" } }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "encodedId" } }, + { kind: "Field", name: { kind: "Name", value: "name" } }, + { + kind: "Field", + name: { kind: "Name", value: "user" }, + selectionSet: { + kind: "SelectionSet", + selections: [{ kind: "Field", name: { kind: "Name", value: "username" } }], + }, + }, + { kind: "Field", name: { kind: "Name", value: "description" } }, + { kind: "Field", name: { kind: "Name", value: "type" } }, + { kind: "Field", name: { kind: "Name", value: "updateTime" } }, + { kind: "Field", name: { kind: "Name", value: "homepageUrl" } }, + { kind: "Field", name: { kind: "Name", value: "remoteRepositoryUrl" } }, + ], + }, + }, + ], +} as unknown as DocumentNode +export const RepositoryUpdateItemFragmentDoc = { + kind: "Document", + definitions: [ + { + kind: "FragmentDefinition", + name: { kind: "Name", value: "RepositoryUpdateItem" }, + typeCondition: { kind: "NamedType", name: { kind: "Name", value: "RelayRepository" } }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "encodedId" } }, + { kind: "Field", name: { kind: "Name", value: "name" } }, + { + kind: "Field", + name: { kind: "Name", value: "user" }, + selectionSet: { + kind: "SelectionSet", + selections: [{ kind: "Field", name: { kind: "Name", value: "username" } }], + }, + }, + { kind: "Field", name: { kind: "Name", value: "updateTime" } }, + ], + }, + }, + ], +} as unknown as DocumentNode +export const RecentlyCreatedRepositoriesDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "recentlyCreatedRepositories" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositories" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryCreationItem" }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryCreationItemFragmentDoc.definitions, + ], +} as unknown as DocumentNode +export const RecentRepositoryUpdatesDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "recentRepositoryUpdates" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositories" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryUpdateItem" }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryUpdateItemFragmentDoc.definitions, + ], +} as unknown as DocumentNode +export const RepositoriesByOwnerDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "repositoriesByOwner" }, + variableDefinitions: [ + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "username" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositoriesForOwner" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "username" }, + value: { kind: "Variable", name: { kind: "Name", value: "username" } }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "after" }, + value: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "cursor" } }, + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryListItemFragment" }, + }, + ], + }, + }, + ], + }, + }, + { + kind: "Field", + name: { kind: "Name", value: "pageInfo" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "endCursor" } }, + { kind: "Field", name: { kind: "Name", value: "hasNextPage" } }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryListItemFragmentFragmentDoc.definitions, + ], +} as unknown as DocumentNode +export const RepositoriesByCategoryDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "repositoriesByCategory" }, + variableDefinitions: [ + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "categoryId" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositoriesForCategory" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "encodedId" }, + value: { kind: "Variable", name: { kind: "Name", value: "categoryId" } }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "after" }, + value: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "cursor" } }, + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryListItemFragment" }, + }, + ], + }, + }, + ], + }, + }, + { + kind: "Field", + name: { kind: "Name", value: "pageInfo" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "endCursor" } }, + { kind: "Field", name: { kind: "Name", value: "hasNextPage" } }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryListItemFragmentFragmentDoc.definitions, + ], +} as unknown as DocumentNode diff --git a/lib/tool_shed/webapp/frontend/src/gql/index.ts b/lib/tool_shed/webapp/frontend/src/gql/index.ts new file mode 100644 index 000000000000..f2b0e1a9ae69 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/index.ts @@ -0,0 +1,2 @@ +export * from "./fragment-masking" +export * from "./gql" diff --git a/lib/tool_shed/webapp/frontend/src/gqlFragements.ts b/lib/tool_shed/webapp/frontend/src/gqlFragements.ts new file mode 100644 index 000000000000..4eca9379d0da --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gqlFragements.ts @@ -0,0 +1,27 @@ +import { graphql } from "@/gql" + +export const RepositoryListItemFragment = graphql(/* GraphQL */ ` + fragment RepositoryListItemFragment on RelayRepository { + encodedId + name + user { + username + } + description + type + updateTime + homepageUrl + remoteRepositoryUrl + } +`) + +export const UpdateFragment = graphql(/* GraphQL */ ` + fragment RepositoryUpdateItem on RelayRepository { + encodedId + name + user { + username + } + updateTime + } +`) diff --git a/lib/tool_shed/webapp/frontend/src/main.ts b/lib/tool_shed/webapp/frontend/src/main.ts new file mode 100644 index 000000000000..4130acc25b72 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/main.ts @@ -0,0 +1,24 @@ +import { createApp } from "vue" +import { Quasar, Notify, Cookies } from "quasar" +import App from "./App.vue" +// ( + props: TProps, + emit: (event: string, value: TProps[TKey]) => void, + name: TKey = "modelValue" as TKey +): WritableComputedRef { + return computed({ + get: () => props[name], + set: (value: TProps[TKey]) => { + emit("update:modelValue", value) + }, + }) +} diff --git a/lib/tool_shed/webapp/frontend/src/quasar-variables.sass b/lib/tool_shed/webapp/frontend/src/quasar-variables.sass new file mode 100644 index 000000000000..42043a4e6e4f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/quasar-variables.sass @@ -0,0 +1,15 @@ +$primary : #2c3143 +$secondary : #dee2e6 + +// really struggling to create constrast in visually +// appealing ways... some failed experiments +// #ffdb58 // #a6c9e1 +$accent : #63a0ca + +$dark : #a3aac4 +$dark-page : #121212 + +$positive : #66cc66 +$negative : #e31a1e +$info : #2077b3 +$warning : #fe7f02 diff --git a/lib/tool_shed/webapp/frontend/src/router.ts b/lib/tool_shed/webapp/frontend/src/router.ts new file mode 100644 index 000000000000..61518786abd9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/router.ts @@ -0,0 +1,13 @@ +import { createRouter, createWebHistory } from "vue-router" +import routes from "@/routes" + +const router = createRouter({ + history: createWebHistory(), + routes: routes, +}) + +export function goToRepository(id: string) { + router.push(`/repositories/${id}`) +} + +export default router diff --git a/lib/tool_shed/webapp/frontend/src/routes.ts b/lib/tool_shed/webapp/frontend/src/routes.ts new file mode 100644 index 000000000000..5dc76636fd81 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/routes.ts @@ -0,0 +1,113 @@ +import AdminControls from "@/components/pages/AdminControls.vue" +import LandingPage from "@/components/pages/LandingPage.vue" +import LoginPage from "@/components/LoginPage.vue" +import RegisterPage from "@/components/RegisterPage.vue" +import RegistrationSuccess from "@/components/RegistrationSuccess.vue" +import HelpPage from "@/components/pages/HelpPage.vue" +import RepositoriesByCategories from "@/components/pages/RepositoriesByCategories.vue" +import RepositoriesByOwners from "@/components/pages/RepositoriesByOwners.vue" +import RepositoriesByOwner from "@/components/pages/RepositoriesByOwner.vue" +import RepositoriesBySearch from "@/components/pages/RepositoriesBySearch.vue" +import RepositoriesByCategory from "@/components/pages/RepositoriesByCategory.vue" +import ComponentsShowcase from "@/components/pages/ComponentsShowcase.vue" +import RepositoryPage from "@/components/pages/RepositoryPage.vue" +import ManageApiKey from "@/components/pages/ManageApiKey.vue" +import ChangePassword from "@/components/pages/ChangePassword.vue" +import CitableRepositoryPage from "@/components/pages/CitableRepositoryPage.vue" + +import type { RouteRecordRaw } from "vue-router" + +const routes: Array = [ + { + path: "/", + component: LandingPage, + }, + { + path: "/register", + component: RegisterPage, + }, + { + path: "/login", + component: LoginPage, + }, + { + path: "/registration_success", + component: RegistrationSuccess, + }, + { + path: "/login_success", + component: LandingPage, + props: { message: "Login successful!" }, + }, + { + path: "/logout_success", + component: LandingPage, + props: { message: "Logout successful!" }, + }, + { + path: "/help", + component: HelpPage, + }, + { + path: "/admin", + component: AdminControls, + }, + { + path: "/_component_showcase", + component: ComponentsShowcase, + }, + { + path: "/repositories_by_search", + component: RepositoriesBySearch, + }, + { + path: "/repositories_by_category", + component: RepositoriesByCategories, + }, + { + path: "/repositories_by_owner", + component: RepositoriesByOwners, + }, + { + path: "/repositories_by_owner/:username", + component: RepositoriesByOwner, + props: true, + }, + { + path: "/repositories_by_category/:categoryId", + component: RepositoriesByCategory, + props: true, + }, + { + path: "/repositories/:repositoryId", + component: RepositoryPage, + props: true, + }, + { + path: "/user/api_key", + component: ManageApiKey, + }, + { + path: "/user/change_password", + component: ChangePassword, + }, + // legacy style access - was thought of as a citable URL + // so lets keep this path. + { + path: "/view/:username", + component: RepositoriesByOwner, + props: true, + }, + { + path: "/view/:username/:repositoryName", + component: CitableRepositoryPage, + props: true, + }, + { + path: "/view/:username/:repositoryName/:changesetRevision", + component: CitableRepositoryPage, + props: true, + }, +] + +export default routes diff --git a/lib/tool_shed/webapp/frontend/src/schema/fetcher.ts b/lib/tool_shed/webapp/frontend/src/schema/fetcher.ts new file mode 100644 index 000000000000..bb88f1cd5cbd --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/fetcher.ts @@ -0,0 +1,20 @@ +import { Fetcher } from "openapi-typescript-fetch" +import type { paths } from "./schema" + +/* +import type { Middleware } from "openapi-typescript-fetch"; +import { rethrowSimple } from "@/utils/simple-error"; +const rethrowSimpleMiddleware: Middleware = async (url, init, next) => { + try { + const response = await next(url, init); + return response; + } catch (e) { + rethrowSimple(e); + } +}; + +use: [rethrowSimpleMiddleware] +*/ + +export const fetcher = Fetcher.for() +fetcher.configure({ baseUrl: "" }) diff --git a/lib/tool_shed/webapp/frontend/src/schema/index.ts b/lib/tool_shed/webapp/frontend/src/schema/index.ts new file mode 100644 index 000000000000..f334fdb0d2a2 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/index.ts @@ -0,0 +1,3 @@ +export type { components, operations, paths } from "./schema" +export { fetcher } from "./fetcher" +export type { RepositoryTool, RevisionMetadata } from "./types" diff --git a/lib/tool_shed/webapp/frontend/src/schema/schema.ts b/lib/tool_shed/webapp/frontend/src/schema/schema.ts new file mode 100644 index 000000000000..5b2011c4db4f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/schema.ts @@ -0,0 +1,2061 @@ +/** + * This file was auto-generated by openapi-typescript. + * Do not make direct changes to the file. + */ + +export interface paths { + "/api/authenticate/baseauth": { + /** Returns returns an API key for authenticated user based on BaseAuth headers. */ + get: operations["authenticate__baseauth"] + } + "/api/categories": { + /** + * Index + * @description index category + */ + get: operations["categories__index"] + /** + * Create + * @description create a category + */ + post: operations["categories__create"] + } + "/api/categories/{encoded_category_id}/repositories": { + /** + * Repositories + * @description display repositories by category + */ + get: operations["categories__repositories"] + } + "/api/ga4gh/trs/v2/service-info": { + /** Service Info */ + get: operations["tools_trs_service_info"] + } + "/api/ga4gh/trs/v2/toolClasses": { + /** Tool Classes */ + get: operations["tools__trs_tool_classes"] + } + "/api/ga4gh/trs/v2/tools": { + /** Trs Index */ + get: operations["tools__trs_index"] + } + "/api/ga4gh/trs/v2/tools/{tool_id}": { + /** Trs Get */ + get: operations["tools__trs_get"] + } + "/api/ga4gh/trs/v2/tools/{tool_id}/versions": { + /** Trs Get Versions */ + get: operations["tools__trs_get_versions"] + } + "/api/repositories": { + /** + * Index + * @description Get a list of repositories or perform a search. + */ + get: operations["repositories__index"] + /** + * Create + * @description create a new repository + */ + post: operations["repositories__create"] + } + "/api/repositories/get_ordered_installable_revisions": { + /** + * Get Ordered Installable Revisions + * @description Get an ordered list of the repository changeset revisions that are installable + */ + get: operations["repositories__get_ordered_installable_revisions"] + } + "/api/repositories/get_repository_revision_install_info": { + /** + * Legacy Install Info + * @description Get information used by the install client to install this repository. + */ + get: operations["repositories__legacy_install_info"] + } + "/api/repositories/install_info": { + /** + * Install Info + * @description Get information used by the install client to install this repository. + */ + get: operations["repositories__install_info"] + } + "/api/repositories/reset_metadata_on_repository": { + /** + * Reset Metadata On Repository Legacy + * @description reset metadata on a repository + */ + post: operations["repositories__reset_legacy"] + } + "/api/repositories/updates": { + /** Updates */ + get: operations["repositories__update"] + } + "/api/repositories/{encoded_repository_id}": { + /** Show */ + get: operations["repositories__show"] + } + "/api/repositories/{encoded_repository_id}/allow_push": { + /** Show Allow Push */ + get: operations["repositories__show_allow_push"] + } + "/api/repositories/{encoded_repository_id}/allow_push/{username}": { + /** Add Allow Push */ + post: operations["repositories__add_allow_push"] + /** Remove Allow Push */ + delete: operations["repositories__remove_allow_push"] + } + "/api/repositories/{encoded_repository_id}/changeset_revision": { + /** + * Create Changeset Revision + * @description upload new revision to the repository + */ + post: operations["repositories__create_revision"] + } + "/api/repositories/{encoded_repository_id}/deprecated": { + /** Set Deprecated */ + put: operations["repositories__set_deprecated"] + /** Unset Deprecated */ + delete: operations["repositories__unset_deprecated"] + } + "/api/repositories/{encoded_repository_id}/metadata": { + /** + * Metadata + * @description Get information about repository metadata + */ + get: operations["repositories__metadata"] + } + "/api/repositories/{encoded_repository_id}/permissions": { + /** Permissions */ + get: operations["repositories__permissions"] + } + "/api/repositories/{encoded_repository_id}/reset_metadata": { + /** + * Reset Metadata On Repository + * @description reset metadata on a repository + */ + post: operations["repositories__reset"] + } + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/malicious": { + /** Set Malicious */ + put: operations["repositories__set_malicious"] + /** Unset Malicious */ + delete: operations["repositories__unset_malicious"] + } + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/readmes": { + /** + * Get Readmes + * @description fetch readmes for repository revision + */ + get: operations["repositories__readmes"] + } + "/api/tools": { + /** Index */ + get: operations["tools__index"] + } + "/api/tools/build_search_index": { + /** + * Build Search Index + * @description Not part of the stable API, just something to simplify + * bootstrapping tool sheds, scripting, testing, etc... + */ + put: operations["tools__build_search_index"] + } + "/api/users": { + /** + * Index + * @description index users + */ + get: operations["users__index"] + /** + * Create + * @description create a user + */ + post: operations["users__create"] + } + "/api/users/current": { + /** + * Current + * @description show current user + */ + get: operations["users__current"] + } + "/api/users/{encoded_user_id}": { + /** + * Show + * @description show a user + */ + get: operations["users__show"] + } + "/api/users/{encoded_user_id}/api_key": { + /** Return the user's API key */ + get: operations["users__get_or_create_api_key"] + /** Creates a new API key for the user */ + post: operations["users__create_api_key"] + /** Delete the current API key of the user */ + delete: operations["users__delete_api_key"] + } + "/api/version": { + /** Version */ + get: operations["configuration__version"] + } + "/api_internal/change_password": { + /** + * Change Password + * @description reset a user + */ + put: operations["users__internal_change_password"] + } + "/api_internal/login": { + /** + * Internal Login + * @description login to web UI + */ + put: operations["users__internal_login"] + } + "/api_internal/logout": { + /** + * Internal Logout + * @description logout of web UI + */ + put: operations["users__internal_logout"] + } + "/api_internal/register": { + /** + * Register + * @description register a user + */ + post: operations["users__internal_register"] + } + "/api_internal/repositories/{encoded_repository_id}/metadata": { + /** + * Metadata Internal + * @description Get information about repository metadata + */ + get: operations["repositories__internal_metadata"] + } +} + +export type webhooks = Record + +export interface components { + schemas: { + /** APIKeyResponse */ + APIKeyResponse: { + /** Api Key */ + api_key: string + } + /** Body_repositories__create_revision */ + Body_repositories__create_revision: { + /** Commit Message */ + commit_message?: Record + /** Files */ + files?: string[] + } + /** BuildSearchIndexResponse */ + BuildSearchIndexResponse: { + /** Repositories Indexed */ + repositories_indexed: number + /** Tools Indexed */ + tools_indexed: number + } + /** Category */ + Category: { + /** Description */ + description: string + /** Id */ + id: string + /** Name */ + name: string + /** Repositories */ + repositories: number + } + /** Checksum */ + Checksum: { + /** + * Checksum + * @description The hex-string encoded checksum for the data. + */ + checksum: string + /** + * Type + * @description The digest method used to create the checksum. + * The value (e.g. `sha-256`) SHOULD be listed as `Hash Name String` in the https://github.com/ga4gh-discovery/ga4gh-checksum/blob/master/hash-alg.csv[GA4GH Checksum Hash Algorithm Registry]. + * Other values MAY be used, as long as implementors are aware of the issues discussed in https://tools.ietf.org/html/rfc6920#section-9.4[RFC6920]. + * GA4GH may provide more explicit guidance for use of non-IANA-registered algorithms in the future. + */ + type: string + } + /** CreateCategoryRequest */ + CreateCategoryRequest: { + /** Description */ + description?: string + /** Name */ + name: string + } + /** CreateRepositoryRequest */ + CreateRepositoryRequest: { + /** Category IDs */ + "category_ids[]": string + /** Description */ + description?: string + /** Homepage Url */ + homepage_url?: string + /** Name */ + name: string + /** Remote Repository Url */ + remote_repository_url?: string + /** Synopsis */ + synopsis: string + /** + * Type + * @default unrestricted + * @enum {string} + */ + type?: "repository_suite_definition" | "tool_dependency_definition" | "unrestricted" + } + /** CreateUserRequest */ + CreateUserRequest: { + /** Email */ + email: string + /** Password */ + password: string + /** Username */ + username: string + } + /** + * DescriptorType + * @description An enumeration. + * @enum {unknown} + */ + DescriptorType: "CWL" | "WDL" | "NFL" | "GALAXY" | "SMK" + /** + * DescriptorTypeVersion + * @description The language version for a given descriptor type. The version should correspond to the actual declared version of the descriptor. For example, tools defined in CWL could have a version of `v1.0.2` whereas WDL tools may have a version of `1.0` or `draft-2` + */ + DescriptorTypeVersion: string + /** DetailedRepository */ + DetailedRepository: { + /** Create Time */ + create_time: string + /** Deleted */ + deleted: boolean + /** Deprecated */ + deprecated: boolean + /** Description */ + description: string + /** Homepage Url */ + homepage_url?: string + /** Id */ + id: string + /** Long Description */ + long_description?: string + /** Name */ + name: string + /** Owner */ + owner: string + /** Private */ + private: boolean + /** Remote Repository Url */ + remote_repository_url?: string + /** Times Downloaded */ + times_downloaded: number + /** Type */ + type: string + /** Update Time */ + update_time: string + /** User Id */ + user_id: string + } + /** FailedRepositoryUpdateMessage */ + FailedRepositoryUpdateMessage: { + /** Err Msg */ + err_msg: string + } + /** HTTPValidationError */ + HTTPValidationError: { + /** Detail */ + detail?: components["schemas"]["ValidationError"][] + } + /** ImageData */ + ImageData: { + /** + * Checksum + * @description A production (immutable) tool version is required to have a hashcode. Not required otherwise, but might be useful to detect changes. This exposes the hashcode for specific image versions to verify that the container version pulled is actually the version that was indexed by the registry. + * @example [ + * { + * "checksum": "77af4d6b9913e693e8d0b4b294fa62ade6054e6b2f1ffb617ac955dd63fb0182", + * "type": "sha256" + * } + * ] + */ + checksum?: components["schemas"]["Checksum"][] + /** + * Image Name + * @description Used in conjunction with a registry_url if provided to locate images. + * @example [ + * "quay.io/seqware/seqware_full/1.1", + * "ubuntu:latest" + * ] + */ + image_name?: string + image_type?: components["schemas"]["ImageType"] + /** + * Registry Host + * @description A docker registry or a URL to a Singularity registry. Used along with image_name to locate a specific image. + * @example [ + * "registry.hub.docker.com" + * ] + */ + registry_host?: string + /** + * Size + * @description Size of the container in bytes. + */ + size?: number + /** + * Updated + * @description Last time the container was updated. + */ + updated?: string + } + /** + * ImageType + * @description An enumeration. + * @enum {unknown} + */ + ImageType: "Docker" | "Singularity" | "Conda" + /** InstallInfo */ + InstallInfo: { + metadata_info?: components["schemas"]["RepositoryMetadataInstallInfo"] + repo_info?: components["schemas"]["RepositoryExtraInstallInfo"] + } + /** Organization */ + Organization: { + /** + * Name + * @description Name of the organization responsible for the service + * @example My organization + */ + name: string + /** + * Url + * Format: uri + * @description URL of the website of the organization (RFC 3986 format) + * @example https://example.com + */ + url: string + } + /** RepositoriesByCategory */ + RepositoriesByCategory: { + /** Description */ + description: string + /** Id */ + id: string + /** Name */ + name: string + /** Repositories */ + repositories: components["schemas"]["Repository"][] + /** Repository Count */ + repository_count: number + } + /** Repository */ + Repository: { + /** Create Time */ + create_time: string + /** Deleted */ + deleted: boolean + /** Deprecated */ + deprecated: boolean + /** Description */ + description: string + /** Homepage Url */ + homepage_url?: string + /** Id */ + id: string + /** Name */ + name: string + /** Owner */ + owner: string + /** Private */ + private: boolean + /** Remote Repository Url */ + remote_repository_url?: string + /** Times Downloaded */ + times_downloaded: number + /** Type */ + type: string + /** Update Time */ + update_time: string + /** User Id */ + user_id: string + } + /** RepositoryDependency */ + RepositoryDependency: { + /** Changeset Revision */ + changeset_revision: string + /** Downloadable */ + downloadable: boolean + /** Has Repository Dependencies */ + has_repository_dependencies: boolean + /** Id */ + id: string + /** Includes Datatypes */ + includes_datatypes?: boolean + /** Includes Tool Dependencies */ + includes_tool_dependencies?: boolean + /** Includes Tools */ + includes_tools: boolean + /** Includes Tools For Display In Tool Panel */ + includes_tools_for_display_in_tool_panel: boolean + /** Includes Workflows */ + includes_workflows?: boolean + /** Invalid Tools */ + invalid_tools: string[] + /** Malicious */ + malicious: boolean + /** Missing Test Components */ + missing_test_components: boolean + /** Numeric Revision */ + numeric_revision: number + repository: components["schemas"]["Repository"] + /** Repository Dependencies */ + repository_dependencies: components["schemas"]["RepositoryDependency"][] + /** Repository Id */ + repository_id: string + /** Tools */ + tools?: components["schemas"]["RepositoryTool"][] + } + /** RepositoryExtraInstallInfo */ + RepositoryExtraInstallInfo: { + /** Changeset Revision */ + changeset_revision: string + /** Ctx Rev */ + ctx_rev: string + /** Description */ + description: string + /** Name */ + name: string + /** Repository Clone Url */ + repository_clone_url: string + /** Repository Dependencies */ + repository_dependencies?: Record + /** Repository Owner */ + repository_owner: string + } + /** RepositoryMetadata */ + RepositoryMetadata: { + [key: string]: components["schemas"]["RepositoryRevisionMetadata"] | undefined + } + /** RepositoryMetadataInstallInfo */ + RepositoryMetadataInstallInfo: { + /** Changeset Revision */ + changeset_revision: string + /** Downloadable */ + downloadable: boolean + /** Has Repository Dependencies */ + has_repository_dependencies: boolean + /** Id */ + id: string + /** Includes Tools */ + includes_tools: boolean + /** Includes Tools For Display In Tool Panel */ + includes_tools_for_display_in_tool_panel: boolean + /** Malicious */ + malicious: boolean + /** Repository Id */ + repository_id: string + /** Url */ + url: string + /** Valid Tools */ + valid_tools: components["schemas"]["ValidToolDict"][] + } + /** RepositoryPermissions */ + RepositoryPermissions: { + /** Allow Push */ + allow_push: string[] + /** Can Manage */ + can_manage: boolean + /** Can Push */ + can_push: boolean + } + /** RepositoryRevisionMetadata */ + RepositoryRevisionMetadata: { + /** Changeset Revision */ + changeset_revision: string + /** Downloadable */ + downloadable: boolean + /** Has Repository Dependencies */ + has_repository_dependencies: boolean + /** Id */ + id: string + /** Includes Datatypes */ + includes_datatypes?: boolean + /** Includes Tool Dependencies */ + includes_tool_dependencies?: boolean + /** Includes Tools */ + includes_tools: boolean + /** Includes Tools For Display In Tool Panel */ + includes_tools_for_display_in_tool_panel: boolean + /** Includes Workflows */ + includes_workflows?: boolean + /** Invalid Tools */ + invalid_tools: string[] + /** Malicious */ + malicious: boolean + /** Missing Test Components */ + missing_test_components: boolean + /** Numeric Revision */ + numeric_revision: number + repository: components["schemas"]["Repository"] + /** Repository Dependencies */ + repository_dependencies: components["schemas"]["RepositoryDependency"][] + /** Repository Id */ + repository_id: string + /** Tools */ + tools?: components["schemas"]["RepositoryTool"][] + } + /** RepositoryRevisionReadmes */ + RepositoryRevisionReadmes: { + [key: string]: string | undefined + } + /** RepositorySearchHit */ + RepositorySearchHit: { + repository: components["schemas"]["RepositorySearchResult"] + /** Score */ + score: number + } + /** RepositorySearchResult */ + RepositorySearchResult: { + /** Approved */ + approved: boolean + /** Categories */ + categories: string + /** Description */ + description: string + /** Full Last Updated */ + full_last_updated: string + /** Homepage Url */ + homepage_url?: string + /** Id */ + id: string + /** Last Update */ + last_update?: string + /** Long Description */ + long_description?: string + /** Name */ + name: string + /** Remote Repository Url */ + remote_repository_url?: string + /** Repo Lineage */ + repo_lineage: string + /** Repo Owner Username */ + repo_owner_username: string + /** Times Downloaded */ + times_downloaded: number + } + /** RepositorySearchResults */ + RepositorySearchResults: { + /** Hits */ + hits: components["schemas"]["RepositorySearchHit"][] + /** Hostname */ + hostname: string + /** Page */ + page: string + /** Page Size */ + page_size: string + /** Total Results */ + total_results: string + } + /** RepositoryTool */ + RepositoryTool: { + /** Description */ + description: string + /** Guid */ + guid: string + /** Id */ + id: string + /** Name */ + name: string + /** Requirements */ + requirements: Record[] + /** Tool Config */ + tool_config: string + /** Tool Type */ + tool_type: string + /** Version */ + version: string + } + /** RepositoryUpdate */ + RepositoryUpdate: + | components["schemas"]["ValidRepostiroyUpdateMessage"] + | components["schemas"]["FailedRepositoryUpdateMessage"] + /** ResetMetadataOnRepositoryResponse */ + ResetMetadataOnRepositoryResponse: { + /** Repository Status */ + repository_status: string[] + /** Start Time */ + start_time: string + /** Status */ + status: string + /** Stop Time */ + stop_time: string + } + /** Service */ + Service: { + /** + * Contacturl + * Format: uri + * @description URL of the contact for the provider of this service, e.g. a link to a contact form (RFC 3986 format), or an email (RFC 2368 format). + * @example mailto:support@example.com + */ + contactUrl?: string + /** + * Createdat + * Format: date-time + * @description Timestamp describing when the service was first deployed and available (RFC 3339 format) + * @example 2019-06-04T12:58:19Z + */ + createdAt?: string + /** + * Description + * @description Description of the service. Should be human readable and provide information about the service. + * @example This service provides... + */ + description?: string + /** + * Documentationurl + * Format: uri + * @description URL of the documentation of this service (RFC 3986 format). This should help someone learn how to use your service, including any specifics required to access data, e.g. authentication. + * @example https://docs.myservice.example.com + */ + documentationUrl?: string + /** + * Environment + * @description Environment the service is running in. Use this to distinguish between production, development and testing/staging deployments. Suggested values are prod, test, dev, staging. However this is advised and not enforced. + * @example test + */ + environment?: string + /** + * Id + * @description Unique ID of this service. Reverse domain name notation is recommended, though not required. The identifier should attempt to be globally unique so it can be used in downstream aggregator services e.g. Service Registry. + * @example org.ga4gh.myservice + */ + id: string + /** + * Name + * @description Name of this service. Should be human readable. + * @example My project + */ + name: string + /** + * Organization + * @description Organization providing the service + */ + organization: components["schemas"]["Organization"] + type: components["schemas"]["ServiceType"] + /** + * Updatedat + * Format: date-time + * @description Timestamp describing when the service was last updated (RFC 3339 format) + * @example 2019-06-04T12:58:19Z + */ + updatedAt?: string + /** + * Version + * @description Version of the service being described. Semantic versioning is recommended, but other identifiers, such as dates or commit hashes, are also allowed. The version should be changed whenever the service is updated. + * @example 1.0.0 + */ + version: string + } + /** ServiceType */ + ServiceType: { + /** + * Artifact + * @description Name of the API or GA4GH specification implemented. Official GA4GH types should be assigned as part of standards approval process. Custom artifacts are supported. + * @example beacon + */ + artifact: string + /** + * Group + * @description Namespace in reverse domain name format. Use `org.ga4gh` for implementations compliant with official GA4GH specifications. For services with custom APIs not standardized by GA4GH, or implementations diverging from official GA4GH specifications, use a different namespace (e.g. your organization's reverse domain name). + * @example org.ga4gh + */ + group: string + /** + * Version + * @description Version of the API or specification. GA4GH specifications use semantic versioning. + * @example 1.0.0 + */ + version: string + } + /** Tool */ + Tool: { + /** + * Aliases + * @description Support for this parameter is optional for tool registries that support aliases. + * A list of strings that can be used to identify this tool which could be straight up URLs. + * This can be used to expose alternative ids (such as GUIDs) for a tool + * for registries. Can be used to match tools across registries. + */ + aliases?: string[] + /** + * Checker Url + * @description Optional url to the checker tool that will exit successfully if this tool produced the expected result given test data. + */ + checker_url?: string + /** + * Description + * @description The description of the tool. + */ + description?: string + /** + * Has Checker + * @description Whether this tool has a checker tool associated with it. + */ + has_checker?: boolean + /** + * Id + * @description A unique identifier of the tool, scoped to this registry. + * @example 123456 + */ + id: string + /** + * Meta Version + * @description The version of this tool in the registry. Iterates when fields like the description, author, etc. are updated. + */ + meta_version?: string + /** + * Name + * @description The name of the tool. + */ + name?: string + /** + * Organization + * @description The organization that published the image. + */ + organization: string + toolclass: components["schemas"]["ToolClass"] + /** + * Url + * @description The URL for this tool in this registry. + * @example http://agora.broadinstitute.org/tools/123456 + */ + url: string + /** + * Versions + * @description A list of versions for this tool. + */ + versions: components["schemas"]["ToolVersion"][] + } + /** ToolClass */ + ToolClass: { + /** + * Description + * @description A longer explanation of what this class is and what it can accomplish. + */ + description?: string + /** + * Id + * @description The unique identifier for the class. + */ + id?: string + /** + * Name + * @description A short friendly name for the class. + */ + name?: string + } + /** ToolVersion */ + ToolVersion: { + /** + * Author + * @description Contact information for the author of this version of the tool in the registry. (More complex authorship information is handled by the descriptor). + */ + author?: string[] + /** + * Containerfile + * @description Reports if this tool has a containerfile available. (For Docker-based tools, this would indicate the presence of a Dockerfile) + */ + containerfile?: boolean + /** @description The type (or types) of descriptors available. */ + descriptor_type?: components["schemas"]["DescriptorType"][] + /** + * Descriptor Type Version + * @description A map providing information about the language versions used in this tool. The keys should be the same values used in the `descriptor_type` field, and the value should be an array of all the language versions used for the given `descriptor_type`. Depending on the `descriptor_type` (e.g. CWL) multiple version values may be used in a single tool. + * @example { + * "WDL": ["1.0", "1.0"], + * "CWL": ["v1.0.2"], + * "NFL": ["DSL2"] + * } + */ + descriptor_type_version?: { + [key: string]: components["schemas"]["DescriptorTypeVersion"][] | undefined + } + /** + * Id + * @description An identifier of the version of this tool for this particular tool registry. + * @example v1 + */ + id: string + /** + * Images + * @description All known docker images (and versions/hashes) used by this tool. If the tool has to evaluate any of the docker images strings at runtime, those ones cannot be reported here. + */ + images?: components["schemas"]["ImageData"][] + /** + * Included Apps + * @description An array of IDs for the applications that are stored inside this tool. + * @example [ + * "https://bio.tools/tool/mytum.de/SNAP2/1", + * "https://bio.tools/bioexcel_seqqc" + * ] + */ + included_apps?: string[] + /** + * Is Production + * @description This version of a tool is guaranteed to not change over time (for example, a tool built from a tag in git as opposed to a branch). A production quality tool is required to have a checksum + */ + is_production?: boolean + /** + * Meta Version + * @description The version of this tool version in the registry. Iterates when fields like the description, author, etc. are updated. + */ + meta_version?: string + /** + * Name + * @description The name of the version. + */ + name?: string + /** + * Signed + * @description Reports whether this version of the tool has been signed. + */ + signed?: boolean + /** + * Url + * @description The URL for this tool version in this registry. + * @example http://agora.broadinstitute.org/tools/123456/versions/1 + */ + url: string + /** + * Verified + * @description Reports whether this tool has been verified by a specific organization or individual. + */ + verified?: boolean + /** + * Verified Source + * @description Source of metadata that can support a verified tool, such as an email or URL. + */ + verified_source?: string[] + } + /** UiChangePasswordRequest */ + UiChangePasswordRequest: { + /** Current */ + current: string + /** Password */ + password: string + } + /** UiLoginRequest */ + UiLoginRequest: { + /** Login */ + login: string + /** Password */ + password: string + /** Session Csrf Token */ + session_csrf_token: string + } + /** UiLoginResponse */ + UiLoginResponse: Record + /** UiLogoutRequest */ + UiLogoutRequest: { + /** + * Logout All + * @default false + */ + logout_all?: boolean + /** Session Csrf Token */ + session_csrf_token: string + } + /** UiLogoutResponse */ + UiLogoutResponse: Record + /** UiRegisterRequest */ + UiRegisterRequest: { + /** Bear Field */ + bear_field: string + /** Email */ + email: string + /** Password */ + password: string + /** Username */ + username: string + } + /** UiRegisterResponse */ + UiRegisterResponse: { + /** + * Activation Error + * @default false + */ + activation_error?: boolean + /** + * Activation Sent + * @default false + */ + activation_sent?: boolean + /** Contact Email */ + contact_email?: string + /** Email */ + email: string + } + /** User */ + User: { + /** Id */ + id: string + /** Username */ + username: string + } + /** ValidRepostiroyUpdateMessage */ + ValidRepostiroyUpdateMessage: { + /** Message */ + message: string + } + /** ValidToolDict */ + ValidToolDict: { + /** Add To Tool Panel */ + add_to_tool_panel: boolean + /** Description */ + description: string + /** Guid */ + guid: string + /** Id */ + id: string + /** Name */ + name: string + /** Requirements */ + requirements: Record[] + /** Tests */ + tests: Record[] + /** Tool Config */ + tool_config: string + /** Tool Type */ + tool_type: string + /** Version */ + version: string + /** Version String Cmd */ + version_string_cmd: string + } + /** ValidationError */ + ValidationError: { + /** Location */ + loc: string[] + /** Message */ + msg: string + /** Error Type */ + type: string + } + /** Version */ + Version: { + /** + * Api Version + * @default v1 + */ + api_version?: string + /** Version */ + version: string + /** Version Major */ + version_major: string + } + } + responses: never + parameters: never + requestBodies: never + headers: never + pathItems: never +} + +export type external = Record + +export interface operations { + authenticate__baseauth: { + /** Returns returns an API key for authenticated user based on BaseAuth headers. */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["APIKeyResponse"] + } + } + } + } + categories__index: { + /** + * Index + * @description index category + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Category"][] + } + } + } + } + categories__create: { + /** + * Create + * @description create a category + */ + requestBody: { + content: { + "application/json": components["schemas"]["CreateCategoryRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Category"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + categories__repositories: { + /** + * Repositories + * @description display repositories by category + */ + parameters: { + query?: { + installable?: boolean + sort_key?: string + sort_order?: string + page?: number + } + /** @description The encoded database identifier of the category. */ + path: { + encoded_category_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoriesByCategory"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools_trs_service_info: { + /** Service Info */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Service"] + } + } + } + } + tools__trs_tool_classes: { + /** Tool Classes */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ToolClass"][] + } + } + } + } + tools__trs_index: { + /** Trs Index */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + } + } + tools__trs_get: { + /** Trs Get */ + parameters: { + /** @description See also https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs-tool-and-trs-tool-version-ids */ + path: { + tool_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Tool"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools__trs_get_versions: { + /** Trs Get Versions */ + parameters: { + /** @description See also https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs-tool-and-trs-tool-version-ids */ + path: { + tool_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ToolVersion"][] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__index: { + /** + * Index + * @description Get a list of repositories or perform a search. + */ + parameters?: { + query?: { + q?: string + page?: number + page_size?: number + deleted?: boolean + owner?: string + name?: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": + | components["schemas"]["RepositorySearchResults"] + | components["schemas"]["Repository"][] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__create: { + /** + * Create + * @description create a new repository + */ + requestBody: { + content: { + "application/json": components["schemas"]["CreateRepositoryRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Repository"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__get_ordered_installable_revisions: { + /** + * Get Ordered Installable Revisions + * @description Get an ordered list of the repository changeset revisions that are installable + */ + parameters?: { + query?: { + owner?: string + name?: string + tsr_id?: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__legacy_install_info: { + /** + * Legacy Install Info + * @description Get information used by the install client to install this repository. + */ + parameters: { + /** @description Name of the target repository. */ + /** @description Owner of the target repository. */ + /** @description Changeset of the target repository. */ + query: { + name: string + owner: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__install_info: { + /** + * Install Info + * @description Get information used by the install client to install this repository. + */ + parameters: { + /** @description Name of the target repository. */ + /** @description Owner of the target repository. */ + /** @description Changeset of the target repository. */ + query: { + name: string + owner: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["InstallInfo"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__reset_legacy: { + /** + * Reset Metadata On Repository Legacy + * @description reset metadata on a repository + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ResetMetadataOnRepositoryResponse"] + } + } + } + } + repositories__update: { + /** Updates */ + parameters: { + query: { + owner?: string + name?: string + changeset_revision: string + hexlify?: boolean + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__show: { + /** Show */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["DetailedRepository"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__show_allow_push: { + /** Show Allow Push */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__add_allow_push: { + /** Add Allow Push */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The target username. */ + path: { + encoded_repository_id: string + username: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__remove_allow_push: { + /** Remove Allow Push */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The target username. */ + path: { + encoded_repository_id: string + username: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__create_revision: { + /** + * Create Changeset Revision + * @description upload new revision to the repository + */ + parameters: { + /** @description Set commit message as a query parameter. */ + query?: { + commit_message?: string + } + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + requestBody?: { + content: { + "multipart/form-data": components["schemas"]["Body_repositories__create_revision"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryUpdate"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__set_deprecated: { + /** Set Deprecated */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__unset_deprecated: { + /** Unset Deprecated */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__metadata: { + /** + * Metadata + * @description Get information about repository metadata + */ + parameters: { + /** @description Include only downloadable repositories. */ + query?: { + downloadable_only?: boolean + } + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__permissions: { + /** Permissions */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryPermissions"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__reset: { + /** + * Reset Metadata On Repository + * @description reset metadata on a repository + */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ResetMetadataOnRepositoryResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__set_malicious: { + /** Set Malicious */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The changeset revision corresponding to the target revision of the target repository. */ + path: { + encoded_repository_id: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__unset_malicious: { + /** Unset Malicious */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The changeset revision corresponding to the target revision of the target repository. */ + path: { + encoded_repository_id: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__readmes: { + /** + * Get Readmes + * @description fetch readmes for repository revision + */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The changeset revision corresponding to the target revision of the target repository. */ + path: { + encoded_repository_id: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryRevisionReadmes"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools__index: { + /** Index */ + parameters: { + query: { + q: string + page?: number + page_size?: number + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools__build_search_index: { + /** + * Build Search Index + * @description Not part of the stable API, just something to simplify + * bootstrapping tool sheds, scripting, testing, etc... + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["BuildSearchIndexResponse"] + } + } + } + } + users__index: { + /** + * Index + * @description index users + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"][] + } + } + } + } + users__create: { + /** + * Create + * @description create a user + */ + requestBody: { + content: { + "application/json": components["schemas"]["CreateUserRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__current: { + /** + * Current + * @description show current user + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"] + } + } + } + } + users__show: { + /** + * Show + * @description show a user + */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__get_or_create_api_key: { + /** Return the user's API key */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__create_api_key: { + /** Creates a new API key for the user */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__delete_api_key: { + /** Delete the current API key of the user */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + configuration__version: { + /** Version */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Version"] + } + } + } + } + users__internal_change_password: { + /** + * Change Password + * @description reset a user + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiChangePasswordRequest"] + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__internal_login: { + /** + * Internal Login + * @description login to web UI + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiLoginRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["UiLoginResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__internal_logout: { + /** + * Internal Logout + * @description logout of web UI + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiLogoutRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["UiLogoutResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__internal_register: { + /** + * Register + * @description register a user + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiRegisterRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["UiRegisterResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__internal_metadata: { + /** + * Metadata Internal + * @description Get information about repository metadata + */ + parameters: { + /** @description Include only downloadable repositories. */ + query?: { + downloadable_only?: boolean + } + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryMetadata"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } +} diff --git a/lib/tool_shed/webapp/frontend/src/schema/types.ts b/lib/tool_shed/webapp/frontend/src/schema/types.ts new file mode 100644 index 000000000000..2e328c9cb591 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/types.ts @@ -0,0 +1,5 @@ +import type { components } from "./schema" + +export type Repository = components["schemas"]["Repository"] +export type RevisionMetadata = components["schemas"]["RepositoryRevisionMetadata"] +export type RepositoryTool = components["schemas"]["RepositoryTool"] diff --git a/lib/tool_shed/webapp/frontend/src/shims-vue.d.ts b/lib/tool_shed/webapp/frontend/src/shims-vue.d.ts new file mode 100644 index 000000000000..bae47cae845d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/shims-vue.d.ts @@ -0,0 +1,6 @@ +declare module "*.vue" { + import { DefineComponent } from "vue" + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/ban-types + const component: DefineComponent<{}, {}, any> + export default component +} diff --git a/lib/tool_shed/webapp/frontend/src/stores/auth.store.ts b/lib/tool_shed/webapp/frontend/src/stores/auth.store.ts new file mode 100644 index 000000000000..9392f9d60ba4 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/auth.store.ts @@ -0,0 +1,54 @@ +import { defineStore } from "pinia" +import { ensureCookie, notifyOnCatch } from "@/util" +import { getCurrentUser } from "@/apiUtil" + +import { fetcher } from "@/schema" + +const loginFetcher = fetcher.path("/api_internal/login").method("put").create() +const logoutFetcher = fetcher.path("/api_internal/logout").method("put").create() + +export const useAuthStore = defineStore({ + id: "auth", + state: () => ({ + // initialize state from local storage to enable user to stay logged in + user: JSON.parse(localStorage.getItem("user") || "null"), + returnUrl: null, + }), + actions: { + async setup() { + const user = await getCurrentUser() + this.user = user + // store user details and jwt in local storage to keep user logged in between page refreshes + localStorage.setItem("user", user ? JSON.stringify(user) : "null") + }, + async login(username: string, password: string) { + const token = ensureCookie("session_csrf_token") + console.log(token) + loginFetcher({ + login: username, + password: password, + session_csrf_token: token, + }) + .then(async () => { + // We need to do this outside the router to get updated + // cookies and hence csrf token. + window.location.href = "/login_success" + }) + .catch(notifyOnCatch) + }, + async logout() { + const token = ensureCookie("session_csrf_token") + logoutFetcher({ + session_csrf_token: token, + }) + .then(async () => { + this.user = null + localStorage.removeItem("user") + // We need to do this outside the router to get updated + // cookies and hence csrf token. + window.location.href = "/logout_success" + }) + .catch(notifyOnCatch) + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/stores/categories.store.ts b/lib/tool_shed/webapp/frontend/src/stores/categories.store.ts new file mode 100644 index 000000000000..4038ad835f67 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/categories.store.ts @@ -0,0 +1,33 @@ +import { defineStore } from "pinia" + +import { fetcher, components } from "@/schema" +const categoriesFetcher = fetcher.path("/api/categories").method("get").create() +type Category = components["schemas"]["Category"] + +export const useCategoriesStore = defineStore({ + id: "categories", + state: () => ({ + categories: [] as Category[], + loading: true, + }), + actions: { + async getAll() { + this.loading = true + const { data: categories } = await categoriesFetcher({}) + this.categories = categories + this.loading = false + }, + }, + getters: { + byId(state) { + return (categoryId: string) => { + for (const category of state.categories) { + if (category.id == categoryId) { + return category + } + } + return null + } + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/stores/index.ts b/lib/tool_shed/webapp/frontend/src/stores/index.ts new file mode 100644 index 000000000000..bb94b71fd9fb --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/index.ts @@ -0,0 +1,4 @@ +export { useAuthStore } from "./auth.store" +export { useCategoriesStore } from "./categories.store" +export { useRepositoryStore } from "./repository.store" +export { useUsersStore } from "./users.store" diff --git a/lib/tool_shed/webapp/frontend/src/stores/repository.store.ts b/lib/tool_shed/webapp/frontend/src/stores/repository.store.ts new file mode 100644 index 000000000000..7d5f7223c12c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/repository.store.ts @@ -0,0 +1,111 @@ +import { defineStore } from "pinia" + +import { fetcher, components } from "@/schema" +const repositoryFetcher = fetcher.path("/api/repositories/{encoded_repository_id}").method("get").create() +const repositoryMetadataFetcher = fetcher + .path("/api_internal/repositories/{encoded_repository_id}/metadata") + .method("get") + .create() +const repositoryPermissionsFetcher = fetcher + .path("/api/repositories/{encoded_repository_id}/permissions") + .method("get") + .create() +const repositoryPermissionsAdder = fetcher + .path("/api/repositories/{encoded_repository_id}/allow_push/{username}") + .method("post") + .create() +const repositoryPermissionsRemover = fetcher + .path("/api/repositories/{encoded_repository_id}/allow_push/{username}") + .method("delete") + .create() +const repositoryInstallInfoFetcher = fetcher.path("/api/repositories/install_info").method("get").create() + +type DetailedRepository = components["schemas"]["DetailedRepository"] +type InstallInfo = components["schemas"]["InstallInfo"] +type RepositoryMetadata = components["schemas"]["RepositoryMetadata"] +type RepositoryPermissions = components["schemas"]["RepositoryPermissions"] + +export const useRepositoryStore = defineStore({ + id: "repository", + state: () => ({ + repositoryId: null as string | null, + repository: null as DetailedRepository | null, + repositoryMetadata: null as RepositoryMetadata | null, + repositoryInstallInfo: null as InstallInfo | null, + repositoryPermissions: null as RepositoryPermissions | null, + loading: true as boolean, + empty: false as boolean, + }), + actions: { + async allowPush(username: string) { + if (this.repositoryId == null) { + throw Error("Logic problem in repository store") + } + const params = { + encoded_repository_id: this.repositoryId, + username: username, + } + await repositoryPermissionsAdder(params) + const { data: _repositoryPermissions } = await repositoryPermissionsFetcher(params) + this.repositoryPermissions = _repositoryPermissions + }, + async disallowPush(username: string) { + if (this.repositoryId == null) { + throw Error("Logic problem in repository store") + } + const params = { + encoded_repository_id: this.repositoryId, + username: username, + } + await repositoryPermissionsRemover(params) + const { data: _repositoryPermissions } = await repositoryPermissionsFetcher(params) + this.repositoryPermissions = _repositoryPermissions + }, + async setId(repositoryId: string) { + this.repositoryId = repositoryId + this.refresh() + }, + async refresh() { + if (!this.repositoryId) { + return + } + this.loading = true + const params = { encoded_repository_id: this.repositoryId } + const metadataParams = { encoded_repository_id: this.repositoryId, downloadable_only: false } + const [{ data: repository }, { data: repositoryMetadata }] = await Promise.all([ + repositoryFetcher(params), + repositoryMetadataFetcher(metadataParams), + ]) + this.repository = repository + this.repositoryMetadata = repositoryMetadata + let repositoryPermissions = { + can_manage: false, + can_push: false, + allow_push: [] as string[], + } + try { + const { data: _repositoryPermissions } = await repositoryPermissionsFetcher(params) + repositoryPermissions = _repositoryPermissions + this.repositoryPermissions = repositoryPermissions + } catch (e) { + // console.log(e) + } + const latestMetadata = Object.values(repositoryMetadata)[0] + if (!latestMetadata) { + this.empty = true + } else { + if (this.empty) { + this.empty = false + } + const installParams = { + name: repository.name, + owner: repository.owner, + changeset_revision: latestMetadata.changeset_revision, + } + const { data: repositoryInstallInfo } = await repositoryInstallInfoFetcher(installParams) + this.repositoryInstallInfo = repositoryInstallInfo + } + this.loading = false + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/stores/users.store.ts b/lib/tool_shed/webapp/frontend/src/stores/users.store.ts new file mode 100644 index 000000000000..13cb403f801f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/users.store.ts @@ -0,0 +1,22 @@ +import { defineStore } from "pinia" + +import { fetcher, components } from "@/schema" +const usersFetcher = fetcher.path("/api/users").method("get").create() + +type User = components["schemas"]["User"] + +export const useUsersStore = defineStore({ + id: "users", + state: () => ({ + users: [] as User[], + loading: true, + }), + actions: { + async getAll() { + this.loading = true + const { data: users } = await usersFetcher({}) + this.users = users + this.loading = false + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/util.ts b/lib/tool_shed/webapp/frontend/src/util.ts new file mode 100644 index 000000000000..fec1486bd948 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/util.ts @@ -0,0 +1,48 @@ +import { copyToClipboard, Notify, Cookies } from "quasar" +import type { QNotifyCreateOptions } from "quasar" +import { type LocationQueryValue } from "vue-router" +import { ApiError } from "openapi-typescript-fetch" + +export function getCookie(name: string): string | null { + return Cookies.get(name) +} + +export function ensureCookie(name: string): string { + const cookie = getCookie(name) + if (cookie == null) { + notify("An important cookie was not set by the tool shed server, this may result in serious problems.") + throw Error(`Cookie ${name} not set`) + } + return cookie +} + +export function notify(notification: string, type: string | null = null) { + const opts: QNotifyCreateOptions = { + message: notification, + } + if (type) { + opts.type = type + } + Notify.create(opts) +} + +export async function copyAndNotify(value: string, notification: string) { + await copyToClipboard(value) + notify(notification) +} + +export function errorMessage(e: Error): string { + if (e instanceof ApiError) { + return e.data.err_msg + } else { + return JSON.stringify(e) + } +} + +export function queryParamToString(param: LocationQueryValue | LocationQueryValue[]): string | null { + return Array.isArray(param) ? param[0] : param +} + +export function notifyOnCatch(e: Error) { + notify(errorMessage(e)) +} diff --git a/lib/tool_shed/webapp/frontend/src/vite-env.d.ts b/lib/tool_shed/webapp/frontend/src/vite-env.d.ts new file mode 100644 index 000000000000..11f02fe2a006 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/lib/tool_shed/webapp/frontend/static/favicon.ico b/lib/tool_shed/webapp/frontend/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..cf52fdcad290c89a022d9d05ecb5a569dfcfac77 GIT binary patch literal 15086 zcmeI(32ak!9Ki8^w{Gi%9k2so;0lZ#og5>CA%{ae5Qq@j+BcYhQHcc61Qah$BO;PW zG@>RZa)=lr3kDCAV=zPngA0cTK@<>C5=28Z98;ive?3T7`r55;Wgzb*pZ(wQzxw~b zCI? zSW|1sU%kodQC(xruda#ri6ayip;r&brro7jNNWKnCK2^KsPdDaUzS zz;S$qYP^ES;leHGh=efdC68m+fz?=qX>g(+a^OktEHAfIK=LZySl7i-vwMn%)LG(h z-aCwNAbKGWH=;G-##UBlU?Emw#aK;yr^@ME>fv%Ot8zIPfgTOLkj9Q5;nwjh`}0Dl zR*GXr)2EKnv`LtW89`*RT%gQ!>_yDN{MrGkn1JebJ_Ou#97LAnh)_j`-ms^89N3; z?oXaYx3|5A&A(;~glYlD#=8DRpY58CCw!(7PQgS>KqW?F1n$7CC`Abz$U`TzLpqX+ z)_>jLei5o%|32==ufZMQAJ$Le5O!lLKEk_r0}JpB9>F+_#9)-5E3%O2&p%Q~+}9CU z{yDZ!;2`R-1#7Vai!mQhV>0f;Jt%{}{7lx(@*_38?>KE0-2T%4e?a`8A8KizMVK!teGYN9ujmA*O7(@C#-%+zy z#wY2xY>cB1Q+gWH{$faKHli3QvD+lsgBvpKfxOY%u1 zOn}uVo-od*!8ng+UjlB&7*t+H(=^6EdMY_Kyh?K=>(xKfe#&cy?;&G?3r%#9_`g9O zc~LE-?SO*_9FsI<{RC-?qbi(4q-}qlm*3LuFJr||84v%A!%cLA_&Z2%H_-nUS4Bkz z-&Z;!r->v@cBRV|XQX^SLp$U~i}E`+plO}SE449oqU|yv-vN%{Xfu)TS5Gyve52=| z-o0W`Bg;2>{a@?lw{-i<^XD7Lv*+JtBG07-jp=``6DtN{6h<}!$!8ShcWq4hW}NG{ z1>B*Qx$$myu-e>9F>6(sI~0F}T4mw5x{UvLU?_t&rbyNOxHM0pJ1a&cb1_@MYyqaCi7U@VqDvA7p^ zqYQn~9eL=0Hn35CgSm>PbUSI=Lgt48=k8?vBF;eOkbc1rka+S=sRke8eJsbDkatY; zFauK|^E>0=#0U(5ynE`2Zs-UpI~lR4zq~^`4VeQyiUW}Pe+j9-%!Nt)Wo}C9|1zZh zGAAbWe;88#3P}BLgVetV%=-IIGTK7#{4v|#JbxnIio%>f{`-$yzjFVH_WF(W{U?>W z$-9;Fs1ILc0_XUQYzY$Nc7K*^!LK<^@onk`MGBuYg>St!8CSgt_1*>ovKQ*&MPiI2@f z-ZEFUA9{0sM)&_o-X*cAz2{wrS^w~TXSRQslg~W=BVPCB`4j&9$>JQ6xqhSfNxk$Z z$-76GDi}IM-d3Kw<(ktu3p1P9Eb@McGFls|q?!1tZ2|gLe(N!TG2DF2yTV?e?3u7f z+d!nv#&^)W{$jmf-s@EvsDG@-KaoDa)PMXnUjK1s{af_>H`~8u??2ab`?u!)CBNyH zXMl|3uC!g0IVjrukECPF)`~fftd_v|t_{!p|K+@tkv=ignk`_qz}4FV-ro$#|2N6f vgz&uYf1(KINV;f4opO&Tl`Hiz8hWx1@wraXC1u|HDaccfp-``!Nc;Z-0h#g+ literal 0 HcmV?d00001 diff --git a/lib/tool_shed/webapp/frontend/tsconfig.json b/lib/tool_shed/webapp/frontend/tsconfig.json new file mode 100644 index 000000000000..7dad262e0c00 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "esnext", + "module": "esnext", + "strict": true, + "jsx": "preserve", + "moduleResolution": "node", + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "useDefineForClassFields": true, + "sourceMap": true, + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + }, + "lib": ["esnext", "dom", "dom.iterable", "scripthost"] + }, + "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.vue", "tests/**/*.ts", "tests/**/*.tsx"], + "exclude": ["node_modules"] +} diff --git a/lib/tool_shed/webapp/frontend/vite.config.ts b/lib/tool_shed/webapp/frontend/vite.config.ts new file mode 100644 index 000000000000..ccf3705b8553 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/vite.config.ts @@ -0,0 +1,23 @@ +import { fileURLToPath } from 'url' +import { defineConfig } from 'vite' +import vue from '@vitejs/plugin-vue' +import { quasar, transformAssetUrls } from '@quasar/vite-plugin' + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [ + vue({ + template: { transformAssetUrls }, + }), + + quasar({ + sassVariables: 'src/quasar-variables.sass', + }), + ], + build: {}, + resolve: { + alias: { + '@': fileURLToPath(new URL('./src', import.meta.url)), + }, + }, +}) diff --git a/lib/tool_shed/webapp/graphql-schema.json b/lib/tool_shed/webapp/graphql-schema.json new file mode 100644 index 000000000000..d51019220a41 --- /dev/null +++ b/lib/tool_shed/webapp/graphql-schema.json @@ -0,0 +1,2990 @@ +{ + "__schema": { + "queryType": { + "name": "Query" + }, + "mutationType": null, + "subscriptionType": null, + "types": [ + { + "kind": "OBJECT", + "name": "Query", + "description": null, + "fields": [ + { + "name": "users", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleUser", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "categories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "revisions", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "node", + "description": null, + "args": [ + { + "name": "id", + "description": "The ID of the object", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayUsers", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayUserSortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayUserConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayRepositoriesForCategory", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayRepositorySortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayRepositories", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayRepositorySortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayCategories", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayCategorySortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayCategoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayRevisions", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayRepositoryMetadataSortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleUser", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "username", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "ID", + "description": "The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"4\"`) or integer (such as `4`) input value will be accepted as an ID.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "String", + "description": "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleRepository", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "remoteRepositoryUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "homepageUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "longDescription", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "categories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleUser", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "metadataRevisions", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "downloadableRevisions", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "DateTime", + "description": "The `DateTime` scalar type represents a DateTime\nvalue as specified by\n[iso8601](https://en.wikipedia.org/wiki/ISO_8601).", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleCategory", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deleted", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Boolean", + "description": "The `Boolean` scalar type represents `true` or `false`.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "changesetRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "numericRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "malicious", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "downloadable", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Int", + "description": "The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INTERFACE", + "name": "Node", + "description": "An object with an ID", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "RelayUser", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RelayRepository", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RelayCategory", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadata", + "ofType": null + } + ] + }, + { + "kind": "ENUM", + "name": "RelayUserSortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "USERNAME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "USERNAME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayUserConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayUserEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "PageInfo", + "description": "The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.", + "fields": [ + { + "name": "hasNextPage", + "description": "When paginating forwards, are there more items?", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "hasPreviousPage", + "description": "When paginating backwards, are there more items?", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "startCursor", + "description": "When paginating backwards, the cursor to continue.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "endCursor", + "description": "When paginating forwards, the cursor to continue.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayUserEdge", + "description": "A Relay edge containing a `RelayUser` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayUser", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayUser", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "username", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RelayRepositorySortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TYPE_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TYPE_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "REMOTE_REPOSITORY_URL_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "REMOTE_REPOSITORY_URL_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "HOMEPAGE_URL_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "HOMEPAGE_URL_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LONG_DESCRIPTION_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LONG_DESCRIPTION_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayRepositoryEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryEdge", + "description": "A Relay edge containing a `RelayRepository` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayRepository", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepository", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "remoteRepositoryUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "homepageUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "longDescription", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "categories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleUser", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RelayCategorySortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DELETED_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DELETED_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayCategoryConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayCategoryEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayCategoryEdge", + "description": "A Relay edge containing a `RelayCategory` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayCategory", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayCategory", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deleted", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RelayRepositoryMetadataSortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataEdge", + "description": "A Relay edge containing a `RelayRepositoryMetadata` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryMetadata", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadata", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "changesetRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "numericRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "malicious", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "downloadable", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Schema", + "description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", + "fields": [ + { + "name": "types", + "description": "A list of all types supported by this server.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "queryType", + "description": "The type that query operations will be rooted at.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "mutationType", + "description": "If this server supports mutation, the type that mutation operations will be rooted at.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "subscriptionType", + "description": "If this server support subscription, the type that subscription operations will be rooted at.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "directives", + "description": "A list of all directives supported by this server.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Directive", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Type", + "description": "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", + "fields": [ + { + "name": "kind", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__TypeKind", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fields", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Field", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "interfaces", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "possibleTypes", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "enumValues", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__EnumValue", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "inputFields", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ofType", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__TypeKind", + "description": "An enum describing what kind of type a given `__Type` is.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "SCALAR", + "description": "Indicates this type is a scalar.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "OBJECT", + "description": "Indicates this type is an object. `fields` and `interfaces` are valid fields.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INTERFACE", + "description": "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNION", + "description": "Indicates this type is a union. `possibleTypes` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM", + "description": "Indicates this type is an enum. `enumValues` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_OBJECT", + "description": "Indicates this type is an input object. `inputFields` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LIST", + "description": "Indicates this type is a list. `ofType` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NON_NULL", + "description": "Indicates this type is a non-null. `ofType` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Field", + "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "args", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__InputValue", + "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "defaultValue", + "description": "A GraphQL-formatted string representing the default value for this input value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__EnumValue", + "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Directive", + "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "locations", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__DirectiveLocation", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "args", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__DirectiveLocation", + "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "QUERY", + "description": "Location adjacent to a query operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MUTATION", + "description": "Location adjacent to a mutation operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SUBSCRIPTION", + "description": "Location adjacent to a subscription operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FIELD", + "description": "Location adjacent to a field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FRAGMENT_DEFINITION", + "description": "Location adjacent to a fragment definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FRAGMENT_SPREAD", + "description": "Location adjacent to a fragment spread.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INLINE_FRAGMENT", + "description": "Location adjacent to an inline fragment.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "VARIABLE_DEFINITION", + "description": "Location adjacent to a variable definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCHEMA", + "description": "Location adjacent to a schema definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCALAR", + "description": "Location adjacent to a scalar definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "OBJECT", + "description": "Location adjacent to an object type definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FIELD_DEFINITION", + "description": "Location adjacent to a field definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ARGUMENT_DEFINITION", + "description": "Location adjacent to an argument definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INTERFACE", + "description": "Location adjacent to an interface definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNION", + "description": "Location adjacent to a union definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM", + "description": "Location adjacent to an enum definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM_VALUE", + "description": "Location adjacent to an enum value definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_OBJECT", + "description": "Location adjacent to an input object type definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_FIELD_DEFINITION", + "description": "Location adjacent to an input object field definition.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + } + ], + "directives": [ + { + "name": "include", + "description": "Directs the executor to include this field or fragment only when the `if` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Included when true.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ] + }, + { + "name": "skip", + "description": "Directs the executor to skip this field or fragment when the `if` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Skipped when true.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ] + }, + { + "name": "deprecated", + "description": "Marks an element of a GraphQL schema as no longer supported.", + "locations": [ + "FIELD_DEFINITION", + "ARGUMENT_DEFINITION", + "INPUT_FIELD_DEFINITION", + "ENUM_VALUE" + ], + "args": [ + { + "name": "reason", + "description": "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted using the Markdown syntax, as specified by [CommonMark](https://commonmark.org/).", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": "\"No longer supported\"" + } + ] + }, + { + "name": "specifiedBy", + "description": "Exposes a URL that specifies the behaviour of this scalar.", + "locations": [ + "SCALAR" + ], + "args": [ + { + "name": "url", + "description": "The URL that specifies the behaviour of this scalar.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ] + } + ] + } +} \ No newline at end of file diff --git a/lib/tool_shed/webapp/graphql/__init__.py b/lib/tool_shed/webapp/graphql/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/lib/tool_shed/webapp/graphql/schema.py b/lib/tool_shed/webapp/graphql/schema.py new file mode 100644 index 000000000000..af07fb37ebc9 --- /dev/null +++ b/lib/tool_shed/webapp/graphql/schema.py @@ -0,0 +1,244 @@ +import graphene +from graphene import relay +from graphene_sqlalchemy import ( + SQLAlchemyConnectionField, + SQLAlchemyObjectType, +) +from graphene_sqlalchemy.converter import ( + convert_sqlalchemy_hybrid_property_type, + convert_sqlalchemy_type, +) +from graphql import GraphQLResolveInfo +from sqlalchemy.orm import scoped_session +from typing_extensions import TypedDict + +from galaxy.model.custom_types import TrimmedString +from galaxy.security.idencoding import IdEncodingHelper +from tool_shed.webapp.model import ( + Category as SaCategory, + Repository as SaRepository, + RepositoryCategoryAssociation, + RepositoryMetadata as SaRepositoryMetadata, + User as SaUser, +) + +USER_FIELDS = ( + "id", + "username", +) + +CATEGORY_FIELDS = ( + "id", + "create_time", + "update_time", + "name", + "description", + "deleted", +) + +REPOSITORY_FIELDS = ( + "id", + "create_time", + "update_time", + "name", + "type", + "remote_repository_url", + "homepage_url", + "description", + "long_description", +) + +REPOSITORY_METADATA_FIELDS = ( + "id", + "create_time" + "update_time" + "changeset_revision" + "numeric_revision" + "metadata" + "tool_versions" + "malicious" + "downloadable", +) + + +class InfoDict(TypedDict): + session: scoped_session + security: IdEncodingHelper + + +# Map these Galaxy-ism to Graphene for cleaner interfaces. +@convert_sqlalchemy_type.register(TrimmedString) +def convert_sqlalchemy_type_trimmed_string(*args, **kwd): + return graphene.String + + +@convert_sqlalchemy_hybrid_property_type.register(lambda t: t == TrimmedString) +def convert_sqlalchemy_hybrid_property_type_trimmed_string(arg): + return graphene.String + + +class HasIdMixin: + id = graphene.NonNull(graphene.ID) + encoded_id = graphene.NonNull(graphene.String) + + def resolve_encoded_id(self: SQLAlchemyObjectType, info): + return info.context["security"].encode_id(self.id) + + +class UserMixin(HasIdMixin): + username = graphene.NonNull(graphene.String) + + +class RelayUser(SQLAlchemyObjectType, UserMixin): + class Meta: + model = SaUser + only_fields = USER_FIELDS + interfaces = (relay.Node,) + + +class SimpleUser(SQLAlchemyObjectType, UserMixin): + class Meta: + model = SaUser + only_fields = USER_FIELDS + + +class CategoryQueryMixin(HasIdMixin): + name = graphene.NonNull(graphene.String) + repositories = graphene.List(lambda: SimpleRepository) + + def resolve_repositories(self, info: InfoDict): + return [a.repository for a in self.repositories] + + +class SimpleCategory(SQLAlchemyObjectType, CategoryQueryMixin): + class Meta: + model = SaCategory + only_fields = CATEGORY_FIELDS + + +class RelayCategory(SQLAlchemyObjectType, CategoryQueryMixin): + class Meta: + model = SaCategory + only_fields = CATEGORY_FIELDS + interfaces = (relay.Node,) + + +class RepositoryMixin(HasIdMixin): + name = graphene.NonNull(graphene.String) + + +class RelayRepository(SQLAlchemyObjectType, RepositoryMixin): + class Meta: + model = SaRepository + only_fields = REPOSITORY_FIELDS + interfaces = (relay.Node,) + + categories = graphene.List(SimpleCategory) + user = graphene.NonNull(SimpleUser) + + +class RevisionQueryMixin(HasIdMixin): + # I think because it is imperatively mapped, but the fields are not + # auto-populated for this and so we need to be a bit more explicit + create_time = graphene.DateTime() + update_time = graphene.DateTime() + repository = graphene.NonNull(lambda: SimpleRepository) + changeset_revision = graphene.NonNull(graphene.String) + numeric_revision = graphene.Int() + malicious = graphene.Boolean() + downloadable = graphene.Boolean() + + +class SimpleRepositoryMetadata(SQLAlchemyObjectType, RevisionQueryMixin): + class Meta: + model = SaRepositoryMetadata + only_fields = REPOSITORY_METADATA_FIELDS + + +class SimpleRepository(SQLAlchemyObjectType, RepositoryMixin): + class Meta: + model = SaRepository + only_fields = REPOSITORY_FIELDS + + categories = graphene.List(SimpleCategory) + user = graphene.NonNull(SimpleUser) + metadata_revisions = graphene.List(lambda: SimpleRepositoryMetadata) + downloadable_revisions = graphene.List(lambda: SimpleRepositoryMetadata) + + +class RelayRepositoryMetadata(SQLAlchemyObjectType, RevisionQueryMixin): + class Meta: + model = SaRepositoryMetadata + only_fields = REPOSITORY_METADATA_FIELDS + interfaces = (relay.Node,) + + +class RepositoriesForCategoryField(SQLAlchemyConnectionField): + def __init__(self): + super().__init__(RelayRepository.connection, id=graphene.Int(), encoded_id=graphene.String()) + + @classmethod + def get_query(cls, model, info: GraphQLResolveInfo, sort=None, **args): + repository_query = super().get_query(model, info, sort=sort, **args) + context: InfoDict = info.root_value + query_id = args.get("id") + if not query_id: + encoded_id = args.get("encoded_id") + assert encoded_id, f"Invalid encodedId found {encoded_id} in args {args}" + query_id = context["security"].decode_id(encoded_id) + if query_id: + rval = repository_query.join( + RepositoryCategoryAssociation, + SaRepository.id == RepositoryCategoryAssociation.repository_id, + ).filter(RepositoryCategoryAssociation.category_id == query_id) + return rval + else: + return repository_query + + +class RepositoriesForOwnerField(SQLAlchemyConnectionField): + def __init__(self): + super().__init__(RelayRepository.connection, username=graphene.String()) + + @classmethod + def get_query(cls, model, info: GraphQLResolveInfo, sort=None, **args): + repository_query = super().get_query(model, info, sort=sort, **args) + username = args.get("username") + rval = repository_query.join( + SaUser, + ).filter(SaUser.username == username) + return rval + + +class Query(graphene.ObjectType): + users = graphene.List(SimpleUser) + repositories = graphene.List(SimpleRepository) + categories = graphene.List(SimpleCategory) + revisions = graphene.List(SimpleRepositoryMetadata) + + node = relay.Node.Field() + relay_users = SQLAlchemyConnectionField(RelayUser.connection) + relay_repositories_for_category = RepositoriesForCategoryField() + relay_repositories_for_owner = RepositoriesForOwnerField() + relay_repositories = SQLAlchemyConnectionField(RelayRepository.connection) + relay_categories = SQLAlchemyConnectionField(RelayCategory.connection) + relay_revisions = SQLAlchemyConnectionField(RelayRepositoryMetadata.connection) + + def resolve_users(self, info: InfoDict): + query = SimpleUser.get_query(info) + return query.all() + + def resolve_repositories(self, info: InfoDict): + query = SimpleRepository.get_query(info) + return query.all() + + def resolve_categories(self, info: InfoDict): + query = SimpleCategory.get_query(info) + return query.all() + + def resolve_revisions(self, info: InfoDict): + query = SimpleRepositoryMetadata.get_query(info) + return query.all() + + +schema = graphene.Schema(query=Query, types=[SimpleCategory]) diff --git a/lib/tool_shed_client/schema/__init__.py b/lib/tool_shed_client/schema/__init__.py index 91dfd767170c..2e7e1d7a9014 100644 --- a/lib/tool_shed_client/schema/__init__.py +++ b/lib/tool_shed_client/schema/__init__.py @@ -131,7 +131,19 @@ def is_ok(self): class RepositoryTool(BaseModel): - pass + # Added back in post v2 in order for the frontend to render + # tool descriptions on the repository page. + description: str + guid: str + id: str + name: str + requirements: list + tool_config: str + tool_type: str + version: str + # add_to_tool_panel: bool + # tests: list + # version_string_cmd: Optional[str] class RepositoryRevisionMetadata(BaseModel): @@ -139,6 +151,7 @@ class RepositoryRevisionMetadata(BaseModel): repository: Repository repository_dependencies: List["RepositoryDependency"] tools: Optional[List["RepositoryTool"]] + invalid_tools: List[str] # added for rendering list of invalid tools in 2.0 frontend repository_id: str numeric_revision: int changeset_revision: str @@ -240,7 +253,10 @@ class RepositoryIndexRequest(BaseModel): deleted: str = "false" -class RepositoriesByCategory(Category): +class RepositoriesByCategory(BaseModel): + id: str + name: str + description: str repository_count: int repositories: List[Repository] @@ -424,7 +440,7 @@ def from_legacy_dict(as_dict: RepositoryMetadataInstallInfoDict) -> "RepositoryM malicious=as_dict["malicious"], repository_id=as_dict["repository_id"], url=as_dict["url"], - valid_tools=ValidTool.from_legacy_list(as_dict["valid_tools"]), + valid_tools=ValidTool.from_legacy_list(as_dict.get("valid_tools", [])), ) diff --git a/packages/test_driver/setup.cfg b/packages/test_driver/setup.cfg index 699fd1706699..cd64afd37a8c 100644 --- a/packages/test_driver/setup.cfg +++ b/packages/test_driver/setup.cfg @@ -40,6 +40,8 @@ install_requires = galaxy-util galaxy-web-apps pytest + graphene-sqlalchemy==3.0.0b3 # these are only needed by tool shed - which we've split out but the test driver loads + starlette-graphene3 packages = find: python_requires = >=3.7 diff --git a/pyproject.toml b/pyproject.toml index 19571905886c..4794d781629d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ fastapi-utils = "*" fs = "*" future = "*" galaxy_sequence_utils = "*" +graphene-sqlalchemy = "3.0.0b3" # need a beta release to be compat. with starlette plugin gravity = ">=1.0" gunicorn = "*" gxformat2 = "*" @@ -112,6 +113,7 @@ sqlitedict = "*" sqlparse = "*" starlette = "*" starlette-context = "*" +starlette-graphene3 = "*" svgwrite = "*" tifffile = "*" tuswsgi = "*" diff --git a/run_tool_shed.sh b/run_tool_shed.sh index ff88729db125..adff4ba9cdb6 100755 --- a/run_tool_shed.sh +++ b/run_tool_shed.sh @@ -3,6 +3,7 @@ cd "$(dirname "$0")" +export GALAXY_SKIP_CLIENT_BUILD=1 TOOL_SHED_PID=${TOOL_SHED_PID:-tool_shed_webapp.pid} TOOL_SHED_LOG=${TOOL_SHED_LOG:-tool_shed_webapp.log} PID_FILE=$TOOL_SHED_PID diff --git a/scripts/bootstrap_test_shed.py b/scripts/bootstrap_test_shed.py index 40f257ded9e4..c9ab2555cfe3 100644 --- a/scripts/bootstrap_test_shed.py +++ b/scripts/bootstrap_test_shed.py @@ -56,6 +56,7 @@ def main(argv: List[str]) -> None: {"name": "Invalid Test Tools", "description": "A contains a repository with invalid tools."} ) populator.setup_bismark_repo(category_id=category.id) + populator.setup_test_data_repo("0010", category_id=category.id) category = populator.new_category_if_needed({"name": "Test Category", "description": "A longer test description."}) mirror_main_categories(populator) @@ -65,6 +66,7 @@ def main(argv: List[str]) -> None: populator.new_user_if_needed({"email": "alice@alicesdomain.com"}) populator.new_user_if_needed({"email": "thirduser@threeis.com"}) + populator.setup_test_data_repo("column_maker_with_readme", category_id=category.id) populator.setup_column_maker_repo(prefix="bootstrap", category_id=category.id) populator.setup_column_maker_repo(prefix="bootstrap2", category_id=category.id) diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py index d57c1ed39d7b..d59991bca0f1 100644 --- a/test/unit/tool_shed/_util.py +++ b/test/unit/tool_shed/_util.py @@ -7,7 +7,11 @@ mkdtemp, NamedTemporaryFile, ) -from typing import Optional +from typing import ( + Any, + Dict, + Optional, +) import tool_shed.repository_registry from galaxy.security.idencoding import IdEncodingHelper @@ -25,10 +29,12 @@ from tool_shed.util.hgweb_config import hgweb_config_manager from tool_shed.util.repository_util import create_repository from tool_shed.webapp.model import ( + Category, mapping, Repository, User, ) +from tool_shed_client.schema import CreateCategoryRequest TEST_DATA_FILES = TEST_DATA_REPO_FILES TEST_HOST = "localhost" @@ -78,9 +84,7 @@ def security_agent(self): return self.model.security_agent -def user_fixture( - app: TestToolShedApp, username: str, password: str = "testpassword", email: Optional[str] = None -) -> User: +def user_fixture(app: ToolShedApp, username: str, password: str = "testpassword", email: Optional[str] = None) -> User: email = email or f"{username}@galaxyproject.org" return create_user( app, @@ -115,10 +119,13 @@ def provides_repositories_fixture( return ProvidesRepositoriesImpl(app, user) -def repository_fixture(app: ToolShedApp, user: User, name: str) -> Repository: +def repository_fixture(app: ToolShedApp, user: User, name: str, category: Optional[Category] = None) -> Repository: type = rt_util.UNRESTRICTED description = f"test repo named {name}" long_description = f"test repo named {name} a longer description" + category_ids = [] + if category: + category_ids.append(app.security.encode_id(category.id)) repository, message = create_repository( app, name, @@ -126,7 +133,7 @@ def repository_fixture(app: ToolShedApp, user: User, name: str) -> Repository: description, long_description, user.id, - category_ids=None, + category_ids=category_ids, remote_repository_url=None, homepage_url=None, ) @@ -177,3 +184,10 @@ def upload_directories_to_repository( def random_name(len: int = 10) -> str: return "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)) + + +def create_category(provides_repositories: ProvidesRepositoriesContext, create: Dict[str, Any]) -> Category: + from tool_shed.managers.categories import CategoryManager + + request = CreateCategoryRequest(**create) + return CategoryManager(provides_repositories.app).create(provides_repositories, request) diff --git a/test/unit/tool_shed/test_graphql.py b/test/unit/tool_shed/test_graphql.py new file mode 100644 index 000000000000..111dbf6fccd8 --- /dev/null +++ b/test/unit/tool_shed/test_graphql.py @@ -0,0 +1,331 @@ +from typing import ( + Callable, + List, + Optional, + Tuple, +) + +from graphql.execution import ExecutionResult + +from tool_shed.context import ( + ProvidesRepositoriesContext, + ProvidesUserContext, +) +from tool_shed.webapp.graphql.schema import schema +from tool_shed.webapp.model import ( + Category, + Repository, + RepositoryCategoryAssociation, +) +from ._util import ( + create_category, + repository_fixture, + upload_directories_to_repository, + user_fixture, +) + + +def relay_query(query_name: str, params: Optional[str], node_def: str) -> str: + params_call = f"({params})" if params else "" + return f""" +query {{ + {query_name}{params_call} {{ + edges {{ + cursor + node {{ + {node_def} + }} + }} + pageInfo {{ + endCursor + hasNextPage + }} + }} +}} +""" + + +class PageInfo: + def __init__(self, result: dict): + assert "pageInfo" in result + self.info = result["pageInfo"] + + @property + def end_cursor(self) -> str: + return self.info["endCursor"] + + @property + def has_next_page(self) -> bool: + return self.info["hasNextPage"] + + +def relay_result(result: ExecutionResult) -> Tuple[list, PageInfo]: + data = result.data + assert data + data_values = data.values() + query_result = list(data_values)[0] + return query_result["edges"], PageInfo(query_result) + + +QueryExecutor = Callable[[str], ExecutionResult] + + +def query_execution_builder_for_trans(trans: ProvidesRepositoriesContext) -> QueryExecutor: + cv = context_value(trans) + + def e(query: str) -> ExecutionResult: + return schema.execute(query, context_value=cv, root_value=cv) + + return e + + +def context_value(trans: ProvidesUserContext): + return { + "session": trans.app.model.context, + "security": trans.security, + } + + +def test_simple_repositories(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + e = query_execution_builder_for_trans(provides_repositories) + repositories_query = """ + query { + repositories { + id + encodedId + name + categories { + name + } + user { + username + } + } + } + """ + result = e(repositories_query) + _assert_no_errors(result) + repos = _assert_result_data_has_key(result, "repositories") + repository_names = [r["name"] for r in repos] + assert new_repository.name in repository_names + + +def attach_category(provides_repositories: ProvidesRepositoriesContext, repository: Repository, category: Category): + assoc = RepositoryCategoryAssociation( + repository=repository, + category=category, + ) + provides_repositories.sa_session.add(assoc) + provides_repositories.sa_session.flush() + + +def test_relay_repos_by_category(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + name = new_repository.name + category = create_category(provides_repositories, {"name": "test_graphql_relay_categories_1"}) + user = provides_repositories.user + assert user + uc1 = repository_fixture(provides_repositories.app, user, "uc1") + uc2 = repository_fixture(provides_repositories.app, user, "uc2") + + other_user = user_fixture(provides_repositories.app, "otherusernamec") + ouc1 = repository_fixture(provides_repositories.app, other_user, "ouc1") + ouc2 = repository_fixture(provides_repositories.app, other_user, "ouc2") + + category_id = category.id + e = query_execution_builder_for_trans(provides_repositories) + + names = repository_names(e, "relayRepositoriesForCategory", f"id: {category_id}") + assert len(names) == 0 + + encoded_id = provides_repositories.security.encode_id(category_id) + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 0 + attach_category(provides_repositories, new_repository, category) + + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 1 + assert name in names + + names = repository_names(e, "relayRepositoriesForCategory", f"id: {category_id}") + assert len(names) == 1 + assert name in names + + attach_category(provides_repositories, uc1, category) + attach_category(provides_repositories, ouc1, category) + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 3, names + assert "uc1" in names, names + assert "ouc1" in names, names + + category2 = create_category(provides_repositories, {"name": "test_graphql_relay_categories_2"}) + attach_category(provides_repositories, uc2, category2) + attach_category(provides_repositories, ouc2, category2) + + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 3, names + assert "uc1" in names, names + assert "ouc1" in names, names + + encoded_id_2 = provides_repositories.security.encode_id(category2.id) + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id_2}"') + assert len(names) == 2, names + assert "uc2" in names, names + assert "ouc2" in names, names + + +def test_simple_categories(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + assert provides_repositories.user + + category = create_category(provides_repositories, {"name": "test_graphql"}) + e = query_execution_builder_for_trans(provides_repositories) + result = e( + """ + query { + categories { + name + encodedId + } + } +""" + ) + _assert_no_errors(result) + categories = _assert_result_data_has_key(result, "categories") + category_names = [c["name"] for c in categories] + assert "test_graphql" in category_names + encoded_id = [c["encodedId"] for c in categories if c["name"] == "test_graphql"][0] + assert encoded_id == provides_repositories.security.encode_id(category.id) + + repository_fixture(provides_repositories.app, provides_repositories.user, "foo1", category=category) + result = e( + """ + query { + categories { + name + repositories { + name + } + } + } +""" + ) + _assert_no_errors(result) + categories = _assert_result_data_has_key(result, "categories") + repositories = [c["repositories"] for c in categories if c["name"] == "test_graphql"][0] + assert repositories + repository_names = [r["name"] for r in repositories] + assert "foo1" in repository_names + + +def test_simple_revisions(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + upload_directories_to_repository(provides_repositories, new_repository, "column_maker_with_download_gaps") + e = query_execution_builder_for_trans(provides_repositories) + # (id: "1") + query = """ + query { + revisions { + id + encodedId + createTime + repository { + name + } + changesetRevision + numericRevision + downloadable + } + } +""" + + result = e(query) + _assert_no_errors(result) + + +def test_relay(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + assert provides_repositories.user + repository_fixture(provides_repositories.app, provides_repositories.user, "foo1") + repository_fixture(provides_repositories.app, provides_repositories.user, "f002") + repository_fixture(provides_repositories.app, provides_repositories.user, "cow") + repository_fixture(provides_repositories.app, provides_repositories.user, "u3") + + e = query_execution_builder_for_trans(provides_repositories) + q1 = relay_query("relayRepositories", "sort: NAME_ASC first: 2", "encodedId, name, type, createTime") + result = e(q1) + _assert_no_errors(result) + edges, page_info = relay_result(result) + has_next_page = page_info.has_next_page + assert has_next_page + + last_cursor = edges[-1]["cursor"] + q2 = relay_query("relayRepositories", f'sort: NAME_ASC first: 2 after: "{last_cursor}"', "name, type, createTime") + result = e(q2) + _assert_no_errors(result) + edges, page_info = relay_result(result) + has_next_page = page_info.has_next_page + + +def test_relay_by_owner(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + user = provides_repositories.user + assert user + repository_fixture(provides_repositories.app, user, "u1") + repository_fixture(provides_repositories.app, user, "u2") + repository_fixture(provides_repositories.app, user, "u3") + repository_fixture(provides_repositories.app, user, "u4") + other_user = user_fixture(provides_repositories.app, "otherusername") + repository_fixture(provides_repositories.app, other_user, "ou1") + repository_fixture(provides_repositories.app, other_user, "ou2") + repository_fixture(provides_repositories.app, other_user, "ou3") + repository_fixture(provides_repositories.app, other_user, "ou4") + + e = query_execution_builder_for_trans(provides_repositories) + names = repository_names(e, "relayRepositoriesForOwner", f'username: "{user.username}"') + assert "u1" in names + assert "ou1" not in names + + names = repository_names(e, "relayRepositoriesForOwner", 'username: "otherusername"') + assert "ou4" in names + assert "u4" not in names + + +def repository_names(e: QueryExecutor, field: str, base_variables: str) -> List[str]: + edges = walk_relay(e, field, base_variables, "name") + return [e["node"]["name"] for e in edges] + + +def walk_relay(e: QueryExecutor, field: str, base_variables: str, fragment: str): + variables = f"{base_variables} first: 2" + query = relay_query(field, variables, fragment) + result: ExecutionResult = e(query) + _assert_no_errors(result, query) + all_edges, page_info = relay_result(result) + has_next_page = page_info.has_next_page + while has_next_page: + variables = f'{base_variables} first: 2 after: "${page_info.end_cursor}"' + query = relay_query(field, variables, fragment) + result = e(query) + _assert_no_errors(result, query) + these_edges, page_info = relay_result(result) + if len(these_edges) == 0: + # I was using .options instead of .join and such with the queries + # and this would break. The queries are better now anyway, but + # be careful with new queries - there seem to be bugs around this + # potentially + assert not page_info.has_next_page + break + all_edges.extend(these_edges) + has_next_page = page_info.has_next_page + return all_edges + + +def _assert_result_data_has_key(result: ExecutionResult, key: str): + data = result.data + assert data + assert key in data + return data[key] + + +def _assert_no_errors(result: ExecutionResult, query=None): + if result.errors is not None: + message = f"Found unexpected GraphQL errors {str(result.errors)}" + if query is not None: + message = f"{message} in query {query}" + raise AssertionError(message) From 990f3dcf136ac3ce86cc14ecc3a1e53f71ee9985 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 31 Aug 2023 10:52:01 -0400 Subject: [PATCH 71/73] More typing? --- lib/galaxy/tool_shed/galaxy_install/client.py | 8 ++++++-- lib/galaxy/tool_shed/unittest_utils/__init__.py | 9 ++++++--- lib/galaxy/tools/__init__.py | 8 ++++---- lib/galaxy/tools/data_manager/manager.py | 2 +- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/lib/galaxy/tool_shed/galaxy_install/client.py b/lib/galaxy/tool_shed/galaxy_install/client.py index c537b9d1def0..92125ba6d78d 100644 --- a/lib/galaxy/tool_shed/galaxy_install/client.py +++ b/lib/galaxy/tool_shed/galaxy_install/client.py @@ -1,6 +1,7 @@ import threading from typing import ( Any, + Dict, Generic, List, Optional, @@ -15,7 +16,10 @@ from galaxy.model.tool_shed_install import HasToolBox from galaxy.security.idencoding import IdEncodingHelper from galaxy.tool_shed.cache import ToolShedRepositoryCache -from galaxy.tool_util.data import ToolDataTableManager +from galaxy.tool_util.data import ( + OutputDataset, + ToolDataTableManager, +) from galaxy.tool_util.toolbox.base import AbstractToolBox if TYPE_CHECKING: @@ -29,7 +33,7 @@ class DataManagerInterface(Protocol): def process_result(self, out_data): ... - def write_bundle(self, out) -> None: + def write_bundle(self, out: Dict[str, OutputDataset]) -> Dict[str, OutputDataset]: ... diff --git a/lib/galaxy/tool_shed/unittest_utils/__init__.py b/lib/galaxy/tool_shed/unittest_utils/__init__.py index 61062d76164c..2785fbca8857 100644 --- a/lib/galaxy/tool_shed/unittest_utils/__init__.py +++ b/lib/galaxy/tool_shed/unittest_utils/__init__.py @@ -23,7 +23,10 @@ InstallationTarget, ) from galaxy.tool_shed.util.repository_util import get_installed_repository -from galaxy.tool_util.data import ToolDataTableManager +from galaxy.tool_util.data import ( + OutputDataset, + ToolDataTableManager, +) from galaxy.tool_util.loader_directory import looks_like_a_tool from galaxy.tool_util.toolbox.base import ( AbstractToolBox, @@ -134,8 +137,8 @@ class DummyDataManager(DataManagerInterface): def process_result(self, out_data): return None - def write_bundle(self, out) -> None: - return None + def write_bundle(self, out) -> Dict[str, OutputDataset]: + return {} class StandaloneDataManagers(DataManagersInterface): diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index 405e80035768..3d03e0348d51 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -3108,10 +3108,10 @@ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, fina pass elif data_manager_mode == "bundle": for bundle_path, dataset in data_manager.write_bundle(out_data).items(): - dataset = cast(model.HistoryDatasetAssociation, dataset) - dataset.dataset.object_store.update_from_file( - dataset.dataset, - extra_dir=dataset.dataset.extra_files_path_name, + hda = cast(model.HistoryDatasetAssociation, dataset) + hda.dataset.object_store.update_from_file( + hda.dataset, + extra_dir=hda.dataset.extra_files_path_name, file_name=bundle_path, alt_name=os.path.basename(bundle_path), create=True, diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py index 622e7189d20f..52f79ca54250 100644 --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -244,7 +244,7 @@ def process_result(self, out_data: Dict[str, OutputDataset]) -> None: def write_bundle( self, out_data: Dict[str, OutputDataset], - ): + ) -> Dict[str, OutputDataset]: tool_data_tables = self.data_managers.app.tool_data_tables return tool_data_tables.write_bundle( out_data, From bb4558102ff28a61ceb099b1c03863cf0ae6c4df Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 26 Sep 2023 14:08:03 -0400 Subject: [PATCH 72/73] Remove incorrect auto-generated readme for frontend. --- lib/tool_shed/webapp/frontend/README.md | 27 ------------------------- 1 file changed, 27 deletions(-) delete mode 100644 lib/tool_shed/webapp/frontend/README.md diff --git a/lib/tool_shed/webapp/frontend/README.md b/lib/tool_shed/webapp/frontend/README.md deleted file mode 100644 index a797a275d079..000000000000 --- a/lib/tool_shed/webapp/frontend/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Vue 3 + Typescript + Vite - -This template should help get you started developing with Vue 3 and Typescript in Vite. - -## Recommended IDE Setup - -[VSCode](https://code.visualstudio.com/) + [Vetur](https://marketplace.visualstudio.com/items?itemName=octref.vetur). Make sure to enable `vetur.experimental.templateInterpolationService` in settings! - -### If Using `