diff --git a/lib/galaxy/__init__.py b/lib/galaxy/__init__.py index 70b3b112b715..20a19480162c 100644 --- a/lib/galaxy/__init__.py +++ b/lib/galaxy/__init__.py @@ -4,4 +4,4 @@ from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) # type: ignore[has-type] +__path__ = extend_path(__path__, __name__) diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index f45c9aa83abc..c659097036e0 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -687,7 +687,7 @@ def __init__(self, **kwargs) -> None: self.watchers = self._register_singleton(ConfigWatchers) self._configure_toolbox() # Load Data Manager - self.data_managers = self._register_singleton(DataManagers) # type: ignore[type-abstract] + self.data_managers = self._register_singleton(DataManagers) # Load the update repository manager. self.update_repository_manager = self._register_singleton( UpdateRepositoryManager, UpdateRepositoryManager(self) diff --git a/lib/galaxy/app_unittest_utils/toolbox_support.py b/lib/galaxy/app_unittest_utils/toolbox_support.py index 6308fb14244f..1189e4580b84 100644 --- a/lib/galaxy/app_unittest_utils/toolbox_support.py +++ b/lib/galaxy/app_unittest_utils/toolbox_support.py @@ -81,7 +81,7 @@ def setUp(self): install_model = mapping.init("sqlite:///:memory:", create_tables=True) self.app.tool_cache = ToolCache() self.app.install_model = install_model - self.app.reindex_tool_search = self.__reindex # type: ignore[assignment] + self.app.reindex_tool_search = self.__reindex # type: ignore[method-assign] itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml") self.app.config.integrated_tool_panel_config = itp_config self.app.watchers = ConfigWatchers(self.app) diff --git a/lib/galaxy/authnz/custos_authnz.py b/lib/galaxy/authnz/custos_authnz.py index 1c75f278e438..a216b3239c14 100644 --- a/lib/galaxy/authnz/custos_authnz.py +++ b/lib/galaxy/authnz/custos_authnz.py @@ -35,7 +35,7 @@ try: import pkce except ImportError: - pkce = None # type: ignore[assignment] + pkce = None # type: ignore[assignment, unused-ignore] log = logging.getLogger(__name__) STATE_COOKIE_NAME = "galaxy-oidc-state" diff --git a/lib/galaxy/authnz/managers.py b/lib/galaxy/authnz/managers.py index 0eaf0a9e59a1..35211f9fe57e 100644 --- a/lib/galaxy/authnz/managers.py +++ b/lib/galaxy/authnz/managers.py @@ -22,7 +22,10 @@ string_as_bool, unicodify, ) -from galaxy.util.resources import files +from galaxy.util.resources import ( + as_file, + resource_path, +) from .custos_authnz import ( CustosAuthFactory, KEYCLOAK_BACKENDS, @@ -35,7 +38,7 @@ Strategy, ) -OIDC_BACKEND_SCHEMA = files("galaxy.authnz.xsd") / "oidc_backends_config.xsd" +OIDC_BACKEND_SCHEMA = resource_path(__package__, "xsd/oidc_backends_config.xsd") log = logging.getLogger(__name__) @@ -108,7 +111,8 @@ def _parse_oidc_backends_config(self, config_file): self.oidc_backends_config = {} self.oidc_backends_implementation = {} try: - tree = parse_xml(config_file, schemafname=OIDC_BACKEND_SCHEMA) + with as_file(OIDC_BACKEND_SCHEMA) as oidc_backend_schema_path: + tree = parse_xml(config_file, schemafname=oidc_backend_schema_path) root = tree.getroot() if root.tag != "OIDC": raise etree.ParseError( diff --git a/lib/galaxy/config/__init__.py b/lib/galaxy/config/__init__.py index 6670611637e6..c69009d47315 100644 --- a/lib/galaxy/config/__init__.py +++ b/lib/galaxy/config/__init__.py @@ -51,6 +51,10 @@ read_properties_from_file, running_from_source, ) +from galaxy.util.resources import ( + as_file, + resource_path, +) from galaxy.util.themes import flatten_theme from ..version import ( VERSION_MAJOR, @@ -60,18 +64,13 @@ if TYPE_CHECKING: from galaxy.model import User -if sys.version_info >= (3, 9): - from importlib.resources import files -else: - from importlib_resources import files - log = logging.getLogger(__name__) DEFAULT_LOCALE_FORMAT = "%a %b %e %H:%M:%S %Y" ISO_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" GALAXY_APP_NAME = "galaxy" -GALAXY_SCHEMAS_PATH = files("galaxy.config") / "schemas" +GALAXY_SCHEMAS_PATH = resource_path(__package__, "schemas") GALAXY_CONFIG_SCHEMA_PATH = GALAXY_SCHEMAS_PATH / "config_schema.yml" REPORTS_CONFIG_SCHEMA_PATH = GALAXY_SCHEMAS_PATH / "reports_config_schema.yml" TOOL_SHED_CONFIG_SCHEMA_PATH = GALAXY_SCHEMAS_PATH / "tool_shed_config_schema.yml" @@ -193,7 +192,7 @@ def configure_logging(config, facts=None): logging.config.dictConfig(logging_conf) -def find_root(kwargs): +def find_root(kwargs) -> str: return os.path.abspath(kwargs.get("root_dir", ".")) @@ -238,6 +237,7 @@ class BaseAppConfiguration(HasDynamicProperties): add_sample_file_to_defaults: Set[str] = set() # for these options, add sample config files to their defaults listify_options: Set[str] = set() # values for these options are processed as lists of values object_store_store_by: str + shed_tools_dir: str def __init__(self, **kwargs): self._preprocess_kwargs(kwargs) @@ -835,7 +835,8 @@ def _process_config(self, kwargs: Dict[str, Any]) -> None: self.cookie_path = kwargs.get("cookie_path") if not running_from_source and kwargs.get("tool_path") is None: try: - self.tool_path = str(files("galaxy.tools") / "bundled") + with as_file(resource_path("galaxy.tools", "bundled")) as path: + self.tool_path = os.fspath(path) except ModuleNotFoundError: # Might not be a full galaxy installation self.tool_path = self._in_root_dir(self.tool_path) diff --git a/lib/galaxy/config/config_manage.py b/lib/galaxy/config/config_manage.py index 4f0da4d9ddf6..a1ae2de306a9 100644 --- a/lib/galaxy/config/config_manage.py +++ b/lib/galaxy/config/config_manage.py @@ -46,6 +46,7 @@ nice_config_parser, NicerConfigParser, ) +from galaxy.util.resources import Traversable from galaxy.util.yaml_util import ( ordered_dump, ordered_load, @@ -72,7 +73,7 @@ class App(NamedTuple): default_port: str expected_app_factories: List[str] destination: str - schema_path: str + schema_path: Traversable @property def app_name(self) -> str: @@ -219,21 +220,21 @@ class OptionValue(NamedTuple): "8080", ["galaxy.web.buildapp:app_factory"], "config/galaxy.yml", - str(GALAXY_CONFIG_SCHEMA_PATH), + GALAXY_CONFIG_SCHEMA_PATH, ) SHED_APP = App( ["tool_shed_wsgi.ini", "config/tool_shed.ini"], "9009", ["tool_shed.webapp.buildapp:app_factory"], "config/tool_shed.yml", - str(TOOL_SHED_CONFIG_SCHEMA_PATH), + TOOL_SHED_CONFIG_SCHEMA_PATH, ) REPORTS_APP = App( ["reports_wsgi.ini", "config/reports.ini"], "9001", ["galaxy.webapps.reports.buildapp:app_factory"], "config/reports.yml", - str(REPORTS_CONFIG_SCHEMA_PATH), + REPORTS_CONFIG_SCHEMA_PATH, ) APPS = {"galaxy": GALAXY_APP, "tool_shed": SHED_APP, "reports": REPORTS_APP} diff --git a/lib/galaxy/config/schema.py b/lib/galaxy/config/schema.py index 10659cb76d60..ca05d887c9f4 100644 --- a/lib/galaxy/config/schema.py +++ b/lib/galaxy/config/schema.py @@ -1,6 +1,7 @@ import logging from galaxy.exceptions import ConfigurationError +from galaxy.util.resources import Traversable from galaxy.util.yaml_util import ordered_load log = logging.getLogger(__name__) @@ -35,15 +36,15 @@ def get_app_option(self, name): class AppSchema(Schema): - def __init__(self, schema_path, app_name): + def __init__(self, schema_path: Traversable, app_name: str): self.raw_schema = self._read_schema(schema_path) self.description = self.raw_schema.get("desc", None) app_schema = self.raw_schema["mapping"][app_name]["mapping"] self._preprocess(app_schema) super().__init__(app_schema) - def _read_schema(self, path): - with open(path) as f: + def _read_schema(self, path: Traversable): + with path.open() as f: return ordered_load(f) def _preprocess(self, app_schema): diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py index f0c09f54221c..679cc6da19db 100644 --- a/lib/galaxy/datatypes/binary.py +++ b/lib/galaxy/datatypes/binary.py @@ -4367,7 +4367,7 @@ class HexrdImagesNpz(Npz): >>> fname = get_test_fname('hexrd.images.npz') >>> HexrdImagesNpz().sniff(fname) True - >>> fname = get_test_fname('eta_ome.npz') + >>> fname = get_test_fname('hexrd.eta_ome.npz') >>> HexrdImagesNpz().sniff(fname) False """ diff --git a/lib/galaxy/datatypes/constructive_solid_geometry.py b/lib/galaxy/datatypes/constructive_solid_geometry.py index da87ae493802..73973abb65a5 100644 --- a/lib/galaxy/datatypes/constructive_solid_geometry.py +++ b/lib/galaxy/datatypes/constructive_solid_geometry.py @@ -142,7 +142,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str: return f"Ply file ({nice_size(dataset.get_size())})" -class PlyAscii(Ply, data.Text): # type: ignore[misc] +class PlyAscii(Ply, data.Text): """ >>> from galaxy.datatypes.sniff import get_test_fname >>> fname = get_test_fname('test.plyascii') @@ -160,7 +160,7 @@ def __init__(self, **kwd): data.Text.__init__(self, **kwd) -class PlyBinary(Ply, Binary): # type: ignore[misc] +class PlyBinary(Ply, Binary): file_ext = "plybinary" subtype = "binary" @@ -477,7 +477,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str: return f"Vtk file ({nice_size(dataset.get_size())})" -class VtkAscii(Vtk, data.Text): # type: ignore[misc] +class VtkAscii(Vtk, data.Text): """ >>> from galaxy.datatypes.sniff import get_test_fname >>> fname = get_test_fname('test.vtkascii') @@ -495,7 +495,7 @@ def __init__(self, **kwd): data.Text.__init__(self, **kwd) -class VtkBinary(Vtk, Binary): # type: ignore[misc] +class VtkBinary(Vtk, Binary): """ >>> from galaxy.datatypes.sniff import get_test_fname >>> fname = get_test_fname('test.vtkbinary') diff --git a/lib/galaxy/datatypes/media.py b/lib/galaxy/datatypes/media.py index 53b743485de5..5be3dea9fbb0 100644 --- a/lib/galaxy/datatypes/media.py +++ b/lib/galaxy/datatypes/media.py @@ -193,8 +193,6 @@ class Mp4(Video): >>> from galaxy.datatypes.sniff import sniff_with_cls >>> sniff_with_cls(Mp4, 'video_1.mp4') True - >>> sniff_with_cls(Mp4, 'audio_1.mp4') - False """ file_ext = "mp4" diff --git a/lib/galaxy/datatypes/sniff.py b/lib/galaxy/datatypes/sniff.py index 07c9026bdeec..12a6550bdf6d 100644 --- a/lib/galaxy/datatypes/sniff.py +++ b/lib/galaxy/datatypes/sniff.py @@ -49,8 +49,9 @@ def get_test_fname(fname): """Returns test data filename""" - path, name = os.path.split(__file__) + path = os.path.dirname(__file__) full_path = os.path.join(path, "test", fname) + assert os.path.isfile(full_path), f"{full_path} is not a file" return full_path diff --git a/lib/galaxy/jobs/runners/aws.py b/lib/galaxy/jobs/runners/aws.py index ab42b38da0e8..ee0284fe0f23 100644 --- a/lib/galaxy/jobs/runners/aws.py +++ b/lib/galaxy/jobs/runners/aws.py @@ -521,10 +521,10 @@ def parse_destination_params(self, params): check_required = [] parsed_params = {} for k, spec in self.DESTINATION_PARAMS_SPEC.items(): - value = params.get(k, spec.get("default")) # type: ignore[attr-defined] - if spec.get("required") and not value: # type: ignore[attr-defined] + value = params.get(k, spec.get("default")) + if spec.get("required") and not value: check_required.append(k) - mapper = spec.get("map") # type: ignore[attr-defined] + mapper = spec.get("map") parsed_params[k] = mapper(value) # type: ignore[operator] if check_required: raise AWSBatchRunnerException( diff --git a/lib/galaxy/jobs/runners/util/kill.py b/lib/galaxy/jobs/runners/util/kill.py index 14a641ef16a1..7a26aa099e38 100644 --- a/lib/galaxy/jobs/runners/util/kill.py +++ b/lib/galaxy/jobs/runners/util/kill.py @@ -11,7 +11,7 @@ ) except ImportError: """Don't make psutil a strict requirement, but use if available.""" - Process = None # type: ignore + Process = None def kill_pid(pid: int, use_psutil: bool = True): diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py index 4496f6dc9be2..c7b57233f09c 100644 --- a/lib/galaxy/managers/history_contents.py +++ b/lib/galaxy/managers/history_contents.py @@ -413,7 +413,7 @@ def _contained_id_map(self, id_list): component_class = self.contained_class stmt = ( select(component_class) - .where(component_class.id.in_(id_list)) # type: ignore[attr-defined] + .where(component_class.id.in_(id_list)) .options(undefer(component_class._metadata)) .options(joinedload(component_class.dataset).joinedload(model.Dataset.actions)) .options(joinedload(component_class.tags)) # type: ignore[attr-defined] diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 0429e37007a0..eea8ca6d4d0e 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -313,7 +313,7 @@ def by_api_key(self, api_key: str, sa_session=None): def by_oidc_access_token(self, access_token: str): if hasattr(self.app, "authnz_manager") and self.app.authnz_manager: - user = self.app.authnz_manager.match_access_token_to_user(self.app.model.session, access_token) # type: ignore[attr-defined] + user = self.app.authnz_manager.match_access_token_to_user(self.app.model.session, access_token) return user else: return None diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index fb01a39d4e1e..95b2eaab2a9b 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -538,7 +538,7 @@ def build_invocations_query( for inv in trans.sa_session.scalars(stmt) if self.check_security(trans, inv, check_ownership=True, check_accessible=False) ] - return invocations, total_matches # type:ignore[return-value] + return invocations, total_matches MissingToolsT = List[Tuple[str, str, Optional[str], str]] diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index ae15250f485e..ae7da2d3a062 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -759,12 +759,12 @@ class User(Base, Dictifiable, RepresentById): back_populates="user", order_by=lambda: desc(History.update_time), cascade_backrefs=False # type: ignore[has-type] ) active_histories: Mapped[List["History"]] = relationship( - primaryjoin=(lambda: (History.user_id == User.id) & (not_(History.deleted)) & (not_(History.archived))), # type: ignore[has-type] + primaryjoin=(lambda: (History.user_id == User.id) & (not_(History.deleted)) & (not_(History.archived))), viewonly=True, order_by=lambda: desc(History.update_time), # type: ignore[has-type] ) galaxy_sessions: Mapped[List["GalaxySession"]] = relationship( - back_populates="user", order_by=lambda: desc(GalaxySession.update_time), cascade_backrefs=False # type: ignore[has-type] + back_populates="user", order_by=lambda: desc(GalaxySession.update_time), cascade_backrefs=False ) quotas: Mapped[List["UserQuotaAssociation"]] = relationship(back_populates="user") quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship(back_populates="user") @@ -772,25 +772,23 @@ class User(Base, Dictifiable, RepresentById): stored_workflow_menu_entries: Mapped[List["StoredWorkflowMenuEntry"]] = relationship( primaryjoin=( lambda: (StoredWorkflowMenuEntry.user_id == User.id) - & (StoredWorkflowMenuEntry.stored_workflow_id == StoredWorkflow.id) # type: ignore[has-type] - & not_(StoredWorkflow.deleted) # type: ignore[has-type] + & (StoredWorkflowMenuEntry.stored_workflow_id == StoredWorkflow.id) + & not_(StoredWorkflow.deleted) ), back_populates="user", cascade="all, delete-orphan", collection_class=ordering_list("order_index"), ) _preferences: Mapped[Dict[str, "UserPreference"]] = relationship(collection_class=attribute_keyed_dict("name")) - values: Mapped[List["FormValues"]] = relationship( - primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] - ) + values: Mapped[List["FormValues"]] = relationship(primaryjoin=(lambda: User.form_values_id == FormValues.id)) # Add type hint (will this work w/SA?) api_keys: Mapped[List["APIKeys"]] = relationship( back_populates="user", order_by=lambda: desc(APIKeys.create_time), primaryjoin=( lambda: and_( - User.id == APIKeys.user_id, # type: ignore[attr-defined] - not_(APIKeys.deleted == true()), # type: ignore[has-type] + User.id == APIKeys.user_id, + not_(APIKeys.deleted == true()), ) ), ) @@ -798,7 +796,7 @@ class User(Base, Dictifiable, RepresentById): roles: Mapped[List["UserRoleAssociation"]] = relationship(back_populates="user") stored_workflows: Mapped[List["StoredWorkflow"]] = relationship( back_populates="user", - primaryjoin=(lambda: User.id == StoredWorkflow.user_id), # type: ignore[has-type] + primaryjoin=(lambda: User.id == StoredWorkflow.user_id), cascade_backrefs=False, ) all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship( @@ -807,9 +805,9 @@ class User(Base, Dictifiable, RepresentById): non_private_roles: Mapped[List["UserRoleAssociation"]] = relationship( viewonly=True, primaryjoin=( - lambda: (User.id == UserRoleAssociation.user_id) # type: ignore[has-type] - & (UserRoleAssociation.role_id == Role.id) # type: ignore[has-type] - & not_(Role.name == User.email) # type: ignore[has-type] + lambda: (User.id == UserRoleAssociation.user_id) + & (UserRoleAssociation.role_id == Role.id) + & not_(Role.name == User.email) ), ) @@ -2512,9 +2510,9 @@ class ImplicitlyCreatedDatasetCollectionInput(Base, RepresentById): input_dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( primaryjoin=( - lambda: HistoryDatasetCollectionAssociation.id # type: ignore[has-type] + lambda: HistoryDatasetCollectionAssociation.id == ImplicitlyCreatedDatasetCollectionInput.input_dataset_collection_id - ), # type: ignore[has-type] + ), ) def __init__(self, name, input_dataset_collection): @@ -2578,7 +2576,7 @@ class PostJobAction(Base, RepresentById): action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( back_populates="post_job_actions", - primaryjoin=(lambda: WorkflowStep.id == PostJobAction.workflow_step_id), # type: ignore[has-type] + primaryjoin=(lambda: WorkflowStep.id == PostJobAction.workflow_step_id), ) def __init__(self, action_type, workflow_step=None, output_name=None, action_arguments=None): @@ -3073,12 +3071,12 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable primaryjoin=( lambda: ( and_( - HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] - not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type, arg-type] + HistoryDatasetCollectionAssociation.history_id == History.id, + not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[arg-type] ) ) ), - order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), # type: ignore[has-type] + order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), viewonly=True, ) visible_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( @@ -3095,12 +3093,12 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable visible_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( primaryjoin=( lambda: and_( - HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] - not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type, arg-type] - HistoryDatasetCollectionAssociation.visible, # type: ignore[has-type, arg-type] + HistoryDatasetCollectionAssociation.history_id == History.id, + not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[arg-type] + HistoryDatasetCollectionAssociation.visible, # type: ignore[arg-type] ) ), - order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), # type: ignore[has-type] + order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), viewonly=True, ) tags: Mapped[List["HistoryTagAssociation"]] = relationship( @@ -3110,7 +3108,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" ) ratings: Mapped[List["HistoryRatingAssociation"]] = relationship( - order_by=lambda: HistoryRatingAssociation.id, # type: ignore[has-type] + order_by=lambda: HistoryRatingAssociation.id, back_populates="history", ) default_permissions: Mapped[List["DefaultHistoryPermissions"]] = relationship(back_populates="history") @@ -4005,7 +4003,7 @@ class Dataset(Base, StorableObject, Serializable): lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] HistoryDatasetAssociation.deleted == false(), # type: ignore[has-type] - HistoryDatasetAssociation.purged == false(), # type: ignore[attr-defined, arg-type] + HistoryDatasetAssociation.purged == false(), # type: ignore[arg-type] ) ), viewonly=True, @@ -4014,7 +4012,7 @@ class Dataset(Base, StorableObject, Serializable): primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] - HistoryDatasetAssociation.purged == true(), # type: ignore[attr-defined, arg-type] + HistoryDatasetAssociation.purged == true(), # type: ignore[arg-type] ) ), viewonly=True, @@ -6113,7 +6111,7 @@ class LibraryInfoAssociation(Base, RepresentById): primaryjoin=lambda: LibraryInfoAssociation.form_definition_id == FormDefinition.id ) info: Mapped[Optional["FormValues"]] = relationship( - primaryjoin=lambda: LibraryInfoAssociation.form_values_id == FormValues.id # type: ignore[has-type] + primaryjoin=lambda: LibraryInfoAssociation.form_values_id == FormValues.id ) def __init__(self, library, form_definition, info, inheritable=False): @@ -6143,7 +6141,7 @@ class LibraryFolderInfoAssociation(Base, RepresentById): primaryjoin=(lambda: LibraryFolderInfoAssociation.form_definition_id == FormDefinition.id) ) info: Mapped[Optional["FormValues"]] = relationship( - primaryjoin=(lambda: LibraryFolderInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] + primaryjoin=(lambda: LibraryFolderInfoAssociation.form_values_id == FormValues.id) ) def __init__(self, folder, form_definition, info, inheritable=False): @@ -6177,7 +6175,7 @@ class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_definition_id == FormDefinition.id), ) info: Mapped[Optional["FormValues"]] = relationship( - primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] + primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_values_id == FormValues.id) ) def __init__(self, library_dataset_dataset_association, form_definition, info): @@ -6311,9 +6309,9 @@ class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) elements: Mapped[List["DatasetCollectionElement"]] = relationship( - primaryjoin=(lambda: DatasetCollection.id == DatasetCollectionElement.dataset_collection_id), # type: ignore[has-type] + primaryjoin=(lambda: DatasetCollection.id == DatasetCollectionElement.dataset_collection_id), back_populates="collection", - order_by=lambda: DatasetCollectionElement.element_index, # type: ignore[has-type] + order_by=lambda: DatasetCollectionElement.element_index, ) dict_collection_visible_keys = ["id", "collection_type"] @@ -6811,7 +6809,7 @@ class HistoryDatasetCollectionAssociation( back_populates="history_dataset_collection", ) ratings: Mapped[List["HistoryDatasetCollectionRatingAssociation"]] = relationship( - order_by=lambda: HistoryDatasetCollectionRatingAssociation.id, # type: ignore[has-type] + order_by=lambda: HistoryDatasetCollectionRatingAssociation.id, back_populates="dataset_collection", ) creating_job_associations: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship(viewonly=True) @@ -7166,7 +7164,7 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre back_populates="dataset_collection", ) ratings: Mapped[List["LibraryDatasetCollectionRatingAssociation"]] = relationship( - order_by=lambda: LibraryDatasetCollectionRatingAssociation.id, # type: ignore[has-type] + order_by=lambda: LibraryDatasetCollectionRatingAssociation.id, back_populates="dataset_collection", ) @@ -7489,14 +7487,14 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): workflows: Mapped[List["Workflow"]] = relationship( back_populates="stored_workflow", cascade="all, delete-orphan", - primaryjoin=(lambda: StoredWorkflow.id == Workflow.stored_workflow_id), # type: ignore[has-type] - order_by=lambda: -Workflow.id, # type: ignore[has-type] + primaryjoin=(lambda: StoredWorkflow.id == Workflow.stored_workflow_id), + order_by=lambda: -Workflow.id, cascade_backrefs=False, ) latest_workflow = relationship( "Workflow", post_update=True, - primaryjoin=(lambda: StoredWorkflow.latest_workflow_id == Workflow.id), # type: ignore[has-type] + primaryjoin=(lambda: StoredWorkflow.latest_workflow_id == Workflow.id), lazy=False, ) tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( @@ -7518,7 +7516,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): back_populates="stored_workflow", ) ratings: Mapped[List["StoredWorkflowRatingAssociation"]] = relationship( - order_by=lambda: StoredWorkflowRatingAssociation.id, # type: ignore[has-type] + order_by=lambda: StoredWorkflowRatingAssociation.id, back_populates="stored_workflow", ) users_shared_with: Mapped[List["StoredWorkflowUserShareAssociation"]] = relationship( @@ -7657,20 +7655,20 @@ class Workflow(Base, Dictifiable, RepresentById): steps = relationship( "WorkflowStep", back_populates="workflow", - primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), # type: ignore[has-type] - order_by=lambda: asc(WorkflowStep.order_index), # type: ignore[has-type] + primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), + order_by=lambda: asc(WorkflowStep.order_index), cascade="all, delete-orphan", lazy=False, ) comments: Mapped[List["WorkflowComment"]] = relationship( back_populates="workflow", - primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), # type: ignore[has-type] + primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), cascade="all, delete-orphan", lazy=False, ) parent_workflow_steps = relationship( "WorkflowStep", - primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id), # type: ignore[has-type] + primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id), back_populates="subworkflow", cascade_backrefs=False, ) @@ -9064,8 +9062,6 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): select(WorkflowStep.order_index).where(WorkflowStep.id == workflow_step_id).scalar_subquery() ) - subworkflow_invocation_id = None - dict_collection_visible_keys = [ "id", "update_time", @@ -9607,7 +9603,7 @@ class FormDefinition(Base, Dictifiable, RepresentById): layout: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition_current: Mapped["FormDefinitionCurrent"] = relationship( back_populates="forms", - primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), # type: ignore[has-type] + primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), ) # The following form_builder classes are supported by the FormDefinition class. @@ -10145,12 +10141,12 @@ class Page(Base, HasTags, Dictifiable, RepresentById): user: Mapped["User"] = relationship() revisions: Mapped[List["PageRevision"]] = relationship( cascade="all, delete-orphan", - primaryjoin=(lambda: Page.id == PageRevision.page_id), # type: ignore[has-type] + primaryjoin=(lambda: Page.id == PageRevision.page_id), back_populates="page", ) latest_revision: Mapped[Optional["PageRevision"]] = relationship( post_update=True, - primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), # type: ignore[has-type] + primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), lazy=False, ) tags: Mapped[List["PageTagAssociation"]] = relationship( @@ -10160,13 +10156,11 @@ class Page(Base, HasTags, Dictifiable, RepresentById): order_by=lambda: PageAnnotationAssociation.id, back_populates="page" ) ratings: Mapped[List["PageRatingAssociation"]] = relationship( - order_by=lambda: PageRatingAssociation.id, # type: ignore[has-type] + order_by=lambda: PageRatingAssociation.id, back_populates="page", ) users_shared_with: Mapped[List["PageUserShareAssociation"]] = relationship(back_populates="page") - average_rating = None - # Set up proxy so that # Page.users_shared_with # returns a list of users that page is shared with. @@ -10282,7 +10276,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): back_populates="visualization", ) ratings: Mapped[List["VisualizationRatingAssociation"]] = relationship( - order_by=lambda: VisualizationRatingAssociation.id, # type: ignore[has-type] + order_by=lambda: VisualizationRatingAssociation.id, back_populates="visualization", ) users_shared_with: Mapped[List["VisualizationUserShareAssociation"]] = relationship(back_populates="visualization") @@ -11317,7 +11311,7 @@ def __repr__(self): deferred=True, ) -Page.average_rating = column_property( # type:ignore[assignment] +Page.average_rating = column_property( select(func.avg(PageRatingAssociation.rating)).where(PageRatingAssociation.page_id == Page.id).scalar_subquery(), deferred=True, ) @@ -11340,7 +11334,7 @@ def __repr__(self): select(func.count(WorkflowStep.id)).where(Workflow.id == WorkflowStep.workflow_id).scalar_subquery(), deferred=True ) -WorkflowInvocationStep.subworkflow_invocation_id = column_property( # type:ignore[assignment] +WorkflowInvocationStep.subworkflow_invocation_id = column_property( select(WorkflowInvocationToSubworkflowInvocationAssociation.subworkflow_invocation_id) .where( and_( diff --git a/lib/galaxy/model/migrations/alembic/env.py b/lib/galaxy/model/migrations/alembic/env.py index 5c3ec11f8298..8ec793787d54 100644 --- a/lib/galaxy/model/migrations/alembic/env.py +++ b/lib/galaxy/model/migrations/alembic/env.py @@ -58,7 +58,7 @@ def _run_migrations_invoked_via_script(run_migrations: Callable[[str], None]) -> if revision_str: if len(revision_str) > 1: log.error("Please run the commmand for one revision at a time") - revision_str = revision_str[0] # type: ignore[union-attr] + revision_str = revision_str[0] if revision_str.startswith(f"{GXY}@"): url = urls[GXY] diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 967295d1202f..dbca1f250f33 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -2175,7 +2175,7 @@ def export_history( # Write collections' attributes (including datasets list) to file. stmt_hdca = ( select(model.HistoryDatasetCollectionAssociation) - .where(model.HistoryDatasetCollectionAssociation.history == history) # type:ignore[arg-type] + .where(model.HistoryDatasetCollectionAssociation.history == history) .where(model.HistoryDatasetCollectionAssociation.deleted == expression.false()) ) collections = sa_session.scalars(stmt_hdca) @@ -2190,7 +2190,7 @@ def export_history( self.export_collection(collection, include_deleted=include_deleted) # Write datasets' attributes to file. - actions_backref = model.Dataset.actions # type: ignore[attr-defined] + actions_backref = model.Dataset.actions stmt_hda = ( select(model.HistoryDatasetAssociation) diff --git a/lib/galaxy/objectstore/rucio.py b/lib/galaxy/objectstore/rucio.py index c89957efc4fa..ebc596903752 100644 --- a/lib/galaxy/objectstore/rucio.py +++ b/lib/galaxy/objectstore/rucio.py @@ -1,21 +1,8 @@ import hashlib -from typing import Optional - -from .caching import ( - CacheTarget, - enable_cache_monitor, - InProcessCacheMonitor, - parse_caching_config_dict_from_xml, -) - -try: - from ..authnz.util import provider_name_to_backend -except ImportError: - provider_name_to_backend = None # type: ignore[misc,assignment] - import logging import os import shutil +from typing import Optional try: import rucio.common @@ -30,6 +17,11 @@ except ImportError: Client = None +try: + from galaxy.authnz.util import provider_name_to_backend +except ImportError: + provider_name_to_backend = None # type: ignore[assignment, unused-ignore] + from galaxy.exceptions import ( ObjectInvalid, ObjectNotFound, @@ -41,7 +33,13 @@ unlink, ) from galaxy.util.path import safe_relpath -from ..objectstore import ConcreteObjectStore +from . import ConcreteObjectStore +from .caching import ( + CacheTarget, + enable_cache_monitor, + InProcessCacheMonitor, + parse_caching_config_dict_from_xml, +) log = logging.getLogger(__name__) diff --git a/lib/galaxy/objectstore/rucio_extra_clients.py b/lib/galaxy/objectstore/rucio_extra_clients.py index 38f6309f5d59..36ff04981b3f 100644 --- a/lib/galaxy/objectstore/rucio_extra_clients.py +++ b/lib/galaxy/objectstore/rucio_extra_clients.py @@ -1,11 +1,10 @@ import copy import logging import time -from abc import ABC try: from rucio.client.uploadclient import UploadClient - from rucio.common.exception import ( # type: ignore + from rucio.common.exception import ( InputValidationError, NoFilesUploaded, NotAllFilesUploaded, @@ -14,7 +13,7 @@ from rucio.common.utils import generate_uuid from rucio.rse import rsemanager as rsemgr except ImportError: - UploadClient = ABC + UploadClient = object class DeleteClient(UploadClient): diff --git a/lib/galaxy/schema/fetch_data.py b/lib/galaxy/schema/fetch_data.py index f37d9bfb23f7..2603b1471ea5 100644 --- a/lib/galaxy/schema/fetch_data.py +++ b/lib/galaxy/schema/fetch_data.py @@ -269,4 +269,4 @@ class FetchDataPayload(BaseDataPayload): class FetchDataFormPayload(BaseDataPayload): - targets: Union[Json[Targets], Targets] # type: ignore[type-arg] # https://github.com/samuelcolvin/pydantic/issues/2990 + targets: Union[Json[Targets], Targets] diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index b0827bdbf8b0..4c439ada7a99 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -101,7 +101,7 @@ def _generate_data_manager_metadata( repo_path = self.repository.repo_path(self.app) if hasattr(self.repository, "repo_files_directory"): # Galaxy Side. - repo_files_directory = self.repository.repo_files_directory(self.app) # type: ignore[attr-defined] + repo_files_directory = self.repository.repo_files_directory(self.app) repo_dir = repo_files_directory else: # Tool Shed side. diff --git a/lib/galaxy/tool_util/ontologies/ontology_data.py b/lib/galaxy/tool_util/ontologies/ontology_data.py index edd3a308f651..9903031c1790 100644 --- a/lib/galaxy/tool_util/ontologies/ontology_data.py +++ b/lib/galaxy/tool_util/ontologies/ontology_data.py @@ -9,7 +9,7 @@ from galaxy.tool_util.biotools import BiotoolsMetadataSource from galaxy.tool_util.parser import ToolSource -from galaxy.util.resources import files +from galaxy.util.resources import resource_string def _multi_dict_mapping(content: str) -> Dict[str, List[str]]: @@ -23,10 +23,9 @@ def _multi_dict_mapping(content: str) -> Dict[str, List[str]]: def _read_ontology_data_text(filename: str) -> str: - return files(PACKAGE).joinpath(filename).read_text() + return resource_string(__package__, filename) -PACKAGE = "galaxy.tool_util.ontologies" BIOTOOLS_MAPPING_FILENAME = "biotools_mappings.tsv" EDAM_OPERATION_MAPPING_FILENAME = "edam_operation_mappings.tsv" EDAM_TOPIC_MAPPING_FILENAME = "edam_topic_mappings.tsv" diff --git a/lib/galaxy/tool_util/parser/output_actions.py b/lib/galaxy/tool_util/parser/output_actions.py index 13ae26266296..f4d8e3fe1825 100644 --- a/lib/galaxy/tool_util/parser/output_actions.py +++ b/lib/galaxy/tool_util/parser/output_actions.py @@ -11,7 +11,7 @@ try: from galaxy.util.template import fill_template except ImportError: - fill_template = None # type: ignore[assignment] + fill_template = None # type: ignore[assignment, unused-ignore] log = logging.getLogger(__name__) @@ -324,7 +324,7 @@ def apply_action(self, output_dataset, other_values) -> None: else: # fallback when Cheetah not available, equivalent to how this was handled prior 23.0 # definitely not needed for CWL tool parsing - log.warning("Cheetah not installed, falling back to legacy 'apply_action' behavior.") # type: ignore[unreachable] + log.warning("Cheetah not installed, falling back to legacy 'apply_action' behavior.") # type: ignore[unreachable, unused-ignore] value = self.default if value is not None: setattr(output_dataset.metadata, self.name, value) diff --git a/lib/galaxy/tool_util/toolbox/watcher.py b/lib/galaxy/tool_util/toolbox/watcher.py index b885ea964e27..05db57fd3040 100644 --- a/lib/galaxy/tool_util/toolbox/watcher.py +++ b/lib/galaxy/tool_util/toolbox/watcher.py @@ -4,15 +4,8 @@ try: from watchdog.events import FileSystemEventHandler - from watchdog.observers import Observer - from watchdog.observers.polling import PollingObserver - - can_watch = True except ImportError: - Observer = None # type:ignore[assignment, misc] - FileSystemEventHandler = object # type:ignore[assignment, misc] - PollingObserver = None # type:ignore[assignment, misc] - can_watch = False + FileSystemEventHandler = object # type:ignore[assignment, misc, unused-ignore] from galaxy.util.hash_util import md5_hash_file from galaxy.util.watcher import ( diff --git a/lib/galaxy/tool_util/verify/__init__.py b/lib/galaxy/tool_util/verify/__init__.py index 8bed1f449fc5..ec84f23ae345 100644 --- a/lib/galaxy/tool_util/verify/__init__.py +++ b/lib/galaxy/tool_util/verify/__init__.py @@ -31,11 +31,11 @@ try: from PIL import Image except ImportError: - Image = None # type: ignore[assignment] + Image = None # type: ignore[assignment, unused-ignore] try: import tifffile except ImportError: - tifffile = None # type: ignore[assignment] + tifffile = None # type: ignore[assignment, unused-ignore] from galaxy.tool_util.parser.util import ( diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 4c53ffa4fcec..6db073aa3887 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -2178,7 +2178,7 @@ def from_json(self, value, trans, other_values=None): elif isinstance(value, HistoryDatasetCollectionAssociation) or isinstance(value, DatasetCollectionElement): rval.append(value) else: - rval.append(session.get(HistoryDatasetAssociation, int(value))) # type:ignore[arg-type] + rval.append(session.get(HistoryDatasetAssociation, int(value))) dataset_matcher_factory = get_dataset_matcher_factory(trans) dataset_matcher = dataset_matcher_factory.dataset_matcher(self, other_values) for v in rval: diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py index a49e5c1ddd1a..814e9fdd9c6b 100644 --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -34,6 +34,7 @@ from email.mime.text import MIMEText from hashlib import md5 from os.path import relpath +from pathlib import Path from typing import ( Any, cast, @@ -61,7 +62,7 @@ remap, ) from requests.adapters import HTTPAdapter -from requests.packages.urllib3.util.retry import Retry # type: ignore[import-untyped] +from requests.packages.urllib3.util.retry import Retry # type: ignore[import-untyped, unused-ignore] from typing_extensions import ( Literal, Self, @@ -116,7 +117,7 @@ def XML(text: Union[str, bytes]) -> Element: except ImportError: LXML_AVAILABLE = False - import xml.etree.ElementTree as etree # type: ignore[assignment,no-redef] + import xml.etree.ElementTree as etree # type: ignore[no-redef] from xml.etree.ElementTree import ( # type: ignore[assignment] # noqa: F401 Element, ElementTree, @@ -134,7 +135,7 @@ def XML(text: Union[str, bytes]) -> Element: from .rst_to_html import rst_to_html # noqa: F401 try: - shlex_join = shlex.join # type: ignore[attr-defined] + shlex_join = shlex.join # type: ignore[attr-defined, unused-ignore] except AttributeError: # Python < 3.8 def shlex_join(split_command): @@ -178,9 +179,10 @@ def str_removeprefix(s: str, prefix: str): """ if sys.version_info >= (3, 9): return s.removeprefix(prefix) - if s.startswith(prefix): # type: ignore[unreachable] + elif s.startswith(prefix): return s[len(prefix) :] - return s + else: + return s def remove_protocol_from_url(url): @@ -1132,7 +1134,7 @@ def commaify(amount): @overload -def unicodify( # type: ignore[misc] +def unicodify( # type: ignore[overload-overlap] value: Literal[None], encoding: str = DEFAULT_ENCODING, error: str = "replace", @@ -1736,25 +1738,23 @@ def safe_str_cmp(a, b): return rv == 0 -# Don't use these two directly, prefer method version that "works" with packaged Galaxy. -galaxy_root_path = os.path.join(__path__[0], os.pardir, os.pardir, os.pardir) # type: ignore[name-defined] -galaxy_samples_path = os.path.join(__path__[0], os.pardir, "config", "sample") # type: ignore[name-defined] +# Don't use this directly, prefer method version that "works" with packaged Galaxy. +galaxy_root_path = Path(__file__).parent.parent.parent.parent -def galaxy_directory(): - path = galaxy_root_path +def galaxy_directory() -> str: if in_packages(): - path = os.path.join(galaxy_root_path, "..") + # This will work only when running pytest from /packages// + cwd = Path.cwd() + path = cwd.parent.parent + else: + path = galaxy_root_path return os.path.abspath(path) -def in_packages(): - # Normalize first; otherwise basename will be `..` - return os.path.basename(os.path.normpath(galaxy_root_path)) == "packages" - - -def galaxy_samples_directory(): - return os.path.join(galaxy_directory(), "lib", "galaxy", "config", "sample") +def in_packages() -> bool: + galaxy_lib_path = Path(__file__).parent.parent.parent + return galaxy_lib_path.name != "lib" def config_directories_from_setting(directories_setting, galaxy_root=galaxy_root_path): diff --git a/lib/galaxy/util/bool_expressions.py b/lib/galaxy/util/bool_expressions.py index 2b721158fc0c..aa529a10d970 100644 --- a/lib/galaxy/util/bool_expressions.py +++ b/lib/galaxy/util/bool_expressions.py @@ -81,7 +81,7 @@ def __str__(self): return f"({sep.join(map(str, self.args))})" def __bool__(self): - return self.evalop(bool(a) for a in self.args) # type: ignore[misc,call-arg] + return self.evalop(bool(a) for a in self.args) __nonzero__ = __bool__ diff --git a/lib/galaxy/util/image_util.py b/lib/galaxy/util/image_util.py index 2cf405ae6390..3b11a50d2fda 100644 --- a/lib/galaxy/util/image_util.py +++ b/lib/galaxy/util/image_util.py @@ -10,7 +10,7 @@ try: from PIL import Image except ImportError: - Image = None # type: ignore[assignment] + Image = None # type: ignore[assignment, unused-ignore] log = logging.getLogger(__name__) diff --git a/lib/galaxy/util/watcher.py b/lib/galaxy/util/watcher.py index 5fefcfb9c499..d57188280c72 100644 --- a/lib/galaxy/util/watcher.py +++ b/lib/galaxy/util/watcher.py @@ -13,9 +13,9 @@ can_watch = True except ImportError: - Observer = None # type:ignore[assignment] - FileSystemEventHandler = object # type:ignore[assignment, misc] - PollingObserver = None # type:ignore[assignment, misc] + Observer = None # type:ignore[assignment, unused-ignore] + FileSystemEventHandler = object # type:ignore[assignment,misc, unused-ignore] + PollingObserver = None # type:ignore[assignment, misc, unused-ignore] can_watch = False from galaxy.util.hash_util import md5_hash_file diff --git a/lib/galaxy/util/yaml_util.py b/lib/galaxy/util/yaml_util.py index 0ad96de335a3..c1a2fc410792 100644 --- a/lib/galaxy/util/yaml_util.py +++ b/lib/galaxy/util/yaml_util.py @@ -8,7 +8,7 @@ try: from yaml import CSafeLoader as SafeLoader except ImportError: - from yaml import SafeLoader # type: ignore[misc, assignment] + from yaml import SafeLoader # type: ignore[assignment] log = logging.getLogger(__name__) diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py index 50672c8227b4..066785feae7a 100644 --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -269,8 +269,8 @@ def get_dbkeys(self, user: Optional[User], chrom_info=False): dbkeys = [] # Add user's custom keys to dbkeys. - if user and user.preferences and "dbkeys" in user.preferences: # type:ignore[unreachable] - user_keys_dict = loads(user.preferences["dbkeys"]) # type:ignore[unreachable] + if user and user.preferences and "dbkeys" in user.preferences: + user_keys_dict = loads(user.preferences["dbkeys"]) dbkeys.extend([(attributes["name"], key) for key, attributes in user_keys_dict.items()]) # Add app keys to dbkeys. diff --git a/lib/galaxy/webapps/__init__.py b/lib/galaxy/webapps/__init__.py index 265603ef3dea..7d9ae84e5cf5 100644 --- a/lib/galaxy/webapps/__init__.py +++ b/lib/galaxy/webapps/__init__.py @@ -2,4 +2,4 @@ from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) # type: ignore[has-type] +__path__ = extend_path(__path__, __name__) diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py index 8ab8494bf4fd..0029bb70a7e6 100644 --- a/lib/galaxy/webapps/base/webapp.py +++ b/lib/galaxy/webapps/base/webapp.py @@ -8,6 +8,7 @@ import re import socket import time +from contextlib import ExitStack from http.cookies import CookieError from typing import ( Any, @@ -51,6 +52,10 @@ safe_makedirs, unicodify, ) +from galaxy.util.resources import ( + as_file, + resource_path, +) from galaxy.util.sanitize_html import sanitize_html from galaxy.version import VERSION from galaxy.web.framework import ( @@ -60,12 +65,6 @@ ) from galaxy.web.framework.middleware.static import CacheableStaticURLParser as Static -try: - from importlib.resources import files # type: ignore[attr-defined] -except ImportError: - # Python < 3.9 - from importlib_resources import files # type: ignore[no-redef] - log = logging.getLogger(__name__) @@ -183,16 +182,19 @@ def create_mako_template_lookup(self, galaxy_app, name): base_package = ( "tool_shed.webapp" if galaxy_app.name == "tool_shed" else "galaxy.webapps.base" ) # reports has templates in galaxy package - base_template_path = files(base_package) / "templates" - # First look in webapp specific directory - if name is not None: - paths.append(base_template_path / "webapps" / name) - # Then look in root directory - paths.append(base_template_path) - # Create TemplateLookup with a small cache - return mako.lookup.TemplateLookup( - directories=paths, module_directory=galaxy_app.config.template_cache_path, collection_size=500 - ) + base_template_path = resource_path(base_package, "templates") + with ExitStack() as stack: + # First look in webapp specific directory + if name is not None: + path = stack.enter_context(as_file(base_template_path / "webapps" / name)) + paths.append(path) + # Then look in root directory + path = stack.enter_context(as_file(base_template_path)) + paths.append(path) + # Create TemplateLookup with a small cache + return mako.lookup.TemplateLookup( + directories=paths, module_directory=galaxy_app.config.template_cache_path, collection_size=500 + ) def handle_controller_exception(self, e, trans, method, **kwargs): if isinstance(e, TypeError): @@ -343,7 +345,7 @@ def __init__( self._ensure_valid_session(session_cookie) if hasattr(self.app, "authnz_manager") and self.app.authnz_manager: - self.app.authnz_manager.refresh_expiring_oidc_tokens(self) # type: ignore[attr-defined] + self.app.authnz_manager.refresh_expiring_oidc_tokens(self) if self.galaxy_session: # When we've authenticated by session, we have to check the diff --git a/lib/galaxy/webapps/openapi/utils.py b/lib/galaxy/webapps/openapi/utils.py index 369da1d4039d..88ae97e43c0b 100644 --- a/lib/galaxy/webapps/openapi/utils.py +++ b/lib/galaxy/webapps/openapi/utils.py @@ -117,4 +117,4 @@ def get_openapi( output["webhooks"] = webhook_paths if tags: output["tags"] = tags - return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore + return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index 68d9d9e9e5e3..e1fcaceeefba 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -1385,14 +1385,14 @@ def get_inputs(self): def restrict_options(self, step, connections: Iterable[WorkflowStepConnection], default_value): try: - static_options = [] # type:ignore[var-annotated] + static_options = [] # Retrieve possible runtime options for 'select' type inputs for connection in connections: # Well this isn't a great assumption... assert connection.input_step module = connection.input_step.module assert isinstance(module, (ToolModule, SubWorkflowModule)) - if isinstance(module, ToolModule): # type:ignore[unreachable] + if isinstance(module, ToolModule): assert module.tool tool_inputs = module.tool.inputs # may not be set, but we're catching the Exception below. @@ -2551,7 +2551,7 @@ def inject_all(self, workflow: Workflow, param_map=None, ignore_tool_missing_exc def compute_runtime_state(self, step: WorkflowStep, step_args=None): assert step.module, "module must be injected before computing runtime state" - state, step_errors = step.module.compute_runtime_state(self.trans, step, step_args) # type:ignore[unreachable] + state, step_errors = step.module.compute_runtime_state(self.trans, step, step_args) step.state = state # Fix any missing parameters diff --git a/lib/galaxy_test/api/_framework.py b/lib/galaxy_test/api/_framework.py index d467860d7232..642f56412bd8 100644 --- a/lib/galaxy_test/api/_framework.py +++ b/lib/galaxy_test/api/_framework.py @@ -17,7 +17,7 @@ except ImportError: # Galaxy libraries and galaxy test driver not available, just assume we're # targetting a remote Galaxy. - GalaxyTestDriver = None # type: ignore[misc,assignment] + GalaxyTestDriver = None # type: ignore[assignment, misc, unused-ignore] class ApiTestCase(FunctionalTestCase, UsesApiTestCaseMixin, UsesCeleryTasks): diff --git a/lib/galaxy_test/selenium/framework.py b/lib/galaxy_test/selenium/framework.py index a75f5567115f..310c88c88053 100644 --- a/lib/galaxy_test/selenium/framework.py +++ b/lib/galaxy_test/selenium/framework.py @@ -536,7 +536,7 @@ def setup_shared_state(self): NavigatesGalaxyMixin = object -class UsesLibraryAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] +class UsesLibraryAssertions(NavigatesGalaxyMixin): @retry_assertion_during_transitions def assert_num_displayed_items_is(self, n): num_displayed = self.num_displayed_items() @@ -546,7 +546,7 @@ def num_displayed_items(self) -> int: return len(self.libraries_table_elements()) -class UsesHistoryItemAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] +class UsesHistoryItemAssertions(NavigatesGalaxyMixin): def assert_item_peek_includes(self, hid, expected): item_body = self.history_panel_item_component(hid=hid) peek_text = item_body.peek.wait_for_text() @@ -584,7 +584,7 @@ def assert_item_hid_text(self, hid): ) -class UsesWorkflowAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] +class UsesWorkflowAssertions(NavigatesGalaxyMixin): @retry_assertion_during_transitions def _assert_showing_n_workflows(self, n): if (actual_count := len(self.workflow_card_elements())) != n: diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py index 4c82a0d7183c..01949ff1eb96 100644 --- a/lib/tool_shed/test/base/playwrightbrowser.py +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -132,7 +132,7 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp input_s = form.locator(f"select[name='{control_name}']") if input_i.count(): if control_name in ["redirect"]: - input_i.input_value = value # type:ignore[method-assign, assignment] + input_i.input_value = value # type:ignore[assignment, unused-ignore] else: if isinstance(value, bool): if value and not input_i.is_checked(): @@ -142,9 +142,9 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp else: input_i.fill(value) if input_t.count(): - input_t.fill(value) # type:ignore[arg-type] + input_t.fill(value) # type:ignore[arg-type, unused-ignore] if input_s.count(): - input_s.select_option(value) # type:ignore[arg-type] + input_s.select_option(value) # type:ignore[arg-type, unused-ignore] def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: multi_select = "form[name='categories'] select[name='category_id']" diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 32a3f97b86b7..460addd27e17 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -2,6 +2,7 @@ from pathlib import Path from tempfile import NamedTemporaryFile from typing import ( + Iterator, List, Optional, Union, @@ -11,7 +12,7 @@ from typing_extensions import Protocol from galaxy.util.resources import ( - files, + as_file, resource_path, Traversable, ) @@ -50,31 +51,27 @@ HasRepositoryId = Union[str, Repository] DEFAULT_PREFIX = "repofortest" -COLUMN_MAKER_PATH = resource_path(__package__, "../test_data/column_maker/column_maker.tar") -COLUMN_MAKER_1_1_1_PATH = resource_path(__package__, "../test_data/column_maker/column_maker.tar") +TEST_DATA_REPO_FILES = resource_path(__package__, "../test_data") +COLUMN_MAKER_PATH = TEST_DATA_REPO_FILES.joinpath("column_maker/column_maker.tar") +COLUMN_MAKER_1_1_1_PATH = TEST_DATA_REPO_FILES.joinpath("column_maker/column_maker_1.1.1.tar") DEFAULT_COMMIT_MESSAGE = "a test commit message" -TEST_DATA_REPO_FILES = files("tool_shed.test.test_data") -def repo_files(test_data_path: str) -> List[Path]: +def repo_files(test_data_path: str) -> Iterator[Path]: repos = TEST_DATA_REPO_FILES.joinpath(f"repos/{test_data_path}") - paths = sorted(Path(str(x)) for x in repos.iterdir()) - return paths + for child in sorted(_.name for _ in repos.iterdir()): + with as_file(repos.joinpath(child)) as path: + yield path -def repo_tars(test_data_path: str) -> List[Path]: - tar_paths = [] +def repo_tars(test_data_path: str) -> Iterator[Path]: for path in repo_files(test_data_path): - if path.is_dir(): - prefix = f"shedtest_{test_data_path}_{path.name}_" - tf = NamedTemporaryFile(delete=False, prefix=prefix) + assert path.is_dir() + prefix = f"shedtest_{test_data_path}_{path.name}_" + with NamedTemporaryFile(prefix=prefix) as tf: with tarfile.open(tf.name, "w:gz") as tar: tar.add(str(path.absolute()), arcname=test_data_path or path.name) - tar_path = tf.name - else: - tar_path = str(path) - tar_paths.append(Path(tar_path)) - return tar_paths + yield Path(tf.name) class HostsTestToolShed(Protocol): diff --git a/lib/tool_shed/test/base/twillbrowser.py b/lib/tool_shed/test/base/twillbrowser.py index a73cdf85b299..b023bec47dc4 100644 --- a/lib/tool_shed/test/base/twillbrowser.py +++ b/lib/tool_shed/test/base/twillbrowser.py @@ -5,7 +5,7 @@ ) import twill.commands as tc -from twill.browser import FormElement # type:ignore[attr-defined] +from twill.browser import FormElement # type:ignore[attr-defined, unused-ignore] from galaxy.util import smart_str from .browser import ( @@ -19,13 +19,13 @@ def visit_url(url: str, allowed_codes: List[int]) -> str: - new_url = tc.go(url) # type:ignore[func-returns-value] + tc.go(url) return_code = tc.browser.code assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( return_code, ", ".join(str(code) for code in allowed_codes), ) - return new_url + return url def page_content() -> str: diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index e6fe9da4d4b8..a0b1a178cdd5 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -115,12 +115,12 @@ class User(Base, Dictifiable): purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) active_repositories = relationship( "Repository", - primaryjoin=(lambda: (Repository.user_id == User.id) & (not_(Repository.deleted))), # type: ignore[has-type] + primaryjoin=(lambda: (Repository.user_id == User.id) & (not_(Repository.deleted))), back_populates="user", - order_by=lambda: desc(Repository.name), # type: ignore[has-type] + order_by=lambda: desc(Repository.name), ) galaxy_sessions = relationship( - "GalaxySession", back_populates="user", order_by=lambda: desc(GalaxySession.update_time) # type: ignore[has-type] + "GalaxySession", back_populates="user", order_by=lambda: desc(GalaxySession.update_time) ) api_keys = relationship("APIKeys", back_populates="user", order_by=lambda: desc(APIKeys.create_time)) reset_tokens = relationship("PasswordResetToken", back_populates="user") @@ -134,9 +134,9 @@ class User(Base, Dictifiable): "UserRoleAssociation", viewonly=True, primaryjoin=( - lambda: (User.id == UserRoleAssociation.user_id) # type: ignore[has-type] - & (UserRoleAssociation.role_id == Role.id) # type: ignore[has-type] - & not_(Role.name == User.email) # type: ignore[has-type] + lambda: (User.id == UserRoleAssociation.user_id) + & (UserRoleAssociation.role_id == Role.id) + & not_(Role.name == User.email) ), ) @@ -379,7 +379,7 @@ class Repository(Base, Dictifiable): user = relationship("User", back_populates="active_repositories") downloadable_revisions = relationship( "RepositoryMetadata", - primaryjoin=lambda: (Repository.id == RepositoryMetadata.repository_id) & (RepositoryMetadata.downloadable == true()), # type: ignore[attr-defined,has-type] + primaryjoin=lambda: (Repository.id == RepositoryMetadata.repository_id) & (RepositoryMetadata.downloadable == true()), # type: ignore[has-type] viewonly=True, order_by=lambda: desc(RepositoryMetadata.update_time), # type: ignore[attr-defined] ) diff --git a/mypy.ini b/mypy.ini index d10c673b7d45..0ec6ae269986 100644 --- a/mypy.ini +++ b/mypy.ini @@ -4,15 +4,17 @@ show_error_codes = True ignore_missing_imports = True check_untyped_defs = True exclude = (?x)( - ^lib/galaxy/tools/bundled - | ^test/functional - | .*tool_shed/test/test_data/repos + ^build/ + | ^lib/galaxy/tools/bundled/ + | ^test/functional/ + | .*tool_shed/test/test_data/repos/ ) pretty = True no_implicit_reexport = True no_implicit_optional = True strict_equality = True warn_unreachable = True +warn_unused_ignores = True platform = linux # green list - work on growing these please! diff --git a/packages/app/galaxy/__init__.py b/packages/app/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/app/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/app/galaxy/__init__.py b/packages/app/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/app/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/auth/galaxy/__init__.py b/packages/auth/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/auth/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/auth/galaxy/__init__.py b/packages/auth/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/auth/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/config/MANIFEST.in b/packages/config/MANIFEST.in index 2adbec36b850..41466cf0b424 100644 --- a/packages/config/MANIFEST.in +++ b/packages/config/MANIFEST.in @@ -1,3 +1,4 @@ include *.rst *.txt LICENSE */py.typed include galaxy/config/schemas/*.yml include galaxy/config/sample/*.sample* +graft galaxy/config/templates/ diff --git a/packages/config/galaxy/__init__.py b/packages/config/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/config/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/config/galaxy/__init__.py b/packages/config/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/config/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/data/galaxy/__init__.py b/packages/data/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/data/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/data/galaxy/__init__.py b/packages/data/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/data/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 3e3e0e085e5c..d1404b141164 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -34,6 +34,7 @@ include_package_data = True install_requires = galaxy-files galaxy-objectstore + galaxy-schema galaxy-tool-util galaxy-util[template] alembic diff --git a/packages/files/galaxy/__init__.py b/packages/files/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/files/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/files/galaxy/__init__.py b/packages/files/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/files/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/job_execution/galaxy/__init__.py b/packages/job_execution/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/job_execution/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/job_execution/galaxy/__init__.py b/packages/job_execution/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/job_execution/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/job_metrics/galaxy/__init__.py b/packages/job_metrics/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/job_metrics/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/job_metrics/galaxy/__init__.py b/packages/job_metrics/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/job_metrics/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/navigation/galaxy/__init__.py b/packages/navigation/galaxy/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/navigation/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/navigation/galaxy/__init__.py b/packages/navigation/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/navigation/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/objectstore/galaxy/__init__.py b/packages/objectstore/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/objectstore/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/objectstore/galaxy/__init__.py b/packages/objectstore/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/objectstore/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/package.__init__.py b/packages/package.__init__.py new file mode 100644 index 000000000000..b948ee6512da --- /dev/null +++ b/packages/package.__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) # noqa: F821 diff --git a/packages/schema/galaxy/__init__.py b/packages/schema/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/schema/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/schema/galaxy/__init__.py b/packages/schema/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/schema/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/selenium/galaxy/__init__.py b/packages/selenium/galaxy/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/selenium/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/selenium/galaxy/__init__.py b/packages/selenium/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/selenium/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/test.sh b/packages/test.sh index ae69d9b20516..0d1d1655d941 100755 --- a/packages/test.sh +++ b/packages/test.sh @@ -49,11 +49,11 @@ while read -r package_dir || [ -n "$package_dir" ]; do # https://stackoverflow. # Install extras (if needed) if [ "$package_dir" = "util" ]; then - pip install -e '.[template,jstree]' + pip install '.[template,jstree]' elif [ "$package_dir" = "tool_util" ]; then - pip install -e '.[cwl,mulled,edam,extended-assertions]' + pip install '.[cwl,mulled,edam,extended-assertions]' else - pip install -e '.' + pip install . fi pip install -r test-requirements.txt diff --git a/packages/test_api/galaxy_test/__init__.py b/packages/test_api/galaxy_test/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/test_api/galaxy_test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/test_api/galaxy_test/__init__.py b/packages/test_api/galaxy_test/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/test_api/galaxy_test/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/test_base/galaxy_test/__init__.py b/packages/test_base/galaxy_test/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/test_base/galaxy_test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/test_base/galaxy_test/__init__.py b/packages/test_base/galaxy_test/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/test_base/galaxy_test/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/test_driver/galaxy_test/__init__.py b/packages/test_driver/galaxy_test/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/test_driver/galaxy_test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/test_driver/galaxy_test/__init__.py b/packages/test_driver/galaxy_test/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/test_driver/galaxy_test/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/test_selenium/galaxy_test/__init__.py b/packages/test_selenium/galaxy_test/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/test_selenium/galaxy_test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/test_selenium/galaxy_test/__init__.py b/packages/test_selenium/galaxy_test/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/test_selenium/galaxy_test/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/tool_util/galaxy/__init__.py b/packages/tool_util/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/tool_util/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/tool_util/galaxy/__init__.py b/packages/tool_util/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/tool_util/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/tours/galaxy/__init__.py b/packages/tours/galaxy/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/packages/tours/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/tours/galaxy/__init__.py b/packages/tours/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/tours/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/util/galaxy/__init__.py b/packages/util/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/util/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/util/galaxy/__init__.py b/packages/util/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/util/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/web_apps/galaxy/__init__.py b/packages/web_apps/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/web_apps/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/web_apps/galaxy/__init__.py b/packages/web_apps/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/web_apps/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/web_framework/galaxy/__init__.py b/packages/web_framework/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/web_framework/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/web_framework/galaxy/__init__.py b/packages/web_framework/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/web_framework/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/packages/web_stack/galaxy/__init__.py b/packages/web_stack/galaxy/__init__.py deleted file mode 100644 index 2e50d9cce896..000000000000 --- a/packages/web_stack/galaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/web_stack/galaxy/__init__.py b/packages/web_stack/galaxy/__init__.py new file mode 120000 index 000000000000..6b4f2177e02a --- /dev/null +++ b/packages/web_stack/galaxy/__init__.py @@ -0,0 +1 @@ +../../package.__init__.py \ No newline at end of file diff --git a/test/integration/test_celery_user_rate_limit.py b/test/integration/test_celery_user_rate_limit.py index 3267a2dd5f53..61f2f207735d 100644 --- a/test/integration/test_celery_user_rate_limit.py +++ b/test/integration/test_celery_user_rate_limit.py @@ -89,7 +89,7 @@ def _test_mock_pass_user_id_task(self, num_users: int, num_calls: int, tasks_per results: Dict[int, List[AsyncResult]] = {} for user in users: user_results: List[AsyncResult] = [] - for _ in range(num_calls): # type: ignore + for _ in range(num_calls): user_results.append(mock_user_id_task.delay(task_user_id=user)) results[user] = user_results # Collect results of each call diff --git a/test/unit/app/jobs/test_job_configuration.py b/test/unit/app/jobs/test_job_configuration.py index 56ba267fb11c..dc8e19c8a07e 100644 --- a/test/unit/app/jobs/test_job_configuration.py +++ b/test/unit/app/jobs/test_job_configuration.py @@ -2,6 +2,10 @@ import os import shutil import tempfile +from typing import ( + Dict, + Optional, +) from unittest import mock from pykwalify.core import Core @@ -9,9 +13,11 @@ from galaxy.config import GALAXY_SCHEMAS_PATH from galaxy.job_metrics import JobMetrics from galaxy.jobs import JobConfiguration -from galaxy.util import ( - galaxy_directory, - galaxy_samples_directory, +from galaxy.util import galaxy_directory +from galaxy.util.path import StrPath +from galaxy.util.resources import ( + as_file, + resource_path, ) from galaxy.util.unittest import TestCase from galaxy.web_stack import ApplicationStack @@ -19,9 +25,10 @@ # File would be slightly more readable if contents were embedded directly, but # there are advantages to testing the documentation/examples. -SIMPLE_JOB_CONF = os.path.join(galaxy_samples_directory(), "job_conf.xml.sample_basic") -ADVANCED_JOB_CONF = os.path.join(galaxy_samples_directory(), "job_conf.xml.sample_advanced") -ADVANCED_JOB_CONF_YAML = os.path.join(galaxy_samples_directory(), "job_conf.sample.yml") +GALAXY_SAMPLES_DIRECTORY = resource_path("galaxy.config", "sample") +SIMPLE_JOB_CONF = GALAXY_SAMPLES_DIRECTORY / "job_conf.xml.sample_basic" +ADVANCED_JOB_CONF = GALAXY_SAMPLES_DIRECTORY / "job_conf.xml.sample_advanced" +ADVANCED_JOB_CONF_YAML = GALAXY_SAMPLES_DIRECTORY / "job_conf.sample.yml" CONDITIONAL_RUNNER_JOB_CONF = os.path.join(os.path.dirname(__file__), "conditional_runners_job_conf.xml") HANDLER_TEMPLATE_JOB_CONF = os.path.join(os.path.dirname(__file__), "handler_template_job_conf.xml") @@ -45,7 +52,8 @@ def setUp(self): track_jobs_in_database=True, server_name="main", ) - self._write_config_from(SIMPLE_JOB_CONF) + with as_file(SIMPLE_JOB_CONF) as path: + self._write_config_from(path) self._app = None self._application_stack = None self._job_configuration = None @@ -91,7 +99,7 @@ def _with_handlers_config(self, assign_with=None, default=None, handlers=None, b self._job_configuration_base_pools = base_pools self._write_config_from(HANDLER_TEMPLATE_JOB_CONF, template=template) - def _write_config_from(self, path, template=None): + def _write_config_from(self, path: StrPath, template: Optional[Dict[str, str]] = None) -> None: template = template or {} try: contents = open(path).read() @@ -118,9 +126,11 @@ def _write_config(self, contents): def _with_advanced_config(self): if self.extension == "xml": - self._write_config_from(ADVANCED_JOB_CONF) + trav = ADVANCED_JOB_CONF else: - self._write_config_from(ADVANCED_JOB_CONF_YAML) + trav = ADVANCED_JOB_CONF_YAML + with as_file(trav) as path: + self._write_config_from(path) class TestSimpleJobConfXmlParser(BaseJobConfXmlParserTestCase): @@ -357,17 +367,18 @@ class TestAdvancedJobConfYamlParser(TestAdvancedJobConfXmlParser): def test_yaml_advanced_validation(): schema = GALAXY_SCHEMAS_PATH / "job_config_schema.yml" integration_tests_dir = os.path.join(galaxy_directory(), "test", "integration") - valid_files = [ - ADVANCED_JOB_CONF_YAML, - os.path.join(integration_tests_dir, "delay_job_conf.yml"), - os.path.join(integration_tests_dir, "embedded_pulsar_metadata_job_conf.yml"), - os.path.join(integration_tests_dir, "io_injection_job_conf.yml"), - os.path.join(integration_tests_dir, "resubmission_job_conf.yml"), - os.path.join(integration_tests_dir, "resubmission_default_job_conf.yml"), - ] - for valid_file in valid_files: - c = Core( - source_file=valid_file, - schema_files=[str(schema)], - ) - c.validate() + with as_file(ADVANCED_JOB_CONF_YAML) as advanced_job_conf_yaml_path, as_file(schema) as schema_path: + valid_files = [ + os.fspath(advanced_job_conf_yaml_path), + os.path.join(integration_tests_dir, "delay_job_conf.yml"), + os.path.join(integration_tests_dir, "embedded_pulsar_metadata_job_conf.yml"), + os.path.join(integration_tests_dir, "io_injection_job_conf.yml"), + os.path.join(integration_tests_dir, "resubmission_job_conf.yml"), + os.path.join(integration_tests_dir, "resubmission_default_job_conf.yml"), + ] + for valid_file in valid_files: + c = Core( + source_file=valid_file, + schema_files=[os.fspath(schema_path)], + ) + c.validate() diff --git a/test/unit/app/jobs/test_job_wrapper.py b/test/unit/app/jobs/test_job_wrapper.py index 6c914c3efafa..0a2976c0d2d5 100644 --- a/test/unit/app/jobs/test_job_wrapper.py +++ b/test/unit/app/jobs/test_job_wrapper.py @@ -66,7 +66,7 @@ def tearDown(self): @contextmanager def _prepared_wrapper(self): wrapper = self._wrapper() - wrapper._get_tool_evaluator = lambda *args, **kwargs: MockEvaluator(wrapper.app, wrapper.tool, wrapper.get_job(), wrapper.working_directory) # type: ignore[assignment] + wrapper._get_tool_evaluator = lambda *args, **kwargs: MockEvaluator(wrapper.app, wrapper.tool, wrapper.get_job(), wrapper.working_directory) # type: ignore[method-assign] wrapper.prepare() yield wrapper diff --git a/test/unit/app/managers/test_HistoryManager.py b/test/unit/app/managers/test_HistoryManager.py index c3627c87d166..021e348fc019 100644 --- a/test/unit/app/managers/test_HistoryManager.py +++ b/test/unit/app/managers/test_HistoryManager.py @@ -88,7 +88,7 @@ def test_base(self): name_first_then_time = ( model.History.name, sqlalchemy.desc(model.History.create_time), - ) # type:ignore[var-annotated] + ) assert self.history_manager.list(order_by=name_first_then_time) == [history2, history1, history3] def test_copy(self): diff --git a/test/unit/app/managers/test_markdown_export.py b/test/unit/app/managers/test_markdown_export.py index 72f5b900a5d7..3470061b359f 100644 --- a/test/unit/app/managers/test_markdown_export.py +++ b/test/unit/app/managers/test_markdown_export.py @@ -46,15 +46,15 @@ def _new_invocation(self): @contextmanager def _expect_get_history(self, history): - self.app.history_manager.get_accessible.return_value = history # type: ignore[attr-defined,union-attr] + self.app.history_manager.get_accessible.return_value = history yield - self.app.history_manager.get_accessible.assert_called_once_with(history.id, self.trans.user) # type: ignore[attr-defined,union-attr] + self.app.history_manager.get_accessible.assert_called_once_with(history.id, self.trans.user) @contextmanager def _expect_get_hda(self, hda, hda_id=1): - self.app.hda_manager.get_accessible.return_value = hda # type: ignore[attr-defined,union-attr] + self.app.hda_manager.get_accessible.return_value = hda yield - self.app.hda_manager.get_accessible.assert_called_once_with(hda.id, self.trans.user) # type: ignore[attr-defined,union-attr] + self.app.hda_manager.get_accessible.assert_called_once_with(hda.id, self.trans.user) def _new_pair_collection(self): hda_forward = self._new_hda(contents="Forward dataset.") @@ -222,7 +222,7 @@ def test_history_collection_paired(self): hdca.collection = self._new_pair_collection() hdca.id = 1 - self.trans.app.dataset_collection_manager.get_dataset_collection_instance.return_value = hdca # type: ignore[attr-defined,union-attr] + self.trans.app.dataset_collection_manager.get_dataset_collection_instance.return_value = hdca example = """# Example ```galaxy history_dataset_collection_display(history_dataset_collection_id=1) @@ -243,7 +243,7 @@ def test_workflow_export(self): stored_workflow.latest_workflow = workflow workflow_step_0 = model.WorkflowStep() workflow.steps = [workflow_step_0] - self.trans.app.workflow_manager.get_stored_accessible_workflow.return_value = stored_workflow # type: ignore[attr-defined,union-attr] + self.trans.app.workflow_manager.get_stored_accessible_workflow.return_value = stored_workflow example = """# Example ```galaxy workflow_display(workflow_id=1) @@ -278,7 +278,7 @@ def test_generate_invocation_time(self): ``` """ invocation = self._new_invocation() - self.app.workflow_manager.get_invocation.side_effect = [invocation] # type: ignore[attr-defined,union-attr] + self.app.workflow_manager.get_invocation.side_effect = [invocation] result = self._to_basic(example) expectedtime = invocation.create_time.strftime("%Y-%m-%d, %H:%M:%S") assert f"\n {expectedtime}" in result @@ -356,7 +356,7 @@ def test_ready_export_two_datasets(self): history_dataset_display(history_dataset_id=2) ``` """ - self.app.hda_manager.get_accessible.side_effect = [hda, hda2] # type: ignore[attr-defined,union-attr] + self.app.hda_manager.get_accessible.side_effect = [hda, hda2] export_markdown, extra_data = self._ready_export(example) assert "history_datasets" in extra_data assert len(extra_data["history_datasets"]) == 2 @@ -369,7 +369,7 @@ def test_export_dataset_collection_paired(self): hdca.history_id = 1 hdca.collection_id = hdca.collection.id - self.trans.app.dataset_collection_manager.get_dataset_collection_instance.return_value = hdca # type: ignore[attr-defined,union-attr] + self.trans.app.dataset_collection_manager.get_dataset_collection_instance.return_value = hdca example = """# Example ```galaxy history_dataset_collection_display(history_dataset_collection_id=1) @@ -404,7 +404,7 @@ def test_generate_time(self): def test_get_invocation_time(self): invocation = self._new_invocation() - self.app.workflow_manager.get_invocation.side_effect = [invocation] # type: ignore[attr-defined,union-attr] + self.app.workflow_manager.get_invocation.side_effect = [invocation] example = """# Example ```galaxy invocation_time(invocation_id=1) diff --git a/test/unit/app/queue_worker/conftest.py b/test/unit/app/queue_worker/conftest.py index 6adec5acd709..4e8fa6b0be4f 100644 --- a/test/unit/app/queue_worker/conftest.py +++ b/test/unit/app/queue_worker/conftest.py @@ -7,7 +7,7 @@ try: import psycopg except ImportError: - psycopg = None # type: ignore[assignment] + psycopg = None # type: ignore[assignment, unused-ignore] try: import psycopg2 diff --git a/test/unit/test_celery.py b/test/unit/app/test_celery.py similarity index 100% rename from test/unit/test_celery.py rename to test/unit/app/test_celery.py diff --git a/test/unit/app/test_dbscript.py b/test/unit/app/test_dbscript.py index ee0decf538c1..4b68c60d51d4 100644 --- a/test/unit/app/test_dbscript.py +++ b/test/unit/app/test_dbscript.py @@ -24,10 +24,11 @@ from galaxy.model.unittest_utils.model_testing_utils import ( # noqa: F401 - url_factory is a fixture we have to import explicitly url_factory, ) -from galaxy.util import in_packages -from galaxy.util.resources import resource_path - -pytestmark = pytest.mark.skipif(in_packages(), reason="Running from packages") +from galaxy.util.resources import ( + as_file, + resource_path, + Traversable, +) DbUrl = NewType("DbUrl", str) @@ -42,19 +43,21 @@ @pytest.fixture(scope="session") -def migrations_dir(): +def migrations_dir() -> Traversable: """[galaxy-root]/lib/galaxy/model/migrations/""" return resource_path("galaxy.model", "migrations") @pytest.fixture(scope="session") -def alembic_env_dir(migrations_dir) -> str: +def alembic_env_dir(migrations_dir: Traversable) -> Traversable: """[galaxy-root]/lib/galaxy/model/migrations/alembic/""" return migrations_dir / "alembic" @pytest.fixture(params=["one database", "two databases"]) -def config(url_factory, alembic_env_dir, alembic_config_text, tmp_directory, monkeypatch, request): # noqa: F811 +def config( + url_factory, alembic_env_dir: Traversable, alembic_config_text, tmp_directory, monkeypatch, request # noqa: F811 +): """ Construct Config object for staging; setup staging env. """ @@ -69,7 +72,8 @@ def config(url_factory, alembic_env_dir, alembic_config_text, tmp_directory, mon # Copy production alembic.ini to staging location config_file_path = os.path.join(tmp_directory, "alembic.ini") - update_config_for_staging(alembic_config_text, alembic_env_dir, version_locations, gxy_dburl) + with as_file(alembic_env_dir) as alembic_env_dir_path: + update_config_for_staging(alembic_config_text, os.fspath(alembic_env_dir_path), version_locations, gxy_dburl) write_to_file(config_file_path, alembic_config_text) alembic_cfg = Config(config_file_path) diff --git a/test/unit/app/tools/test_parameter_validation.py b/test/unit/app/tools/test_parameter_validation.py index 2e0bf0ffe857..4ef91328a595 100644 --- a/test/unit/app/tools/test_parameter_validation.py +++ b/test/unit/app/tools/test_parameter_validation.py @@ -1,16 +1,18 @@ -from galaxy.datatypes.sniff import get_test_fname +import os.path + from galaxy.model import ( Dataset, History, HistoryDatasetAssociation, ) +from galaxy.util import galaxy_directory from .util import BaseParameterTestCase -# def get_test_fname(fname): -# """Returns test data filename""" -# path, name = os.path.split(__file__) -# full_path = os.path.join(path, "test", fname) -# return full_path + +def get_test_data_path(name: str): + path = os.path.join(galaxy_directory(), "test-data", name) + assert os.path.isfile(path), f"{path} is not a file" + return path class TestParameterValidation(BaseParameterTestCase): @@ -259,11 +261,11 @@ def test_DatasetEmptyValidator(self): hist = History() with sa_session.begin(): sa_session.add(hist) - empty_dataset = Dataset(external_filename=get_test_fname("empty.txt")) + empty_dataset = Dataset(external_filename=get_test_data_path("empty.txt")) empty_hda = hist.add_dataset( HistoryDatasetAssociation(id=1, extension="interval", dataset=empty_dataset, sa_session=sa_session) ) - full_dataset = Dataset(external_filename=get_test_fname("1.json")) + full_dataset = Dataset(external_filename=get_test_data_path("1.interval")) full_hda = hist.add_dataset( HistoryDatasetAssociation(id=2, extension="interval", dataset=full_dataset, sa_session=sa_session) ) @@ -346,7 +348,7 @@ def test_MetadataValidator(self): extension="bed", create_dataset=True, sa_session=sa_session, - dataset=Dataset(external_filename=get_test_fname("1.bed")), + dataset=Dataset(external_filename=get_test_data_path("1.bed")), ) ) hda.state = Dataset.states.OK diff --git a/test/unit/config/test_load_config.py b/test/unit/config/test_load_config.py index 7ab70339437f..dda85e9552cf 100644 --- a/test/unit/config/test_load_config.py +++ b/test/unit/config/test_load_config.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest from galaxy.config import BaseAppConfiguration @@ -25,7 +27,7 @@ def get_schema(app_mapping): @pytest.fixture def mock_init(monkeypatch): - monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(None, "_")) + monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(Path("no path"), "_")) monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(MOCK_SCHEMA)) diff --git a/test/unit/config/test_path_graph.py b/test/unit/config/test_path_graph.py index f05bc347771f..02f33787081f 100644 --- a/test/unit/config/test_path_graph.py +++ b/test/unit/config/test_path_graph.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest from galaxy.config import BaseAppConfiguration @@ -47,7 +49,7 @@ def test_basecase(monkeypatch): }, } monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(mock_schema)) - monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(None, "_")) + monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(Path("no path"), "_")) config = BaseAppConfiguration() assert config.component1_path0 == "value0" @@ -73,7 +75,7 @@ def test_resolves_to_invalid_property(monkeypatch): monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(mock_schema)) with pytest.raises(ConfigurationError): - AppSchema(None, "_").validate_path_resolution_graph() + AppSchema(Path("no path"), "_").validate_path_resolution_graph() def test_path_resolution_cycle(monkeypatch): @@ -98,7 +100,7 @@ def test_path_resolution_cycle(monkeypatch): monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(mock_schema)) with pytest.raises(ConfigurationError): - AppSchema(None, "_").validate_path_resolution_graph() + AppSchema(Path("no path"), "_").validate_path_resolution_graph() def test_path_invalid_type(monkeypatch): @@ -117,7 +119,7 @@ def test_path_invalid_type(monkeypatch): monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(mock_schema)) with pytest.raises(ConfigurationError): - AppSchema(None, "_").validate_path_resolution_graph() + AppSchema(Path("no path"), "_").validate_path_resolution_graph() def test_resolves_to_invalid_type(monkeypatch): @@ -136,7 +138,7 @@ def test_resolves_to_invalid_type(monkeypatch): monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(mock_schema)) with pytest.raises(ConfigurationError): - AppSchema(None, "_").validate_path_resolution_graph() + AppSchema(Path("no path"), "_").validate_path_resolution_graph() def test_resolves_with_empty_component(monkeypatch): @@ -157,7 +159,7 @@ def test_resolves_with_empty_component(monkeypatch): }, } monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(mock_schema)) - monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(None, "_")) + monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(Path("no path"), "_")) config = BaseAppConfiguration() assert config.path0 == "value0" diff --git a/test/unit/config/test_path_resolves_to.py b/test/unit/config/test_path_resolves_to.py index eb280c3f8524..d3252147dffb 100644 --- a/test/unit/config/test_path_resolves_to.py +++ b/test/unit/config/test_path_resolves_to.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest from galaxy.config import BaseAppConfiguration @@ -46,7 +48,7 @@ def get_schema(app_mapping): @pytest.fixture def mock_init(monkeypatch): - monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(None, "_")) + monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(Path("no path"), "_")) monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(MOCK_SCHEMA)) monkeypatch.setattr(BaseAppConfiguration, "deprecated_dirs", MOCK_DEPRECATED_DIRS) monkeypatch.setattr(BaseAppConfiguration, "listify_options", {"path4"}) diff --git a/test/unit/config/test_reload_config.py b/test/unit/config/test_reload_config.py index 30211768fbeb..dae560a743b2 100644 --- a/test/unit/config/test_reload_config.py +++ b/test/unit/config/test_reload_config.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest from galaxy import config @@ -23,7 +25,7 @@ def get_schema(app_mapping): @pytest.fixture def mock_init(monkeypatch): - monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(None, "_")) + monkeypatch.setattr(BaseAppConfiguration, "_load_schema", lambda a: AppSchema(Path("no path"), "_")) monkeypatch.setattr(AppSchema, "_read_schema", lambda a, b: get_schema(MOCK_SCHEMA)) diff --git a/test/unit/config/test_schema.py b/test/unit/config/test_schema.py index 820f74300e25..63ae4676f2c3 100644 --- a/test/unit/config/test_schema.py +++ b/test/unit/config/test_schema.py @@ -1,3 +1,5 @@ +from pathlib import Path + from galaxy.config.schema import AppSchema from galaxy.util.yaml_util import ( ordered_load, @@ -46,7 +48,7 @@ def mock_init(self, stream): monkeypatch.setattr(AppSchema, "_read_schema", mock_read_schema) monkeypatch.setattr(OrderedLoader, "__init__", mock_init) - schema = AppSchema("no path", "mockgalaxy") + schema = AppSchema(Path("no path"), "mockgalaxy") data = ordered_load(MOCK_YAML) assert schema.description == data["desc"] diff --git a/test/unit/security/fixtures/vault_conf_custos.yml b/test/unit/data/security/fixtures/vault_conf_custos.yml similarity index 100% rename from test/unit/security/fixtures/vault_conf_custos.yml rename to test/unit/data/security/fixtures/vault_conf_custos.yml diff --git a/test/unit/security/fixtures/vault_conf_database.yml b/test/unit/data/security/fixtures/vault_conf_database.yml similarity index 100% rename from test/unit/security/fixtures/vault_conf_database.yml rename to test/unit/data/security/fixtures/vault_conf_database.yml diff --git a/test/unit/security/fixtures/vault_conf_database_invalid_keys.yml b/test/unit/data/security/fixtures/vault_conf_database_invalid_keys.yml similarity index 100% rename from test/unit/security/fixtures/vault_conf_database_invalid_keys.yml rename to test/unit/data/security/fixtures/vault_conf_database_invalid_keys.yml diff --git a/test/unit/security/fixtures/vault_conf_database_rotated.yml b/test/unit/data/security/fixtures/vault_conf_database_rotated.yml similarity index 100% rename from test/unit/security/fixtures/vault_conf_database_rotated.yml rename to test/unit/data/security/fixtures/vault_conf_database_rotated.yml diff --git a/test/unit/security/fixtures/vault_conf_hashicorp.yml b/test/unit/data/security/fixtures/vault_conf_hashicorp.yml similarity index 100% rename from test/unit/security/fixtures/vault_conf_hashicorp.yml rename to test/unit/data/security/fixtures/vault_conf_hashicorp.yml diff --git a/test/unit/security/test_vault.py b/test/unit/data/security/test_vault.py similarity index 93% rename from test/unit/security/test_vault.py rename to test/unit/data/security/test_vault.py index a64d3179cdfd..ef94d21cb773 100644 --- a/test/unit/security/test_vault.py +++ b/test/unit/data/security/test_vault.py @@ -32,23 +32,23 @@ class VaultTestBase(TestCase): def test_read_write_secret(self): self.vault.write_secret("my/test/secret", "hello world") - assert self.vault.read_secret("my/test/secret") == "hello world" # type: ignore + assert self.vault.read_secret("my/test/secret") == "hello world" def test_overwrite_secret(self): self.vault.write_secret("my/new/secret", "hello world") self.vault.write_secret("my/new/secret", "hello overwritten") - assert self.vault.read_secret("my/new/secret") == "hello overwritten" # type: ignore + assert self.vault.read_secret("my/new/secret") == "hello overwritten" def test_valid_paths(self): - with self.assertRaises(InvalidVaultKeyException): # type: ignore + with self.assertRaises(InvalidVaultKeyException): self.vault.write_secret("", "hello world") - with self.assertRaises(InvalidVaultKeyException): # type: ignore + with self.assertRaises(InvalidVaultKeyException): self.vault.write_secret("my//new/secret", "hello world") - with self.assertRaises(InvalidVaultKeyException): # type: ignore + with self.assertRaises(InvalidVaultKeyException): self.vault.write_secret("my/ /new/secret", "hello world") # leading and trailing slashes should be ignored self.vault.write_secret("/my/new/secret with space/", "hello overwritten") - assert self.vault.read_secret("my/new/secret with space") == "hello overwritten" # type: ignore + assert self.vault.read_secret("my/new/secret with space") == "hello overwritten" VAULT_CONF_HASHICORP = os.path.join(os.path.dirname(__file__), "fixtures/vault_conf_hashicorp.yml") @@ -94,7 +94,7 @@ def test_rotate_keys(self): vault.write_secret("my/rotated/secret", "hello rotated") # should succeed after rotation - app.config.vault_config_file = VAULT_CONF_DATABASE_ROTATED # type: ignore + app.config.vault_config_file = VAULT_CONF_DATABASE_ROTATED # type: ignore[attr-defined] vault = VaultFactory.from_app(app) assert vault.read_secret("my/rotated/secret") == "hello rotated" @@ -105,7 +105,7 @@ def test_wrong_keys(self): vault.write_secret("my/incorrect/secret", "hello incorrect") # should fail because decryption keys are the wrong - app.config.vault_config_file = VAULT_CONF_DATABASE_INVALID # type: ignore + app.config.vault_config_file = VAULT_CONF_DATABASE_INVALID # type: ignore[attr-defined] vault = VaultFactory.from_app(app) with self.assertRaises(InvalidToken): vault.read_secret("my/incorrect/secret") diff --git a/test/unit/files/file_sources_conf.yml b/test/unit/files/file_sources_conf.yml deleted file mode 120000 index b021797364f4..000000000000 --- a/test/unit/files/file_sources_conf.yml +++ /dev/null @@ -1 +0,0 @@ -../../../integration/file_sources_conf.yml \ No newline at end of file diff --git a/test/unit/job_execution/test_job_io.py b/test/unit/job_execution/test_job_io.py index df9ec27cf1e9..0f9d9c00dab0 100644 --- a/test/unit/job_execution/test_job_io.py +++ b/test/unit/job_execution/test_job_io.py @@ -29,7 +29,7 @@ class FileSourcesMockApp(GalaxyDataTestApp): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.file_sources = ConfiguredFileSources(ConfiguredFileSourcesConfig.from_app_config(self.config)) # type: ignore[assignment] + self.file_sources = ConfiguredFileSources(ConfiguredFileSourcesConfig.from_app_config(self.config)) @pytest.fixture diff --git a/test/unit/tool_shed/test_dbscript.py b/test/unit/tool_shed/test_dbscript.py index a0f20a5bef8b..065c5a621334 100644 --- a/test/unit/tool_shed/test_dbscript.py +++ b/test/unit/tool_shed/test_dbscript.py @@ -16,11 +16,8 @@ from galaxy.model.unittest_utils.model_testing_utils import ( # noqa: F401 - url_factory is a fixture we have to import explicitly url_factory, ) -from galaxy.util import in_packages from galaxy.util.resources import resource_path -pytestmark = pytest.mark.skipif(in_packages(), reason="Running from packages") - DbUrl = NewType("DbUrl", str) BASE_ID = "ts0" diff --git a/test/unit/tool_util/toolbox/test_watcher.py b/test/unit/tool_util/toolbox/test_watcher.py index f735c693706d..39c696954f96 100644 --- a/test/unit/tool_util/toolbox/test_watcher.py +++ b/test/unit/tool_util/toolbox/test_watcher.py @@ -8,9 +8,10 @@ from galaxy.tool_util.toolbox import watcher from galaxy.util import bunch +from galaxy.util.watcher import can_watch -@pytest.mark.skipif(not watcher.can_watch, reason="watchdog not available") +@pytest.mark.skipif(not can_watch, reason="watchdog not available") def test_watcher(): with __test_directory() as t: tool_path = path.join(t, "test.xml") @@ -29,7 +30,7 @@ def test_watcher(): assert tool_watcher.observer is None -@pytest.mark.skipif(not watcher.can_watch, reason="watchdog not available") +@pytest.mark.skipif(not can_watch, reason="watchdog not available") def test_tool_conf_watcher(): callback = CallbackRecorder() conf_watcher = watcher.get_tool_conf_watcher(callback.call) diff --git a/test/unit/util/test_utils.py b/test/unit/util/test_utils.py index 34d7cb95093b..81d448dd4a1a 100644 --- a/test/unit/util/test_utils.py +++ b/test/unit/util/test_utils.py @@ -3,6 +3,7 @@ import tempfile from enum import Enum from io import StringIO +from pathlib import Path from typing import Dict import pytest @@ -104,25 +105,14 @@ def test_safe_loads(): def test_in_packages(monkeypatch): - monkeypatch.setattr(util, "galaxy_root_path", "a/b") - assert not util.in_packages() - - monkeypatch.setattr(util, "galaxy_root_path", "a/b/packages") - assert util.in_packages() + util_path = Path(util.__file__).parent + assert util.in_packages() == (not str(util_path).endswith("lib/galaxy/util")) def test_galaxy_directory(monkeypatch): - monkeypatch.setattr(util, "galaxy_root_path", "a/b") # a/b - path1 = util.galaxy_directory() - - monkeypatch.setattr(util, "galaxy_root_path", "a/b/c/..") # a/b - path2 = util.galaxy_directory() - - monkeypatch.setattr(util, "galaxy_root_path", "a/b/packages/c/..") # a/b/packages - path3 = util.galaxy_directory() - - assert path1 == path2 == path3 - assert os.path.isabs(path1) + galaxy_dir = util.galaxy_directory() + assert os.path.isabs(galaxy_dir) + assert os.path.isfile(os.path.join(galaxy_dir, "run.sh")) def test_listify() -> None: diff --git a/tools/filters/gff/gff_filter_by_attribute.py b/tools/filters/gff/gff_filter_by_attribute.py index f2acf71c1f29..540d4d173912 100644 --- a/tools/filters/gff/gff_filter_by_attribute.py +++ b/tools/filters/gff/gff_filter_by_attribute.py @@ -170,7 +170,7 @@ def check_expression(text): statements = module.body if not len(statements) == 1: - return False # type: ignore[unreachable] + return False expression = statements[0] if expression.__class__.__name__ != "Expr": return False