Skip to content

Commit

Permalink
Merge pull request galaxyproject#17365 from mvdbeek/warn_unreachable
Browse files Browse the repository at this point in the history
Enable ``warn_unreachable`` mypy option
  • Loading branch information
mvdbeek authored Feb 12, 2024
2 parents 54f661c + d092e02 commit 818c147
Show file tree
Hide file tree
Showing 35 changed files with 119 additions and 93 deletions.
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml.sample
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
repos:
- repo: https://github.com/psf/black
rev: 22.10.0
rev: 24.1.1
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 5.0.4
rev: 7.0.0
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-prettier
Expand All @@ -16,7 +16,7 @@ repos:
additional_dependencies:
- [email protected] # Workaround. See https://github.com/pre-commit/mirrors-prettier/issues/29
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0 # Use the ref you want to point at
rev: v4.5.0 # Use the ref you want to point at
hooks:
- id: trailing-whitespace
- id: check-merge-conflict
Expand All @@ -30,7 +30,7 @@ repos:
- id: shell-lint
args: [--format=json]
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.27.3
rev: 0.27.4
hooks:
- id: check-github-workflows
- repo: local
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/blast.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def merge(split_files: List[str], output_file: str) -> None:
raise NotImplementedError("Merging BLAST databases is non-trivial (do this via makeblastdb?)")

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""Split a BLAST database (not implemented for now)."""
if split_params is None:
return None
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -1110,7 +1110,7 @@ def set_peek(self, dataset: DatasetProtocol, **kwd) -> None:
dataset.blurb = "file purged from disk"

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
Split the input files by line.
"""
Expand Down
11 changes: 6 additions & 5 deletions lib/galaxy/datatypes/molecules.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Callable,
Dict,
List,
Optional,
)

from galaxy.datatypes import metadata
Expand Down Expand Up @@ -261,7 +262,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N
dataset.metadata.number_of_molecules = count_special_lines(r"^\$\$\$\$$", dataset.get_file_name())

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
Split the input files by molecule records.
"""
Expand Down Expand Up @@ -344,7 +345,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N
dataset.metadata.number_of_molecules = count_special_lines("@<TRIPOS>MOLECULE", dataset.get_file_name())

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
Split the input files by molecule records.
"""
Expand Down Expand Up @@ -430,7 +431,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N
dataset.metadata.number_of_molecules = count_special_lines("^#", dataset.get_file_name(), invert=True)

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
Split the input files by fingerprint records.
"""
Expand Down Expand Up @@ -554,7 +555,7 @@ def merge(split_files: List[str], output_file: str) -> None:
raise NotImplementedError("Merging Fastsearch indices is not supported.")

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""Splitting Fastsearch indices is not supported."""
if split_params is None:
return None
Expand Down Expand Up @@ -1631,7 +1632,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool:
return True

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
Split the input files by molecule records.
"""
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/datatypes/msa.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Callable,
Dict,
List,
Optional,
)

from galaxy.datatypes.binary import Binary
Expand Down Expand Up @@ -205,7 +206,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N
)

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
Split the input files by model records.
Expand Down
8 changes: 4 additions & 4 deletions lib/galaxy/datatypes/sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def get_subdir(idx):
return directories

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""Split a generic sequence file (not sensible or possible, see subclasses)."""
if split_params is None:
return None
Expand Down Expand Up @@ -335,7 +335,7 @@ class Alignment(data.Text):
)

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""Split a generic alignment file (not sensible or possible, see subclasses)."""
if split_params is None:
return None
Expand Down Expand Up @@ -405,7 +405,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool:
return False

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""Split a FASTA file sequence by sequence.
Note that even if split_mode="number_of_parts", the actual number of
Expand Down Expand Up @@ -792,7 +792,7 @@ def display_data(
return Sequence.display_data(self, trans, dataset, preview, filename, to_ext, **kwd)

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""
FASTQ files are split on cluster boundaries, in increments of 4 lines
"""
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/spaln.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def merge(split_files: List[str], output_file: str) -> None:
raise NotImplementedError("Merging spaln databases is not possible")

@classmethod
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Dict) -> None:
def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None:
"""Split a spaln database (not implemented)."""
if split_params is None:
return None
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/datatypes/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -1113,7 +1113,6 @@ def _looks_like_yaml(self, file_prefix: FilePrefix) -> bool:
return True
except yaml.YAMLError:
return False
return False


@build_sniff_from_prefix
Expand Down
3 changes: 0 additions & 3 deletions lib/galaxy/jobs/runners/state_handlers/_safe_eval.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from ast import (
Module,
parse,
walk,
)
Expand Down Expand Up @@ -127,8 +126,6 @@ def _check_expression(text, allowed_variables=None):
except SyntaxError:
return False

if not isinstance(module, Module):
return False
statements = module.body
if not len(statements) == 1:
return False
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/selenium/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def screenshot(self, label: str):
return target

@abstractmethod
def _screenshot_path(self, label: str, extension=".png") -> str:
def _screenshot_path(self, label: str, extension=".png") -> Optional[str]:
"""Path to store screenshots in."""


Expand Down
7 changes: 2 additions & 5 deletions lib/galaxy/tool_util/cwl/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,7 @@ class ExpressionToolProxy(CommandLineToolProxy):

class JobProxy:
def __init__(self, tool_proxy, input_dict, output_dict, job_directory):
assert RuntimeContext is not None, "cwltool is not installed, cannot run CWL jobs"
self._tool_proxy = tool_proxy
self._input_dict = input_dict
self._output_dict = output_dict
Expand Down Expand Up @@ -327,12 +328,8 @@ def _ensure_cwl_job_initialized(self):
beta_relaxed_fmt_check=beta_relaxed_fmt_check,
)

args = []
args = [RuntimeContext(job_args)]
kwargs: Dict[str, str] = {}
if RuntimeContext is not None:
args.append(RuntimeContext(job_args))
else:
kwargs = job_args
self._cwl_job = next(self._tool_proxy._tool.job(self._input_dict, self._output_callback, *args, **kwargs))
self._is_command_line_job = hasattr(self._cwl_job, "command_line")

Expand Down
11 changes: 10 additions & 1 deletion lib/galaxy/tool_util/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
Dict,
List,
Optional,
overload,
Set,
Tuple,
Type,
Expand Down Expand Up @@ -878,7 +879,15 @@ def to_dict(self, view: str = "collection", value_mapper: Optional[Dict[str, Cal
return rval


def _expand_here_template(content: str, here: Optional[str]) -> str:
@overload
def _expand_here_template(content: str, here: Optional[str]) -> str: ...


@overload
def _expand_here_template(content: None, here: Optional[str]) -> None: ...


def _expand_here_template(content: Optional[str], here: Optional[str]) -> Optional[str]:
if here and content:
content = string.Template(content).safe_substitute({"__HERE__": here})
return content
Expand Down
5 changes: 0 additions & 5 deletions lib/galaxy/tool_util/deps/conda_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
"""

import os
from collections.abc import Hashable

import yaml

Expand All @@ -31,10 +30,6 @@ def __init__(self, func):
self.cache = {}

def __call__(self, *args):
if not isinstance(args, Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tool_util/deps/docker_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def build_docker_run_command(
auto_rm: bool = DEFAULT_AUTO_REMOVE,
set_user: Optional[str] = DEFAULT_SET_USER,
host: Optional[str] = DEFAULT_HOST,
guest_ports: Union[bool, List[str]] = False,
guest_ports: Union[bool, str, List[str]] = False,
container_name: Optional[str] = None,
) -> str:
env_directives = env_directives or []
Expand Down
4 changes: 3 additions & 1 deletion lib/galaxy/tool_util/linters/xml_order.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
https://github.com/galaxy-iuc/standards.
"""

from typing import Optional

# https://github.com/galaxy-iuc/standards
# https://github.com/galaxy-iuc/standards/pull/7/files
TAG_ORDER = [
Expand Down Expand Up @@ -54,7 +56,7 @@ def lint_xml_order(tool_xml, lint_ctx):
tag_ordering = TAG_ORDER

last_tag = None
last_key = None
last_key: Optional[int] = None
for elem in tool_root:
tag = elem.tag
if tag in tag_ordering:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tool_util/parser/output_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def apply_action(self, output_dataset, other_values) -> None:
else:
# fallback when Cheetah not available, equivalent to how this was handled prior 23.0
# definitely not needed for CWL tool parsing
log.warning("Cheetah not installed, falling back to legacy 'apply_action' behavior.")
log.warning("Cheetah not installed, falling back to legacy 'apply_action' behavior.") # type: ignore[unreachable]
value = self.default
if value is not None:
setattr(output_dataset.metadata, self.name, value)
Expand Down
3 changes: 0 additions & 3 deletions lib/galaxy/tool_util/toolbox/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -763,9 +763,6 @@ def has_tool(self, tool_id: str, tool_version: Optional[str] = None, exact: bool

def is_missing_shed_tool(self, tool_id: str) -> bool:
"""Confirm that the tool ID does reference a shed tool and is not installed."""
if tool_id is None:
# This is not a tool ID.
return False
if "repos" not in tool_id:
# This is not a shed tool.
return False
Expand Down
8 changes: 3 additions & 5 deletions lib/galaxy/tool_util/verify/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
def verify(
item_label: str,
output_content: bytes,
attributes: Dict[str, Any],
attributes: Optional[Dict[str, Any]],
filename: Optional[str] = None,
get_filecontent: Optional[Callable[[str], bytes]] = None,
get_filename: Optional[Callable[[str], str]] = None,
Expand All @@ -51,6 +51,7 @@ def verify(
Throw an informative assertion error if any of these tests fail.
"""
attributes = attributes or {}
if get_filename is None:
get_filecontent_: Callable[[str], bytes]
if get_filecontent is None:
Expand All @@ -67,7 +68,7 @@ def get_filename(filename: str) -> str:

# Check assertions...
assertions = attributes.get("assert_list", None)
if attributes is not None and assertions is not None:
if assertions is not None:
try:
verify_assertions(output_content, attributes["assert_list"], attributes.get("decompress", False))
except AssertionError as err:
Expand Down Expand Up @@ -95,9 +96,6 @@ def get_filename(filename: str) -> str:
errmsg += unicodify(err)
raise AssertionError(errmsg)

if attributes is None:
attributes = {}

# expected object might be None, so don't pull unless available
has_expected_object = "object" in attributes
if has_expected_object:
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -789,7 +789,7 @@ def __init__(
# tool_data_table_conf.xml entries exist.
self.input_params: List[ToolParameter] = []
# Attributes of tools installed from Galaxy tool sheds.
self.tool_shed = None
self.tool_shed: Optional[str] = None
self.repository_name = None
self.repository_owner = None
self.changeset_revision = None
Expand Down Expand Up @@ -1659,6 +1659,7 @@ def populate_resource_parameters(self, tool_source):
def populate_tool_shed_info(self, tool_shed_repository):
if tool_shed_repository:
self.tool_shed = tool_shed_repository.tool_shed
assert self.tool_shed
self.repository_name = tool_shed_repository.name
self.repository_owner = tool_shed_repository.owner
self.changeset_revision = tool_shed_repository.changeset_revision
Expand Down
10 changes: 0 additions & 10 deletions lib/galaxy/tools/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,23 +271,13 @@ def __init__(self, path: str, modtime: Optional[float] = None, lazy_hash: bool =
self.hash # noqa: B018

def modtime_less_than(self, other_modtime: float):
if self._modtime is None:
# For the purposes of the tool cache,
# if we haven't seen the modtime we consider it not equal
return True
return self._modtime < other_modtime

def hash_equals(self, other_hash: Optional[str]):
if self._tool_hash is None or other_hash is None:
# For the purposes of the tool cache,
# if we haven't seen the hash yet we consider it not equal
return False
return self.hash == other_hash

@property
def modtime(self) -> float:
if self._modtime is None:
self._modtime = os.path.getmtime(self.path)
return self._modtime

@modtime.setter
Expand Down
Loading

0 comments on commit 818c147

Please sign in to comment.