diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts
index a89d13317519..482ec3912ddb 100644
--- a/client/src/api/schema/schema.ts
+++ b/client/src/api/schema/schema.ts
@@ -19398,7 +19398,7 @@ export interface operations {
* @description Allows remote job running mechanisms to get a fresh OIDC token that can be used on remote side to authorize user. It is not meant to represent part of Galaxy's stable, user facing API
*/
parameters: {
- /** @description A key used to authenticate this request as acting onbehalf or a job runner for the specified job */
+ /** @description A key used to authenticate this request as acting on behalf or a job runner for the specified job */
/** @description OIDC provider name */
query: {
job_key: string;
diff --git a/lib/galaxy/authnz/managers.py b/lib/galaxy/authnz/managers.py
index f7de71b2b201..edf13d19beb6 100644
--- a/lib/galaxy/authnz/managers.py
+++ b/lib/galaxy/authnz/managers.py
@@ -95,8 +95,9 @@ def _parse_oidc_config(self, config_file):
func = getattr(builtins, child.get("Type"))
except AttributeError:
log.error(
- "The value of attribute `Type`, `{}`, is not a valid built-in type;" " skipping this node"
- ).format(child.get("Type"))
+ "The value of attribute `Type`, `%s`, is not a valid built-in type; skipping this node",
+ child.get("Type"),
+ )
continue
self.oidc_config[child.get("Property")] = func(child.get("Value"))
except ImportError:
diff --git a/lib/galaxy/celery/base_task.py b/lib/galaxy/celery/base_task.py
index 6737aba0231c..94bb8d75956f 100644
--- a/lib/galaxy/celery/base_task.py
+++ b/lib/galaxy/celery/base_task.py
@@ -87,7 +87,7 @@ def calculate_task_start_time( # type: ignore
update_stmt = (
update(CeleryUserRateLimit)
.where(CeleryUserRateLimit.user_id == user_id)
- .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) "))
+ .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', :now)"))
.returning(CeleryUserRateLimit.last_scheduled_time)
)
result = sa_session.execute(update_stmt, {"interval": task_interval_secs, "now": now}).all()
diff --git a/lib/galaxy/datatypes/molecules.py b/lib/galaxy/datatypes/molecules.py
index a4cc26b549d2..193c32d65d6d 100644
--- a/lib/galaxy/datatypes/molecules.py
+++ b/lib/galaxy/datatypes/molecules.py
@@ -877,15 +877,15 @@ def get_matcher(self) -> re.Pattern:
"""
pat = (
r"(ATOM|HETATM)\s+"
- + r"(\d+)\s+"
- + r"([A-Z0-9]+)\s+"
- + r"([A-Z0-9]+)\s+"
- + r"(([A-Z]?)\s+)?"
- + r"([-+]?\d*\.\d+|\d+)\s+"
- + r"([-+]?\d*\.\d+|\d+)\s+"
- + r"([-+]?\d*\.\d+|\d+)\s+"
- + r"([-+]?\d*\.\d+|\d+)\s+"
- + r"([-+]?\d*\.\d+|\d+)\s+"
+ r"(\d+)\s+"
+ r"([A-Z0-9]+)\s+"
+ r"([A-Z0-9]+)\s+"
+ r"(([A-Z]?)\s+)?"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
+ r"([-+]?\d*\.\d+|\d+)\s+"
)
return re.compile(pat)
diff --git a/lib/galaxy/datatypes/qiime2.py b/lib/galaxy/datatypes/qiime2.py
index deaaf6a3da47..c9aa533ae8c2 100644
--- a/lib/galaxy/datatypes/qiime2.py
+++ b/lib/galaxy/datatypes/qiime2.py
@@ -275,7 +275,7 @@ def _get_versions(path, uuid):
framework_version = framework_version_line.split(":")[1].strip()
return version, framework_version
except Exception:
- raise ValueError("Archive does not contain a correctly formatted" " VERSION file.")
+ raise ValueError("Archive does not contain a correctly formatted VERSION file.")
def _open_file_in_archive(zip_path, path, uuid):
diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py
index b9b79f046511..5b4e1d523e12 100644
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -1223,7 +1223,8 @@ def __init__(self, **kwd):
"DESC",
"SRAS",
"PRAS",
- "PART_CHROM" "PART_CONTIG",
+ "PART_CHROM",
+ "PART_CONTIG",
"PART_OFFSET",
"PART_STRAND",
"FILT",
diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py
index 44547a86bcf8..04d4c57e4c76 100644
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1331,7 +1331,7 @@ def clear_working_directory(self):
job = self.get_job()
if not os.path.exists(self.working_directory):
log.warning(
- "(%s): Working directory clear requested but %s does " "not exist", self.job_id, self.working_directory
+ "(%s): Working directory clear requested but %s does not exist", self.job_id, self.working_directory
)
return
@@ -1535,7 +1535,7 @@ def change_state(self, state, info=False, flush=True, job=None):
if job.state in model.Job.terminal_states:
log.warning(
- "(%s) Ignoring state change from '%s' to '%s' for job " "that is already terminal",
+ "(%s) Ignoring state change from '%s' to '%s' for job that is already terminal",
job.id,
job.state,
state,
diff --git a/lib/galaxy/jobs/dynamic_tool_destination.py b/lib/galaxy/jobs/dynamic_tool_destination.py
index 9e61e9adb88f..19a3d25776d7 100755
--- a/lib/galaxy/jobs/dynamic_tool_destination.py
+++ b/lib/galaxy/jobs/dynamic_tool_destination.py
@@ -1773,11 +1773,13 @@ def get_typo_correction(typo_str, word_set, max_dist):
"--check-config",
dest="check_config",
nargs="?",
- help="Use this option to validate tool_destinations.yml."
- + " Optionally, provide the path to the tool_destinations.yml"
- + " that you would like to check, and/or the path to the related"
- + " job_conf.xml. Default: galaxy/config/tool_destinations.yml"
- + "and galaxy/config/job_conf.xml",
+ help=(
+ "Use this option to validate tool_destinations.yml."
+ " Optionally, provide the path to the tool_destinations.yml"
+ " that you would like to check, and/or the path to the related"
+ " job_conf.xml. Default: galaxy/config/tool_destinations.yml"
+ "and galaxy/config/job_conf.xml"
+ ),
)
parser.add_argument("-j", "--job-config", dest="job_config")
diff --git a/lib/galaxy/jobs/runners/aws.py b/lib/galaxy/jobs/runners/aws.py
index 57b8dae427fa..9831cc42950e 100644
--- a/lib/galaxy/jobs/runners/aws.py
+++ b/lib/galaxy/jobs/runners/aws.py
@@ -399,7 +399,7 @@ def stop_job(self, job_wrapper):
log.debug(msg.format(name=job_name))
def recover(self, job, job_wrapper):
- msg = "(name!r/runner!r) is still in {state!s} state, adding to" " the runner monitor queue"
+ msg = "(name!r/runner!r) is still in {state!s} state, adding to the runner monitor queue"
job_id = job.get_job_runner_external_id()
job_name = self.JOB_NAME_PREFIX + job_wrapper.get_id_tag()
ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper)
diff --git a/lib/galaxy/jobs/runners/chronos.py b/lib/galaxy/jobs/runners/chronos.py
index 3bed4dc79a67..bb96c1b429dc 100644
--- a/lib/galaxy/jobs/runners/chronos.py
+++ b/lib/galaxy/jobs/runners/chronos.py
@@ -178,7 +178,7 @@ def stop_job(self, job_wrapper):
LOGGER.error(msg.format(name=job_name))
def recover(self, job, job_wrapper):
- msg = "(name!r/runner!r) is still in {state!s} state, adding to" " the runner monitor queue"
+ msg = "(name!r/runner!r) is still in {state!s} state, adding to the runner monitor queue"
job_id = job.get_job_runner_external_id()
ajs = AsynchronousJobState(
files_dir=job_wrapper.working_directory,
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
index 93ee001aed73..ded3c626d14d 100644
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3252,7 +3252,7 @@ def add_dataset(self, dataset, parent_id=None, genome_build=None, set_hid=True,
elif not isinstance(dataset, (HistoryDatasetAssociation, HistoryDatasetCollectionAssociation)):
raise TypeError(
"You can only add Dataset and HistoryDatasetAssociation instances to a history"
- + f" ( you tried to add {str(dataset)} )."
+ f" ( you tried to add {str(dataset)} )."
)
is_dataset = is_hda(dataset)
if parent_id:
diff --git a/lib/galaxy/objectstore/pithos.py b/lib/galaxy/objectstore/pithos.py
index 4d04b64246c3..60a710f1542d 100644
--- a/lib/galaxy/objectstore/pithos.py
+++ b/lib/galaxy/objectstore/pithos.py
@@ -83,7 +83,7 @@ def parse_config_xml(config_xml):
log.error(msg)
raise Exception(msg)
except Exception:
- log.exception("Malformed PithosObjectStore Configuration XML -- " "unable to continue")
+ log.exception("Malformed PithosObjectStore Configuration XML, unable to continue")
raise
return r
@@ -325,7 +325,7 @@ def _size(self, obj, **kwargs) -> int:
try:
return os.path.getsize(self._get_cache_path(path))
except OSError as ex:
- log.warning(f"Could not get size of file {path} in local cache," f"will try Pithos. Error: {ex}")
+ log.warning("Could not get size of file %s in local cache, will try Pithos. Error: %s", path, ex)
try:
file = self.pithos.get_object_info(path)
except ClientError as ce:
@@ -408,7 +408,7 @@ def _update_from_file(self, obj, **kwargs):
if kwargs.get("create"):
self._create(obj, **kwargs)
if not self._exists(obj, **kwargs):
- raise ObjectNotFound(f"objectstore.update_from_file, object does not exist: {obj}, " f"kwargs: {kwargs}")
+ raise ObjectNotFound(f"objectstore.update_from_file, object does not exist: {obj}, kwargs: {kwargs}")
path = self._construct_path(obj, **kwargs)
cache_path = self._get_cache_path(path)
diff --git a/lib/galaxy/objectstore/rucio.py b/lib/galaxy/objectstore/rucio.py
index ebc596903752..1d9c3d48b8d7 100644
--- a/lib/galaxy/objectstore/rucio.py
+++ b/lib/galaxy/objectstore/rucio.py
@@ -594,7 +594,7 @@ def _register_file(self, rel_path, file_name):
file_name = self._get_cache_path(rel_path)
if not os.path.islink(file_name):
raise ObjectInvalid(
- "rucio objectstore._register_file, rucio_register_only " "is set, but file in cache is not a link "
+ "rucio objectstore._register_file, rucio_register_only is set, but file in cache is not a link "
)
if os.path.islink(file_name):
file_name = os.readlink(file_name)
diff --git a/lib/galaxy/schema/schema.py b/lib/galaxy/schema/schema.py
index 5badff543962..95b6f125f0f5 100644
--- a/lib/galaxy/schema/schema.py
+++ b/lib/galaxy/schema/schema.py
@@ -1563,14 +1563,14 @@ class CreateHistoryPayload(Model):
default=None,
title="History ID",
description=(
- "The encoded ID of the history to copy. " "Provide this value only if you want to copy an existing history."
+ "The encoded ID of the history to copy. Provide this value only if you want to copy an existing history."
),
)
all_datasets: Optional[bool] = Field(
default=True,
title="All Datasets",
description=(
- "Whether to copy also deleted HDAs/HDCAs. Only applies when " "providing a `history_id` to copy from."
+ "Whether to copy also deleted HDAs/HDCAs. Only applies when providing a `history_id` to copy from."
),
)
archive_source: Optional[str] = Field(
@@ -3411,7 +3411,7 @@ class ShareWithPayload(Model):
...,
title="User Identifiers",
description=(
- "A collection of encoded IDs (or email addresses) of users " "that this resource will be shared with."
+ "A collection of encoded IDs (or email addresses) of users that this resource will be shared with."
),
)
share_option: Optional[SharingOptions] = Field(
diff --git a/lib/galaxy/tool_util/verify/test_data.py b/lib/galaxy/tool_util/verify/test_data.py
index 68bf408543e1..5aa8f4763c16 100644
--- a/lib/galaxy/tool_util/verify/test_data.py
+++ b/lib/galaxy/tool_util/verify/test_data.py
@@ -11,11 +11,11 @@
)
UPDATE_TEMPLATE = Template(
- "git --work-tree $dir --git-dir $dir/.git fetch && " "git --work-tree $dir --git-dir $dir/.git merge origin/master"
+ "git --work-tree $dir --git-dir $dir/.git fetch && git --work-tree $dir --git-dir $dir/.git merge origin/master"
)
UPDATE_FAILED_TEMPLATE = Template(
- "Warning failed to update test repository $dir - " "update stdout was [$stdout] and stderr was [$stderr]."
+ "Warning failed to update test repository $dir - update stdout was [$stdout] and stderr was [$stderr]."
)
diff --git a/lib/galaxy/tools/data_fetch.py b/lib/galaxy/tools/data_fetch.py
index 19e272ca4b6b..a6786c725dc9 100644
--- a/lib/galaxy/tools/data_fetch.py
+++ b/lib/galaxy/tools/data_fetch.py
@@ -305,7 +305,7 @@ def _resolve_item_with_primary(item):
if datatype.dataset_content_needs_grooming(path):
err_msg = (
"The uploaded files need grooming, so change your Copy data into Galaxy? selection to be "
- + "Copy files into Galaxy instead of Link to files without copying into Galaxy so grooming can be performed."
+ "Copy files into Galaxy instead of Link to files without copying into Galaxy so grooming can be performed."
)
raise UploadProblemException(err_msg)
diff --git a/lib/galaxy/tools/search/__init__.py b/lib/galaxy/tools/search/__init__.py
index cc77ad0125a1..3fcce2caab42 100644
--- a/lib/galaxy/tools/search/__init__.py
+++ b/lib/galaxy/tools/search/__init__.py
@@ -233,7 +233,7 @@ def build_index(self, tool_cache, toolbox, index_help: bool = True) -> None:
# Add tool document to index (or overwrite if existing)
writer.update_document(**add_doc_kwds)
- log.debug(f"Toolbox index of panel {self.panel_view_id}" f" finished {execution_timer}")
+ log.debug("Toolbox index of panel %s finished %s", self.panel_view_id, execution_timer)
def _get_tools_to_remove(self, tool_cache) -> list:
"""Return list of tool IDs to be removed from index."""
diff --git a/lib/galaxy/util/custom_logging/fluent_log.py b/lib/galaxy/util/custom_logging/fluent_log.py
index ed9ede536e92..fde60ec00ae5 100644
--- a/lib/galaxy/util/custom_logging/fluent_log.py
+++ b/lib/galaxy/util/custom_logging/fluent_log.py
@@ -12,7 +12,7 @@
FluentSender = None
-FLUENT_IMPORT_MESSAGE = "The Python fluent package is required to use this " "feature, please install it"
+FLUENT_IMPORT_MESSAGE = "The Python fluent package is required to use this feature, please install it"
class FluentTraceLogger:
diff --git a/lib/galaxy/webapps/galaxy/api/cloudauthz.py b/lib/galaxy/webapps/galaxy/api/cloudauthz.py
index dc5a1ecf540a..e3e1ced53061 100644
--- a/lib/galaxy/webapps/galaxy/api/cloudauthz.py
+++ b/lib/galaxy/webapps/galaxy/api/cloudauthz.py
@@ -122,9 +122,9 @@ def create(self, trans, payload, **kwargs):
description = payload.get("description", "")
if not isinstance(config, dict):
- log.debug(msg_template.format(f"invalid config type `{type(config)}`, expect `dict`"))
+ log.debug(msg_template.format(f"invalid config type `{type(config)}`, expected `dict`"))
raise RequestParameterInvalidException(
- "Invalid type for the required `config` variable; expect `dict` " f"but received `{type(config)}`."
+ f"Invalid type for the required `config` variable; expected `dict` but received `{type(config)}`."
)
if authn_id:
try:
diff --git a/lib/galaxy/webapps/galaxy/api/job_tokens.py b/lib/galaxy/webapps/galaxy/api/job_tokens.py
index 8853f33d62bd..713e130a8a59 100644
--- a/lib/galaxy/webapps/galaxy/api/job_tokens.py
+++ b/lib/galaxy/webapps/galaxy/api/job_tokens.py
@@ -39,7 +39,7 @@ def get_token(
job_id: EncodedDatabaseIdField,
job_key: str = Query(
description=(
- "A key used to authenticate this request as acting on" "behalf or a job runner for the specified job"
+ "A key used to authenticate this request as acting on behalf or a job runner for the specified job"
),
),
provider: str = Query(
diff --git a/lib/galaxy/webapps/galaxy/api/remote_files.py b/lib/galaxy/webapps/galaxy/api/remote_files.py
index 4edfe28cc6bc..5cc6d938b7d9 100644
--- a/lib/galaxy/webapps/galaxy/api/remote_files.py
+++ b/lib/galaxy/webapps/galaxy/api/remote_files.py
@@ -37,7 +37,7 @@
TargetQueryParam: str = Query(
default=RemoteFilesTarget.ftpdir,
title="Target source",
- description=("The source to load datasets from." " Possible values: ftpdir, userdir, importdir"),
+ description=("The source to load datasets from. Possible values: ftpdir, userdir, importdir"),
)
FormatQueryParam: Optional[RemoteFilesFormat] = Query(
@@ -54,7 +54,7 @@
default=None,
title="Recursive",
description=(
- "Whether to recursively lists all sub-directories." " This will be `True` by default depending on the `target`."
+ "Whether to recursively lists all sub-directories. This will be `True` by default depending on the `target`."
),
)
diff --git a/lib/galaxy/webapps/galaxy/controllers/visualization.py b/lib/galaxy/webapps/galaxy/controllers/visualization.py
index 3928d2e6f02d..87a0c3947c55 100644
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -19,7 +19,6 @@
UsesItemRatings,
)
from galaxy.structured_app import StructuredApp
-from galaxy.util import unicodify
from galaxy.util.sanitize_html import sanitize_html
from galaxy.visualization.genomes import GenomeRegion
from galaxy.webapps.base.controller import (
@@ -263,9 +262,8 @@ def _handle_plugin_error(self, trans, visualization_name, exception):
raise exception
return trans.show_error_message(
"There was an error rendering the visualization. "
- + "Contact your Galaxy administrator if the problem persists."
- + "
Details: "
- + unicodify(exception),
+ "Contact your Galaxy administrator if the problem persists."
+ f"
Details: {exception}",
use_panels=False,
)
diff --git a/lib/galaxy/webapps/galaxy/services/library_folders.py b/lib/galaxy/webapps/galaxy/services/library_folders.py
index 36e74004210b..bc560c849238 100644
--- a/lib/galaxy/webapps/galaxy/services/library_folders.py
+++ b/lib/galaxy/webapps/galaxy/services/library_folders.py
@@ -218,7 +218,7 @@ def set_permissions(
trans.app.security_agent.set_all_library_permissions(trans, folder, permissions)
else:
raise RequestParameterInvalidException(
- 'The mandatory parameter "action" has an invalid value.' 'Allowed values are: "set_permissions"'
+ 'The mandatory parameter "action" has an invalid value. Allowed values are: "set_permissions"'
)
current_permissions = self.folder_manager.get_current_roles(trans, folder)
return LibraryFolderCurrentPermissions(**current_permissions)
diff --git a/pyproject.toml b/pyproject.toml
index 0ebb92ddf93d..aaa26de5e38f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -191,8 +191,9 @@ exclude = [
[tool.ruff.lint]
# Enable: pycodestyle errors (E), Pyflakes (F), flake8-bugbear (B),
-# flake8-comprehensions (C4), flake8-logging-format (G) and pyupgrade (UP)
-select = ["E", "F", "B", "C4", "G", "UP"]
+# flake8-comprehensions (C4), flake8-logging-format (G),
+# flake8-implicit-str-concat (ISC) and pyupgrade (UP)
+select = ["E", "F", "B", "C4", "G", "ISC", "UP"]
# Exceptions:
# B008 Do not perform function calls in argument defaults (for FastAPI Depends and Body)
# B9 flake8-bugbear opinionated warnings
diff --git a/scripts/api/upload_to_history.py b/scripts/api/upload_to_history.py
index e3c71e091444..5823143cf097 100755
--- a/scripts/api/upload_to_history.py
+++ b/scripts/api/upload_to_history.py
@@ -12,7 +12,7 @@
except ImportError:
print(
"Could not import the requests module. See http://docs.python-requests.org/en/latest/"
- + " or install with 'pip install requests'"
+ " or install with 'pip install requests'"
)
raise
@@ -46,7 +46,7 @@ def upload_file(base_url, api_key, history_id, filepath, **kwargs):
if len(sys.argv) < 5:
print(
"history_upload.py \n"
- + " (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')"
+ " (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')"
)
sys.exit(1)
diff --git a/scripts/cleanup_datasets/admin_cleanup_datasets.py b/scripts/cleanup_datasets/admin_cleanup_datasets.py
index 5c78ff0382b3..624fb3fac98d 100755
--- a/scripts/cleanup_datasets/admin_cleanup_datasets.py
+++ b/scripts/cleanup_datasets/admin_cleanup_datasets.py
@@ -93,7 +93,7 @@ def main():
help="config file (legacy, use --config instead)",
)
parser.add_argument("-d", "--days", dest="days", action="store", type=int, help="number of days (60)", default=60)
- parser.add_argument("--tool_id", default=None, help="Text to match against tool_id" "Default: match all")
+ parser.add_argument("--tool_id", default=None, help="Text to match against tool_id. Default: match all")
parser.add_argument(
"--template",
default=None,
@@ -121,10 +121,10 @@ def main():
default=False,
)
parser.add_argument(
- "--smtp", default=None, help="SMTP Server to use to send email. " "Default: [read from galaxy config file]"
+ "--smtp", default=None, help="SMTP Server to use to send email. Default: [read from galaxy config file]"
)
parser.add_argument(
- "--fromaddr", default=None, help="From address to use to send email. " "Default: [read from galaxy config file]"
+ "--fromaddr", default=None, help="From address to use to send email. Default: [read from galaxy config file]"
)
populate_config_args(parser)
@@ -138,14 +138,12 @@ def main():
if args.smtp is not None:
app_properties["smtp_server"] = args.smtp
if app_properties.get("smtp_server") is None:
- parser.error("SMTP Server must be specified as an option (--smtp) " "or in the config file (smtp_server)")
+ parser.error("SMTP Server must be specified as an option (--smtp) or in the config file (smtp_server)")
if args.fromaddr is not None:
app_properties["email_from"] = args.fromaddr
if app_properties.get("email_from") is None:
- parser.error(
- "From address must be specified as an option " "(--fromaddr) or in the config file " "(email_from)"
- )
+ parser.error("From address must be specified as an option (--fromaddr) or in the config file (email_from)")
scriptdir = os.path.dirname(os.path.abspath(__file__))
template_file = args.template
@@ -262,7 +260,7 @@ def administrative_delete_datasets(
# Mark the HistoryDatasetAssociation as deleted
hda.deleted = True
app.sa_session.add(hda)
- print("Marked HistoryDatasetAssociation id %d as " "deleted" % hda.id)
+ print("Marked HistoryDatasetAssociation id %d as deleted" % hda.id)
session = app.sa_session()
with transaction(session):
session.commit()
@@ -270,7 +268,7 @@ def administrative_delete_datasets(
emailtemplate = Template(filename=template_file)
for email, dataset_list in user_notifications.items():
msgtext = emailtemplate.render(email=email, datasets=dataset_list, cutoff=cutoff_days)
- subject = "Galaxy Server Cleanup " "- %d datasets DELETED" % len(dataset_list)
+ subject = "Galaxy Server Cleanup - %d datasets DELETED" % len(dataset_list)
fromaddr = config.email_from
print()
print(f"From: {fromaddr}")
diff --git a/scripts/rst2html.py b/scripts/rst2html.py
index 5e825c5badd2..2b82025acc77 100755
--- a/scripts/rst2html.py
+++ b/scripts/rst2html.py
@@ -22,6 +22,6 @@
publish_cmdline,
)
-description = "Generates (X)HTML documents from standalone reStructuredText " "sources. " + default_description
+description = "Generates (X)HTML documents from standalone reStructuredText sources. " + default_description
publish_cmdline(writer_name="html", description=description)
diff --git a/scripts/runtime_stats.py b/scripts/runtime_stats.py
index 9e6c9e5277aa..49a08a5d3e87 100755
--- a/scripts/runtime_stats.py
+++ b/scripts/runtime_stats.py
@@ -82,20 +82,20 @@ def parse_arguments():
"--like",
action="store_true",
default=False,
- help="Use SQL `LIKE` operator to find " "a shed-installed tool using the tool's " '"short" id',
+ help='Use SQL `LIKE` operator to find a shed-installed tool using the tool\'s "short" id',
)
populate_config_args(parser)
parser.add_argument("-d", "--debug", action="store_true", default=False, help="Print extra info")
parser.add_argument("-m", "--min", type=int, default=-1, help="Ignore runtimes less than MIN seconds")
parser.add_argument("-M", "--max", type=int, default=-1, help="Ignore runtimes greater than MAX seconds")
- parser.add_argument("-u", "--user", help="Return stats for only this user (id, email, " "or username)")
+ parser.add_argument("-u", "--user", help="Return stats for only this user (id, email, or username)")
parser.add_argument(
"-s", "--source", default="metrics", help="Runtime data source (SOURCES: {})".format(", ".join(DATA_SOURCES))
)
args = parser.parse_args()
if args.like and "/" in args.tool_id:
- print("ERROR: Do not use --like with a tool shed tool id (the tool " "id should not contain `/` characters)")
+ print("ERROR: Do not use --like with a tool shed tool id (the tool id should not contain `/` characters)")
sys.exit(2)
args.source = args.source.lower()
diff --git a/scripts/tool_shed/deprecate_repositories_without_metadata.py b/scripts/tool_shed/deprecate_repositories_without_metadata.py
index 6ab395d0354b..42a2e840886c 100644
--- a/scripts/tool_shed/deprecate_repositories_without_metadata.py
+++ b/scripts/tool_shed/deprecate_repositories_without_metadata.py
@@ -106,8 +106,8 @@ def send_mail_to_owner(app, owner, email, repositories_deprecated, days=14):
subject = f"Regarding your tool shed repositories at {url}"
message_body_template = (
"The tool shed automated repository checker has discovered that one or more of your repositories hosted "
- + "at this tool shed url ${url} have remained empty for over ${days} days, so they have been marked as deprecated. If you have plans "
- + "for these repositories, you can mark them as un-deprecated at any time."
+ "at this tool shed url ${url} have remained empty for over ${days} days, so they have been marked as deprecated. If you have plans "
+ "for these repositories, you can mark them as un-deprecated at any time."
)
message_template = string.Template(message_body_template)
body = "\n".join(textwrap.wrap(message_template.safe_substitute(days=days, url=url), width=95))
diff --git a/test/unit/app/managers/test_HDAManager.py b/test/unit/app/managers/test_HDAManager.py
index 226b1e8d70d9..a868c7ce5ca3 100644
--- a/test/unit/app/managers/test_HDAManager.py
+++ b/test/unit/app/managers/test_HDAManager.py
@@ -221,8 +221,7 @@ def test_accessible(self):
assert accessible == item1
self.log(
- "after setting a dataset to private (one user) permissions, "
- + "access should be not allowed for other users"
+ "after setting a dataset to private (one user) permissions, access should be not allowed for other users"
)
with self.assertRaises(exceptions.ItemAccessibilityException):
self.hda_manager.get_accessible(
@@ -233,7 +232,7 @@ def test_accessible(self):
self.log(
"a copy of a restricted dataset in another users history should be inaccessible even to "
- + "the histories owner"
+ "the histories owner"
)
history2 = self.history_manager.create(name="history2", user=non_owner)
self.trans.set_history(history2)
@@ -300,7 +299,7 @@ def test_anon_accessibility(self):
self.log(
"anonymous users should not be able to access datasets within their own histories if "
- + "permissions do not allow"
+ "permissions do not allow"
)
assert not self.hda_manager.is_accessible(item1, anon_user)
with self.assertRaises(exceptions.ItemAccessibilityException):
@@ -308,7 +307,7 @@ def test_anon_accessibility(self):
self.log(
"those users with access permissions should still be allowed access to datasets "
- + "within anon users' histories"
+ "within anon users' histories"
)
assert self.hda_manager.is_accessible(item1, dataset_owner)
diff --git a/test/unit/app/test_custom_templates.py b/test/unit/app/test_custom_templates.py
index e3a2f20136d4..2da4cfef8217 100644
--- a/test/unit/app/test_custom_templates.py
+++ b/test/unit/app/test_custom_templates.py
@@ -14,7 +14,7 @@
"""
DEFAULT_TEMPLATE_HEADER_SUBSTRING = "The following variables are available"
DEFAULT_TEMPLATE_OUTPUT_SUBSTRING = ""
-CUSTOM_TEMPLATE_OUTPUT = "This is my custom template!\n" f"Name: {CONTEXT['name']}"
+CUSTOM_TEMPLATE_OUTPUT = f"This is my custom template!\nName: {CONTEXT['name']}"
def test_it_can_render_a_default_template(tmp_path):
diff --git a/test/unit/data/datatypes/test_qiime2.py b/test/unit/data/datatypes/test_qiime2.py
index 8b57cf4431bc..9bfddc4e0037 100644
--- a/test/unit/data/datatypes/test_qiime2.py
+++ b/test/unit/data/datatypes/test_qiime2.py
@@ -147,7 +147,7 @@ def test_strip_properties_single():
def test_strip_properties_double():
- double_expression = 'FeatureData[Taxonomy % Properties("SILVIA"), ' 'DistanceMatrix % Axes("ASV", "ASV")]'
+ double_expression = 'FeatureData[Taxonomy % Properties("SILVIA"), DistanceMatrix % Axes("ASV", "ASV")]'
stripped_expression = "FeatureData[Taxonomy, DistanceMatrix]"
reconstructed_expression = _strip_properties(double_expression)
@@ -156,7 +156,7 @@ def test_strip_properties_double():
def test_strip_properties_nested():
- nested_expression = "Tuple[FeatureData[Taxonomy % " 'Properties("SILVIA")] % Axes("ASV", "ASV")]'
+ nested_expression = 'Tuple[FeatureData[Taxonomy % Properties("SILVIA")] % Axes("ASV", "ASV")]'
stripped_expression = "Tuple[FeatureData[Taxonomy]]"
reconstructed_expression = _strip_properties(nested_expression)
@@ -177,7 +177,7 @@ def test_strip_properties_complex():
def test_strip_properties_keeps_different_binop():
- expression_with_different_binop = 'FeatureData[Taxonomy % Properties("SILVIA"), ' "Taxonomy & Properties]"
+ expression_with_different_binop = 'FeatureData[Taxonomy % Properties("SILVIA"), Taxonomy & Properties]'
stripped_expression = "FeatureData[Taxonomy, Taxonomy & Properties]"
reconstructed_expression = _strip_properties(expression_with_different_binop)
diff --git a/tools/data_export/send.py b/tools/data_export/send.py
index fe4ce0348828..87270b1d8fad 100644
--- a/tools/data_export/send.py
+++ b/tools/data_export/send.py
@@ -102,7 +102,7 @@ def parse_args(args):
"--object_label",
type=str,
required=True,
- help="The label of the object created on the cloud-based storage for " "the data to be persisted.",
+ help="The label of the object created on the cloud-based storage for the data to be persisted.",
)
parser.add_argument(
@@ -110,7 +110,7 @@ def parse_args(args):
"--filename",
type=str,
required=True,
- help="The (absolute) filename of the data to be persisted on the " "cloud-based storage.",
+ help="The (absolute) filename of the data to be persisted on the cloud-based storage.",
)
parser.add_argument(
diff --git a/tools/data_source/upload.py b/tools/data_source/upload.py
index 823fee7cc9bf..d6c6b108650e 100644
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -155,7 +155,7 @@ def add_file(dataset, registry, output_path: str) -> Dict[str, str]:
if datatype.dataset_content_needs_grooming(dataset.path):
err_msg = (
"The uploaded files need grooming, so change your Copy data into Galaxy? selection to be "
- + "Copy files into Galaxy instead of Link to files without copying into Galaxy so grooming can be performed."
+ "Copy files into Galaxy instead of Link to files without copying into Galaxy so grooming can be performed."
)
raise UploadProblemException(err_msg)
if not link_data_only: