Skip to content

Commit

Permalink
Fix new flake8 errors
Browse files Browse the repository at this point in the history
  • Loading branch information
nsoranzo committed Oct 10, 2023
1 parent 0b6bd9c commit 6d9787b
Show file tree
Hide file tree
Showing 12 changed files with 21 additions and 22 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1674,7 +1674,7 @@ def _set_object_store_ids_full(self, job):
# the outputs and set them accordingly
object_store_id_overrides = {o: preferred_outputs_object_store_id for o in output_names}

def split_object_stores(output_name):
def split_object_stores(output_name): # noqa: F811 https://github.com/PyCQA/pyflakes/issues/783
if "|__part__|" in output_name:
output_name = output_name.split("|__part__|", 1)[0]
if output_name in output_names:
Expand Down
7 changes: 3 additions & 4 deletions lib/galaxy/managers/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -1428,13 +1428,12 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F
if name:
input_dicts.append({"name": name, "description": annotation_str})
for name, val in step_state.items():
input_type = type(val)
if input_type == RuntimeValue:
if isinstance(val, RuntimeValue):
input_dicts.append({"name": name, "description": f"runtime parameter for tool {module.get_name()}"})
elif input_type == dict:
elif isinstance(val, dict):
# Input type is described by a dict, e.g. indexed parameters.
for partval in val.values():
if type(partval) == RuntimeValue:
if isinstance(partval, RuntimeValue):
input_dicts.append(
{"name": name, "description": f"runtime parameter for tool {module.get_name()}"}
)
Expand Down
10 changes: 5 additions & 5 deletions lib/galaxy/model/security.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,21 +608,21 @@ def __active_folders_have_accessible_library_datasets(self, trans, folder, user,
return False

def can_access_library_item(self, roles, item, user):
if type(item) == self.model.Library:
if isinstance(item, self.model.Library):
return self.can_access_library(roles, item)
elif type(item) == self.model.LibraryFolder:
elif isinstance(item, self.model.LibraryFolder):
return (
self.can_access_library(roles, item.parent_library) and self.check_folder_contents(user, roles, item)[0]
)
elif type(item) == self.model.LibraryDataset:
elif isinstance(item, self.model.LibraryDataset):
return self.can_access_library(roles, item.folder.parent_library) and self.can_access_dataset(
roles, item.library_dataset_dataset_association.dataset
)
elif type(item) == self.model.LibraryDatasetDatasetAssociation:
elif isinstance(item, self.model.LibraryDatasetDatasetAssociation):
return self.can_access_library(
roles, item.library_dataset.folder.parent_library
) and self.can_access_dataset(roles, item.dataset)
elif type(item) == self.model.LibraryDatasetCollectionAssociation:
elif isinstance(item, self.model.LibraryDatasetCollectionAssociation):
return self.can_access_library(roles, item.folder.parent_library)
else:
log.warning(f"Unknown library item type: {type(item)}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def container_testing(args=None):
for error in container["errors"]:
f.write(
"\n\t\t\tCOMMAND: {}\n\t\t\t\tERROR:{}".format(
error.get("command", f"import{error.get('import', 'nothing found')}"), error["output"]
error.get("command", f"import {error.get('import', 'nothing found')}"), error["output"]
)
)
f.write("\n\tNO TEST AVAILABLE:")
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tool_util/verify/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def build_case_references(
filtered_test_references.append(test_reference)
if log is not None:
log.info(
f"Skipping {len(test_references)-len(filtered_test_references)} out of {len(test_references)} tests."
f"Skipping {len(test_references) - len(filtered_test_references)} out of {len(test_references)} tests."
)
test_references = filtered_test_references

Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tools/actions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,7 +757,7 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current
assert (
old_job.user_id == trans.user.id
), f"({old_job.id}/{current_job.id}): Old user id ({old_job.user_id}) does not match rerun user id ({trans.user.id})"
elif trans.user is None and type(galaxy_session) == trans.model.GalaxySession:
elif trans.user is None and isinstance(galaxy_session, trans.model.GalaxySession):
assert (
old_job.session_id == galaxy_session.id
), f"({old_job.id}/{current_job.id}): Old session id ({old_job.session_id}) does not match rerun session id ({galaxy_session.id})"
Expand Down Expand Up @@ -847,7 +847,7 @@ def _new_job_for_session(self, trans, tool, history):
if hasattr(trans, "get_galaxy_session"):
galaxy_session = trans.get_galaxy_session()
# If we're submitting from the API, there won't be a session.
if type(galaxy_session) == trans.model.GalaxySession:
if isinstance(galaxy_session, trans.model.GalaxySession):
job.session_id = model.cached_id(galaxy_session)
if trans.user is not None:
job.user_id = model.cached_id(trans.user)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/actions/upload_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ def create_job(trans, params, tool, json_file_path, outputs, folder=None, histor
trans.sa_session.add(job)
job.galaxy_version = trans.app.config.version_major
galaxy_session = trans.get_galaxy_session()
if type(galaxy_session) == trans.model.GalaxySession:
if isinstance(galaxy_session, trans.model.GalaxySession):
job.session_id = galaxy_session.id
if trans.user is not None:
job.user_id = trans.user.id
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/parameters/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def from_element(cls, param, elem):
message = elem.get("message")
negate = elem.get("negate", "false")
if not message:
message = f"The selected dataset is {'non-' if negate == 'true' else ''}empty, this tool expects {'non-' if negate=='false' else ''}empty files."
message = f"The selected dataset is {'non-' if negate == 'true' else ''}empty, this tool expects {'non-' if negate == 'false' else ''}empty files."
return cls(message, negate)

def validate(self, value, trans=None):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/util/dictifiable.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ def get_value(key, item):
assert value_mapper is not None
if key in value_mapper:
return value_mapper[key](item)
if type(item) == datetime.datetime:
if isinstance(item, datetime.datetime):
return item.isoformat()
elif type(item) == uuid.UUID:
elif isinstance(item, uuid.UUID):
return str(item)
# Leaving this for future reference, though we may want a more
# generic way to handle special type mappings going forward.
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/api/library_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ def create(self, trans, library_id, payload, **kwd):
trans.sa_session.add(meta_i)
with transaction(trans.sa_session):
trans.sa_session.commit()
if type(v) == trans.app.model.LibraryDatasetDatasetAssociation:
if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation):
v = v.library_dataset
encoded_id = trans.security.encode_id(v.id)
if create_type == "folder":
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_folder_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def test_index_order_by(self, history_id):
history_id,
folder_id,
name,
content=f"{'0'*dataset_sizes[index]}",
content=f"{'0' * dataset_sizes[index]}",
ldda_message=ldda_messages[index],
file_type=file_types[index],
)
Expand Down
4 changes: 2 additions & 2 deletions test/integration/test_storage_cleaner.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def _create_histories_with(
history_ids.append(history_id)
# Create a dataset with content equal to the expected size of the history
if history_data.size:
self.dataset_populator.new_dataset(history_id, content=f"{'0'*(history_data.size-1)}\n")
self.dataset_populator.new_dataset(history_id, content=f"{'0' * (history_data.size - 1)}\n")
if wait_for_histories:
for history_id in history_ids:
self.dataset_populator.wait_for_history(history_id)
Expand All @@ -176,7 +176,7 @@ def _create_datasets_in_history_with(
dataset_ids = []
for dataset_data in test_datasets:
dataset = self.dataset_populator.new_dataset(
history_id, name=dataset_data.name, content=f"{'0'*(dataset_data.size-1)}\n"
history_id, name=dataset_data.name, content=f"{'0' * (dataset_data.size - 1)}\n"
)
dataset_ids.append(dataset["id"])
if wait_for_history:
Expand Down

0 comments on commit 6d9787b

Please sign in to comment.