Skip to content

Commit

Permalink
fix rebase error
Browse files Browse the repository at this point in the history
  • Loading branch information
bernt-matthias committed Jun 6, 2024
1 parent 545cda2 commit 7bb8a3f
Showing 1 changed file with 0 additions and 41 deletions.
41 changes: 0 additions & 41 deletions lib/galaxy/tools/actions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,24 +211,9 @@ def process_dataset(data, formats=None):
i
] = conversion_data.id # a more robust way to determine JSONable value is desired
else:
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
input_datasets[prefixed_name] = process_dataset(value)
input_datasets.set_legacy_alias(new_key=prefixed_name, old_key=prefix + input.name)
=======
log.error(f"ADD DTP !list input.name {input.name}")
input_datasets[prefix + input.name] = process_dataset(value)
log.error(f"==> input_datasets {input_datasets}")
>>>>>>> f578c6ad55 (more debug)
=======
# log.error(f"ADD DTP !list input.name {input.name}")
input_datasets[prefix + input.name] = process_dataset(value)
# log.error(f"==> input_datasets {input_datasets}")
>>>>>>> 26d94faf82 (fix linter problems and commend log statements)
=======
input_datasets[prefix + input.name] = process_dataset(value)
>>>>>>> 83b785ba3f (remove log messages)
conversions = []
for conversion_name, conversion_extensions, conversion_datatypes in input.conversions:
new_data = process_dataset(input_datasets[prefixed_name], conversion_datatypes)
Expand Down Expand Up @@ -291,26 +276,11 @@ def process_dataset(data, formats=None):
processed_dataset = process_dataset(v)
if processed_dataset is not v:
processed_dataset_dict[v] = processed_dataset
<<<<<<< HEAD
<<<<<<< HEAD
<<<<<<< HEAD
input_datasets[prefixed_name + str(i + 1)] = processed_dataset or v
input_datasets.set_legacy_alias(
new_key=prefixed_name + str(i + 1), old_key=prefix + input.name + str(i + 1)
)
=======
log.error(f"ADD DCTP input.name {input.name} i+1 {i+1}")
input_datasets[prefix + input.name + str(i + 1)] = processed_dataset or v
log.error(f"==> input_datasets {input_datasets}")
>>>>>>> f578c6ad55 (more debug)
=======
# log.error(f"ADD DCTP input.name {input.name} i+1 {i+1}")
input_datasets[prefix + input.name + str(i + 1)] = processed_dataset or v
# log.error(f"==> input_datasets {input_datasets}")
>>>>>>> 26d94faf82 (fix linter problems and commend log statements)
=======
input_datasets[prefix + input.name + str(i + 1)] = processed_dataset or v
>>>>>>> 83b785ba3f (remove log messages)
if conversion_required:
collection_type_description = (
trans.app.dataset_collection_manager.collection_type_descriptions.for_collection_type(
Expand Down Expand Up @@ -527,17 +497,6 @@ def execute(
async_tool = tool.tool_type == "data_source_async"

def handle_output(name, output, hidden=None):
<<<<<<< HEAD
<<<<<<< HEAD
=======
# log.error(f"handle_output name {name}")
# log.error(f"handle_output output {output}")
=======
>>>>>>> 83b785ba3f (remove log messages)
if output.parent:
parent_to_child_pairs.append((output.parent, name))
child_dataset_names.add(name)
>>>>>>> 26d94faf82 (fix linter problems and commend log statements)
if async_tool and name in incoming:
# HACK: output data has already been created as a result of the async controller
dataid = incoming[name]
Expand Down

0 comments on commit 7bb8a3f

Please sign in to comment.