Skip to content

Commit

Permalink
Remove copy dataset mako associated controller endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
guerler committed Mar 5, 2024
1 parent b52872d commit cf15040
Showing 1 changed file with 0 additions and 146 deletions.
146 changes: 0 additions & 146 deletions lib/galaxy/webapps/galaxy/controllers/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -892,152 +892,6 @@ def purge_async(self, trans, dataset_id, filename):
else:
raise Exception(message)

@web.expose
def copy_datasets(
self,
trans,
source_history=None,
source_content_ids="",
target_history_id=None,
target_history_ids="",
new_history_name="",
do_copy=False,
**kwd,
):
user = trans.get_user()
if source_history is not None:
decoded_source_history_id = self.decode_id(source_history)
history = self.history_manager.get_owned(
decoded_source_history_id, trans.user, current_history=trans.history
)
current_history = trans.get_history()
else:
history = current_history = trans.get_history()
refresh_frames = []
if source_content_ids:
if not isinstance(source_content_ids, list):
source_content_ids = source_content_ids.split(",")
encoded_dataset_collection_ids = [
s[len("dataset_collection|") :] for s in source_content_ids if s.startswith("dataset_collection|")
]
encoded_dataset_ids = [s[len("dataset|") :] for s in source_content_ids if s.startswith("dataset|")]
decoded_dataset_collection_ids = set(map(self.decode_id, encoded_dataset_collection_ids))
decoded_dataset_ids = set(map(self.decode_id, encoded_dataset_ids))
else:
decoded_dataset_collection_ids = []
decoded_dataset_ids = []
if new_history_name:
target_history_ids = []
else:
if target_history_id:
target_history_ids = [self.decode_id(target_history_id)]
elif target_history_ids:
if not isinstance(target_history_ids, list):
target_history_ids = target_history_ids.split(",")
target_history_ids = list({self.decode_id(h) for h in target_history_ids if h})
else:
target_history_ids = []
done_msg = error_msg = ""
new_history = None
if do_copy:
invalid_contents = 0
if not (decoded_dataset_ids or decoded_dataset_collection_ids) or not (
target_history_ids or new_history_name
):
error_msg = "You must provide both source datasets and target histories. "
else:
if new_history_name:
new_history = trans.app.model.History()
new_history.name = new_history_name
new_history.user = user
trans.sa_session.add(new_history)
with transaction(trans.sa_session):
trans.sa_session.commit()
target_history_ids.append(new_history.id)
if user:
target_histories = [
hist
for hist in map(trans.sa_session.query(trans.app.model.History).get, target_history_ids)
if hist is not None and hist.user == user
]
else:
target_histories = [history]
if len(target_histories) != len(target_history_ids):
error_msg = (
error_msg
+ "You do not have permission to add datasets to %i requested histories. "
% (len(target_history_ids) - len(target_histories))
)
source_contents = list(
map(trans.sa_session.query(trans.app.model.HistoryDatasetAssociation).get, decoded_dataset_ids)
)
source_contents.extend(
map(
trans.sa_session.query(trans.app.model.HistoryDatasetCollectionAssociation).get,
decoded_dataset_collection_ids,
)
)
source_contents.sort(key=lambda content: content.hid)
for content in source_contents:
if content is None:
error_msg = f"{error_msg}You tried to copy a dataset that does not exist. "
invalid_contents += 1
elif content.history != history:
error_msg = f"{error_msg}You tried to copy a dataset which is not in your current history. "
invalid_contents += 1
else:
for hist in target_histories:
if content.history_content_type == "dataset":
copy = content.copy(flush=False)
hist.stage_addition(copy)
else:
copy = content.copy(element_destination=hist)
if user:
copy.copy_tags_from(user, content)
for hist in target_histories:
hist.add_pending_items()
with transaction(trans.sa_session):
trans.sa_session.commit()
if current_history in target_histories:
refresh_frames = ["history"]
hist_names_str = ", ".join(
'<a href="{}" target="_top">{}</a>'.format(
url_for(
controller="history", action="switch_to_history", hist_id=trans.security.encode_id(hist.id)
),
escape(hist.name),
)
for hist in target_histories
)
num_source = len(source_content_ids) - invalid_contents
num_target = len(target_histories)
done_msg = "%i %s copied to %i %s: %s." % (
num_source,
inflector.cond_plural(num_source, "dataset"),
num_target,
inflector.cond_plural(num_target, "history"),
hist_names_str,
)
trans.sa_session.refresh(history)
source_contents = history.active_contents
target_histories = [history]
if user:
target_histories = user.active_histories
return trans.fill_template(
"/dataset/copy_view.mako",
source_history=history,
current_history=current_history,
source_content_ids=source_content_ids,
target_history_id=target_history_id,
target_history_ids=target_history_ids,
source_contents=source_contents,
target_histories=target_histories,
new_history_name=new_history_name,
done_msg=done_msg,
error_msg=error_msg,
refresh_frames=refresh_frames,
)

def _copy_datasets(self, trans, dataset_ids, target_histories, imported=False):
"""Helper method for copying datasets."""
user = trans.get_user()
Expand Down

0 comments on commit cf15040

Please sign in to comment.