Skip to content

Commit

Permalink
Allow setting tags on targets & contents in data fetch API.
Browse files Browse the repository at this point in the history
  • Loading branch information
jmchilton committed Jul 14, 2018
1 parent bc90d0a commit 80ede7e
Show file tree
Hide file tree
Showing 7 changed files with 52 additions and 13 deletions.
13 changes: 10 additions & 3 deletions lib/galaxy/managers/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,11 @@ def __init__(self, app):
self.tag_manager = tags.GalaxyTagManager(app.model.context)
self.ldda_manager = lddas.LDDAManager(app)

def precreate_dataset_collection_instance(self, trans, parent, name, structure, implicit_inputs=None, implicit_output_name=None):
def precreate_dataset_collection_instance(self, trans, parent, name, structure, implicit_inputs=None, implicit_output_name=None, tags=[]):
# TODO: prebuild all required HIDs and send them in so no need to flush in between.
dataset_collection = self.precreate_dataset_collection(structure, allow_unitialized_element=implicit_output_name is not None)
instance = self._create_instance_for_collection(
trans, parent, name, dataset_collection, implicit_inputs=implicit_inputs, implicit_output_name=implicit_output_name, flush=False
trans, parent, name, dataset_collection, implicit_inputs=implicit_inputs, implicit_output_name=implicit_output_name, flush=False, tags=tags
)
return instance

Expand Down Expand Up @@ -148,7 +148,14 @@ def _create_instance_for_collection(self, trans, parent, name, dataset_collectio
log.exception(message)
raise MessageException(message)

tags = self._append_tags(dataset_collection_instance, implicit_inputs, tags)
# Tags may be coming in as a dictionary or tag model objects if copying them from other
# existing Galaxy objects or as a list of strings if the tags are coming from user supplied
# values.
if tags and isinstance(tags, list):
assert implicit_inputs is None, implicit_inputs
tags = self.tag_manager.add_tags_from_list(trans.user, dataset_collection_instance, tags)
else:
tags = self._append_tags(dataset_collection_instance, implicit_inputs, tags)
return self.__persist(dataset_collection_instance, flush=flush)

def create_dataset_collection(self, trans, collection_type, element_identifiers=None, elements=None,
Expand Down
6 changes: 4 additions & 2 deletions lib/galaxy/tools/actions/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,8 @@ def _precreate_fetched_hdas(trans, history, target, outputs):
uploaded_dataset = Bunch(
type='file', name=name, file_type=file_type, dbkey=dbkey
)
data = upload_common.new_upload(trans, '', uploaded_dataset, library_bunch=None, history=history)
tag_list = item.get("tags", [])
data = upload_common.new_upload(trans, '', uploaded_dataset, library_bunch=None, history=history, tag_list=tag_list)
outputs.append(data)
item["object_id"] = data.id

Expand All @@ -136,11 +137,12 @@ def _precreate_fetched_collection_instance(trans, history, target, outputs):
if not name:
return

tags = target.get("tags", [])
collections_service = trans.app.dataset_collections_service
collection_type_description = collections_service.collection_type_descriptions.for_collection_type(collection_type)
structure = UninitializedTree(collection_type_description)
hdca = collections_service.precreate_dataset_collection_instance(
trans, history, name, structure=structure
trans, history, name, structure=structure, tags=tags
)
outputs.append(hdca)
# Following flushed needed for an ID.
Expand Down
11 changes: 8 additions & 3 deletions lib/galaxy/tools/actions/upload_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,11 +274,16 @@ def __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, state
return ldda


def new_upload(trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None):
def new_upload(trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None, tag_list=[]):
if library_bunch:
return __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, state)
upload_target_dataset_instance = __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, state)
else:
return __new_history_upload(trans, uploaded_dataset, history=history, state=state)
upload_target_dataset_instance = __new_history_upload(trans, uploaded_dataset, history=history, state=state)

if tag_list:
trans.app.tag_handler.add_tags_from_list(trans.user, upload_target_dataset_instance, tag_list)

return upload_target_dataset_instance


def get_uploaded_datasets(trans, cntrller, params, dataset_upload_inputs, library_bunch=None, history=None):
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/tools/data_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def _resolve_src(item):
dbkey = item.get("dbkey", "?")
requested_ext = item.get("ext", "auto")
info = item.get("info", None)
tags = item.get("tags", [])
object_id = item.get("object_id", None)
link_data_only = upload_config.link_data_only
if "link_data_only" in item:
Expand Down Expand Up @@ -146,6 +147,8 @@ def _resolve_src(item):
rval["info"] = info
if object_id is not None:
rval["object_id"] = object_id
if tags:
rval["tags"] = tags
return rval

elements = elements_tree_map(_resolve_src, items)
Expand Down
12 changes: 11 additions & 1 deletion lib/galaxy/tools/parameters/output_collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def populate_collection_elements(self, collection, root_collection_builder, file
dataset_name = fields_match.name or designation

link_data = discovered_file.match.link_data

tag_list = discovered_file.match.tag_list
dataset = self.create_dataset(
ext=ext,
designation=designation,
Expand All @@ -417,6 +417,7 @@ def populate_collection_elements(self, collection, root_collection_builder, file
filename=filename,
metadata_source_name=metadata_source_name,
link_data=link_data,
tag_list=tag_list,
)
log.debug(
"(%s) Created dynamic collection dataset for path [%s] with element identifier [%s] for output [%s] %s",
Expand Down Expand Up @@ -473,6 +474,7 @@ def create_dataset(
library_folder=None,
link_data=False,
primary_data=None,
tag_list=[],
):
app = self.app
sa_session = self.sa_session
Expand All @@ -493,6 +495,10 @@ def create_dataset(
metadata_source = self.inp_data[metadata_source_name]

sa_session.flush()

if tag_list:
app.tag_handler.add_tags_from_list(self.job.user, primary_data, tag_list)

# Move data from temp location to dataset location
if not link_data:
app.object_store.update_from_file(primary_data.dataset, file_name=filename, create=True)
Expand Down Expand Up @@ -873,6 +879,10 @@ def visible(self):
def link_data(self):
return bool(self.as_dict.get("link_data_only", False))

@property
def tag_list(self):
return self.as_dict.get("tags", [])

@property
def object_id(self):
return self.as_dict.get("object_id", None)
Expand Down
11 changes: 9 additions & 2 deletions test/api/test_dataset_collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,12 +189,13 @@ def test_enforces_unique_names(self):
self._assert_status_code_is(create_response, 400)

def test_upload_collection(self):
elements = [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]
elements = [{"src": "files", "dbkey": "hg19", "info": "my cool bed", "tags": ["name:data1", "group:condition:treated", "machine:illumina"]}]
targets = [{
"destination": {"type": "hdca"},
"elements": elements,
"collection_type": "list",
"name": "Test upload",
"tags": ["name:collection1"]
}]
payload = {
"history_id": self.history_id,
Expand All @@ -204,10 +205,16 @@ def test_upload_collection(self):
self.dataset_populator.fetch(payload)
hdca = self._assert_one_collection_created_in_history()
self.assertEquals(hdca["name"], "Test upload")
hdca_tags = hdca["tags"]
assert len(hdca_tags) == 1
assert "name:collection1" in hdca_tags
assert len(hdca["elements"]) == 1, hdca
element0 = hdca["elements"][0]
assert element0["element_identifier"] == "4.bed"
assert element0["object"]["file_size"] == 61
dataset0 = element0["object"]
assert dataset0["file_size"] == 61
dataset_tags = dataset0["tags"]
assert len(dataset_tags) == 3, dataset0

def test_upload_nested(self):
elements = [{"name": "samp1", "elements": [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]}]
Expand Down
9 changes: 7 additions & 2 deletions test/api/test_tools_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,10 +227,15 @@ def test_upload_dbkey(self):
datasets = run_response.json()["outputs"]
assert datasets[0].get("genome_build") == "hg19", datasets[0]

def test_fetch_dbkey(self):
def test_fetch_metadata(self):
table = ONE_TO_SIX_WITH_SPACES
details = self._upload_and_get_details(table, api='fetch', dbkey="hg19")
details = self._upload_and_get_details(table, api='fetch', dbkey="hg19", info="cool upload", tags=["name:data", "group:type:paired-end"])
assert details.get("genome_build") == "hg19"
assert details.get("misc_info") == "cool upload", details
tags = details.get("tags")
assert len(tags) == 2, details
assert "group:type:paired-end" in tags
assert "name:data" in tags

def test_upload_multiple_files_1(self):
with self.dataset_populator.test_history() as history_id:
Expand Down

0 comments on commit 80ede7e

Please sign in to comment.