Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate tagging into the rule builder. #6500

Merged
merged 2 commits into from
Jul 18, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions client/galaxy/scripts/components/RuleCollectionBuilder.vue
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,10 @@
<option v-for="(col, index) in genomes" :value="col['id']"">{{ col["text"] }}</option>
</select2>
</div>
<label v-if="showAddNameTag">
{{ l("Add nametag for name") }}:
</label>
<input type="checkbox" v-model="addNameTag" v-if="showAddNameTag"/>
<div class="rule-footer-name-group" v-if="showCollectionNameInput">
<b-input class="collection-name"
:placeholder="namePlaceholder" :title="namePlaceholder" v-b-tooltip.hover v-model="collectionName" />
Expand Down Expand Up @@ -932,6 +936,7 @@ export default {
genomes: [],
genome: null,
hideSourceItems: this.defaultHideSourceItems,
addNameTag: false,
orientation: orientation
};
},
Expand Down Expand Up @@ -1007,6 +1012,12 @@ export default {
!this.mappingAsDict.collection_name
);
},
showAddNameTag() {
return (
this.importType == "collections" &&
this.elementsType != "collection_contents"
);
},
titleFinish() {
if (this.elementsType == "datasets" || this.elementsType == "library_datasets") {
return _l("Create new collection from specified rules and datasets");
Expand Down Expand Up @@ -1464,6 +1475,9 @@ export default {
collection_type: collectionType,
name: collectionName
};
if (this.addNameTag) {
target["tags"] = ["name:" + collectionName];
}
targets.push(target);
}
} else {
Expand Down Expand Up @@ -1739,6 +1753,29 @@ export default {
const info = data[dataIndex][infoColumn];
res["info"] = info;
}
const tags = [];
if (mappingAsDict.tags) {
const tagColumns = mappingAsDict.tags.columns;
for (var tagColumn of tagColumns) {
const tag = data[dataIndex][tagColumn];
tags.push(tag);
}
}
if (mappingAsDict.group_tags) {
const groupTagColumns = mappingAsDict.group_tags.columns;
for (var groupTagColumn of groupTagColumns) {
const tag = data[dataIndex][groupTagColumn];
tags.push("group:" + tag);
}
}
if (mappingAsDict.name_tag) {
const nameTagColumn = mappingAsDict.name_tag.columns[0];
const nameTag = data[dataIndex][nameTagColumn];
tags.push("name:" + nameTag);
}
if (tags.length > 0) {
res["tags"] = tags;
}
return res;
}
},
Expand Down
24 changes: 24 additions & 0 deletions client/galaxy/scripts/mvc/rules/rule-definitions.js
Original file line number Diff line number Diff line change
Expand Up @@ -726,6 +726,30 @@ const MAPPING_TARGETS = {
modes: ["raw", "ftp", "datasets", "library_datasets"],
importType: "collections"
},
name_tag: {
label: _l("Name Tag"),
help: _l(
"Add a name tag or hash tag based on the specified column value for imported datasets."
),
importType: "datasets",
modes: ["raw", "ftp"]
},
tags: {
multiple: true,
label: _l("General Purpose Tag(s)"),
help: _l(
"Add a general purpose tag based on the specified column value, use : to separate key-value pairs if desired. These tags are not propagated to derived datasets the way name and group tags are."
),
modes: ["raw", "ftp", "library_datasets"],
},
group_tags: {
multiple: true,
label: _l("Group Tag(s)"),
help: _l(
"Add a group tag based on the specified column value, use : to separate key-value pairs. These tags are propagated to derived datasets and may be useful for factorial experiments."
),
modes: ["raw", "ftp", "library_datasets"],
},
name: {
label: _l("Name"),
importType: "datasets"
Expand Down
13 changes: 10 additions & 3 deletions lib/galaxy/managers/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,11 @@ def __init__(self, app):
self.tag_manager = tags.GalaxyTagManager(app.model.context)
self.ldda_manager = lddas.LDDAManager(app)

def precreate_dataset_collection_instance(self, trans, parent, name, structure, implicit_inputs=None, implicit_output_name=None):
def precreate_dataset_collection_instance(self, trans, parent, name, structure, implicit_inputs=None, implicit_output_name=None, tags=None):
# TODO: prebuild all required HIDs and send them in so no need to flush in between.
dataset_collection = self.precreate_dataset_collection(structure, allow_unitialized_element=implicit_output_name is not None)
instance = self._create_instance_for_collection(
trans, parent, name, dataset_collection, implicit_inputs=implicit_inputs, implicit_output_name=implicit_output_name, flush=False
trans, parent, name, dataset_collection, implicit_inputs=implicit_inputs, implicit_output_name=implicit_output_name, flush=False, tags=tags
)
return instance

Expand Down Expand Up @@ -148,7 +148,14 @@ def _create_instance_for_collection(self, trans, parent, name, dataset_collectio
log.exception(message)
raise MessageException(message)

tags = self._append_tags(dataset_collection_instance, implicit_inputs, tags)
# Tags may be coming in as a dictionary or tag model objects if copying them from other
# existing Galaxy objects or as a list of strings if the tags are coming from user supplied
# values.
if isinstance(tags, list):
assert implicit_inputs is None, implicit_inputs
tags = self.tag_manager.add_tags_from_list(trans.user, dataset_collection_instance, tags)
else:
tags = self._append_tags(dataset_collection_instance, implicit_inputs, tags)
return self.__persist(dataset_collection_instance, flush=flush)

def create_dataset_collection(self, trans, collection_type, element_identifiers=None, elements=None,
Expand Down
6 changes: 4 additions & 2 deletions lib/galaxy/tools/actions/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,8 @@ def _precreate_fetched_hdas(trans, history, target, outputs):
uploaded_dataset = Bunch(
type='file', name=name, file_type=file_type, dbkey=dbkey
)
data = upload_common.new_upload(trans, '', uploaded_dataset, library_bunch=None, history=history)
tag_list = item.get("tags", [])
data = upload_common.new_upload(trans, '', uploaded_dataset, library_bunch=None, history=history, tag_list=tag_list)
outputs.append(data)
item["object_id"] = data.id

Expand All @@ -136,11 +137,12 @@ def _precreate_fetched_collection_instance(trans, history, target, outputs):
if not name:
return

tags = target.get("tags", [])
collections_service = trans.app.dataset_collections_service
collection_type_description = collections_service.collection_type_descriptions.for_collection_type(collection_type)
structure = UninitializedTree(collection_type_description)
hdca = collections_service.precreate_dataset_collection_instance(
trans, history, name, structure=structure
trans, history, name, structure=structure, tags=tags
)
outputs.append(hdca)
# Following flushed needed for an ID.
Expand Down
11 changes: 8 additions & 3 deletions lib/galaxy/tools/actions/upload_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,11 +274,16 @@ def __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, state
return ldda


def new_upload(trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None):
def new_upload(trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None, tag_list=None):
if library_bunch:
return __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, state)
upload_target_dataset_instance = __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, state)
else:
return __new_history_upload(trans, uploaded_dataset, history=history, state=state)
upload_target_dataset_instance = __new_history_upload(trans, uploaded_dataset, history=history, state=state)

if tag_list:
trans.app.tag_handler.add_tags_from_list(trans.user, upload_target_dataset_instance, tag_list)

return upload_target_dataset_instance


def get_uploaded_datasets(trans, cntrller, params, dataset_upload_inputs, library_bunch=None, history=None):
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/tools/data_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def _resolve_src(item):
dbkey = item.get("dbkey", "?")
requested_ext = item.get("ext", "auto")
info = item.get("info", None)
tags = item.get("tags", [])
object_id = item.get("object_id", None)
link_data_only = upload_config.link_data_only
if "link_data_only" in item:
Expand Down Expand Up @@ -146,6 +147,8 @@ def _resolve_src(item):
rval["info"] = info
if object_id is not None:
rval["object_id"] = object_id
if tags:
rval["tags"] = tags
return rval

elements = elements_tree_map(_resolve_src, items)
Expand Down
12 changes: 11 additions & 1 deletion lib/galaxy/tools/parameters/output_collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def populate_collection_elements(self, collection, root_collection_builder, file
dataset_name = fields_match.name or designation

link_data = discovered_file.match.link_data

tag_list = discovered_file.match.tag_list
dataset = self.create_dataset(
ext=ext,
designation=designation,
Expand All @@ -417,6 +417,7 @@ def populate_collection_elements(self, collection, root_collection_builder, file
filename=filename,
metadata_source_name=metadata_source_name,
link_data=link_data,
tag_list=tag_list,
)
log.debug(
"(%s) Created dynamic collection dataset for path [%s] with element identifier [%s] for output [%s] %s",
Expand Down Expand Up @@ -473,6 +474,7 @@ def create_dataset(
library_folder=None,
link_data=False,
primary_data=None,
tag_list=[],
):
app = self.app
sa_session = self.sa_session
Expand All @@ -493,6 +495,10 @@ def create_dataset(
metadata_source = self.inp_data[metadata_source_name]

sa_session.flush()

if tag_list:
app.tag_handler.add_tags_from_list(self.job.user, primary_data, tag_list)

# Move data from temp location to dataset location
if not link_data:
app.object_store.update_from_file(primary_data.dataset, file_name=filename, create=True)
Expand Down Expand Up @@ -873,6 +879,10 @@ def visible(self):
def link_data(self):
return bool(self.as_dict.get("link_data_only", False))

@property
def tag_list(self):
return self.as_dict.get("tags", [])

@property
def object_id(self):
return self.as_dict.get("object_id", None)
Expand Down
11 changes: 9 additions & 2 deletions test/api/test_dataset_collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,12 +189,13 @@ def test_enforces_unique_names(self):
self._assert_status_code_is(create_response, 400)

def test_upload_collection(self):
elements = [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]
elements = [{"src": "files", "dbkey": "hg19", "info": "my cool bed", "tags": ["name:data1", "group:condition:treated", "machine:illumina"]}]
targets = [{
"destination": {"type": "hdca"},
"elements": elements,
"collection_type": "list",
"name": "Test upload",
"tags": ["name:collection1"]
}]
payload = {
"history_id": self.history_id,
Expand All @@ -204,10 +205,16 @@ def test_upload_collection(self):
self.dataset_populator.fetch(payload)
hdca = self._assert_one_collection_created_in_history()
self.assertEquals(hdca["name"], "Test upload")
hdca_tags = hdca["tags"]
assert len(hdca_tags) == 1
assert "name:collection1" in hdca_tags
assert len(hdca["elements"]) == 1, hdca
element0 = hdca["elements"][0]
assert element0["element_identifier"] == "4.bed"
assert element0["object"]["file_size"] == 61
dataset0 = element0["object"]
assert dataset0["file_size"] == 61
dataset_tags = dataset0["tags"]
assert len(dataset_tags) == 3, dataset0

def test_upload_nested(self):
elements = [{"name": "samp1", "elements": [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]}]
Expand Down
9 changes: 7 additions & 2 deletions test/api/test_tools_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,10 +227,15 @@ def test_upload_dbkey(self):
datasets = run_response.json()["outputs"]
assert datasets[0].get("genome_build") == "hg19", datasets[0]

def test_fetch_dbkey(self):
def test_fetch_metadata(self):
table = ONE_TO_SIX_WITH_SPACES
details = self._upload_and_get_details(table, api='fetch', dbkey="hg19")
details = self._upload_and_get_details(table, api='fetch', dbkey="hg19", info="cool upload", tags=["name:data", "group:type:paired-end"])
assert details.get("genome_build") == "hg19"
assert details.get("misc_info") == "cool upload", details
tags = details.get("tags")
assert len(tags) == 2, details
assert "group:type:paired-end" in tags
assert "name:data" in tags

def test_upload_multiple_files_1(self):
with self.dataset_populator.test_history() as history_id:
Expand Down