diff --git a/client/galaxy/scripts/components/RuleCollectionBuilder.vue b/client/galaxy/scripts/components/RuleCollectionBuilder.vue
index cac85f619f20..60d5d58dff88 100644
--- a/client/galaxy/scripts/components/RuleCollectionBuilder.vue
+++ b/client/galaxy/scripts/components/RuleCollectionBuilder.vue
@@ -26,7 +26,7 @@
v-bind:class="{ 'flex-column-container': vertical }" v-if="ruleView == 'normal'">
-
+
@@ -49,6 +49,16 @@
+
+
+
@@ -275,6 +285,7 @@
-
+
this.colHeaders[idx]);
- } else {
- columnNames = [this.colHeaders[this.columns]];
- }
- if (columnNames.length == 2) {
- return "columns " + columnNames[0] + " and " + columnNames[1];
- } else if (columnNames.length > 2) {
- return (
- "columns " + columnNames.slice(0, -1).join(", ") + ", and " + columnNames[columnNames.length - 1]
- );
- } else {
- return "column " + columnNames[0];
- }
+ return RuleDefs.columnDisplay(this.columns, this.colHeaders);
}
}
};
@@ -801,24 +798,50 @@ const RuleModalFooter = {
export default {
data: function() {
- let mapping;
- if (this.elementsType == "ftp") {
- mapping = [{ type: "ftp_path", columns: [0] }];
- } else if (this.elementsType == "datasets") {
- mapping = [{ type: "list_identifiers", columns: [1] }];
+ let orientation = "vertical";
+ let mapping, rules;
+ if (this.initialRules) {
+ mapping = this.initialRules.mapping.slice();
+ rules = this.initialRules.rules.slice();
} else {
- mapping = [];
+ if (this.elementsType == "ftp") {
+ mapping = [{ type: "ftp_path", columns: [0] }];
+ } else if (this.elementsType == "datasets") {
+ mapping = [{ type: "list_identifiers", columns: [1] }];
+ } else {
+ mapping = [];
+ }
+ rules = [];
+ if (this.elementsType == "collection_contents") {
+ if (this.initialElements !== null) {
+ const collectionType = this.initialElements.collection_type;
+ const collectionTypeRanks = collectionType.split(":");
+ for (let index in collectionTypeRanks) {
+ rules.push({
+ type: "add_column_metadata",
+ value: "identifier" + index
+ });
+ }
+ } else {
+ orientation = "horizontal";
+ // just assume a list is given by default.
+ rules.push({
+ type: "add_column_metadata",
+ value: "identifier0"
+ });
+ }
+ }
}
return {
- rules: [],
+ rules: rules,
colHeadersPerRule: [],
mapping: mapping,
state: "build", // 'build', 'error', 'wait',
ruleView: "normal", // 'normal' or 'source'
ruleSource: "",
+ ruleSourceJson: null,
ruleSourceError: null,
errorMessage: "",
- hasRuleErrors: false,
jaggedData: false,
waitingJobState: "new",
titleReset: _l("Undo all reordering and discards"),
@@ -849,6 +872,7 @@ export default {
addColumnRegexReplacement: null,
addColumnRegexGroupCount: null,
addColumnRegexType: "global",
+ addColumnMetadataValue: 0,
addColumnConcatenateTarget0: 0,
addColumnConcatenateTarget1: 0,
addColumnRownumStart: 1,
@@ -884,12 +908,11 @@ export default {
genomes: [],
genome: null,
hideSourceItems: this.defaultHideSourceItems,
- orientation: "vertical"
+ orientation: orientation
};
},
props: {
initialElements: {
- type: Array,
required: true
},
importType: {
@@ -908,6 +931,16 @@ export default {
required: false,
type: Function
},
+ // required if elementsType is "collection_contents" - hook into tool form to update
+ // rule parameter
+ saveRulesFn: {
+ required: false,
+ type: Function
+ },
+ initialRules: {
+ required: false,
+ type: Object
+ },
defaultHideSourceItems: {
type: Boolean,
required: false,
@@ -931,7 +964,7 @@ export default {
computed: {
exisistingDatasets() {
const elementsType = this.elementsType;
- return elementsType === "datasets" || elementsType === "library_datasets";
+ return elementsType === "datasets" || elementsType === "collection_contents" || elementsType === "library_datasets";
},
showFileTypeSelector() {
return !this.exisistingDatasets && !this.mappingAsDict.file_type;
@@ -940,11 +973,17 @@ export default {
return !this.exisistingDatasets && !this.mappingAsDict.dbkey;
},
showCollectionNameInput() {
- return this.importType == "collections" && !this.mappingAsDict.collection_name;
+ return (
+ this.importType == "collections" &&
+ this.elementsType != "collection_contents" &&
+ !this.mappingAsDict.collection_name
+ );
},
titleFinish() {
if (this.elementsType == "datasets" || this.elementsType == "library_datasets") {
return _l("Create new collection from specified rules and datasets");
+ } else if (this.elementsType == "collection_contents") {
+ return _l("Save rules and return to tool form");
} else {
return _l("Upload collection using specified rules");
}
@@ -959,6 +998,8 @@ export default {
finishButtonTitle() {
if (this.elementsType == "datasets" || this.elementsType == "library_datasets") {
return _l("Create");
+ } else if (this.elementsType == "collection_contents") {
+ return _l("Save");
} else {
return _l("Upload");
}
@@ -1012,50 +1053,48 @@ export default {
return targets;
},
hotData() {
- let data, sources;
+ let data, sources, columns;
if (this.elementsType == "datasets") {
data = this.initialElements.map(el => [el["hid"], el["name"]]);
sources = this.initialElements.slice();
+ columns = ["new", "new"];
} else if (this.elementsType == "library_datasets") {
data = this.initialElements.map(el => [el["name"]]);
sources = this.initialElements.slice();
+ columns = ["new"];
+ } else if (this.elementsType == "collection_contents") {
+ const collection = this.initialElements;
+ if (collection) {
+ const obj = this.populateElementsFromCollectionDescription(
+ collection.elements,
+ collection.collection_type
+ );
+ data = obj.data;
+ sources = obj.sources;
+ columns = [];
+ } else {
+ data = [];
+ sources = [];
+ columns = [];
+ }
} else {
data = this.initialElements.slice();
sources = data.map(el => null);
- }
-
- let hasRuleError = false;
- this.colHeadersPerRule = [];
- for (var ruleIndex in this.rules) {
- const ruleHeaders = this.colHeadersFor(data);
- this.colHeadersPerRule[ruleIndex] = ruleHeaders;
-
- const rule = this.rules[ruleIndex];
- rule.error = null;
- rule.warn = null;
- if (hasRuleError) {
- rule.warn = _l("Skipped due to previous errors.");
- continue;
- }
- var ruleType = rule.type;
- const ruleDef = RULES[ruleType];
- const res = ruleDef.apply(rule, data, sources);
- if (res.error) {
- hasRuleError = true;
- rule.error = res.error;
- } else {
- if (res.warn) {
- rule.warn = res.warn;
+ columns = [];
+ if (this.initialElements) {
+ for (var columnIndex in this.initialElements[0]) {
+ columns.push("new");
}
- data = res.data || data;
- sources = res.sources || sources;
}
}
- return { data, sources };
+
+ this.colHeadersPerRule = [];
+ return RuleDefs.applyRules(data, sources, columns, this.rules, this.colHeadersPerRule);
},
colHeaders() {
const data = this.hotData["data"];
- return this.colHeadersFor(data);
+ const columns = this.hotData["columns"];
+ return RuleDefs.colHeadersFor(data, columns);
},
colHeadersDisplay() {
const formattedHeaders = [];
@@ -1087,6 +1126,29 @@ export default {
}
return asDict;
},
+ metadataOptions() {
+ const metadataOptions = {};
+ if (this.elementsType == "collection_contents") {
+ let collectionType;
+ if (this.initialElements) {
+ collectionType = this.initialElements.collection_type;
+ } else {
+ // give a bunch of different options if not constrained with given input
+ collectionType = "list:list:list:paired";
+ }
+ const collectionTypeRanks = collectionType.split(":");
+ for (let index in collectionTypeRanks) {
+ const collectionTypeRank = collectionTypeRanks[index];
+ if (collectionTypeRank == "list") {
+ // TODO: drop the numeral at the end if only flat list
+ metadataOptions["identifier" + index] = _l("List Identifier ") + (parseInt(index) + 1);
+ } else {
+ metadataOptions["identifier" + index] = _l("Paired Identifier");
+ }
+ }
+ }
+ return metadataOptions;
+ },
collectionType() {
let identifierColumns = [];
if (this.mappingAsDict.list_identifiers) {
@@ -1094,14 +1156,18 @@ export default {
}
let collectionType = identifierColumns.map(col => "list").join(":");
if (this.mappingAsDict.paired_identifier) {
- collectionType += ":paired";
+ if (collectionType) {
+ collectionType += ":paired";
+ } else {
+ collectionType = "paired";
+ }
}
return collectionType;
},
validInput() {
const identifierColumns = this.identifierColumns();
const mappingAsDict = this.mappingAsDict;
- const buildingCollection = identifierColumns.length > 0;
+ const buildingCollection = identifierColumns.length > 0 && this.elementsType != "collection_contents";
let valid = true;
if (buildingCollection && !mappingAsDict.collection_name) {
@@ -1179,12 +1245,15 @@ export default {
rules: this.rules,
mapping: this.mapping
};
- if (this.extension !== UploadUtils.DEFAULT_EXTENSION) {
- asJson.extension = this.extension;
- }
- if (this.genome !== UploadUtils.DEFAULT_GENOME) {
- asJson.genome = this.genome;
+ if (!this.exisistingDatasets) {
+ if (this.extension !== UploadUtils.DEFAULT_EXTENSION) {
+ asJson.extension = this.extension;
+ }
+ if (this.genome !== UploadUtils.DEFAULT_GENOME) {
+ asJson.genome = this.genome;
+ }
}
+ this.ruleSourceJson = asJson;
this.ruleSource = JSON.stringify(asJson, replacer, " ");
this.ruleSourceError = null;
},
@@ -1220,14 +1289,6 @@ export default {
this.mapping = asJson.mapping;
}
},
- handleColumnMapping() {},
- colHeadersFor(data) {
- if (data.length == 0) {
- return [];
- } else {
- return data[0].map((el, i) => String.fromCharCode(65 + i));
- }
- },
addIdentifier(identifier) {
const multiple = this.mappingTargets()[identifier].multiple;
// If multiple selection, pop open a new column selector in edit mode.
@@ -1313,13 +1374,21 @@ export default {
const collectionType = this.collectionType;
if (this.elementsType == "datasets" || this.elementsType == "library_datasets") {
const elements = this.creationElementsFromDatasets();
- new AjaxQueue.AjaxQueue(
- _.map(elements, (elements, name) => {
- return () => {
- const response = this.creationFn(elements, collectionType, name, this.hideSourceItems);
- return response;
- };
- })).done(this.oncreate).fail(this.renderFetchError);
+ if (this.state !== "error") {
+ new AjaxQueue.AjaxQueue(
+ _.map(elements, (elements, name) => {
+ return () => {
+ const response = this.creationFn(elements, collectionType, name, this.hideSourceItems);
+ return response;
+ };
+ })).done(this.oncreate).fail(this.renderFetchError);
+ }
+ } else if (this.elementsType == "collection_contents") {
+ this.resetSource();
+ if (this.state !== "error") {
+ this.saveRulesFn(this.ruleSourceJson);
+ this.oncreate();
+ }
} else {
const historyId = Galaxy.currHistoryPanel.model.id;
let elements, targets;
@@ -1522,6 +1591,35 @@ export default {
return datasets;
},
+ populateElementsFromCollectionDescription(elements, collectionType, parentIdentifiers_) {
+ const parentIdentifiers = parentIdentifiers_ ? parentIdentifiers_ : [];
+ let data = [];
+ let sources = [];
+ for (let element of elements) {
+ const elementObject = element.object;
+ const identifiers = parentIdentifiers.concat([element.element_identifier]);
+ const collectionTypeLevelSepIndex = collectionType.indexOf(":");
+ if (collectionTypeLevelSepIndex === -1) {
+ // Flat collection at this depth.
+ // sources are the elements
+ // TOOD: right thing is probably this: data.push([]);
+ data.push([]);
+ sources.push({ identifiers: identifiers, dataset: elementObject });
+ } else {
+ const restCollectionType = collectionType.slice(collectionTypeLevelSepIndex + 1);
+ let elementObj = this.populateElementsFromCollectionDescription(
+ elementObject.elements,
+ restCollectionType,
+ identifiers
+ );
+ const elementData = elementObj.data;
+ const elementSources = elementObj.sources;
+ data = data.concat(elementData);
+ sources = sources.concat(elementSources);
+ }
+ }
+ return { data, sources };
+ },
highlightColumn(n) {
const headerSelection = $(`.htCore > thead > tr > th:nth-child(${n + 1})`);
headerSelection.addClass("ht__highlight");
@@ -1578,39 +1676,41 @@ export default {
}
},
created() {
- let columnCount = null;
- if (this.elementsType == "datasets") {
- for (let element of this.initialElements) {
- if (element.history_content_type == "dataset_collection") {
- this.errorMessage =
- "This component can only be used with datasets, you have specified one or more collections.";
- this.state = "error";
+ if (this.elementsType !== "collection_contents") {
+ let columnCount = null;
+ if (this.elementsType == "datasets") {
+ for (let element of this.initialElements) {
+ if (element.history_content_type == "dataset_collection") {
+ this.errorMessage =
+ "This component can only be used with datasets, you have specified one or more collections.";
+ this.state = "error";
+ }
}
- }
- } else {
- for (let row of this.initialElements) {
- if (columnCount == null) {
- columnCount = row.length;
- } else {
- if (columnCount != row.length) {
- this.jaggedData = true;
- break;
+ } else {
+ for (let row of this.initialElements) {
+ if (columnCount == null) {
+ columnCount = row.length;
+ } else {
+ if (columnCount != row.length) {
+ this.jaggedData = true;
+ break;
+ }
}
}
}
+ UploadUtils.getUploadDatatypes(
+ extensions => {
+ this.extensions = extensions;
+ this.extension = UploadUtils.DEFAULT_EXTENSION;
+ },
+ false,
+ UploadUtils.AUTO_EXTENSION
+ );
+ UploadUtils.getUploadGenomes(genomes => {
+ this.genomes = genomes;
+ this.genome = UploadUtils.DEFAULT_GENOME;
+ }, UploadUtils.DEFAULT_GENOME);
}
- UploadUtils.getUploadDatatypes(
- extensions => {
- this.extensions = extensions;
- this.extension = UploadUtils.DEFAULT_EXTENSION;
- },
- false,
- UploadUtils.AUTO_EXTENSION
- );
- UploadUtils.getUploadGenomes(genomes => {
- this.genomes = genomes;
- this.genome = UploadUtils.DEFAULT_GENOME;
- }, UploadUtils.DEFAULT_GENOME);
},
watch: {
addColumnRegexType: function(val) {
@@ -1668,6 +1768,10 @@ export default {
.rule-column.horizontal {
height: 150px;
}
+.rules-container-full {
+ width: 100%;
+ height: 400px;
+}
.rules-container {
border: 1px dashed #ccc;
padding: 5px;
diff --git a/client/galaxy/scripts/components/RulesDisplay.vue b/client/galaxy/scripts/components/RulesDisplay.vue
new file mode 100644
index 000000000000..3e83b0960313
--- /dev/null
+++ b/client/galaxy/scripts/components/RulesDisplay.vue
@@ -0,0 +1,119 @@
+
+
+
+
diff --git a/client/galaxy/scripts/mocha/tests/rules_tests.js b/client/galaxy/scripts/mocha/tests/rules_tests.js
index 3d0efc8b6c48..b97f81130faa 100644
--- a/client/galaxy/scripts/mocha/tests/rules_tests.js
+++ b/client/galaxy/scripts/mocha/tests/rules_tests.js
@@ -11,44 +11,29 @@ function applyRules(rules, data, sources) {
columns.push("new");
}
}
- for (var ruleIndex in rules) {
- const rule = rules[ruleIndex];
- rule.error = null;
- rule.warn = null;
-
- var ruleType = rule.type;
- const ruleDef = RULES[ruleType];
- const res = ruleDef.apply(rule, data, sources, columns);
- if (res.error) {
- throw res.error;
- } else {
- if (res.warn) {
- rule.warn = res.warn;
- }
- data = res.data || data;
- sources = res.sources || sources;
- columns = res.columns || columns;
- }
- }
- return { data, sources, columns };
+ return RuleDefs.applyRules(data, sources, columns, rules);
}
function itShouldConform(specTestCase, i) {
it("should pass conformance test case " + i, function() {
chai.assert.property(specTestCase, "rules");
- chai.assert.property(specTestCase, "initial");
- chai.assert.property(specTestCase, "final");
+ if (specTestCase.initial) {
+ chai.assert.property(specTestCase, "final");
- const rules = specTestCase.rules;
- const initial = specTestCase.initial;
- const expectedFinal = specTestCase.final;
+ const rules = specTestCase.rules;
+ const initial = specTestCase.initial;
+ const expectedFinal = specTestCase.final;
- const final = applyRules(rules, initial.data, initial.sources);
- const finalData = final.data;
- const finalSources = final.sources;
- chai.assert.deepEqual(finalData, expectedFinal.data);
- if (expectedFinal.sources !== undefined) {
- chai.assert.deepEqual(finalSources, expectedFinal.sources);
+ const final = applyRules(rules, initial.data, initial.sources);
+ const finalData = final.data;
+ const finalSources = final.sources;
+ chai.assert.deepEqual(finalData, expectedFinal.data);
+ if (expectedFinal.sources !== undefined) {
+ chai.assert.deepEqual(finalSources, expectedFinal.sources);
+ }
+ } else {
+ chai.assert(specTestCase.error);
+ // TODO: test these...
}
});
}
diff --git a/client/galaxy/scripts/mvc/collection/list-collection-creator.js b/client/galaxy/scripts/mvc/collection/list-collection-creator.js
index 3657cd773568..e1ba11498b87 100644
--- a/client/galaxy/scripts/mvc/collection/list-collection-creator.js
+++ b/client/galaxy/scripts/mvc/collection/list-collection-creator.js
@@ -1069,6 +1069,8 @@ var ruleBasedCollectionCreatorModal = function _ruleBasedCollectionCreatorModal(
let title;
if (importType == "datasets") {
title = _l("Build Rules for Uploading Datasets");
+ } else if (elementsType == "collection_contents") {
+ title = _l("Build Rules for Applying to Existing Collection");
} else if (elementsType == "datasets" || elementsType == "library_datasets") {
title = _l("Build Rules for Creating Collection(s)");
} else {
@@ -1090,7 +1092,9 @@ var ruleBasedCollectionCreatorModal = function _ruleBasedCollectionCreatorModal(
creationFn: options.creationFn,
oncancel: options.oncancel,
oncreate: options.oncreate,
- defaultHideSourceItems: options.defaultHideSourceItems
+ defaultHideSourceItems: options.defaultHideSourceItems,
+ saveRulesFn: options.saveRulesFn,
+ initialRules: options.initialRules
}
}).$mount(vm);
return deferred;
@@ -1128,7 +1132,8 @@ function createListCollection(contents, defaultHideSourceItems) {
function createCollectionViaRules(selection, defaultHideSourceItems) {
let elements, elementsType, importType;
- if (!selection.selectionType) {
+ const selectionType = selection.selectionType;
+ if (!selectionType) {
// Have HDAs from the history panel.
elements = selection.toJSON();
elementsType = "datasets";
diff --git a/client/galaxy/scripts/mvc/form/form-input.js b/client/galaxy/scripts/mvc/form/form-input.js
index 5ac5032cf09e..649e0fa54871 100644
--- a/client/galaxy/scripts/mvc/form/form-input.js
+++ b/client/galaxy/scripts/mvc/form/form-input.js
@@ -85,11 +85,13 @@ export default Backbone.View.extend({
// render visibility
this.$el[this.model.get("hidden") ? "hide" : "show"]();
// render preview view for collapsed fields
+ // allow at least newlines to render properly after escape
+ const html = _.escape(this.model.get("text_value")).replace(/\n/g, "
");
this.$preview[
(this.field.collapsed && this.model.get("collapsible_preview")) || this.model.get("disabled")
? "show"
: "hide"
- ]().html(_.escape(this.model.get("text_value")));
+ ]().html(html);
// render error messages
var error_text = this.model.get("error_text");
this.$error[error_text ? "show" : "hide"]();
@@ -106,7 +108,14 @@ export default Backbone.View.extend({
style: this.model.get("style")
});
// render collapsible options
- if (!this.model.get("disabled") && this.model.get("collapsible_value") !== undefined) {
+ const workflowRuntimeCompatible =
+ this.field.workflowRuntimeCompatible === undefined ? true : this.field.workflowRuntimeCompatible;
+ if (
+ workflowRuntimeCompatible &&
+ !this.model.get("disabled") &&
+ this.model.get("collapsible_value") !== undefined
+ ) {
+ console.log(this.field);
var collapsible_state = this.field.collapsed ? "enable" : "disable";
this.$title_text.hide();
this.$collapsible.show();
diff --git a/client/galaxy/scripts/mvc/form/form-parameters.js b/client/galaxy/scripts/mvc/form/form-parameters.js
index e1869f0996be..49fd767dce29 100644
--- a/client/galaxy/scripts/mvc/form/form-parameters.js
+++ b/client/galaxy/scripts/mvc/form/form-parameters.js
@@ -7,6 +7,7 @@ import SelectContent from "mvc/ui/ui-select-content";
import SelectLibrary from "mvc/ui/ui-select-library";
import SelectFtp from "mvc/ui/ui-select-ftp";
import SelectGenomeSpace from "mvc/ui/ui-select-genomespace";
+import RulesEdit from "mvc/ui/ui-rules-edit";
import ColorPicker from "mvc/ui/ui-color-picker";
// create form view
export default Backbone.Model.extend({
@@ -30,6 +31,7 @@ export default Backbone.Model.extend({
library_data: "_fieldLibrary",
ftpfile: "_fieldFtp",
upload: "_fieldUpload",
+ rules: "_fieldRulesEdit",
genomespacefile: "_fieldGenomeSpace"
},
@@ -215,13 +217,20 @@ export default Backbone.Model.extend({
/** GenomeSpace file select field
*/
_fieldGenomeSpace: function(input_def) {
- var self = this;
return new SelectGenomeSpace.View({
id: `field-${input_def.id}`,
onchange: input_def.onchange
});
},
+ _fieldRulesEdit: function(input_def) {
+ return new RulesEdit.View({
+ id: `field-${input_def.id}`,
+ onchange: input_def.onchange,
+ target: input_def.target
+ });
+ },
+
/** Upload file field */
_fieldUpload: function(input_def) {
return new Ui.Upload({
diff --git a/client/galaxy/scripts/mvc/form/form-view.js b/client/galaxy/scripts/mvc/form/form-view.js
index 1c707c756a1d..d53179f93158 100644
--- a/client/galaxy/scripts/mvc/form/form-view.js
+++ b/client/galaxy/scripts/mvc/form/form-view.js
@@ -25,10 +25,13 @@ export default Backbone.View.extend({
var self = this;
this.data.matchModel(new_model, (node, input_id) => {
var input = self.input_list[input_id];
+ var field = self.field_list[input_id];
+ if (field.refreshDefinition) {
+ field.refreshDefinition(node);
+ }
if (input && input.options) {
if (!_.isEqual(input.options, node.options)) {
input.options = node.options;
- var field = self.field_list[input_id];
if (field.update) {
var new_options = [];
if (["data", "data_collection", "drill_down"].indexOf(input.type) != -1) {
diff --git a/client/galaxy/scripts/mvc/rules/rule-definitions.js b/client/galaxy/scripts/mvc/rules/rule-definitions.js
index 7071d674af28..6f416d07d70f 100644
--- a/client/galaxy/scripts/mvc/rules/rule-definitions.js
+++ b/client/galaxy/scripts/mvc/rules/rule-definitions.js
@@ -1,6 +1,8 @@
import _l from "utils/localization";
import pyre from "pyre-to-regexp";
+const NEW_COLUMN = "new";
+
const multiColumnsToString = function(targetColumns, colHeaders) {
if (targetColumns.length == 0) {
return `no columns`;
@@ -13,6 +15,16 @@ const multiColumnsToString = function(targetColumns, colHeaders) {
}
};
+const removeColumns = function(columns, targetColumns) {
+ const newColumns = [];
+ for (let index in columns) {
+ if (targetColumns.indexOf(index) === -1) {
+ newColumns.push(columns[index]);
+ }
+ }
+ return newColumns;
+};
+
const applyRegex = function(regex, target, data, replacement, groupCount) {
let regExp;
try {
@@ -71,13 +83,16 @@ const RULES = {
save: (component, rule) => {
rule.target_column = component.addColumnBasenameTarget;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
// https://github.com/kgryte/regex-basename-posix/blob/master/lib/index.js
//const re = /^(?:\/?|)(?:[\s\S]*?)((?:\.{1,2}|[^\/]+?|)(?:\.[^.\/]*|))(?:[\/]*)$/;
// https://stackoverflow.com/questions/8376525/get-value-of-a-string-after-a-slash-in-javascript
const re = "[^/]*$";
const target = rule.target_column;
- return applyRegex(re, target, data);
+ const rval = applyRegex(re, target, data);
+ columns.push(NEW_COLUMN);
+ rval.columns = columns;
+ return rval;
}
},
add_column_rownum: {
@@ -95,7 +110,7 @@ const RULES = {
save: (component, rule) => {
rule.start = component.addColumnRownumStart;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
let rownum = rule.start;
function newRow(row) {
const newRow = row.slice();
@@ -104,7 +119,8 @@ const RULES = {
return newRow;
}
data = data.map(newRow);
- return { data };
+ columns.push(NEW_COLUMN);
+ return { data, columns };
}
},
add_column_value: {
@@ -122,7 +138,7 @@ const RULES = {
save: (component, rule) => {
rule.value = component.addColumnValue;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const addValue = rule.value;
function newRow(row) {
const newRow = row.slice();
@@ -130,7 +146,36 @@ const RULES = {
return newRow;
}
data = data.map(newRow);
- return { data };
+ columns.push(NEW_COLUMN);
+ return { data, columns };
+ }
+ },
+ add_column_metadata: {
+ title: _l("Add Column from Metadata"),
+ display: (rule, colHeaders) => {
+ return `Add column for ${rule.value}.`;
+ },
+ init: (component, rule) => {
+ if (!rule) {
+ component.addColumnMetadataValue = null;
+ } else {
+ component.addColumnMetadataValue = rule.value;
+ }
+ },
+ save: (component, rule) => {
+ rule.value = component.addColumnMetadataValue;
+ },
+ apply: (rule, data, sources, columns) => {
+ const ruleValue = rule.value;
+ const identifierIndex = parseInt(ruleValue.substring("identifier".length));
+ function newRow(row, index) {
+ const newRow = row.slice();
+ newRow.push(sources[index]["identifiers"][identifierIndex]);
+ return newRow;
+ }
+ data = data.map(newRow);
+ columns.push(NEW_COLUMN);
+ return { data, columns };
}
},
add_column_regex: {
@@ -168,9 +213,12 @@ const RULES = {
rule.group_count = component.addColumnRegexGroupCount;
}
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target = rule.target_column;
- return applyRegex(rule.expression, target, data, rule.replacement, rule.group_count);
+ const rval = applyRegex(rule.expression, target, data, rule.replacement, rule.group_count);
+ columns.push(NEW_COLUMN);
+ rval.columns = columns;
+ return rval;
}
},
add_column_concatenate: {
@@ -193,7 +241,7 @@ const RULES = {
rule.target_column_0 = component.addColumnConcatenateTarget0;
rule.target_column_1 = component.addColumnConcatenateTarget1;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target0 = rule.target_column_0;
const target1 = rule.target_column_1;
function newRow(row) {
@@ -202,7 +250,8 @@ const RULES = {
return newRow;
}
data = data.map(newRow);
- return { data };
+ columns.push(NEW_COLUMN);
+ return { data, columns };
}
},
add_column_substr: {
@@ -241,7 +290,7 @@ const RULES = {
rule.length = component.addColumnSubstrLength;
rule.substr_type = component.addColumnSubstrType;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target = rule.target_column;
const length = rule.length;
const type = rule.substr_type;
@@ -269,6 +318,7 @@ const RULES = {
return newRow;
}
data = data.map(newRow);
+ columns.push(NEW_COLUMN);
return { data };
}
},
@@ -288,7 +338,7 @@ const RULES = {
save: (component, rule) => {
rule.target_columns = component.removeColumnTargets;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const targets = rule.target_columns;
function newRow(row) {
const newRow = [];
@@ -300,7 +350,8 @@ const RULES = {
return newRow;
}
data = data.map(newRow);
- return { data };
+ columns = removeColumns(columns, targets);
+ return { data, columns };
}
},
add_filter_regex: {
@@ -326,7 +377,7 @@ const RULES = {
rule.expression = component.addFilterRegexExpression;
rule.invert = component.addFilterRegexInvert;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const regex = String(rule.expression);
var regExp;
try {
@@ -376,7 +427,7 @@ const RULES = {
rule.which = component.addFilterCountWhich;
rule.invert = component.addFilterCountInvert;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const count = rule.count;
const invert = rule.invert;
const which = rule.which;
@@ -413,7 +464,7 @@ const RULES = {
rule.target_column = component.addFilterEmptyTarget;
rule.invert = component.addFilterEmptyInvert;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target = rule.target_column;
const invert = rule.invert;
const filterFunction = function(el, index) {
@@ -446,7 +497,7 @@ const RULES = {
rule.value = component.addFilterMatchesValue;
rule.invert = component.addFilterMatchesInvert;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target = rule.target_column;
const invert = rule.invert;
const value = rule.value;
@@ -482,7 +533,7 @@ const RULES = {
rule.value = component.addFilterCompareValue;
rule.compare_type = component.addFilterCompareType;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target = rule.target_column;
const compare_type = rule.compare_type;
const value = rule.value;
@@ -524,7 +575,7 @@ const RULES = {
rule.target_column = component.addSortingTarget;
rule.numeric = component.addSortingNumeric;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target = rule.target_column;
const numeric = rule.numeric;
@@ -577,7 +628,7 @@ const RULES = {
rule.target_column_0 = component.swapColumnsTarget0;
rule.target_column_1 = component.swapColumnsTarget1;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const target0 = rule.target_column_0;
const target1 = rule.target_column_1;
function newRow(row) {
@@ -587,7 +638,10 @@ const RULES = {
return newRow;
}
data = data.map(newRow);
- return { data };
+ const tempColumn = columns[target0];
+ columns[target0] = columns[target1];
+ columns[target1] = tempColumn;
+ return { data, columns };
}
},
split_columns: {
@@ -608,7 +662,7 @@ const RULES = {
rule.target_columns_0 = component.splitColumnsTargets0;
rule.target_columns_1 = component.splitColumnsTargets1;
},
- apply: (rule, data, sources) => {
+ apply: (rule, data, sources, columns) => {
const targets0 = rule.target_columns_0;
const targets1 = rule.target_columns_1;
@@ -628,10 +682,10 @@ const RULES = {
}
return [newRow0, newRow1];
};
-
data = flatMap(splitRow, data);
- sources = flatMap(src => [src, src], data);
- return { data, sources };
+ sources = flatMap(src => [src, src], sources);
+ columns = removeColumns(columns, targets0);
+ return { data, sources, columns };
}
}
};
@@ -659,6 +713,7 @@ const MAPPING_TARGETS = {
help: _l(
"If this is set, all rows with the same collection name will be joined into a collection and it is possible to create multiple collections at once."
),
+ modes: ["raw", "ftp", "datasets", "library_datasets"],
importType: "collections"
},
name: {
@@ -696,7 +751,69 @@ const MAPPING_TARGETS = {
}
};
+const columnDisplay = function(columns, colHeaders) {
+ let columnNames;
+ if (typeof columns == "object") {
+ columnNames = columns.map(idx => colHeaders[idx]);
+ } else {
+ columnNames = [colHeaders[columns]];
+ }
+ if (columnNames.length == 2) {
+ return "columns " + columnNames[0] + " and " + columnNames[1];
+ } else if (columnNames.length > 2) {
+ return "columns " + columnNames.slice(0, -1).join(", ") + ", and " + columnNames[columnNames.length - 1];
+ } else {
+ return "column " + columnNames[0];
+ }
+};
+
+const colHeadersFor = function(data, columns) {
+ if (data.length == 0) {
+ if (columns) {
+ return columns.map((el, i) => String.fromCharCode(65 + i));
+ } else {
+ return [];
+ }
+ } else {
+ return data[0].map((el, i) => String.fromCharCode(65 + i));
+ }
+};
+
+const applyRules = function(data, sources, columns, rules, colHeadersPerRule) {
+ var colHeadersPerRule = colHeadersPerRule || [];
+ let hasRuleError = false;
+ for (var ruleIndex in rules) {
+ const ruleHeaders = colHeadersFor(data, columns);
+ colHeadersPerRule[ruleIndex] = ruleHeaders;
+ const rule = rules[ruleIndex];
+ rule.error = null;
+ rule.warn = null;
+ if (hasRuleError) {
+ rule.warn = _l("Skipped due to previous errors.");
+ continue;
+ }
+ var ruleType = rule.type;
+ const ruleDef = RULES[ruleType];
+ const res = ruleDef.apply(rule, data, sources, columns);
+ if (res.error) {
+ hasRuleError = true;
+ rule.error = res.error;
+ } else {
+ if (res.warn) {
+ rule.warn = res.warn;
+ }
+ data = res.data || data;
+ sources = res.sources || sources;
+ columns = res.columns || columns;
+ }
+ }
+ return { data, sources, columns };
+};
+
export default {
+ applyRules: applyRules,
+ columnDisplay: columnDisplay,
+ colHeadersFor: colHeadersFor,
RULES: RULES,
MAPPING_TARGETS: MAPPING_TARGETS
};
diff --git a/client/galaxy/scripts/mvc/tool/tool-form.js b/client/galaxy/scripts/mvc/tool/tool-form.js
index 5dec9427bffb..a31848e03181 100644
--- a/client/galaxy/scripts/mvc/tool/tool-form.js
+++ b/client/galaxy/scripts/mvc/tool/tool-form.js
@@ -280,6 +280,17 @@ var View = Backbone.View.extend({
this.form.highlight(input_id);
return false;
}
+ if (input_field.validate) {
+ // wish there was a way to just reset this input field
+ const reset = () => {
+ this.form.trigger("reset");
+ };
+ const validateObject = input_field.validate(reset);
+ if (!validateObject.valid) {
+ this.form.highlight(input_id, validateObject.message);
+ return false;
+ }
+ }
if (input_value && input_value.batch) {
var n = input_value.values.length;
var src = n > 0 && input_value.values[0] && input_value.values[0].src;
diff --git a/client/galaxy/scripts/mvc/ui/ui-rules-edit.js b/client/galaxy/scripts/mvc/ui/ui-rules-edit.js
new file mode 100644
index 000000000000..9d6c3330a586
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-rules-edit.js
@@ -0,0 +1,169 @@
+import axios from "axios";
+import _l from "utils/localization";
+import Utils from "utils/utils";
+import Ui from "mvc/ui/ui-misc";
+import Vue from "vue";
+import ListCollectionCreator from "mvc/collection/list-collection-creator";
+import RulesDisplay from "components/RulesDisplay.vue";
+
+/**
+ * Bridge rule based builder and the tool form.
+ */
+var View = Backbone.View.extend({
+ // initialize
+ initialize: function(options) {
+ // link this
+ this.options = options;
+ this.target = options.target;
+ this.reset = null;
+ const view = this;
+
+ // create insert new list element button
+ this.browse_button = new Ui.ButtonIcon({
+ title: _l("Edit"),
+ icon: "fa fa-edit",
+ tooltip: _l("Edit Rules"),
+ onclick: () => {
+ if (view.target) {
+ view._fetcCollectionAndEdit();
+ } else {
+ view._showRuleEditor(null);
+ }
+ }
+ });
+
+ // add change event. fires on trigger
+ this.on("change", () => {
+ if (options.onchange) {
+ options.onchange(this.value());
+ }
+ });
+
+ // create elements
+ this.setElement(this._template(options));
+ this.$(".ui-rules-edit-button").append(this.browse_button.$el);
+ var rulesDisplayInstance = Vue.extend(RulesDisplay);
+ var vm = document.createElement("div");
+ this.$(".ui-rules-preview").append(vm);
+ this.instance = new rulesDisplayInstance({
+ propsData: {
+ initialRules: {
+ rules: [],
+ mapping: []
+ }
+ }
+ });
+ this.instance.$mount(vm);
+ this.workflowRuntimeCompatible = false;
+ },
+
+ _fetcCollectionAndEdit: function() {
+ const view = this;
+ const url = `${Galaxy.root}api/dataset_collections/${view.target.id}?instance_type=history`;
+ axios
+ .get(url)
+ .then(response => this._showCollection(response))
+ .catch(view._renderFetchError);
+ },
+
+ _showCollection: function(response) {
+ const elements = response.data;
+ this._showRuleEditor(elements);
+ },
+
+ _showRuleEditor: function(elements) {
+ const elementsType = "collection_contents";
+ const importType = "collections";
+ let value = this._value;
+ const options = {
+ saveRulesFn: rules => this._handleRulesSave(rules),
+ initialRules: value
+ };
+ ListCollectionCreator.ruleBasedCollectionCreatorModal(elements, elementsType, importType, options).done(
+ () => {}
+ );
+ },
+
+ _handleRulesSave: function(rules) {
+ this._setValue(rules);
+ },
+
+ _renderFetchError: function(e) {
+ console.log(e);
+ console.log("problem fetching collection");
+ },
+
+ /** Main Template */
+ _template: function(options) {
+ return `
+
+
+
+
+ `;
+ },
+
+ /** Return/Set currently selected genomespace filename */
+ value: function(new_value) {
+ // check if new_value is defined
+ if (new_value !== undefined) {
+ this._setValue(new_value);
+ } else {
+ return this._getValue();
+ }
+ },
+
+ // update
+ refreshDefinition: function(input_def) {
+ self.target = input_def.target;
+ // refresh
+ this._refresh();
+ },
+
+ // refresh
+ _refresh: function() {},
+
+ // get value
+ _getValue: function() {
+ return this._value;
+ },
+
+ // set value
+ _setValue: function(new_value) {
+ if (new_value) {
+ if (typeof new_value == "string") {
+ new_value = JSON.parse(new_value);
+ }
+ this._value = new_value;
+ this.trigger("change");
+ this.instance.inputRules = new_value;
+ if (this.reset) {
+ this.reset();
+ this.reset = null;
+ }
+ }
+ },
+
+ validate: function(reset) {
+ const value = this._value;
+ let message = null;
+ if (!value || value.rules.length === 0) {
+ message = "No rules defined, define at least one rule.";
+ } else if (value.mapping.length === 0) {
+ message = "No collection identifiers defined, specify at least one collection identifier.";
+ } else {
+ for (let rule of value.rules) {
+ if (rule.error) {
+ message = "One or more rules in error.";
+ break;
+ }
+ }
+ }
+ this.reset = reset;
+ return { valid: !message, message: message };
+ }
+});
+
+export default {
+ View: View
+};
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-terminals.js b/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
index aa6ef7451ed3..31393f29de1d 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
@@ -1,5 +1,7 @@
// TODO; tie into Galaxy state?
window.workflow_globals = window.workflow_globals || {};
+import * as Toastr from "libs/toastr";
+
function CollectionTypeDescription(collectionType) {
this.collectionType = collectionType;
this.isCollection = true;
@@ -524,6 +526,7 @@ var OutputCollectionTerminal = Terminal.extend({
if (newCollectionType.collectionType != this.collectionType.collectionType) {
_.each(this.connectors, connector => {
// TODO: consider checking if connection valid before removing...
+ Toastr.warning("Destroying a connection because collection type has changed.");
connector.destroy();
});
}
diff --git a/config/tool_conf.xml.main b/config/tool_conf.xml.main
index ebc8c585bda8..d242c8e5c0bc 100644
--- a/config/tool_conf.xml.main
+++ b/config/tool_conf.xml.main
@@ -33,6 +33,7 @@
+
diff --git a/config/tool_conf.xml.sample b/config/tool_conf.xml.sample
index 47558354254e..8fe8fc2693fa 100644
--- a/config/tool_conf.xml.sample
+++ b/config/tool_conf.xml.sample
@@ -35,6 +35,7 @@
+
diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py
index 53440d9222c1..fbb4374064a2 100644
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -94,7 +94,6 @@ def __init__(self, **kwargs):
self._configure_security()
# Tag handler
self.tag_handler = GalaxyTagManager(self.model.context)
- # Dataset Collection Plugins
self.dataset_collections_service = DatasetCollectionManager(self)
self.history_manager = HistoryManager(self)
self.dependency_resolvers_view = DependencyResolversView(self)
diff --git a/lib/galaxy/dataset_collections/type_description.py b/lib/galaxy/dataset_collections/type_description.py
index ee76a4609b5a..e002247fab6f 100644
--- a/lib/galaxy/dataset_collections/type_description.py
+++ b/lib/galaxy/dataset_collections/type_description.py
@@ -39,6 +39,8 @@ class CollectionTypeDescription(object):
'list'
>>> nested_type_description.effective_collection_type_description(paired_type_description).collection_type
'list'
+ >>> nested_type_description.child_collection_type()
+ 'paired'
"""
def __init__(self, collection_type, collection_type_description_factory):
@@ -46,6 +48,14 @@ def __init__(self, collection_type, collection_type_description_factory):
self.collection_type_description_factory = collection_type_description_factory
self.__has_subcollections = self.collection_type.find(":") > 0
+ def child_collection_type(self):
+ rank_collection_type = self.rank_collection_type()
+ return self.collection_type[len(rank_collection_type) + 1:]
+
+ def child_collection_type_description(self):
+ child_collection_type = self.child_collection_type()
+ return self.collection_type_description_factory.for_collection_type(child_collection_type)
+
def effective_collection_type_description(self, subcollection_type):
effective_collection_type = self.effective_collection_type(subcollection_type)
return self.collection_type_description_factory.for_collection_type(effective_collection_type)
diff --git a/lib/galaxy/managers/collections.py b/lib/galaxy/managers/collections.py
index cf507c3c2185..9ec8cceb673c 100644
--- a/lib/galaxy/managers/collections.py
+++ b/lib/galaxy/managers/collections.py
@@ -162,11 +162,15 @@ def create_dataset_collection(self, trans, collection_type, element_identifiers=
raise RequestParameterInvalidException(ERROR_NO_COLLECTION_TYPE)
collection_type_description = self.collection_type_descriptions.for_collection_type(collection_type)
-
+ has_subcollections = collection_type_description.has_subcollections()
# If we have elements, this is an internal request, don't need to load
# objects from identifiers.
if elements is None:
elements = self._element_identifiers_to_elements(trans, collection_type_description, element_identifiers)
+ else:
+ if has_subcollections:
+ # Nested collection - recursively create collections as needed.
+ self.__recursively_create_collections_for_elements(trans, elements)
# else if elements is set, it better be an ordered dict!
if elements is not self.ELEMENTS_UNINITIALIZED:
@@ -184,7 +188,7 @@ def create_dataset_collection(self, trans, collection_type, element_identifiers=
def _element_identifiers_to_elements(self, trans, collection_type_description, element_identifiers):
if collection_type_description.has_subcollections():
# Nested collection - recursively create collections and update identifiers.
- self.__recursively_create_collections(trans, element_identifiers)
+ self.__recursively_create_collections_for_identifiers(trans, element_identifiers)
new_collection = False
for element_identifier in element_identifiers:
if element_identifier.get("src") == "new_collection" and element_identifier.get('collection_type') == '':
@@ -310,10 +314,10 @@ def __persist(self, dataset_collection_instance, flush=True):
context.flush()
return dataset_collection_instance
- def __recursively_create_collections(self, trans, element_identifiers):
+ def __recursively_create_collections_for_identifiers(self, trans, element_identifiers):
for index, element_identifier in enumerate(element_identifiers):
try:
- if not element_identifier["src"] == "new_collection":
+ if element_identifier.get("src", None) != "new_collection":
# not a new collection, keep moving...
continue
except KeyError:
@@ -331,6 +335,30 @@ def __recursively_create_collections(self, trans, element_identifiers):
return element_identifiers
+ def __recursively_create_collections_for_elements(self, trans, elements):
+ if elements is self.ELEMENTS_UNINITIALIZED:
+ return
+
+ new_elements = odict.odict()
+ for key, element in elements.items():
+ if isinstance(element, model.DatasetCollection):
+ continue
+
+ if element.get("src", None) != "new_collection":
+ continue
+
+ # element is a dict with src new_collection and
+ # and odict of named elements
+ collection_type = element.get("collection_type", None)
+ sub_elements = element["elements"]
+ collection = self.create_dataset_collection(
+ trans=trans,
+ collection_type=collection_type,
+ elements=sub_elements,
+ )
+ new_elements[key] = collection
+ elements.update(new_elements)
+
def __load_elements(self, trans, element_identifiers):
elements = odict.odict()
for element_identifier in element_identifiers:
@@ -397,6 +425,77 @@ def get_dataset_collection(self, trans, encoded_id):
collection = trans.sa_session.query(trans.app.model.DatasetCollection).get(collection_id)
return collection
+ def apply_rules(self, hdca, rule_set, handle_dataset):
+ hdca_collection = hdca.collection
+ collection_type = hdca_collection.collection_type
+ elements = hdca_collection.elements
+ collection_type_description = self.collection_type_descriptions.for_collection_type(collection_type)
+ initial_data, initial_sources = self.__init_rule_data(elements, collection_type_description)
+ data, sources = rule_set.apply(initial_data, initial_sources)
+
+ collection_type = rule_set.collection_type
+ collection_type_description = self.collection_type_descriptions.for_collection_type(collection_type)
+ elements = self._build_elements_from_rule_data(collection_type_description, rule_set, data, sources, handle_dataset)
+ return elements
+
+ def _build_elements_from_rule_data(self, collection_type_description, rule_set, data, sources, handle_dataset):
+ identifier_columns = rule_set.identifier_columns
+ elements = odict.odict()
+ for data_index, row_data in enumerate(data):
+ # For each row, find place in depth for this element.
+ collection_type_at_depth = collection_type_description
+ elements_at_depth = elements
+
+ for i, identifier_column in enumerate(identifier_columns):
+ identifier = row_data[identifier_column]
+
+ if i + 1 == len(identifier_columns):
+ # At correct final position in nested structure for this dataset.
+ if collection_type_at_depth.collection_type == "paired":
+ if identifier.lower() in ["f", "1", "r1", "forward"]:
+ identifier = "forward"
+ elif identifier.lower() in ["r", "2", "r2", "reverse"]:
+ identifier = "reverse"
+ else:
+ raise Exception("Unknown indicator of paired status encountered - only values of F, R, 1, 2, R1, R2, forward, or reverse are allowed.")
+
+ elements_at_depth[identifier] = handle_dataset(sources[data_index]["dataset"])
+ else:
+ collection_type_at_depth = collection_type_at_depth.child_collection_type_description()
+ found = False
+ if identifier in elements_at_depth:
+ elements_at_depth = elements_at_depth[identifier]["elements"]
+ found = True
+
+ if not found:
+ sub_collection = {}
+ sub_collection["src"] = "new_collection"
+ sub_collection["collection_type"] = collection_type_at_depth.collection_type
+ sub_collection["elements"] = odict.odict()
+ elements_at_depth[identifier] = sub_collection
+ elements_at_depth = sub_collection["elements"]
+
+ return elements
+
+ def __init_rule_data(self, elements, collection_type_description, parent_identifiers=None):
+ parent_identifiers = parent_identifiers or []
+ data, sources = [], []
+ for element in elements:
+ element_object = element.element_object
+ identifiers = parent_identifiers + [element.element_identifier]
+ if not element.is_collection:
+ data.append([])
+ sources.append({"identifiers": identifiers, "dataset": element_object})
+ else:
+ child_collection_type = collection_type_description.child_collection_type()
+ element_data, element_sources = self.__init_rule_data(
+ element_object.elements, child_collection_type, identifiers
+ )
+ data.extend(element_data)
+ sources.extend(element_sources)
+
+ return data, sources
+
def __get_history_collection_instance(self, trans, id, check_ownership=False, check_accessible=True):
instance_id = int(trans.app.security.decode_id(id))
collection_instance = trans.sa_session.query(trans.app.model.HistoryDatasetCollectionAssociation).get(instance_id)
diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py
index 888b2206787a..2f1204ceafb8 100755
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -80,6 +80,7 @@
from galaxy.util.json import json_fix
from galaxy.util.json import safe_loads
from galaxy.util.odict import odict
+from galaxy.util.rules_dsl import RuleSet
from galaxy.util.template import fill_template
from galaxy.version import VERSION_MAJOR
from galaxy.web import url_for
@@ -2653,6 +2654,29 @@ def add_copied_value_to_new_elements(new_label, dce_object):
)
+class ApplyRulesTool(DatabaseOperationTool):
+ tool_type = 'apply_rules'
+
+ def produce_outputs(self, trans, out_data, output_collections, incoming, history, **kwds):
+ log.info(incoming)
+ hdca = incoming["input"]
+ rule_set = RuleSet(incoming["rules"])
+
+ def copy_dataset(dataset):
+ copied_dataset = dataset.copy()
+ copied_dataset.visible = False
+ history.add_dataset(copied_dataset, set_hid=True)
+ return copied_dataset
+
+ new_elements = self.app.dataset_collections_service.apply_rules(
+ hdca, rule_set, copy_dataset
+ )
+ log.info(new_elements)
+ output_collections.create_collection(
+ next(iter(self.outputs.values())), "output", collection_type=rule_set.collection_type, elements=new_elements
+ )
+
+
class TagFromFileTool(DatabaseOperationTool):
tool_type = 'tag_from_file'
diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py
index 51717788ca6e..7038f5a7e432 100644
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -775,10 +775,10 @@ def __init__(self, trans, history, tool, tool_action, input_collections, dataset
self.out_collection_instances = {}
self.tags = tags
- def create_collection(self, output, name, **element_kwds):
+ def create_collection(self, output, name, collection_type=None, **element_kwds):
input_collections = self.input_collections
collections_manager = self.trans.app.dataset_collections_service
- collection_type = output.structure.collection_type
+ collection_type = collection_type or output.structure.collection_type
if collection_type is None:
collection_type_source = output.structure.collection_type_source
if collection_type_source is None:
@@ -791,15 +791,21 @@ def create_collection(self, output, name, **element_kwds):
collection_type = input_collections[collection_type_source].collection.collection_type
if "elements" in element_kwds:
+ def check_elements(elements):
+ if hasattr(elements, "items"): # else it is ELEMENTS_UNINITIALIZED object.
+ for value in elements.values():
+ # Either a HDA (if) or a DatasetCollection or a recursive dict.
+ if getattr(value, "history_content_type", None) == "dataset":
+ assert value.history is not None
+ elif hasattr(value, "dataset_instances"):
+ for dataset in value.dataset_instances:
+ assert dataset.history is not None
+ else:
+ assert value["src"] == "new_collection"
+ check_elements(value["elements"])
+
elements = element_kwds["elements"]
- if hasattr(elements, "items"): # else it is ELEMENTS_UNINITIALIZED object.
- for value in elements.values():
- # Either a HDA (if) or a DatasetCollection (the else)
- if getattr(value, "history_content_type", None) == "dataset":
- assert value.history is not None
- else:
- for dataset in value.dataset_instances:
- assert dataset.history is not None
+ check_elements(elements)
if self.dataset_collection_elements is not None:
dc = collections_manager.create_dataset_collection(
diff --git a/lib/galaxy/tools/apply_rules.xml b/lib/galaxy/tools/apply_rules.xml
new file mode 100644
index 000000000000..5a0117db0b52
--- /dev/null
+++ b/lib/galaxy/tools/apply_rules.xml
@@ -0,0 +1,36 @@
+
+
+
+
+
+
+
+
+
+
+
+ `__ (this link is to documentation that is pretty rough still).
+
+.. class:: infomark
+
+This tool will create new history datasets from your collection but your quota usage will not increase.
+
+ ]]>
+
diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py
index ba8d7f7c3778..892a69785402 100644
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -4,6 +4,7 @@
from __future__ import print_function
import cgi
+import json
import logging
import os
import os.path
@@ -23,6 +24,7 @@
from galaxy.util.bunch import Bunch
from galaxy.util.dictifiable import Dictifiable
from galaxy.util.expressions import ExpressionContext
+from galaxy.util.rules_dsl import RuleSet
from galaxy.web import url_for
from . import validation
from .dataset_matcher import (
@@ -2111,6 +2113,68 @@ def to_dict(self, trans, other_values=None):
return d
+class BaseJsonToolParameter(ToolParameter):
+ """
+ Class of parameter that tries to keep values as close to JSON as possible.
+ In particular value_to_basic is overloaded to prevent params_to_strings from
+ double encoding JSON and to_python using loads to produce values.
+ """
+
+ def value_to_basic(self, value, app, use_security=False):
+ if is_runtime_value(value):
+ return {'__class__': 'RuntimeValue'}
+ return value
+
+ def to_json(self, value, app, use_security):
+ """Convert a value to a string representation suitable for persisting"""
+ return json.dumps(value)
+
+ def to_python(self, value, app):
+ """Convert a value created with to_json back to an object representation"""
+ return json.loads(value)
+
+
+class RulesListToolParameter(BaseJsonToolParameter):
+ """
+ Parameter that allows for the creation of a list of rules using the Galaxy rules DSL.
+ """
+
+ def __init__(self, tool, input_source, context=None):
+ input_source = ensure_input_source(input_source)
+ BaseJsonToolParameter.__init__(self, tool, input_source)
+ self.data_ref = input_source.get("data_ref", None)
+
+ def to_dict(self, trans, other_values={}):
+ d = ToolParameter.to_dict(self, trans)
+ target_name = self.data_ref
+ if target_name in other_values:
+ target = other_values[target_name]
+ if not is_runtime_value(target):
+ d["target"] = {
+ "src": "hdca" if hasattr(target, "collection") else "hda",
+ "id": trans.app.security.encode_id(target.id),
+ }
+ return d
+
+ def validate(self, value, trans=None):
+ super(RulesListToolParameter, self).validate(value, trans=trans)
+ if not isinstance(value, dict):
+ raise ValueError("No rules specified for rules parameter.")
+
+ if "rules" not in value:
+ raise ValueError("No rules specified for rules parameter")
+ mappings = value.get("mapping", None)
+ if not mappings:
+ raise ValueError("No column definitions defined for rules parameter.")
+
+ def to_text(self, value):
+ if value:
+ rule_set = RuleSet(value)
+ return rule_set.display
+ else:
+ return ""
+
+
parameter_types = dict(
text=TextToolParameter,
integer=IntegerToolParameter,
@@ -2129,6 +2193,7 @@ def to_dict(self, trans, other_values=None):
data=DataToolParameter,
data_collection=DataCollectionToolParameter,
library_data=LibraryDatasetToolParameter,
+ rules=RulesListToolParameter,
drill_down=DrillDownSelectToolParameter
)
diff --git a/lib/galaxy/tools/parser/output_objects.py b/lib/galaxy/tools/parser/output_objects.py
index 2b0784ecc507..216d5df60f5b 100644
--- a/lib/galaxy/tools/parser/output_objects.py
+++ b/lib/galaxy/tools/parser/output_objects.py
@@ -175,19 +175,21 @@ def __init__(
self,
collection_type,
collection_type_source=None,
+ collection_type_from_rules=None,
structured_like=None,
dataset_collector_descriptions=None,
):
self.collection_type = collection_type
self.collection_type_source = collection_type_source
+ self.collection_type_from_rules = collection_type_from_rules
self.structured_like = structured_like
self.dataset_collector_descriptions = dataset_collector_descriptions
if collection_type and collection_type_source:
raise ValueError("Cannot set both type and type_source on collection output.")
- if collection_type is None and structured_like is None and dataset_collector_descriptions is None and collection_type_source is None:
- raise ValueError("Output collection types must be specify type of structured_like")
- if dataset_collector_descriptions and structured_like:
- raise ValueError("Cannot specify dynamic structure (discovered_datasets) and structured_like attribute.")
+ if collection_type is None and structured_like is None and dataset_collector_descriptions is None and collection_type_source is None and collection_type_from_rules is None:
+ raise ValueError("Output collection types must specify source of collection type information (e.g. structured_like or type_source).")
+ if dataset_collector_descriptions and (structured_like or collection_type_source or collection_type_from_rules):
+ raise ValueError("Cannot specify dynamic structure (discovered_datasets) and collection type source attributes such as structured_like or type_source.")
self.dynamic = dataset_collector_descriptions is not None
def collection_prototype(self, inputs, type_registry):
diff --git a/lib/galaxy/tools/parser/xml.py b/lib/galaxy/tools/parser/xml.py
index fde2b532935c..a2abee93450d 100644
--- a/lib/galaxy/tools/parser/xml.py
+++ b/lib/galaxy/tools/parser/xml.py
@@ -260,6 +260,7 @@ def _parse(data_elem, **kwds):
default_format = collection_elem.get("format", "data")
collection_type = collection_elem.get("type", None)
collection_type_source = collection_elem.get("type_source", None)
+ collection_type_from_rules = collection_elem.get("type_from_rules", None)
structured_like = collection_elem.get("structured_like", None)
inherit_format = False
inherit_metadata = False
@@ -276,6 +277,7 @@ def _parse(data_elem, **kwds):
structure = ToolOutputCollectionStructure(
collection_type=collection_type,
collection_type_source=collection_type_source,
+ collection_type_from_rules=collection_type_from_rules,
structured_like=structured_like,
dataset_collector_descriptions=dataset_collector_descriptions,
)
diff --git a/lib/galaxy/util/rules_dsl.py b/lib/galaxy/util/rules_dsl.py
new file mode 100644
index 000000000000..30830ddfe619
--- /dev/null
+++ b/lib/galaxy/util/rules_dsl.py
@@ -0,0 +1,553 @@
+import abc
+import itertools
+import re
+
+import six
+from six.moves import map
+
+
+def _ensure_rule_contains_keys(rule, keys):
+ for key, instance_class in keys.items():
+ if key not in rule:
+ raise ValueError("Rule of type [%s] does not contain key [%s]." % (rule["type"], key))
+ value = rule[key]
+ if not isinstance(value, instance_class):
+ raise ValueError("Rule of type [%s] does not contain correct value type for key [%s]." % (rule["type"], key))
+
+
+def _ensure_key_value_in(rule, key, values):
+ value = rule[key]
+ if value not in values:
+ raise ValueError("Invalid value [%s] for [%s] encountered." % (value, key))
+
+
+def _ensure_valid_pattern(expression):
+ re.compile(expression)
+
+
+def apply_regex(regex, target, data, replacement=None, group_count=None):
+ pattern = re.compile(regex)
+
+ def new_row(row):
+ source = row[target]
+ if replacement is None:
+ match = pattern.search(source)
+ if not match:
+ raise Exception("Problem apply regular expression [%s] to [%s]." % (regex, source))
+
+ if group_count:
+ if len(match.groups()) != group_count + 1:
+ raise Exception("Problem apply regular expression.")
+
+ result = row + [match.groups()[1:len(match.groups())]]
+ else:
+ result = row + [match.group(0)]
+ else:
+ result = row + [pattern.search(source).expand(replacement)]
+
+ return result
+
+ new_data = list(map(new_row, data))
+ return new_data
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseRuleDefinition(object):
+
+ @abc.abstractproperty
+ def rule_type(self):
+ """Short string describing type of rule (plugin class) to use."""
+
+ @abc.abstractmethod
+ def validate_rule(self, rule):
+ """Validate dictified rule definition of this type."""
+
+ @abc.abstractmethod
+ def apply(self, rule, data, sources):
+ """Apply validated, dictified rule definition to supplied data."""
+
+
+class AddColumnMetadataRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_metadata"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {"value": six.string_types})
+
+ def apply(self, rule, data, sources):
+ rule_value = rule["value"]
+ identifier_index = int(rule_value[len("identifier"):])
+
+ new_rows = []
+ for index, row in enumerate(data):
+ new_rows.append(row + [sources[index]["identifiers"][identifier_index]])
+
+ return new_rows, sources
+
+
+class AddColumnConcatenateRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_concatenate"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {"target_column_0": int, "target_column_1": int})
+
+ def apply(self, rule, data, sources):
+ column_0 = rule["target_column_0"]
+ column_1 = rule["target_column_1"]
+
+ new_rows = []
+ for index, row in enumerate(data):
+ new_rows.append(row + [row[column_0] + row[column_1]])
+
+ return new_rows, sources
+
+
+class AddColumnBasenameRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_basename"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {"target_column": int})
+
+ def apply(self, rule, data, sources):
+ column = rule["target_column"]
+ re = r"[^/]*$"
+ return apply_regex(re, column, data), sources
+
+
+class AddColumnRegexRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_regex"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {"target_column": int, "expression": six.string_types})
+ _ensure_valid_pattern(rule["expression"])
+
+ def apply(self, rule, data, sources):
+ target = rule["target_column"]
+ expression = rule["expression"]
+ replacement = rule.get("replacement")
+ group_count = rule.get("rule_count")
+
+ return apply_regex(expression, target, data, replacement, group_count), sources
+
+
+class AddColumnRownumRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_rownum"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {"start": int})
+
+ def apply(self, rule, data, sources):
+ start = rule["start"]
+
+ new_rows = []
+ for index, row in enumerate(data):
+ new_rows.append(row + ["%d" % (index + start)])
+
+ return new_rows, sources
+
+
+class AddColumnValueRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_value"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {"value": six.string_types})
+
+ def apply(self, rule, data, sources):
+ value = rule["value"]
+
+ new_rows = []
+ for index, row in enumerate(data):
+ new_rows.append(row + [str(value)])
+
+ return new_rows, sources
+
+
+class AddColumnSubstrRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_column_substr"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column": int,
+ "length": int,
+ "substr_type": six.string_types,
+ })
+ _ensure_key_value_in(rule, "substr_type", ["keep_prefix", "drop_prefix", "keep_suffix", "drop_suffix"])
+
+ def apply(self, rule, data, sources):
+ target = rule["target_column"]
+ length = rule["length"]
+ substr_type = rule["substr_type"]
+
+ def new_row(row):
+ original_value = row[target]
+ start = 0
+ end = len(original_value)
+
+ if substr_type == "keep_prefix":
+ end = length
+ elif substr_type == "drop_prefix":
+ start = length
+ elif substr_type == "keep_suffix":
+ start = end - length
+ if start < 0:
+ start = 0
+ else:
+ end = end - length
+ if end < 0:
+ end = 0
+
+ return row + [original_value[start:end]]
+
+ return list(map(new_row, data)), sources
+
+
+class RemoveColumnsRuleDefinition(BaseRuleDefinition):
+ rule_type = "remove_columns"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_columns": list,
+ })
+
+ def apply(self, rule, data, sources):
+ target_columns = rule["target_columns"]
+
+ def new_row(row):
+ new = []
+ for index, val in enumerate(row):
+ if index not in target_columns:
+ new.append(val)
+ return new
+
+ return list(map(new_row, data)), sources
+
+
+def _filter_index(func, iterable):
+ result = []
+ for index, x in enumerate(iterable):
+ if func(index):
+ result.append(x)
+
+ return result
+
+
+class AddFilterRegexRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_filter_regex"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column": int,
+ "invert": bool,
+ "expression": six.string_types,
+ })
+ _ensure_valid_pattern(rule["expression"])
+
+ def apply(self, rule, data, sources):
+ target_column = rule["target_column"]
+ invert = rule["invert"]
+ regex = rule["expression"]
+
+ def _filter(index):
+ row = data[index]
+ val = row[target_column]
+ pattern = re.compile(regex)
+ return not invert if pattern.search(val) else invert
+
+ return _filter_index(_filter, data), _filter_index(_filter, sources)
+
+
+class AddFilterCountRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_filter_count"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "count": int,
+ "invert": bool,
+ "which": six.string_types,
+ })
+ _ensure_key_value_in(rule, "which", ["first", "last"])
+
+ def apply(self, rule, data, sources):
+ num_rows = len(data)
+ invert = rule["invert"]
+ n = rule["count"]
+ which = rule["which"]
+
+ def _filter(index):
+ if which == "first":
+ matches = index >= n
+ else:
+ matches = index < (num_rows - n)
+ return not invert if matches else invert
+
+ return _filter_index(_filter, data), _filter_index(_filter, sources)
+
+
+class AddFilterEmptyRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_filter_empty"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column": int,
+ "invert": bool
+ })
+
+ def apply(self, rule, data, sources):
+ invert = rule["invert"]
+ target_column = rule["target_column"]
+
+ def _filter(index):
+ return not invert if len(data[target_column]) == 0 else invert
+
+ return _filter_index(_filter, data), _filter_index(_filter, sources)
+
+
+class AddFilterMatchesRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_filter_matches"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column": int,
+ "invert": bool,
+ "value": six.string_types,
+ })
+
+ def apply(self, rule, data, sources):
+ invert = rule["invert"]
+ target_column = rule["target_column"]
+ value = rule["value"]
+
+ def _filter(index):
+ row = data[index]
+ val = row[target_column]
+ return not invert if val == value else invert
+
+ return _filter_index(_filter, data), _filter_index(_filter, sources)
+
+
+class AddFilterCompareRuleDefinition(BaseRuleDefinition):
+ rule_type = "add_filter_compare"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column": int,
+ "value": int,
+ "compare_type": six.string_types,
+ })
+ _ensure_key_value_in(rule, "compare_type", ["less_than", "less_than_equal", "greater_than", "greater_than_equal"])
+
+ def apply(self, rule, data, sources):
+ target_column = rule["target_column"]
+ value = rule["value"]
+ compare_type = rule["compare_type"]
+
+ def _filter(index):
+ row = data[index]
+ target_value = float(row[target_column])
+ if compare_type == "less_than":
+ matches = target_value < value
+ elif compare_type == "less_than_equal":
+ matches = target_value <= value
+ elif compare_type == "greater_than":
+ matches = target_value > value
+ elif compare_type == "greater_than_equal":
+ matches = target_value >= value
+
+ return matches
+
+ return _filter_index(_filter, data), _filter_index(_filter, sources)
+
+
+class SortRuleDefinition(BaseRuleDefinition):
+ rule_type = "sort"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column": int,
+ "numeric": bool,
+ })
+
+ def apply(self, rule, data, sources):
+ target = rule["target_column"]
+ numeric = rule["numeric"]
+
+ sortable = zip(data, sources)
+
+ def sort_func(a, b):
+ a_val = a[0][target]
+ b_val = b[0][target]
+ if numeric:
+ a_val = float(a_val)
+ b_val = float(b_val)
+
+ if a_val < b_val:
+ return -1
+ elif b_val < a_val:
+ return 1
+ else:
+ return 0
+
+ sorted_data = sorted(sortable, sort_func)
+
+ new_data = []
+ new_sources = []
+
+ for (row, source) in sorted_data:
+ new_data.append(row)
+ new_sources.append(source)
+
+ return new_data, new_sources
+
+
+class SwapColumnsRuleDefinition(BaseRuleDefinition):
+ rule_type = "swap_columns"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_column_0": int,
+ "target_column_1": int,
+ })
+
+ def apply(self, rule, data, sources):
+ target_column_0 = rule["target_column_0"]
+ target_column_1 = rule["target_column_1"]
+
+ def new_row(row):
+ row_copy = row[:]
+ row_copy[target_column_0] = row[target_column_1]
+ row_copy[target_column_1] = row[target_column_0]
+ return row_copy
+
+ return list(map(new_row, data)), sources
+
+
+class SplitColumnsRuleDefinition(BaseRuleDefinition):
+ rule_type = "split_columns"
+
+ def validate_rule(self, rule):
+ _ensure_rule_contains_keys(rule, {
+ "target_columns_0": list,
+ "target_columns_1": list,
+ })
+
+ def apply(self, rule, data, sources):
+ target_columns_0 = rule["target_columns_0"]
+ target_columns_1 = rule["target_columns_1"]
+
+ def split_row(row):
+ new_row_0 = []
+ new_row_1 = []
+ for index, el in enumerate(row):
+ if index in target_columns_0:
+ new_row_0.append(el)
+ elif index in target_columns_1:
+ new_row_1.append(el)
+ else:
+ new_row_0.append(el)
+ new_row_1.append(el)
+
+ return [new_row_0, new_row_1]
+
+ data = flat_map(split_row, data)
+ sources = flat_map(lambda x: [x, x], sources)
+
+ return data, sources
+
+
+def flat_map(f, items):
+ return list(itertools.chain.from_iterable(map(f, items)))
+
+
+class RuleSet(object):
+
+ def __init__(self, rule_set_as_dict):
+ self.raw_rules = rule_set_as_dict["rules"]
+ self.raw_mapping = rule_set_as_dict.get("mapping", [])
+
+ @property
+ def rules(self):
+ return self.raw_rules
+
+ def _rules_with_definitions(self):
+ for rule in self.raw_rules:
+ yield (rule, RULES_DEFINITIONS[rule["type"]])
+
+ def apply(self, data, sources):
+ for rule, rule_definition in self._rules_with_definitions():
+ rule_definition.validate_rule(rule)
+ data, sources = rule_definition.apply(rule, data, sources)
+
+ return data, sources
+
+ @property
+ def has_errors(self):
+ errored = False
+ try:
+ for rule, rule_definition in self._rules_with_definitions():
+ rule_definition.validate_rule(rule)
+ except Exception:
+ errored = True
+ return errored
+
+ @property
+ def mapping_as_dict(self):
+ as_dict = {}
+ for mapping in self.raw_mapping:
+ as_dict[mapping["type"]] = mapping
+
+ return as_dict
+
+ # Rest of this is generic, things here are Galaxy collection specific, think about about
+ # subclass of RuleSet for collection creation.
+ @property
+ def identifier_columns(self):
+ mapping_as_dict = self.mapping_as_dict
+ identifier_columns = []
+ if "list_identifiers" in mapping_as_dict:
+ identifier_columns.extend(mapping_as_dict["list_identifiers"]["columns"])
+ if "paired_identifier" in mapping_as_dict:
+ identifier_columns.append(mapping_as_dict["paired_identifier"]["columns"][0])
+
+ return identifier_columns
+
+ @property
+ def collection_type(self):
+ mapping_as_dict = self.mapping_as_dict
+ list_columns = mapping_as_dict.get("list_identifiers", {"columns": []})["columns"]
+ collection_type = ":".join(map(lambda c: "list", list_columns))
+ if "paired_identifier" in mapping_as_dict:
+ if collection_type:
+ collection_type += ":paired"
+ else:
+ collection_type = "paired"
+ return collection_type
+
+ @property
+ def display(self):
+ message = "Rules:\n"
+ message += "".join(["- %s\n" % r for r in self.raw_rules])
+ message += "Column Definitions:\n"
+ message += "".join(["- %s\n" % m for m in self.raw_mapping])
+ return message
+
+
+RULES_DEFINITION_CLASSES = [
+ AddColumnMetadataRuleDefinition,
+ AddColumnConcatenateRuleDefinition,
+ AddColumnBasenameRuleDefinition,
+ AddColumnRegexRuleDefinition,
+ AddColumnRownumRuleDefinition,
+ AddColumnValueRuleDefinition,
+ AddColumnSubstrRuleDefinition,
+ RemoveColumnsRuleDefinition,
+ AddFilterRegexRuleDefinition,
+ AddFilterCountRuleDefinition,
+ AddFilterEmptyRuleDefinition,
+ AddFilterMatchesRuleDefinition,
+ AddFilterCompareRuleDefinition,
+ SortRuleDefinition,
+ SwapColumnsRuleDefinition,
+ SplitColumnsRuleDefinition,
+]
+RULES_DEFINITIONS = {}
+for rule_class in RULES_DEFINITION_CLASSES:
+ RULES_DEFINITIONS[rule_class.rule_type] = rule_class()
diff --git a/lib/galaxy/webapps/galaxy/api/workflows.py b/lib/galaxy/webapps/galaxy/api/workflows.py
index 84562919a4f8..84cf05f4a459 100644
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -663,8 +663,6 @@ def invoke(self, trans, workflow_id, payload, **kwd):
Schedule the workflow specified by `workflow_id` to run.
"""
- # /usage is awkward in this context but is consistent with the rest of
- # this module. Would prefer to redo it all to use /invocation(s).
# Get workflow + accessibility check.
stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id)
workflow = stored_workflow.latest_workflow
diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py
index 73a92b9486c4..71749c6b2c98 100644
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -41,6 +41,7 @@
from galaxy.util.bunch import Bunch
from galaxy.util.json import safe_loads
from galaxy.util.odict import odict
+from galaxy.util.rules_dsl import RuleSet
from tool_shed.util import common_util
log = logging.getLogger(__name__)
@@ -718,7 +719,17 @@ def get_data_outputs(self):
extra_kwds = {}
if tool_output.collection:
extra_kwds["collection"] = True
- extra_kwds["collection_type"] = tool_output.structure.collection_type
+ collection_type = tool_output.structure.collection_type
+ if not collection_type and tool_output.structure.collection_type_from_rules:
+ rule_param = tool_output.structure.collection_type_from_rules
+ if rule_param in self.state.inputs:
+ rule_json_str = self.state.inputs[rule_param]
+ if rule_json_str: # initialized to None...
+ rules = rule_json_str
+ if rules:
+ rule_set = RuleSet(rules)
+ collection_type = rule_set.collection_type
+ extra_kwds["collection_type"] = collection_type
extra_kwds["collection_type_source"] = tool_output.structure.collection_type_source
formats = ['input'] # TODO: fix
elif tool_output.format_source is not None:
diff --git a/test/api/test_tools.py b/test/api/test_tools.py
index 5d516122a585..ff6cf71ba13f 100644
--- a/test/api/test_tools.py
+++ b/test/api/test_tools.py
@@ -3,10 +3,12 @@
import os
from base import api
+from base import rules_test_data
from base.populators import (
DatasetCollectionPopulator,
DatasetPopulator,
LibraryPopulator,
+ load_data_dict,
skip_without_tool,
)
@@ -256,7 +258,6 @@ def test_zip_list_inputs(self):
def test_filter_failed(self):
with self.dataset_populator.test_history() as history_id:
- history_id = self.dataset_populator.new_history()
ok_hdca_id = self.dataset_collection_populator.create_list_in_history(history_id, contents=["0", "1", "0", "1"]).json()["id"]
response = self.dataset_populator.run_exit_code_from_file(history_id, ok_hdca_id)
@@ -282,6 +283,31 @@ def get_state(dce):
filtered_states = [get_state(_) for _ in filtered_hdca["elements"]]
assert filtered_states == [u"ok", u"ok"], filtered_states
+ def _apply_rules_and_check(self, example):
+ with self.dataset_populator.test_history() as history_id:
+ inputs, _, _ = load_data_dict(history_id, {"input": example["test_data"]}, self.dataset_populator, self.dataset_collection_populator)
+ hdca = inputs["input"]
+ inputs = {
+ "input": {"src": "hdca", "id": hdca["id"]},
+ "rules": example["rules"]
+ }
+ self.dataset_populator.wait_for_history(history_id)
+ response = self._run("__APPLY_RULES__", history_id, inputs, assert_ok=True)
+ output_collections = response["output_collections"]
+ self.assertEquals(len(output_collections), 1)
+ output_hid = output_collections[0]["hid"]
+ output_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=output_hid, wait=False)
+ example["check"](output_hdca, self.dataset_populator)
+
+ def test_apply_rules_1(self):
+ self._apply_rules_and_check(rules_test_data.EXAMPLE_1)
+
+ def test_apply_rules_2(self):
+ self._apply_rules_and_check(rules_test_data.EXAMPLE_2)
+
+ def test_apply_rules_3(self):
+ self._apply_rules_and_check(rules_test_data.EXAMPLE_3)
+
@skip_without_tool("multi_select")
def test_multi_select_as_list(self):
with self.dataset_populator.test_history() as history_id:
diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py
index 44098d516f20..66602131626b 100644
--- a/test/api/test_workflows.py
+++ b/test/api/test_workflows.py
@@ -10,6 +10,7 @@
from requests import delete, put
from base import api # noqa: I100,I202
+from base import rules_test_data # noqa: I100
from base.populators import ( # noqa: I100
DatasetCollectionPopulator,
DatasetPopulator,
@@ -24,6 +25,7 @@
WORKFLOW_WITH_DYNAMIC_OUTPUT_COLLECTION,
WORKFLOW_WITH_OUTPUT_COLLECTION,
WORKFLOW_WITH_OUTPUT_COLLECTION_MAPPING,
+ WORKFLOW_WITH_RULES_1,
)
from galaxy.exceptions import error_codes # noqa: I201
from galaxy.tools.verify.test_data import TestDataResolver
@@ -872,6 +874,13 @@ def test_workflow_run_zip_collections(self):
content = self.dataset_populator.get_history_dataset_content(history_id)
self.assertEqual(content.strip(), "samp1\t10.0\nsamp2\t20.0\nsamp1\t20.0\nsamp2\t40.0")
+ @skip_without_tool("__APPLY_RULES__")
+ def test_workflow_run_apply_rules(self):
+ with self.dataset_populator.test_history() as history_id:
+ self._run_jobs(WORKFLOW_WITH_RULES_1, history_id=history_id, wait=True, assert_ok=True)
+ output_content = self.dataset_populator.get_history_collection_details(history_id, hid=6)
+ rules_test_data.check_example_2(output_content, self.dataset_populator)
+
def test_filter_failed_mapping(self):
with self.dataset_populator.test_history() as history_id:
summary = self._run_jobs("""
diff --git a/test/base/data/rules_dsl_spec.yml b/test/base/data/rules_dsl_spec.yml
index 4dd6653191b9..ba03490a346e 100644
--- a/test/base/data/rules_dsl_spec.yml
+++ b/test/base/data/rules_dsl_spec.yml
@@ -356,4 +356,50 @@
final:
data: [["bark", "Dog"], ["meow", "cat"], ["moo", "cow"]]
sources: [3, 2, 1]
-
+- rules:
+ - type: swap_columns
+ target_column_0: 0
+ target_column_1: 1
+ initial:
+ data: [["moo", "cow"], ["meow", "cat"], ["bark", "Dog"]]
+ sources: [1, 2, 3]
+ final:
+ data: [["cow", "moo"], ["cat", "meow"], ["Dog", "bark"]]
+ sources: [1, 2, 3]
+- rules:
+ - type: split_columns
+ target_columns_0: [0]
+ target_columns_1: [1]
+ initial:
+ data: [["moo", "cow", "A"], ["meow", "cat", "B"], ["bark", "Dog", "C"]]
+ sources: [1, 2, 3]
+ final:
+ data: [["moo", "A"], ["cow", "A"], ["meow", "B"], ["cat", "B"], ["bark", "C"], ["Dog", "C"]]
+ sources: [1, 1, 2, 2, 3, 3]
+- rules:
+ - type: invalid_rule_type
+ error: true
+- rules:
+ - type: add_filter_compare
+ target_column: 0
+ value: 13
+ compare_type: invalid_compare_type
+ error: true
+- rules:
+ - type: add_column_concatenate
+ target_column: 0
+ error: true
+- rules:
+ - type: add_column_basename
+ target_column_0: 0
+ error: true
+- rules:
+ - type: add_column_regex
+ target_column: 0
+ regex: '(o)+'
+ error: true
+- rules:
+ - type: add_column_regex
+ target_column: 0
+ expression: '(o+'
+ error: true
diff --git a/test/base/rules_test_data.py b/test/base/rules_test_data.py
new file mode 100644
index 000000000000..3fb3eb2bb39e
--- /dev/null
+++ b/test/base/rules_test_data.py
@@ -0,0 +1,132 @@
+# Common test data for rule testing meant to be shared between API and Selenium tests.
+from pkg_resources import resource_string
+
+RULES_DSL_SPEC_STR = resource_string(__name__, "data/rules_dsl_spec.yml")
+
+
+def check_example_1(hdca, dataset_populator):
+ assert hdca["collection_type"] == "list"
+ assert hdca["element_count"] == 2
+
+ first_dce = hdca["elements"][0]
+ first_hda = first_dce["object"]
+ assert first_hda["hid"] > 3
+
+
+def check_example_2(hdca, dataset_populator):
+ assert hdca["collection_type"] == "list:list"
+ assert hdca["element_count"] == 2
+ first_collection_level = hdca["elements"][0]
+ assert first_collection_level["element_type"] == "dataset_collection"
+ second_collection_level = first_collection_level["object"]
+ assert second_collection_level["collection_type"] == "list"
+ assert second_collection_level["elements"][0]["element_type"] == "hda"
+
+
+def check_example_3(hdca, dataset_populator):
+ assert hdca["collection_type"] == "list"
+ assert hdca["element_count"] == 2
+ first_element = hdca["elements"][0]
+ assert first_element["element_identifier"] == "test0forward"
+
+
+EXAMPLE_1 = {
+ "rules": {
+ "rules": [
+ {
+ "type": "add_column_metadata",
+ "value": "identifier0",
+ }
+ ],
+ "mapping": [
+ {
+ "type": "list_identifiers",
+ "columns": [0],
+ }
+ ],
+ },
+ "test_data": {
+ "type": "list",
+ "elements": [
+ {
+ "identifier": "i1",
+ "content": "0"
+ },
+ {
+ "identifier": "i2",
+ "content": "1"
+ },
+ ]
+ },
+ "check": check_example_1,
+ "output_hid": 6,
+}
+
+
+EXAMPLE_2 = {
+ "rules": {
+ "rules": [
+ {
+ "type": "add_column_metadata",
+ "value": "identifier0",
+ },
+ {
+ "type": "add_column_metadata",
+ "value": "identifier0",
+ }
+ ],
+ "mapping": [
+ {
+ "type": "list_identifiers",
+ "columns": [0, 1],
+ }
+ ],
+ },
+ "test_data": {
+ "type": "list",
+ "elements": [
+ {
+ "identifier": "i1",
+ "content": "0"
+ },
+ {
+ "identifier": "i2",
+ "content": "1"
+ },
+ ]
+ },
+ "check": check_example_2,
+ "output_hid": 6,
+}
+
+# Flatten
+EXAMPLE_3 = {
+ "rules": {
+ "rules": [
+ {
+ "type": "add_column_metadata",
+ "value": "identifier0",
+ },
+ {
+ "type": "add_column_metadata",
+ "value": "identifier1",
+ },
+ {
+ "type": "add_column_concatenate",
+ "target_column_0": 0,
+ "target_column_1": 1,
+ }
+ ],
+ "mapping": [
+ {
+ "type": "list_identifiers",
+ "columns": [2],
+ }
+ ],
+ },
+ "test_data": {
+ "type": "list:paired",
+ },
+ "check": check_example_3,
+ "output_hid": 7,
+}
diff --git a/test/base/workflow_fixtures.py b/test/base/workflow_fixtures.py
index f38ec362fce3..71eef6081588 100644
--- a/test/base/workflow_fixtures.py
+++ b/test/base/workflow_fixtures.py
@@ -130,6 +130,82 @@
"""
+WORKFLOW_WITH_RULES_1 = """
+class: GalaxyWorkflow
+inputs:
+ - type: collection
+ label: input_c
+steps:
+ - label: apply
+ tool_id: __APPLY_RULES__
+ state:
+ input:
+ $link: input_c
+ rules:
+ rules:
+ - type: add_column_metadata
+ value: identifier0
+ - type: add_column_metadata
+ value: identifier0
+ mapping:
+ - type: list_identifiers
+ columns: [0, 1]
+ - tool_id: random_lines1
+ label: random_lines
+ state:
+ num_lines: 1
+ input:
+ $link: apply#output
+ seed_source:
+ seed_source_selector: set_seed
+ seed: asdf
+test_data:
+ input_c:
+ type: list
+ elements:
+ - identifier: i1
+ content: "0"
+ - identifier: i2
+ content: "1"
+"""
+
+
+WORKFLOW_WITH_RULES_2 = """
+class: GalaxyWorkflow
+inputs:
+ - type: collection
+ label: input_c
+steps:
+ - label: apply
+ tool_id: __APPLY_RULES__
+ state:
+ input:
+ $link: input_c
+ rules:
+ rules:
+ - type: add_column_metadata
+ value: identifier0
+ - type: add_column_metadata
+ value: identifier0
+ mapping:
+ - type: list_identifiers
+ columns: [0, 1]
+ - tool_id: collection_creates_list
+ label: copy_list
+ state:
+ input1:
+ $link: apply#output
+test_data:
+ input_c:
+ type: list
+ elements:
+ - identifier: i1
+ content: "0"
+ - identifier: i2
+ content: "1"
+"""
+
+
WORKFLOW_NESTED_SIMPLE = """
class: GalaxyWorkflow
inputs:
diff --git a/test/functional/tools/samples_tool_conf.xml b/test/functional/tools/samples_tool_conf.xml
index b1cf03ed24df..c47985e9ee95 100644
--- a/test/functional/tools/samples_tool_conf.xml
+++ b/test/functional/tools/samples_tool_conf.xml
@@ -185,5 +185,6 @@
+
diff --git a/test/galaxy_selenium/navigates_galaxy.py b/test/galaxy_selenium/navigates_galaxy.py
index 7dd8fd75f998..275e65b0adc3 100644
--- a/test/galaxy_selenium/navigates_galaxy.py
+++ b/test/galaxy_selenium/navigates_galaxy.py
@@ -766,6 +766,19 @@ def rule_builder_set_mapping(self, mapping_type, column_label, screenshot_name=N
self.screenshot(screenshot_name)
rule_builder.mapping_ok.wait_for_and_click()
+ def rule_builder_set_source(self, json):
+ rule_builder = self.components.rule_builder
+ rule_builder.view_source.wait_for_and_click()
+ self.rule_builder_enter_source_text(json)
+ rule_builder.main_button_ok.wait_for_and_click()
+ rule_builder.view_source.wait_for_visible()
+
+ def rule_builder_enter_source_text(self, json):
+ rule_builder = self.components.rule_builder
+ text_area_elem = rule_builder.source.wait_for_visible()
+ text_area_elem.clear()
+ text_area_elem.send_keys(json)
+
def workflow_editor_click_option(self, option_label):
self.workflow_editor_click_options()
menu_element = self.workflow_editor_options_menu_element()
@@ -1005,11 +1018,20 @@ def tagging_add(self, tags, auto_closes=True, parent_selector=""):
def workflow_run_submit(self):
self.wait_for_and_click_selector("button.btn-primary")
- def tool_open(self, tool_id):
- self.wait_for_and_click_selector('a[href$="tool_runner?tool_id=%s"]' % tool_id)
+ def tool_open(self, tool_id, outer=False):
+ if outer:
+ tool_link = self.components.tool_panel.outer_tool_link(tool_id=tool_id)
+ else:
+ tool_link = self.components.tool_panel.tool_link(tool_id=tool_id)
+ tool_link.wait_for_and_click()
def tool_parameter_div(self, expanded_parameter_id):
- return self.wait_for_selector("div.ui-form-element[tour_id$='%s']" % expanded_parameter_id)
+ return self.components.tool_form.parameter_div(parameter=expanded_parameter_id).wait_for_visible()
+
+ def tool_parameter_edit_rules(self, expanded_parameter_id="rules"):
+ rules_div_element = self.tool_parameter_div("rules")
+ edit_button_element = rules_div_element.find_element_by_css_selector("i.fa-edit")
+ edit_button_element.click()
def tool_set_value(self, expanded_parameter_id, value, expected_type=None, test_data_resolver=None):
div_element = self.tool_parameter_div(expanded_parameter_id)
diff --git a/test/galaxy_selenium/navigation.yml b/test/galaxy_selenium/navigation.yml
index 6f4e315a801b..0903cf5e0a1c 100644
--- a/test/galaxy_selenium/navigation.yml
+++ b/test/galaxy_selenium/navigation.yml
@@ -159,6 +159,12 @@ history_panel:
new_name: 'Unnamed history'
new_size: '(empty)'
+tool_panel:
+
+ selectors:
+ tool_link: 'a[href$$="tool_runner?tool_id=${tool_id}"]'
+ outer_tool_link: '.toolTitleNoSection a[href$$="tool_runner?tool_id=${tool_id}"]'
+
multi_history_view:
selectors:
@@ -215,6 +221,7 @@ tool_form:
reference: '.citations-formatted .formatted-reference'
show_bibtex: 'button.citations-to-bibtex'
bibtex_area: '.citations-bibtex textarea'
+ parameter_div: 'div.ui-form-element[tour_id="${parameter}"]'
labels:
generate_tour: 'Generate Tour'
diff --git a/test/selenium_tests/test_tool_form.py b/test/selenium_tests/test_tool_form.py
index 0ba330b0f432..0690ecea1303 100644
--- a/test/selenium_tests/test_tool_form.py
+++ b/test/selenium_tests/test_tool_form.py
@@ -1,4 +1,7 @@
-from base.populators import flakey
+import json
+
+from base import rules_test_data
+from base.populators import flakey, load_data_dict
from galaxy_selenium.navigates_galaxy import retry_call_during_transitions
from .framework import (
@@ -153,3 +156,45 @@ def _run_environment_test_tool(self, inttest_value="42"):
self.tool_open("environment_variables")
self.tool_set_value("inttest", inttest_value)
self.tool_form_execute()
+
+
+class LoggedInToolFormTestCase(SeleniumTestCase):
+
+ ensure_registered = True
+
+ @selenium_test
+ def test_run_apply_rules_1(self):
+ self._apply_rules_and_check(rules_test_data.EXAMPLE_1)
+ self.screenshot("tool_apply_rules_example_1_final")
+
+ @selenium_test
+ def test_run_apply_rules_2(self):
+ self._apply_rules_and_check(rules_test_data.EXAMPLE_2)
+ self.screenshot("tool_apply_rules_example_2_final")
+
+ @selenium_test
+ def test_run_apply_rules_3(self):
+ self._apply_rules_and_check(rules_test_data.EXAMPLE_3)
+ self.screenshot("tool_apply_rules_example_3_final")
+
+ def _apply_rules_and_check(self, example):
+ rule_builder = self.components.rule_builder
+
+ self.home()
+ history_id = self.current_history_id()
+ inputs, _, _ = load_data_dict(history_id, {"input": example["test_data"]}, self.dataset_populator, self.dataset_collection_populator)
+ self.dataset_populator.wait_for_history(history_id)
+ self.home()
+ self.tool_open("__APPLY_RULES__", outer=True) # may appear twice in panel, grab top-level link
+ self.screenshot("tool_apply_rules_landing")
+ self.tool_parameter_edit_rules()
+ rule_builder._.wait_for_visible()
+ self.screenshot("tool_apply_rules_builder_landing")
+ self.rule_builder_set_source(json.dumps(example["rules"]))
+ self.screenshot("tool_apply_rules_after")
+ rule_builder.main_button_ok.wait_for_and_click()
+ self.tool_form_execute()
+ output_hid = example["output_hid"]
+ self.history_panel_wait_for_hid_ok(output_hid)
+ output_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=output_hid, wait=False)
+ example["check"](output_hdca, self.dataset_populator)
diff --git a/test/selenium_tests/test_uploads.py b/test/selenium_tests/test_uploads.py
index 24787b419615..5dcd472c25a7 100644
--- a/test/selenium_tests/test_uploads.py
+++ b/test/selenium_tests/test_uploads.py
@@ -246,11 +246,8 @@ def test_rules_example_5_matching_collections(self):
rule_builder = self.components.rule_builder
rule_builder.view_source.wait_for_and_click()
- text_area_elem = rule_builder.source.wait_for_visible()
-
content = self._read_rules_test_data_file("uniprot.json")
- text_area_elem.clear()
- text_area_elem.send_keys(content)
+ self.rule_builder_enter_source_text(content)
self.screenshot("rules_example_5_2_source")
rule_builder.main_button_ok.wait_for_and_click()
rule_builder.view_source.wait_for_visible()
diff --git a/test/selenium_tests/test_workflow_editor.py b/test/selenium_tests/test_workflow_editor.py
index d7b0799afffe..fa6a653375a9 100644
--- a/test/selenium_tests/test_workflow_editor.py
+++ b/test/selenium_tests/test_workflow_editor.py
@@ -1,3 +1,5 @@
+import json
+
from base.workflow_fixtures import (
WORKFLOW_NESTED_SIMPLE,
WORKFLOW_SIMPLE_CAT_TWICE,
@@ -5,6 +7,8 @@
WORKFLOW_WITH_INVALID_STATE,
WORKFLOW_WITH_OLD_TOOL_VERSION,
WORKFLOW_WITH_OUTPUT_COLLECTION,
+ WORKFLOW_WITH_RULES_1,
+ WORKFLOW_WITH_RULES_2,
)
from .framework import (
@@ -146,6 +150,71 @@ def test_rendering_simple_nested_workflow(self):
self.workflow_editor_maximize_center_pane()
self.screenshot("workflow_editor_simple_nested")
+ @selenium_test
+ def test_rendering_rules_workflow_1(self):
+ self.open_in_workflow_editor(WORKFLOW_WITH_RULES_1)
+ rule_output = "apply#output"
+ random_lines_input = "random_lines#input"
+
+ self.workflow_editor_maximize_center_pane()
+ self.screenshot("workflow_editor_rules_1")
+ self.assert_connected(rule_output, random_lines_input)
+ self.assert_input_mapped(random_lines_input)
+ self.workflow_editor_destroy_connection(random_lines_input)
+ self.assert_not_connected(rule_output, random_lines_input)
+ self.assert_input_not_mapped(random_lines_input)
+ self.workflow_editor_connect(rule_output, random_lines_input)
+ self.assert_connected(rule_output, random_lines_input)
+ self.assert_input_mapped(random_lines_input)
+
+ @selenium_test
+ def test_rendering_rules_workflow_2(self):
+ self.open_in_workflow_editor(WORKFLOW_WITH_RULES_2)
+ self.workflow_editor_maximize_center_pane(collapse_right=False)
+
+ editor = self.components.workflow_editor
+ rule_builder = self.components.rule_builder
+
+ rule_output = "apply#output"
+ copy_list_input = "copy_list#input1"
+
+ apply_node = editor.node._(label="apply")
+
+ self.assert_connected(rule_output, copy_list_input)
+ self.assert_input_mapped(copy_list_input)
+ self.workflow_editor_destroy_connection(copy_list_input)
+ self.assert_not_connected(rule_output, copy_list_input)
+ self.assert_input_not_mapped(copy_list_input)
+ self.workflow_editor_connect(rule_output, copy_list_input)
+ self.assert_connected(rule_output, copy_list_input)
+ self.assert_input_mapped(copy_list_input)
+
+ apply_node.title.wait_for_and_click()
+ self.screenshot("workflow_editor_rules_2_form")
+ self.tool_parameter_edit_rules()
+ rule_builder._.wait_for_visible()
+ self.screenshot("workflow_editor_rules_2_builder")
+ new_rules = dict(
+ rules=[{"type": "add_column_metadata", "value": "identifier0"}],
+ mapping=[{"type": "list_identifiers", "columns": [0]}],
+ )
+ self.rule_builder_set_source(json.dumps(new_rules))
+ rule_builder.main_button_ok.wait_for_and_click()
+ apply_node.title.wait_for_and_click()
+ self.sleep_for(self.wait_types.UX_RENDER)
+ # screenshot should have async warning about connection removed
+ self.screenshot("workflow_editor_rules_2_after_change")
+ self.assert_input_not_mapped(copy_list_input)
+ # changing output collection type remove outbound connections, so this
+ # this needs to be re-connected. Remove this re-connection if we upgrade
+ # the workflow editor to try to re-establish the connection with different
+ # mapping.
+ self.workflow_editor_connect(rule_output, copy_list_input)
+ self.assert_connected(rule_output, copy_list_input)
+ # Regardless - this rules now say to connect a list to a list instead of a list
+ # to a list:list, so there should be no mapping anymore even after connected.
+ self.assert_input_not_mapped(copy_list_input)
+
@selenium_test
def test_save_as(self):
name = self.workflow_upload_yaml_with_random_name(WORKFLOW_SIMPLE_CAT_TWICE)
@@ -196,9 +265,11 @@ def workflow_editor_save_and_close(self):
self.workflow_editor_click_option("Save")
self.workflow_editor_click_option("Close")
- def workflow_editor_maximize_center_pane(self):
- self.components._.left_panel_collapse.wait_for_and_click()
- self.components._.right_panel_collapse.wait_for_and_click()
+ def workflow_editor_maximize_center_pane(self, collapse_left=True, collapse_right=True):
+ if collapse_left:
+ self.components._.left_panel_collapse.wait_for_and_click()
+ if collapse_right:
+ self.components._.right_panel_collapse.wait_for_and_click()
self.sleep_for(self.wait_types.UX_RENDER)
def workflow_editor_connect(self, source, sink, screenshot_partial=None):
diff --git a/test/selenium_tests/test_workflow_run.py b/test/selenium_tests/test_workflow_run.py
index b37cf29aab62..02aecd981ea5 100644
--- a/test/selenium_tests/test_workflow_run.py
+++ b/test/selenium_tests/test_workflow_run.py
@@ -1,9 +1,11 @@
import yaml
+from base import rules_test_data
from base.populators import load_data_dict
from base.workflow_fixtures import (
WORKFLOW_SIMPLE_CAT_TWICE,
WORKFLOW_WITH_DYNAMIC_OUTPUT_COLLECTION,
WORKFLOW_WITH_OLD_TOOL_VERSION,
+ WORKFLOW_WITH_RULES_1,
)
from .framework import (
@@ -55,6 +57,18 @@ def test_execution_with_multiple_inputs(self):
content = self.dataset_populator.get_history_dataset_content(history_id, hid=7)
self.assertEqual("10.0\n30.0\n20.0\n40.0\n", content)
+ @selenium_test
+ @managed_history
+ def test_execution_with_rules(self):
+ history_id, inputs = self.workflow_run_setup_inputs(WORKFLOW_WITH_RULES_1)
+ self.open_in_workflow_run(WORKFLOW_WITH_RULES_1)
+ self.workflow_run_specify_inputs(inputs)
+ self.screenshot("workflow_run_rules")
+ self.workflow_run_submit()
+ self.history_panel_wait_for_hid_ok(6, allowed_force_refreshes=1)
+ output_content = self.dataset_populator.get_history_collection_details(history_id, hid=6)
+ rules_test_data.check_example_2(output_content, self.dataset_populator)
+
def open_in_workflow_run(self, yaml_content):
name = self.workflow_upload_yaml_with_random_name(yaml_content)
self.workflow_run_with_name(name)
diff --git a/test/unit/test_rule_utils.py b/test/unit/test_rule_utils.py
new file mode 100644
index 000000000000..249ec2341f52
--- /dev/null
+++ b/test/unit/test_rule_utils.py
@@ -0,0 +1,22 @@
+import yaml
+from base import rules_test_data
+
+from galaxy.util import rules_dsl
+
+
+def test_rules():
+ for test_case in yaml.safe_load(rules_test_data.RULES_DSL_SPEC_STR):
+ rule_set = rules_dsl.RuleSet(test_case)
+ if "initial" in test_case:
+ initial = test_case["initial"]
+ final_data, final_sources = rule_set.apply(initial["data"], initial["sources"])
+ expected_final = test_case["final"]
+ for final_row, expected_final_row in zip(final_data, expected_final["data"]):
+ msg = "%s != %s" % (final_row, expected_final_row)
+ assert len(final_row) == len(expected_final_row), msg
+ for final_val, expected_final_val in zip(final_row, expected_final_row):
+ assert final_val == expected_final_val, msg
+ elif "error" in test_case:
+ assert rule_set.has_errors, "rule [%s] does not contain errors" % test_case
+ else:
+ raise Exception("Problem with test case definition [%s]." % test_case)
diff --git a/test/unit/tools/test_parsing.py b/test/unit/tools/test_parsing.py
index abc966e3b5ad..7cae5aad0733 100644
--- a/test/unit/tools/test_parsing.py
+++ b/test/unit/tools/test_parsing.py
@@ -425,6 +425,22 @@ def test_hidden(self):
assert not self._tool_source.parse_hidden()
+class ApplyRulesToolLoaderTestCase(BaseLoaderTestCase):
+ source_file_name = os.path.join(os.getcwd(), "lib/galaxy/tools/apply_rules.xml")
+ source_contents = None
+
+ def test_tool_type(self):
+ tool_module = self._tool_source.parse_tool_module()
+ assert tool_module[0] == "galaxy.tools"
+ assert tool_module[1] == "ApplyRulesTool"
+ assert self._tool_source.parse_tool_type() == "apply_rules_to_collection"
+
+ def test_outputs(self):
+ outputs, output_collections = self._tool_source.parse_outputs(object())
+ assert len(outputs) == 1
+ assert len(output_collections) == 1
+
+
class SpecialToolLoaderTestCase(BaseLoaderTestCase):
source_file_name = os.path.join(os.getcwd(), "lib/galaxy/tools/imp_exp/exp_history_to_archive.xml")
source_contents = None