diff --git a/client/src/components/TagsMultiselect/StatelessTags.test.js b/client/src/components/TagsMultiselect/StatelessTags.test.js index 5468a153a8e9..09b0226b61b2 100644 --- a/client/src/components/TagsMultiselect/StatelessTags.test.js +++ b/client/src/components/TagsMultiselect/StatelessTags.test.js @@ -1,9 +1,10 @@ import { mount } from "@vue/test-utils"; import { useToast } from "composables/toast"; -import { useUserTags } from "composables/user"; import { getLocalVue } from "tests/jest/helpers"; import { computed } from "vue"; +import { useUserTagsStore } from "@/stores/userTagsStore"; + import StatelessTags from "./StatelessTags"; const autocompleteTags = ["#named_user_tag", "abc", "my_tag"]; @@ -17,9 +18,9 @@ const mountWithProps = (props) => { }); }; -jest.mock("composables/user"); +jest.mock("@/stores/userTagsStore"); const addLocalTagMock = jest.fn((tag) => tag); -useUserTags.mockReturnValue({ +useUserTagsStore.mockReturnValue({ userTags: computed(() => autocompleteTags), addLocalTag: addLocalTagMock, }); diff --git a/client/src/components/TagsMultiselect/StatelessTags.vue b/client/src/components/TagsMultiselect/StatelessTags.vue index a00f45bafcda..71a944340753 100644 --- a/client/src/components/TagsMultiselect/StatelessTags.vue +++ b/client/src/components/TagsMultiselect/StatelessTags.vue @@ -8,8 +8,8 @@ import Multiselect from "vue-multiselect"; import { useToast } from "@/composables/toast"; import { useMultiselect } from "@/composables/useMultiselect"; -import { useUserTags } from "@/composables/user"; import { useUid } from "@/composables/utils/uid"; +import { useUserTagsStore } from "@/stores/userTagsStore"; import Tag from "./Tag.vue"; @@ -39,7 +39,7 @@ const emit = defineEmits<{ library.add(faTags, faCheck, faTimes, faPlus); -const { userTags, addLocalTag } = useUserTags(); +const { userTags, addLocalTag } = useUserTagsStore(); const { warning } = useToast(); function onAddTag(tag: string) { diff --git a/client/src/components/Workflow/Editor/Attributes.test.js b/client/src/components/Workflow/Editor/Attributes.test.js index 544cbe7281d3..2f477bca6b9f 100644 --- a/client/src/components/Workflow/Editor/Attributes.test.js +++ b/client/src/components/Workflow/Editor/Attributes.test.js @@ -1,8 +1,9 @@ import { createLocalVue, mount } from "@vue/test-utils"; -import { useUserTags } from "composables/user"; import { isDate } from "date-fns"; import { computed } from "vue"; +import { useUserTagsStore } from "@/stores/userTagsStore"; + import Attributes from "./Attributes"; import { UntypedParameters } from "./modules/parameters"; @@ -16,8 +17,8 @@ const TEST_VERSIONS = [ ]; const autocompleteTags = ["#named_uer_tag", "abc", "my_tag"]; -jest.mock("composables/user"); -useUserTags.mockReturnValue({ +jest.mock("@/stores/userTagsStore"); +useUserTagsStore.mockReturnValue({ userTags: computed(() => autocompleteTags), addLocalTag: jest.fn(), }); diff --git a/client/src/components/Workflow/WorkflowList.test.js b/client/src/components/Workflow/WorkflowList.test.js index 6bb58612251a..4b07236eb091 100644 --- a/client/src/components/Workflow/WorkflowList.test.js +++ b/client/src/components/Workflow/WorkflowList.test.js @@ -2,13 +2,14 @@ import { createTestingPinia } from "@pinia/testing"; import { mount } from "@vue/test-utils"; import axios from "axios"; import MockAdapter from "axios-mock-adapter"; -import { useUserTags } from "composables/user"; import { formatDistanceToNow, parseISO } from "date-fns"; import flushPromises from "flush-promises"; import { PiniaVuePlugin } from "pinia"; import { getLocalVue, wait } from "tests/jest/helpers"; import { computed } from "vue"; +import { useUserTagsStore } from "@/stores/userTagsStore"; + import Tag from "../TagsMultiselect/Tag"; import Workflows from "../Workflow/WorkflowList"; @@ -16,8 +17,8 @@ const localVue = getLocalVue(); localVue.use(PiniaVuePlugin); const autocompleteTags = ["#named_user_tags", "abc", "my_tag"]; -jest.mock("composables/user"); -useUserTags.mockReturnValue({ +jest.mock("@/stores/userTagsStore"); +useUserTagsStore.mockReturnValue({ userTags: computed(() => autocompleteTags), addLocalTag: jest.fn(), }); diff --git a/client/src/composables/user.ts b/client/src/composables/user.ts index ecc6bddadcfa..9182934a2a7a 100644 --- a/client/src/composables/user.ts +++ b/client/src/composables/user.ts @@ -1,5 +1,4 @@ -import { storeToRefs } from "pinia"; -import { computed, ref } from "vue"; +import { computed } from "vue"; import { useUserStore } from "@/stores/userStore"; @@ -14,27 +13,3 @@ export function useCurrentTheme() { setCurrentTheme, }; } - -// temporarily stores tags which have not yet been fetched from the backend -const localTags = ref([]); - -/** - * Keeps tracks of the tags the current user has used. - */ -export function useUserTags() { - const { currentUser } = storeToRefs(useUserStore()); - const userTags = computed(() => { - let tags: string[]; - if (currentUser.value && !currentUser.value.isAnonymous) { - tags = [...currentUser.value.tags_used, ...localTags.value]; - } else { - tags = localTags.value; - } - const tagSet = new Set(tags); - return Array.from(tagSet).map((tag) => tag.replace(/^name:/, "#")); - }); - const addLocalTag = (tag: string) => { - localTags.value.push(tag); - }; - return { userTags, addLocalTag }; -} diff --git a/client/src/stores/userTagsStore.ts b/client/src/stores/userTagsStore.ts new file mode 100644 index 000000000000..47f717ddea0c --- /dev/null +++ b/client/src/stores/userTagsStore.ts @@ -0,0 +1,27 @@ +import { defineStore, storeToRefs } from "pinia"; +import { computed, ref } from "vue"; + +import { useUserStore } from "./userStore"; + +export const useUserTagsStore = defineStore("userTagsStore", () => { + const localTags = ref([]); + + const { currentUser } = storeToRefs(useUserStore()); + + const userTags = computed(() => { + let tags: string[]; + if (currentUser.value && !currentUser.value.isAnonymous) { + tags = [...(currentUser.value.tags_used ?? []), ...localTags.value]; + } else { + tags = localTags.value; + } + const tagSet = new Set(tags); + return Array.from(tagSet).map((tag) => tag.replace(/^name:/, "#")); + }); + + const addLocalTag = (tag: string) => { + localTags.value.push(tag); + }; + + return { userTags, addLocalTag }; +}); diff --git a/lib/galaxy/config/sample/datatypes_conf.xml.sample b/lib/galaxy/config/sample/datatypes_conf.xml.sample index 6d757aee96cf..dfae056f4cb9 100644 --- a/lib/galaxy/config/sample/datatypes_conf.xml.sample +++ b/lib/galaxy/config/sample/datatypes_conf.xml.sample @@ -217,6 +217,9 @@ + + + diff --git a/lib/galaxy/config/sample/tool_conf.xml.sample b/lib/galaxy/config/sample/tool_conf.xml.sample index 31b0bb4e6ef5..57810365641a 100644 --- a/lib/galaxy/config/sample/tool_conf.xml.sample +++ b/lib/galaxy/config/sample/tool_conf.xml.sample @@ -79,6 +79,7 @@
+ diff --git a/lib/galaxy/jobs/handler.py b/lib/galaxy/jobs/handler.py index cf84845cb640..644f1153809b 100644 --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -37,7 +37,10 @@ ) from galaxy.jobs.mapper import JobNotReadyException from galaxy.managers.jobs import get_jobs_to_check_at_startup -from galaxy.model.base import transaction +from galaxy.model.base import ( + check_database_connection, + transaction, +) from galaxy.structured_app import MinimalManagerApp from galaxy.util import unicodify from galaxy.util.custom_logging import get_logger @@ -400,6 +403,7 @@ def __handle_waiting_jobs(self): the waiting queue. If the job has dependencies with errors, it is marked as having errors and removed from the queue. If the job belongs to an inactive user it is ignored. Otherwise, the job is dispatched. """ + check_database_connection(self.sa_session) # Pull all new jobs from the queue at once jobs_to_check = [] resubmit_jobs = [] diff --git a/lib/galaxy/jobs/runners/pulsar.py b/lib/galaxy/jobs/runners/pulsar.py index 003af5a4d3c7..3f60d04a77a0 100644 --- a/lib/galaxy/jobs/runners/pulsar.py +++ b/lib/galaxy/jobs/runners/pulsar.py @@ -50,6 +50,7 @@ AsynchronousJobState, JobState, ) +from galaxy.model.base import check_database_connection from galaxy.tool_util.deps import dependencies from galaxy.util import ( galaxy_directory, @@ -273,6 +274,7 @@ def url_to_destination(self, url): return JobDestination(runner="pulsar", params=url_to_destination_params(url)) def check_watched_item(self, job_state): + check_database_connection(self.app.model.session()) if self.use_mq: # Might still need to check pod IPs. job_wrapper = job_state.job_wrapper @@ -971,6 +973,7 @@ def __async_update(self, full_status): galaxy_job_id = None remote_job_id = None try: + check_database_connection(self.sa_session) remote_job_id = full_status["job_id"] if len(remote_job_id) == 32: # It is a UUID - assign_ids = uuid in destination params... diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py index 7cc3510bbeab..cbeb236b11d8 100644 --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -54,7 +54,10 @@ model, ) from galaxy.model import tool_shed_install -from galaxy.model.base import transaction +from galaxy.model.base import ( + check_database_connection, + transaction, +) from galaxy.schema import ValueFilterQueryParams from galaxy.schema.storage_cleaner import ( CleanableItemsSummary, @@ -310,6 +313,7 @@ def _one_with_recast_errors(self, query: Query) -> Query: :raises exceptions.ObjectNotFound: if no model is found :raises exceptions.InconsistentDatabase: if more than one model is found """ + check_database_connection(self.session()) # overridden to raise serializable errors try: return query.one() diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index 5e98e0625ed2..249ddb4fde93 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -59,6 +59,19 @@ def transaction(session: Union[scoped_session, Session, "SessionlessContext"]): yield +def check_database_connection(session): + """ + In the event of a database disconnect, if there exists an active database + transaction, that transaction becomes invalidated. Accessing the database + will raise sqlalchemy.exc.PendingRollbackError. This handles this situation + by rolling back the invalidated transaction. + Ref: https://docs.sqlalchemy.org/en/14/errors.html#can-t-reconnect-until-invalid-transaction-is-rolled-back + """ + if session and session.connection().invalidated: + log.error("Database transaction rolled back due to invalid state.") + session.rollback() + + # TODO: Refactor this to be a proper class, not a bunch. class ModelMapping(Bunch): def __init__(self, model_modules, engine): diff --git a/lib/galaxy/tool_util/toolbox/views/static.py b/lib/galaxy/tool_util/toolbox/views/static.py index 2eba8eef3f4b..7972cc84e835 100644 --- a/lib/galaxy/tool_util/toolbox/views/static.py +++ b/lib/galaxy/tool_util/toolbox/views/static.py @@ -105,7 +105,7 @@ def definition_with_items_to_panel(definition, allow_sections: bool = True, item f"Failed to find matching section for (id, name) = ({section_def.id}, {section_def.name})" ) continue - section = closest_section.copy() + section = closest_section.copy(merge_tools=True) if section_def.id is not None: section.id = section_def.id if section_def.name is not None: diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index 2ff8fe290e0b..dc82b1d8c70d 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -126,6 +126,8 @@ class ConditionalStepWhen(BooleanToolParameter): def to_cwl(value, hda_references, step): element_identifier = None + if isinstance(value, model.HistoryDatasetCollectionAssociation): + value = value.collection if isinstance(value, model.DatasetCollectionElement) and value.hda: element_identifier = value.element_identifier value = value.hda @@ -155,14 +157,13 @@ def to_cwl(value, hda_references, step): properties, value.dataset.created_from_basename or element_identifier or value.name ) return properties - elif hasattr(value, "collection"): - collection = value.collection - if collection.collection_type == "list": - return [to_cwl(dce, hda_references=hda_references, step=step) for dce in collection.dataset_elements] + elif isinstance(value, model.DatasetCollection): + if value.collection_type == "list": + return [to_cwl(dce, hda_references=hda_references, step=step) for dce in value.dataset_elements] else: # Could be record or nested lists rval = {} - for element in collection.elements: + for element in value.elements: rval[element.element_identifier] = to_cwl( element.element_object, hda_references=hda_references, step=step ) diff --git a/test/unit/workflows/test_modules.py b/test/unit/workflows/test_modules.py index 27955d3d4f96..8e0df5f81624 100644 --- a/test/unit/workflows/test_modules.py +++ b/test/unit/workflows/test_modules.py @@ -261,6 +261,19 @@ def test_to_cwl(): assert hda_references == hdas +def test_to_cwl_nested_collection(): + hda = model.HistoryDatasetAssociation(create_dataset=True, flush=False) + hda.dataset.state = model.Dataset.states.OK + dc_inner = model.DatasetCollection(collection_type="list") + model.DatasetCollectionElement(collection=dc_inner, element_identifier="inner", element=hda) + dc_outer = model.DatasetCollection(collection_type="list:list") + model.DatasetCollectionElement(collection=dc_outer, element_identifier="outer", element=dc_inner) + hdca = model.HistoryDatasetCollectionAssociation(name="the collection", collection=dc_outer) + result = modules.to_cwl(hdca, [], model.WorkflowStep()) + assert result["outer"][0]["class"] == "File" + assert result["outer"][0]["basename"] == "inner" + + class MapOverTestCase(NamedTuple): data_input: str step_input_def: Union[str, List[str]] diff --git a/tools/stats/filtering_1_1_0.xml b/tools/stats/filtering_1_1_0.xml new file mode 100644 index 000000000000..1a20cf40c947 --- /dev/null +++ b/tools/stats/filtering_1_1_0.xml @@ -0,0 +1,103 @@ + + data on any column using simple expressions + + operation_0335 + + + python '$__tool_directory__/filtering.py' '$input' '$out_file1' '$inputs' ${input.metadata.columns} "${input.metadata.column_types}" $header_lines + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +.. class:: warningmark + +Double equal signs, ==, must be used as *"equal to"* (e.g., **c1 == 'chr22'**) + +.. class:: infomark + +**TIP:** Attempting to apply a filtering condition may throw exceptions if the data type (e.g., string, integer) in every line of the columns being filtered is not appropriate for the condition (e.g., attempting certain numerical calculations on strings). If an exception is thrown when applying the condition to a line, that line is skipped as invalid for the filter condition. The number of invalid skipped lines is documented in the resulting history item as a "Condition/data issue". + +.. class:: infomark + +**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert* + +----- + +**Syntax** + +The filter tool allows you to restrict the dataset using simple conditional statements. + +- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file +- Make sure that multi-character operators contain no white space ( e.g., **<=** is valid while **< =** is not valid ) +- When using 'equal-to' operator **double equal sign '==' must be used** ( e.g., **c1=='chr1'** ) +- Non-numerical values must be included in single or double quotes ( e.g., **c6=='+'** ) +- Filtering condition can include logical operators, but **make sure operators are all lower case** ( e.g., **(c1!='chrX' and c1!='chrY') or not c6=='+'** ) + +----- + +**Example** + +- **c1=='chr1'** selects lines in which the first column is chr1 +- **c3-c2<100*c4** selects lines where subtracting column 3 from column 2 is less than the value of column 4 times 100 +- **len(c2.split(',')) < 4** will select lines where the second column has less than four comma separated elements +- **c2>=1** selects lines in which the value of column 2 is greater than or equal to 1 +- Numbers should not contain commas - **c2<=44,554,350** will not work, but **c2<=44554350** will +- Some words in the data can be used, but must be single or double quoted ( e.g., **c3=='exon'** ) + + + +