From de34121ae1f74f72ce587af075b32c94a0fea04a Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Mon, 4 Mar 2024 16:03:30 +0100 Subject: [PATCH 001/669] add some admin facing docs on data tables --- doc/source/admin/data_tables.md | 93 +++++++++++++++++++++++++++++++++ doc/source/admin/index.rst | 1 + 2 files changed, 94 insertions(+) create mode 100644 doc/source/admin/data_tables.md diff --git a/doc/source/admin/data_tables.md b/doc/source/admin/data_tables.md new file mode 100644 index 000000000000..a92291eef0cc --- /dev/null +++ b/doc/source/admin/data_tables.md @@ -0,0 +1,93 @@ +# Tool data + +Galaxy stores tool data in the path defined by `tool_data_path` (by default `tool-data/`). +It's possible to to separate tool data of shed installed tools by setting (`shed_tool_data_path`). + +Tool data consists of: + +1. the actual data +2. a tool data table +3. tool data config files + +## Tool data + +This is the actual data that is stored by default in `tool_data_path`. It may be favorable to store the +actual tool data in a separate folder. For manually managed tool data this can be achieved by simply +storing the data in another folder. For data that is added by data managers this can be achieved by +setting `galaxy_data_manager_data_path`. + +## Tool data tables + +In order to make tool data usable from Galaxy tools so called tool data tables are used. +Those are tabular (by default tab separated) files with the extension `.loc`. +Besides the actual paths the entries can contain, e.g. IDs, names, or other +that can be used in tools to select reference data. The paths should be given as absolute paths, +but can also be given relative to the Galaxy root dir. +By default tool data tables are installed in `tool_data_path` (where also built-in tool data tables +are stored). By setting `shed_tool_data_path` this can be separated. + +## Tool data table config + +The tool data tables that should be used in a Galaxy instance are configured +using tool data table config files. In addition these files contain some +metadata. + +Tool data table config files are XML files listing tool data table configurations: + +```xml + + .... + +``` + +A tool data table configuration looks like this + +```xml + + value, dbkey, name, path + +
+``` + +- `table`: `name`, `comment_char` (default `"#"`), `separator` (default `"\t"`), `allow_duplicate_entries` (default `True`), `empty_field_value` (default `""`) +- `columns`: a comma separated list of column names +- `file`: `path` (alternatively `url`, `from_config`) + +Tool data table definitions for tools installed from a toolshed have an additional +element `tool_shed_repository` and sub-tags `tool_shed` +`repository_name`, `repository_owner`, `installed_changeset_revision`, e.g.: + +```xml + + value, name, date, path + + + toolshed.g2.bx.psu.edu + plasmidfinder + iuc + 7075b7a5441b + +
+``` + +The file path points to a data table (i.e. a `.loc` file) and can be given +relative (to the `tool_data_path`) or absolute. If a given relative path does +not exist also the base name is checked (many tools use something like +`tool-data/xyz.loc` and store example `loc` files in a `tool-data/` directory in +the tool repository). +Currently also `.loc.sample` may be used in case the specified `.loc` is absent. + +Tool data table config files: + +- `tool_data_table_config_path`: by default `tool_data_table_conf.xml` in Galaxy's `config/` directory. +- `shed_tool_data_table_config`: by default `shed_tool_data_table_conf.xml` in +Galaxy's `config/` directory. This file lists all tool data tables of tools +installed from a toolshed. Note that the entries are versioned, i.e. there is a +separate entry for each tool and tool version. These content of the tool data +tables are merged when they are loaded. + +When a new tool is installed that uses a data table a new entry is added to +`shed_tool_data_table_config` and a `.loc` file is placed in a versioned +subdirectory in `tool_data_path` (in a subdirectory that has the name of the +toolshed). By default thus is `tool-data/toolshed.g2.bx.psu.edu/`. Note that +these directories will also contain tool data config files, but they are unused. diff --git a/doc/source/admin/index.rst b/doc/source/admin/index.rst index 2ae8d1af72a0..61ed7db63ba4 100644 --- a/doc/source/admin/index.rst +++ b/doc/source/admin/index.rst @@ -20,6 +20,7 @@ This documentation is in the midst of being ported and unified based on resource job_metrics authentication tool_panel + data_tables mq dependency_resolvers container_resolvers From f15426c56b83436effd93bfe54f5612561dfdc6e Mon Sep 17 00:00:00 2001 From: M Bernt Date: Tue, 5 Mar 2024 08:28:56 +0100 Subject: [PATCH 002/669] Improve wording Co-authored-by: Nicola Soranzo --- doc/source/admin/data_tables.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/source/admin/data_tables.md b/doc/source/admin/data_tables.md index a92291eef0cc..8af597814119 100644 --- a/doc/source/admin/data_tables.md +++ b/doc/source/admin/data_tables.md @@ -1,7 +1,7 @@ # Tool data Galaxy stores tool data in the path defined by `tool_data_path` (by default `tool-data/`). -It's possible to to separate tool data of shed installed tools by setting (`shed_tool_data_path`). +It's possible to separate tool data of ToolShed-installed tools by setting `shed_tool_data_path`. Tool data consists of: @@ -20,13 +20,13 @@ setting `galaxy_data_manager_data_path`. In order to make tool data usable from Galaxy tools so called tool data tables are used. Those are tabular (by default tab separated) files with the extension `.loc`. -Besides the actual paths the entries can contain, e.g. IDs, names, or other +Besides the actual paths, the entries can contain IDs, names, or other information that can be used in tools to select reference data. The paths should be given as absolute paths, but can also be given relative to the Galaxy root dir. By default tool data tables are installed in `tool_data_path` (where also built-in tool data tables are stored). By setting `shed_tool_data_path` this can be separated. -## Tool data table config +## Tool data table config files The tool data tables that should be used in a Galaxy instance are configured using tool data table config files. In addition these files contain some @@ -89,5 +89,5 @@ tables are merged when they are loaded. When a new tool is installed that uses a data table a new entry is added to `shed_tool_data_table_config` and a `.loc` file is placed in a versioned subdirectory in `tool_data_path` (in a subdirectory that has the name of the -toolshed). By default thus is `tool-data/toolshed.g2.bx.psu.edu/`. Note that -these directories will also contain tool data config files, but they are unused. +toolshed). By default this is `tool-data/toolshed.g2.bx.psu.edu/`. Note that +these directories will also contain tool data table config files, but they are unused. From c964d89ba3195abb6d70d215ba5b85f29dd38bd4 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Wed, 6 Mar 2024 13:15:21 +0100 Subject: [PATCH 003/669] try to use terms properly loc files and tool data tables should now be used as they should be --- doc/source/admin/data_tables.md | 53 ++++++++++++++++++++------------- 1 file changed, 33 insertions(+), 20 deletions(-) diff --git a/doc/source/admin/data_tables.md b/doc/source/admin/data_tables.md index a92291eef0cc..9b88787fcd0d 100644 --- a/doc/source/admin/data_tables.md +++ b/doc/source/admin/data_tables.md @@ -6,8 +6,19 @@ It's possible to to separate tool data of shed installed tools by setting (`shed Tool data consists of: 1. the actual data -2. a tool data table -3. tool data config files +2. one or more so called `loc` files +3. entries in a tool data table (config) file + + +## History + +In order to understand the naming and structure of these three components it might be of advantage +to look in the history. Tool data was organized in tabular `loc` that contained metadata and paths +of the data. Those files were were installed with the tool and could be accessed with the +[`from_file`](https://docs.galaxyproject.org/en/master/dev/schema.html#from-file) mechanism from tools. +Since each tool version had it's own `loc` file the maintenance was difficult. With tool data tables +an additional abstraction layer was introduced that is used from tools via +[`from_datatable`](https://docs.galaxyproject.org/en/master/dev/schema.html#from-data-table). ## Tool data @@ -16,20 +27,20 @@ actual tool data in a separate folder. For manually managed tool data this can b storing the data in another folder. For data that is added by data managers this can be achieved by setting `galaxy_data_manager_data_path`. -## Tool data tables +## `loc` files -In order to make tool data usable from Galaxy tools so called tool data tables are used. +In order to make tool data usable from Galaxy tools so called `loc` files are used. Those are tabular (by default tab separated) files with the extension `.loc`. -Besides the actual paths the entries can contain, e.g. IDs, names, or other +Besides the actual paths the entries can contain, e.g. IDs, names, or other metadata that can be used in tools to select reference data. The paths should be given as absolute paths, but can also be given relative to the Galaxy root dir. -By default tool data tables are installed in `tool_data_path` (where also built-in tool data tables +By default `loc` files are installed in `tool_data_path` (where also built-in `loc` files are stored). By setting `shed_tool_data_path` this can be separated. -## Tool data table config +## Tool data tables -The tool data tables that should be used in a Galaxy instance are configured -using tool data table config files. In addition these files contain some +The tool data tables that should be used in a Galaxy instance are listed +in tool data table config files. In addition these contain some metadata. Tool data table config files are XML files listing tool data table configurations: @@ -40,7 +51,7 @@ Tool data table config files are XML files listing tool data table configuration ``` -A tool data table configuration looks like this +An entry for a tool data looks like this ```xml @@ -53,7 +64,7 @@ A tool data table configuration looks like this - `columns`: a comma separated list of column names - `file`: `path` (alternatively `url`, `from_config`) -Tool data table definitions for tools installed from a toolshed have an additional +Tool data table entries for tools installed from a toolshed have an additional element `tool_shed_repository` and sub-tags `tool_shed` `repository_name`, `repository_owner`, `installed_changeset_revision`, e.g.: @@ -70,24 +81,26 @@ element `tool_shed_repository` and sub-tags `tool_shed`
``` -The file path points to a data table (i.e. a `.loc` file) and can be given -relative (to the `tool_data_path`) or absolute. If a given relative path does -not exist also the base name is checked (many tools use something like -`tool-data/xyz.loc` and store example `loc` files in a `tool-data/` directory in -the tool repository). +The file path points to a `loc` file and can be given relative (to the +`tool_data_path`) or absolute. If a given relative path does not exist also the +base name is checked (many tools use something like `tool-data/xyz.loc` and +store example `loc` files in a `tool-data/` directory in the tool repository). Currently also `.loc.sample` may be used in case the specified `.loc` is absent. -Tool data table config files: +Galaxy uses two tool data table config files: - `tool_data_table_config_path`: by default `tool_data_table_conf.xml` in Galaxy's `config/` directory. - `shed_tool_data_table_config`: by default `shed_tool_data_table_conf.xml` in Galaxy's `config/` directory. This file lists all tool data tables of tools installed from a toolshed. Note that the entries are versioned, i.e. there is a -separate entry for each tool and tool version. These content of the tool data -tables are merged when they are loaded. +separate entry for each tool and tool version. + +The tool data table config files can (and do) contain multiple entries for the same data table +(identified by the same name). These content of the corresponding `loc` files are merged when +they are loaded. When a new tool is installed that uses a data table a new entry is added to -`shed_tool_data_table_config` and a `.loc` file is placed in a versioned +`shed_tool_data_table_config` and a `loc` file is placed in a versioned subdirectory in `tool_data_path` (in a subdirectory that has the name of the toolshed). By default thus is `tool-data/toolshed.g2.bx.psu.edu/`. Note that these directories will also contain tool data config files, but they are unused. From 101087f9535aba7ee7265ac26b67698b51b06256 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Fri, 8 Mar 2024 16:37:54 +0100 Subject: [PATCH 004/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20dat?= =?UTF-8?q?asets=20states=20to=20typescript=20`client/src/mvc/dataset/stat?= =?UTF-8?q?es`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/src/mvc/dataset/{states.js => states.ts} | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) rename client/src/mvc/dataset/{states.js => states.ts} (95%) diff --git a/client/src/mvc/dataset/states.js b/client/src/mvc/dataset/states.ts similarity index 95% rename from client/src/mvc/dataset/states.js rename to client/src/mvc/dataset/states.ts index 4e0702c9cdbb..dd5b37d7fb07 100644 --- a/client/src/mvc/dataset/states.js +++ b/client/src/mvc/dataset/states.ts @@ -2,7 +2,7 @@ /** Map of possible HDA/collection/job states to their string equivalents. * A port of galaxy.model.Dataset.states. */ -var STATES = { +const STATES = { // NOT ready states /** is uploading and not ready */ UPLOAD: "upload", @@ -34,6 +34,9 @@ var STATES = { DEFERRED: "deferred", /** the tool producing this dataset failed */ ERROR: "error", + + READY_STATES: [] as string[], + NOT_READY_STATES: [] as string[], }; STATES.READY_STATES = [ From 830da7bf740f72511652dcf8ec37088035392cf4 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Fri, 8 Mar 2024 16:41:00 +0100 Subject: [PATCH 005/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Un?= =?UTF-8?q?pairedDatasetElementView`=20to=20use=20composition=20API=20and?= =?UTF-8?q?=20`typeScript`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../UnpairedDatasetElementView.vue | 50 ++++++------------- 1 file changed, 15 insertions(+), 35 deletions(-) diff --git a/client/src/components/Collections/UnpairedDatasetElementView.vue b/client/src/components/Collections/UnpairedDatasetElementView.vue index b02575a2bb05..91494a082fea 100644 --- a/client/src/components/Collections/UnpairedDatasetElementView.vue +++ b/client/src/components/Collections/UnpairedDatasetElementView.vue @@ -1,41 +1,21 @@ + + - - - - From 4a99f97ae7352513a103d0ccb96b922f9274769e Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Fri, 8 Mar 2024 16:42:35 +0100 Subject: [PATCH 006/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Pa?= =?UTF-8?q?iredElementView`=20to=20use=20composition=20API=20and=20`typeSc?= =?UTF-8?q?ript`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Collections/PairedElementView.vue | 94 ++++++++++--------- 1 file changed, 49 insertions(+), 45 deletions(-) diff --git a/client/src/components/Collections/PairedElementView.vue b/client/src/components/Collections/PairedElementView.vue index 8c0a3998d11d..181bfb67ece9 100644 --- a/client/src/components/Collections/PairedElementView.vue +++ b/client/src/components/Collections/PairedElementView.vue @@ -1,58 +1,62 @@ + + - From 2e71ae701b5ac6ec5bb5f288ebb2a40de4cda43d Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Fri, 8 Mar 2024 16:45:02 +0100 Subject: [PATCH 007/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Li?= =?UTF-8?q?stCollectionCreator`=20to=20use=20composition=20API=20and=20`ty?= =?UTF-8?q?peScript`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Collections/ListCollectionCreator.vue | 671 ++++++++++-------- 1 file changed, 365 insertions(+), 306 deletions(-) diff --git a/client/src/components/Collections/ListCollectionCreator.vue b/client/src/components/Collections/ListCollectionCreator.vue index 7f8896e221e7..e6cb37846cba 100644 --- a/client/src/components/Collections/ListCollectionCreator.vue +++ b/client/src/components/Collections/ListCollectionCreator.vue @@ -1,67 +1,340 @@ + + + - + - - - From 315767eaa20a7783b1bcf7438e5971ac4aacf582 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Mon, 11 Mar 2024 16:31:14 +0100 Subject: [PATCH 009/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20import=20missi?= =?UTF-8?q?ng=20icon=20to=20`PairedElementView`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/components/Collections/PairedElementView.vue | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/client/src/components/Collections/PairedElementView.vue b/client/src/components/Collections/PairedElementView.vue index 181bfb67ece9..364832a22db2 100644 --- a/client/src/components/Collections/PairedElementView.vue +++ b/client/src/components/Collections/PairedElementView.vue @@ -1,10 +1,15 @@ + + - + - - From 8630e3b343d318f856933c4b6282d9bf5dceafdc Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Thu, 4 Apr 2024 18:38:35 +0200 Subject: [PATCH 352/669] Fix lost reports when switching workflow versions We serialize workflow.reports, but we were only setting it on the workflow instance if the markdown editor emits it. Fixes a part of https://github.com/galaxyproject/galaxy/issues/17903 --- client/src/components/Workflow/Editor/Index.vue | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index a283e5b863b2..b656ca1e69f8 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -145,7 +145,7 @@ 0; From 95e26c34bd94bdef1e6262e0b18d991ae6197c35 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:21:15 +0000 Subject: [PATCH 353/669] Bump undici from 5.28.3 to 5.28.4 in /client Bumps [undici](https://github.com/nodejs/undici) from 5.28.3 to 5.28.4. - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.28.3...v5.28.4) --- updated-dependencies: - dependency-name: undici dependency-type: indirect ... Signed-off-by: dependabot[bot] --- client/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/yarn.lock b/client/yarn.lock index 375a92d59564..c0010d4f10fc 100644 --- a/client/yarn.lock +++ b/client/yarn.lock @@ -11346,9 +11346,9 @@ undertaker@^1.2.1: undertaker-registry "^1.0.0" undici@^5.12.0: - version "5.28.3" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.3.tgz#a731e0eff2c3fcfd41c1169a869062be222d1e5b" - integrity sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA== + version "5.28.4" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" + integrity sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g== dependencies: "@fastify/busboy" "^2.0.0" From ad6af8d1b299dcf8f4d58dba43fe530c182ff160 Mon Sep 17 00:00:00 2001 From: Ahmed Awan Date: Thu, 4 Apr 2024 15:33:55 -0500 Subject: [PATCH 354/669] [24.0] Do not save workflow on Run without user confirmation This adds a modal when the user runs a workflow with changes, that asks for user confirmation on whether to proceed without saving changes or to save changes and then proceed. Currently, we _always_ save the workflow when it is run (even if there are no changes at all). Fixes https://github.com/galaxyproject/galaxy/issues/17903 --- .../src/components/Workflow/Editor/Index.vue | 20 +++- .../Workflow/Editor/SaveChangesModal.vue | 97 +++++++++++++++++++ 2 files changed, 113 insertions(+), 4 deletions(-) create mode 100644 client/src/components/Workflow/Editor/SaveChangesModal.vue diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index a283e5b863b2..ffb147416fc1 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -13,6 +13,7 @@ @onRefactor="onRefactor" @onShow="hideModal" /> + import { Toast } from "composables/toast"; import { storeToRefs } from "pinia"; -import Vue, { computed, onUnmounted, ref, unref } from "vue"; +import Vue, { computed, nextTick, onUnmounted, ref, unref } from "vue"; import { getUntypedWorkflowParameters } from "@/components/Workflow/Editor/modules/parameters"; import { ConfirmDialog } from "@/composables/confirmDialog"; @@ -192,6 +193,7 @@ import WorkflowLint from "./Lint.vue"; import MessagesModal from "./MessagesModal.vue"; import WorkflowOptions from "./Options.vue"; import RefactorConfirmationModal from "./RefactorConfirmationModal.vue"; +import SaveChangesModal from "./SaveChangesModal.vue"; import StateUpgradeModal from "./StateUpgradeModal.vue"; import WorkflowGraph from "./WorkflowGraph.vue"; import MarkdownEditor from "@/components/Markdown/MarkdownEditor.vue"; @@ -204,6 +206,7 @@ export default { components: { MarkdownEditor, FlexPanel, + SaveChangesModal, StateUpgradeModal, ToolPanel, FormDefault, @@ -337,6 +340,8 @@ export default { showSaveAsModal: false, transform: { x: 0, y: 0, k: 1 }, graphOffset: { left: 0, top: 0, width: 0, height: 0 }, + showSaveChangesModal: false, + navUrl: "", }; }, computed: { @@ -677,14 +682,21 @@ export default { const runUrl = `/workflows/run?id=${this.id}`; this.onNavigate(runUrl); }, - async onNavigate(url) { + async onNavigate(url, forceSave = false, ignoreChanges = false) { if (this.isNewTempWorkflow) { await this.onCreate(); - } else { - await this.onSave(true); + } else if (this.hasChanges && !forceSave && !ignoreChanges) { + // if there are changes, prompt user to save or discard or cancel + this.navUrl = url; + this.showSaveChangesModal = true; + return; + } else if (forceSave) { + // when forceSave is true, save the workflow before navigating + await this.onSave(); } this.hasChanges = false; + await nextTick(); this.$router.push(url); }, onSave(hideProgress = false) { diff --git a/client/src/components/Workflow/Editor/SaveChangesModal.vue b/client/src/components/Workflow/Editor/SaveChangesModal.vue new file mode 100644 index 000000000000..9a2b5bbda2d0 --- /dev/null +++ b/client/src/components/Workflow/Editor/SaveChangesModal.vue @@ -0,0 +1,97 @@ + + + From a6ddaac915da6f073f8b4ec04e11ad1073ad51ad Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Fri, 5 Apr 2024 11:31:11 +0200 Subject: [PATCH 355/669] Run make config-rebuild --- doc/source/admin/galaxy_options.rst | 41 +++++++++++-- lib/galaxy/config/sample/galaxy.yml.sample | 67 +++++++++++++--------- 2 files changed, 75 insertions(+), 33 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index 2100c81f8cae..f3021dc25d48 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -1453,7 +1453,7 @@ This option has no effect if the file specified by object_store_config_file exists. Otherwise, if this option is set, it overrides any other objectstore settings. - The syntax, available instrumenters, and documentation of their + The syntax, available storage plugins, and documentation of their options is explained in detail in the object store sample configuration file, `object_store_conf.sample.yml` :Default: ``None`` @@ -2606,8 +2606,20 @@ :Description: The upload store is a temporary directory in which files uploaded - by the tus middleware or server will be placed. Defaults to - new_file_path if not set. + by the tus middleware or server for user uploads will be placed. + Defaults to new_file_path if not set. +:Default: ``None`` +:Type: str + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``tus_upload_store_job_files`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:Description: + The upload store is a temporary directory in which files uploaded + by the tus middleware or server for remote job files (Pulsar) will + be placed. Defaults to tus_upload_store if not set. :Default: ``None`` :Type: str @@ -4030,6 +4042,23 @@ :Type: str +~~~~~~~~~~~~~~~~~~~~~ +``oidc_scope_prefix`` +~~~~~~~~~~~~~~~~~~~~~ + +:Description: + Sets the prefix for OIDC scopes specific to this Galaxy instance. + If an API call is made against this Galaxy instance using an OIDC + bearer token, any scopes must be prefixed with this value e.g. + https://galaxyproject.org/api. More concretely, to request all + permissions that the user has, the scope would have to be + specified as ":*". e.g "https://galaxyproject.org/api:*". + Currently, only * is recognised as a valid scope, and future + iterations may provide more fine-grained scopes. +:Default: ``https://galaxyproject.org/api`` +:Type: str + + ~~~~~~~~~~~~~~~~~~~~ ``auth_config_file`` ~~~~~~~~~~~~~~~~~~~~ @@ -5439,9 +5468,9 @@ :Type: str -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``help_forum_tool_panel_integration_enabled`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``enable_help_forum_tool_panel_integration`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Description: Enable the integration of the Galaxy Help Forum in the tool panel. diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index 96c2a84e0bda..21b325bdb490 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -1,21 +1,21 @@ # Galaxy is configured by default to be usable in a single-user development # environment. To tune the application for a multi-user production # environment, see the documentation at: -# +# # https://docs.galaxyproject.org/en/master/admin/production.html -# +# # Throughout this sample configuration file, except where stated otherwise, # uncommented values override the default if left unset, whereas commented # values are set to the default value. Relative paths are relative to the root # Galaxy directory. -# +# # Examples of many of these options are explained in more detail in the Galaxy # Community Hub. -# +# # https://galaxyproject.org/admin/config -# +# # Config hackers are encouraged to check there before asking for help. -# +# # Configuration for Gravity process manager. # ``uwsgi:`` section will be ignored if Galaxy is started via Gravity commands (e.g ``./run.sh``, ``galaxy`` or ``galaxyctl``). gravity: @@ -181,7 +181,7 @@ gravity: # enable: false # gx-it-proxy version - # version: '>=0.0.5' + # version: '>=0.0.6' # Public-facing IP of the proxy # ip: localhost @@ -248,6 +248,14 @@ gravity: # Must match ``tus_upload_store`` setting in ``galaxy:`` section. # upload_dir: + # Value of tusd -hooks-httpd option + # + # the default of is suitable for using tusd for Galaxy uploads and should not be changed unless you are using tusd for + # other purposes such as Pulsar staging. + # + # The value of galaxy_infrastructure_url is automatically prepended if the option starts with a `/` + # hooks_http: /api/upload/hooks + # Comma-separated string of enabled tusd hooks. # # Leave at the default value to require authorization at upload creation time. @@ -333,15 +341,9 @@ gravity: # names. # environment: {} - # Configure dynamic handlers in this section. Below is a simple example + # Configure dynamic handlers in this section. # See https://docs.galaxyproject.org/en/latest/admin/scaling.html#dynamically-defined-handlers for details. - #handlers: - # handler: - # processes: 3 - # pools: - # - job-handlers - # - workflow-schedulers - + # handlers: {} galaxy: # The directory that will be prepended to relative paths in options @@ -1030,7 +1032,7 @@ galaxy: # This option has no effect if the file specified by # object_store_config_file exists. Otherwise, if this option is set, # it overrides any other objectstore settings. - # The syntax, available instrumenters, and documentation of their + # The syntax, available storage plugins, and documentation of their # options is explained in detail in the object store sample # configuration file, `object_store_conf.sample.yml` #object_store_config: null @@ -1067,6 +1069,13 @@ galaxy: # for that object store entry. #object_store_cache_size: -1 + # Set this to true to indicate in the UI that a user's object store + # selection isn't simply a "preference" that job destinations often + # respect but in fact will always be respected. This should be set to + # true to simplify the UI as long as job destinations never override + # 'object_store_id's for a jobs. + #object_store_always_respect_user_selection: false + # What Dataset attribute is used to reference files in an ObjectStore # implementation, this can be 'uuid' or 'id'. The default will depend # on how the object store is configured, starting with 20.05 Galaxy @@ -1304,9 +1313,6 @@ galaxy: # The value of this option will be resolved with respect to # . #interactivetools_map: interactivetools_map.sqlite - # Note: the following config should still be used due to lack of - # support of data_dir resolution in gx-it-proxy and gravity: - #interactivetools_map: database/interactivetools_map.sqlite # Prefix to use in the formation of the subdomain or path for # interactive tools @@ -1547,10 +1553,15 @@ galaxy: #nginx_upload_job_files_path: null # The upload store is a temporary directory in which files uploaded by - # the tus middleware or server will be placed. Defaults to - # new_file_path if not set. + # the tus middleware or server for user uploads will be placed. + # Defaults to new_file_path if not set. #tus_upload_store: null + # The upload store is a temporary directory in which files uploaded by + # the tus middleware or server for remote job files (Pulsar) will be + # placed. Defaults to tus_upload_store if not set. + #tus_upload_store_job_files: null + # Galaxy can upload user files in chunks without using nginx. Enable # the chunk uploader by specifying a chunk size larger than 0. The # chunk size is specified in bytes (default: 10MB). @@ -2189,12 +2200,13 @@ galaxy: # . #oidc_backends_config_file: oidc_backends_config.xml - # Sets the prefix for OIDC scopes specific to this Galaxy instance. - # If an API call is made against this Galaxy instance using an OIDC bearer token, - # any scopes must be prefixed with this value e.g. https://galaxyproject.org/api. - # More concretely, to request all permissions that the user has, the scope - # would have to be specified as ":*". e.g "https://galaxyproject.org/api:*". - # Currently, only * is recognised as a valid scope, and future iterations may + # Sets the prefix for OIDC scopes specific to this Galaxy instance. If + # an API call is made against this Galaxy instance using an OIDC + # bearer token, any scopes must be prefixed with this value e.g. + # https://galaxyproject.org/api. More concretely, to request all + # permissions that the user has, the scope would have to be specified + # as ":*". e.g "https://galaxyproject.org/api:*". Currently, + # only * is recognised as a valid scope, and future iterations may # provide more fine-grained scopes. #oidc_scope_prefix: https://galaxyproject.org/api @@ -2897,3 +2909,4 @@ galaxy: # Enable the integration of the Galaxy Help Forum in the tool panel. # This requires the help_forum_api_url to be set. #enable_help_forum_tool_panel_integration: false + From 81e9a910633d63c6db23f042f3b8230e7aa63ac1 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Fri, 5 Apr 2024 12:13:07 +0200 Subject: [PATCH 356/669] Always discard session after __handle_waiting_jobs is done --- lib/galaxy/jobs/handler.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/jobs/handler.py b/lib/galaxy/jobs/handler.py index 008a4eee0be4..1df3503ffb96 100644 --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -395,6 +395,8 @@ def __monitor_step(self): self.__handle_waiting_jobs() except StopSignalException: pass + finally: + self.sa_session.remove() log.trace(monitor_step_timer.to_str()) def __handle_waiting_jobs(self): @@ -583,9 +585,6 @@ def __handle_waiting_jobs(self): with transaction(self.sa_session): self.sa_session.commit() - # Done with the session - self.sa_session.remove() - def __filter_jobs_with_invalid_input_states(self, jobs): """ Takes list of jobs and filters out jobs whose input datasets are in invalid state and From 0bb2925a053d58466fedf9418f07f04be75f68c0 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Fri, 5 Apr 2024 13:33:09 +0200 Subject: [PATCH 357/669] Make sure step removal also resets mapOver state Fixes https://github.com/galaxyproject/galaxy/issues/17906 --- client/src/stores/workflowStepStore.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts index c696f3141ad9..e1ee49f479df 100644 --- a/client/src/stores/workflowStepStore.ts +++ b/client/src/stores/workflowStepStore.ts @@ -340,6 +340,7 @@ export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (work del(steps.value, stepId.toString()); del(stepExtraInputs.value, stepId); + del(stepMapOver.value, stepId.toString()); } return { From bd846d9b2ee6f9adcfbe47239430515e9de70cf7 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Fri, 5 Apr 2024 13:49:31 +0200 Subject: [PATCH 358/669] Adjust update_cwl_conformance_tests.sh for removed branch --- scripts/update_cwl_conformance_tests.sh | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/scripts/update_cwl_conformance_tests.sh b/scripts/update_cwl_conformance_tests.sh index 2a71267c554e..4cc2cdc1386d 100755 --- a/scripts/update_cwl_conformance_tests.sh +++ b/scripts/update_cwl_conformance_tests.sh @@ -17,11 +17,7 @@ for version in $VERSIONS; do conformance_filepath=conformance_tests.yaml tests_dir=tests fi - if [ "$version" = '1.2' ]; then - branch=1.2.1_proposed - else - branch=main - fi + branch=main wget "https://github.com/common-workflow-language/${repo_name}/archive/${branch}.zip" unzip ${branch}.zip rm -rf "${DEST_DIR}/v${version}" From 7b8866495f8a52dc40195033d462749249917b3e Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Fri, 5 Apr 2024 15:21:41 +0200 Subject: [PATCH 359/669] fix computed setter infinite loop --- client/src/components/User/UserActivityBarSettings.vue | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/src/components/User/UserActivityBarSettings.vue b/client/src/components/User/UserActivityBarSettings.vue index 878f7377ea3d..5b892b9a79e6 100644 --- a/client/src/components/User/UserActivityBarSettings.vue +++ b/client/src/components/User/UserActivityBarSettings.vue @@ -11,13 +11,13 @@ const enableActivityBar: WritableComputedRef = computed({ get: () => { return userStore.showActivityBar; }, - set: () => { + set: (toggle) => { // always toggle tool side panel when enabling activity bar if (userStore.toggledSideBar !== "tools") { userStore.toggleSideBar("tools"); } // toggle activity bar - userStore.toggleActivityBar(); + userStore.showActivityBar = toggle; }, }); From d43b826666db22c19332b8741cb3bcb932c1248c Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Fri, 5 Apr 2024 11:31:11 +0200 Subject: [PATCH 360/669] Run make config-rebuild --- doc/source/admin/galaxy_options.rst | 41 +++++++++++-- lib/galaxy/config/sample/galaxy.yml.sample | 67 +++++++++++++--------- 2 files changed, 75 insertions(+), 33 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index 2100c81f8cae..f3021dc25d48 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -1453,7 +1453,7 @@ This option has no effect if the file specified by object_store_config_file exists. Otherwise, if this option is set, it overrides any other objectstore settings. - The syntax, available instrumenters, and documentation of their + The syntax, available storage plugins, and documentation of their options is explained in detail in the object store sample configuration file, `object_store_conf.sample.yml` :Default: ``None`` @@ -2606,8 +2606,20 @@ :Description: The upload store is a temporary directory in which files uploaded - by the tus middleware or server will be placed. Defaults to - new_file_path if not set. + by the tus middleware or server for user uploads will be placed. + Defaults to new_file_path if not set. +:Default: ``None`` +:Type: str + + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``tus_upload_store_job_files`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:Description: + The upload store is a temporary directory in which files uploaded + by the tus middleware or server for remote job files (Pulsar) will + be placed. Defaults to tus_upload_store if not set. :Default: ``None`` :Type: str @@ -4030,6 +4042,23 @@ :Type: str +~~~~~~~~~~~~~~~~~~~~~ +``oidc_scope_prefix`` +~~~~~~~~~~~~~~~~~~~~~ + +:Description: + Sets the prefix for OIDC scopes specific to this Galaxy instance. + If an API call is made against this Galaxy instance using an OIDC + bearer token, any scopes must be prefixed with this value e.g. + https://galaxyproject.org/api. More concretely, to request all + permissions that the user has, the scope would have to be + specified as ":*". e.g "https://galaxyproject.org/api:*". + Currently, only * is recognised as a valid scope, and future + iterations may provide more fine-grained scopes. +:Default: ``https://galaxyproject.org/api`` +:Type: str + + ~~~~~~~~~~~~~~~~~~~~ ``auth_config_file`` ~~~~~~~~~~~~~~~~~~~~ @@ -5439,9 +5468,9 @@ :Type: str -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``help_forum_tool_panel_integration_enabled`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``enable_help_forum_tool_panel_integration`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Description: Enable the integration of the Galaxy Help Forum in the tool panel. diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index 96c2a84e0bda..21b325bdb490 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -1,21 +1,21 @@ # Galaxy is configured by default to be usable in a single-user development # environment. To tune the application for a multi-user production # environment, see the documentation at: -# +# # https://docs.galaxyproject.org/en/master/admin/production.html -# +# # Throughout this sample configuration file, except where stated otherwise, # uncommented values override the default if left unset, whereas commented # values are set to the default value. Relative paths are relative to the root # Galaxy directory. -# +# # Examples of many of these options are explained in more detail in the Galaxy # Community Hub. -# +# # https://galaxyproject.org/admin/config -# +# # Config hackers are encouraged to check there before asking for help. -# +# # Configuration for Gravity process manager. # ``uwsgi:`` section will be ignored if Galaxy is started via Gravity commands (e.g ``./run.sh``, ``galaxy`` or ``galaxyctl``). gravity: @@ -181,7 +181,7 @@ gravity: # enable: false # gx-it-proxy version - # version: '>=0.0.5' + # version: '>=0.0.6' # Public-facing IP of the proxy # ip: localhost @@ -248,6 +248,14 @@ gravity: # Must match ``tus_upload_store`` setting in ``galaxy:`` section. # upload_dir: + # Value of tusd -hooks-httpd option + # + # the default of is suitable for using tusd for Galaxy uploads and should not be changed unless you are using tusd for + # other purposes such as Pulsar staging. + # + # The value of galaxy_infrastructure_url is automatically prepended if the option starts with a `/` + # hooks_http: /api/upload/hooks + # Comma-separated string of enabled tusd hooks. # # Leave at the default value to require authorization at upload creation time. @@ -333,15 +341,9 @@ gravity: # names. # environment: {} - # Configure dynamic handlers in this section. Below is a simple example + # Configure dynamic handlers in this section. # See https://docs.galaxyproject.org/en/latest/admin/scaling.html#dynamically-defined-handlers for details. - #handlers: - # handler: - # processes: 3 - # pools: - # - job-handlers - # - workflow-schedulers - + # handlers: {} galaxy: # The directory that will be prepended to relative paths in options @@ -1030,7 +1032,7 @@ galaxy: # This option has no effect if the file specified by # object_store_config_file exists. Otherwise, if this option is set, # it overrides any other objectstore settings. - # The syntax, available instrumenters, and documentation of their + # The syntax, available storage plugins, and documentation of their # options is explained in detail in the object store sample # configuration file, `object_store_conf.sample.yml` #object_store_config: null @@ -1067,6 +1069,13 @@ galaxy: # for that object store entry. #object_store_cache_size: -1 + # Set this to true to indicate in the UI that a user's object store + # selection isn't simply a "preference" that job destinations often + # respect but in fact will always be respected. This should be set to + # true to simplify the UI as long as job destinations never override + # 'object_store_id's for a jobs. + #object_store_always_respect_user_selection: false + # What Dataset attribute is used to reference files in an ObjectStore # implementation, this can be 'uuid' or 'id'. The default will depend # on how the object store is configured, starting with 20.05 Galaxy @@ -1304,9 +1313,6 @@ galaxy: # The value of this option will be resolved with respect to # . #interactivetools_map: interactivetools_map.sqlite - # Note: the following config should still be used due to lack of - # support of data_dir resolution in gx-it-proxy and gravity: - #interactivetools_map: database/interactivetools_map.sqlite # Prefix to use in the formation of the subdomain or path for # interactive tools @@ -1547,10 +1553,15 @@ galaxy: #nginx_upload_job_files_path: null # The upload store is a temporary directory in which files uploaded by - # the tus middleware or server will be placed. Defaults to - # new_file_path if not set. + # the tus middleware or server for user uploads will be placed. + # Defaults to new_file_path if not set. #tus_upload_store: null + # The upload store is a temporary directory in which files uploaded by + # the tus middleware or server for remote job files (Pulsar) will be + # placed. Defaults to tus_upload_store if not set. + #tus_upload_store_job_files: null + # Galaxy can upload user files in chunks without using nginx. Enable # the chunk uploader by specifying a chunk size larger than 0. The # chunk size is specified in bytes (default: 10MB). @@ -2189,12 +2200,13 @@ galaxy: # . #oidc_backends_config_file: oidc_backends_config.xml - # Sets the prefix for OIDC scopes specific to this Galaxy instance. - # If an API call is made against this Galaxy instance using an OIDC bearer token, - # any scopes must be prefixed with this value e.g. https://galaxyproject.org/api. - # More concretely, to request all permissions that the user has, the scope - # would have to be specified as ":*". e.g "https://galaxyproject.org/api:*". - # Currently, only * is recognised as a valid scope, and future iterations may + # Sets the prefix for OIDC scopes specific to this Galaxy instance. If + # an API call is made against this Galaxy instance using an OIDC + # bearer token, any scopes must be prefixed with this value e.g. + # https://galaxyproject.org/api. More concretely, to request all + # permissions that the user has, the scope would have to be specified + # as ":*". e.g "https://galaxyproject.org/api:*". Currently, + # only * is recognised as a valid scope, and future iterations may # provide more fine-grained scopes. #oidc_scope_prefix: https://galaxyproject.org/api @@ -2897,3 +2909,4 @@ galaxy: # Enable the integration of the Galaxy Help Forum in the tool panel. # This requires the help_forum_api_url to be set. #enable_help_forum_tool_panel_integration: false + From eed755a3f72f5d1cea05a085c16f56ac1f10a8c5 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Fri, 5 Apr 2024 16:16:03 +0200 Subject: [PATCH 361/669] Reorder enable_celery_tasks in config schema This is the main flag and it makes sense to set it before the rest of the configuration. --- doc/source/admin/galaxy_options.rst | 24 ++++++++++----------- lib/galaxy/config/sample/galaxy.yml.sample | 10 ++++----- lib/galaxy/config/schemas/config_schema.yml | 17 ++++++++------- 3 files changed, 26 insertions(+), 25 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index f3021dc25d48..04ad0c5a1d5b 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5145,6 +5145,18 @@ :Type: str +~~~~~~~~~~~~~~~~~~~~~~~ +``enable_celery_tasks`` +~~~~~~~~~~~~~~~~~~~~~~~ + +:Description: + Offload long-running tasks to a Celery task queue. Activate this + only if you have setup a Celery worker for Galaxy. For details, + see https://docs.galaxyproject.org/en/master/admin/production.html +:Default: ``false`` +:Type: bool + + ~~~~~~~~~~~~~~~ ``celery_conf`` ~~~~~~~~~~~~~~~ @@ -5166,18 +5178,6 @@ :Type: any -~~~~~~~~~~~~~~~~~~~~~~~ -``enable_celery_tasks`` -~~~~~~~~~~~~~~~~~~~~~~~ - -:Description: - Offload long-running tasks to a Celery task queue. Activate this - only if you have setup a Celery worker for Galaxy. For details, - see https://docs.galaxyproject.org/en/master/admin/production.html -:Default: ``false`` -:Type: bool - - ~~~~~~~~~~~~~~~~~~~~~~~~~~ ``celery_user_rate_limit`` ~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index 21b325bdb490..532f582920ed 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2757,6 +2757,11 @@ galaxy: # commented out line below). #amqp_internal_connection: sqlalchemy+sqlite:///./database/control.sqlite?isolation_level=IMMEDIATE + # Offload long-running tasks to a Celery task queue. Activate this + # only if you have setup a Celery worker for Galaxy. For details, see + # https://docs.galaxyproject.org/en/master/admin/production.html + #enable_celery_tasks: false + # Configuration options passed to Celery. # To refer to a task by name, use the template `galaxy.foo` where # `foo` is the function name of the task defined in the @@ -2774,11 +2779,6 @@ galaxy: # galaxy.fetch_data: galaxy.external # galaxy.set_job_metadata: galaxy.external - # Offload long-running tasks to a Celery task queue. Activate this - # only if you have setup a Celery worker for Galaxy. For details, see - # https://docs.galaxyproject.org/en/master/admin/production.html - #enable_celery_tasks: false - # If set to a non-0 value, upper limit on number of tasks that can be # executed per user per second. #celery_user_rate_limit: 0.0 diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index ea25af4fc4c4..04e0e41c6f5b 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3755,6 +3755,15 @@ mapping: will automatically create and use a separate sqlite database located in your /database folder (indicated in the commented out line below). + enable_celery_tasks: + type: bool + default: false + required: false + desc: | + Offload long-running tasks to a Celery task queue. + Activate this only if you have setup a Celery worker for Galaxy. + For details, see https://docs.galaxyproject.org/en/master/admin/production.html + celery_conf: type: any required: false @@ -3776,14 +3785,6 @@ mapping: For details, see Celery documentation at https://docs.celeryq.dev/en/stable/userguide/configuration.html. - enable_celery_tasks: - type: bool - default: false - required: false - desc: | - Offload long-running tasks to a Celery task queue. - Activate this only if you have setup a Celery worker for Galaxy. - For details, see https://docs.galaxyproject.org/en/master/admin/production.html celery_user_rate_limit: type: float From 9fb43e58b2802edaefefe33033f206f52e6848ee Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Fri, 5 Apr 2024 17:08:28 +0200 Subject: [PATCH 362/669] Update production docs to mention Celery --- doc/source/admin/production.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/doc/source/admin/production.md b/doc/source/admin/production.md index e5aa2a814129..d7237abfb834 100644 --- a/doc/source/admin/production.md +++ b/doc/source/admin/production.md @@ -179,3 +179,20 @@ Finally, if you are using Galaxy <= release_2014.06.02, we recommend that you in ### Make the proxy handle uploads and downloads By default, Galaxy receives file uploads as a stream from the proxy server and then writes this file to disk. Likewise, it sends files as a stream to the proxy server. This occupies the GIL in that Galaxy process and will decrease responsiveness for other operations in that process. To solve this problem, you can configure your proxy server to serve downloads directly, involving Galaxy only for the task of authorizing that the user has permission to read the dataset. If using nginx as the proxy, you can configure it to receive uploaded files and write them to disk itself, only notifying Galaxy of the upload once it's completed. All the details on how to configure these can be found on the [Apache](apache.md) and [nginx](nginx.md) proxy instruction pages. + +### Use Celery for asynchronous tasks + +Galaxy can use [Celery](https://docs.celeryq.dev/en/stable/index.html) to handle asynchronous tasks. This is useful for offloading tasks that are usually time-consuming and that would otherwise block the Galaxy process. Some use cases include: + +- Setting metadata on datasets +- Purging datasets +- Exporting histories or other data +- Running periodic tasks + +The list of tasks that are currently handled by `Celery` can be found in `lib/galaxy/celery/tasks.py`. + +To enable Celery in your instance you need to follow some additional steps: + +- Set `enable_celery_tasks: true` in the Galaxy config. +- Configure the `backend` under `celery_conf` to store the results of the tasks. For example, you can use [`redis` as the backend](https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#broker-redis). If you are using `redis`, make sure to install the `redis` dependency in your Galaxy environment with `pip install redis`. You can find more information on how to configure other backends in the [Celery documentation](https://docs.celeryq.dev/en/stable/userguide/tasks.html#task-result-backends). +- Configure one or more workers to handle the tasks. You can find more information on how to configure workers in the [Celery documentation](https://docs.celeryq.dev/en/stable/userguide/workers.html). If you are using [Gravity](https://github.com/galaxyproject/gravity) it will simplify the process of setting up Celery workers. From 2f4bc1a60091f0e91253c7d4da29d87b79968db4 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Fri, 5 Apr 2024 17:09:28 +0200 Subject: [PATCH 363/669] Update config docs to mention the celery result_backend --- doc/source/admin/galaxy_options.rst | 9 ++++++--- lib/galaxy/config/sample/galaxy.yml.sample | 8 ++++++-- lib/galaxy/config/schemas/config_schema.yml | 8 ++++++-- 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index 04ad0c5a1d5b..441d5d2d85ad 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5151,8 +5151,11 @@ :Description: Offload long-running tasks to a Celery task queue. Activate this - only if you have setup a Celery worker for Galaxy. For details, - see https://docs.galaxyproject.org/en/master/admin/production.html + only if you have setup a Celery worker for Galaxy and you have + configured the `celery_conf` option below. Specifically, you need + to set the `result_backend` option in the `celery_conf` option to + a valid Celery result backend URL. For details, see + https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks :Default: ``false`` :Type: bool @@ -5174,7 +5177,7 @@ disabled on a per-task basis at this time.) For details, see Celery documentation at https://docs.celeryq.dev/en/stable/userguide/configuration.html. -:Default: ``{'task_routes': {'galaxy.fetch_data': 'galaxy.external', 'galaxy.set_job_metadata': 'galaxy.external'}}`` +:Default: ``{'result_backend': 'redis://127.0.0.1:6379/0', 'task_routes': {'galaxy.fetch_data': 'galaxy.external', 'galaxy.set_job_metadata': 'galaxy.external'}}`` :Type: any diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index 532f582920ed..fffe8446af47 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2758,8 +2758,11 @@ galaxy: #amqp_internal_connection: sqlalchemy+sqlite:///./database/control.sqlite?isolation_level=IMMEDIATE # Offload long-running tasks to a Celery task queue. Activate this - # only if you have setup a Celery worker for Galaxy. For details, see - # https://docs.galaxyproject.org/en/master/admin/production.html + # only if you have setup a Celery worker for Galaxy and you have + # configured the `celery_conf` option below. Specifically, you need to + # set the `result_backend` option in the `celery_conf` option to a + # valid Celery result backend URL. For details, see + # https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks #enable_celery_tasks: false # Configuration options passed to Celery. @@ -2775,6 +2778,7 @@ galaxy: # For details, see Celery documentation at # https://docs.celeryq.dev/en/stable/userguide/configuration.html. #celery_conf: + # result_backend: redis://127.0.0.1:6379/0 # task_routes: # galaxy.fetch_data: galaxy.external # galaxy.set_job_metadata: galaxy.external diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 04e0e41c6f5b..8178826bbbf7 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3761,13 +3761,17 @@ mapping: required: false desc: | Offload long-running tasks to a Celery task queue. - Activate this only if you have setup a Celery worker for Galaxy. - For details, see https://docs.galaxyproject.org/en/master/admin/production.html + Activate this only if you have setup a Celery worker for Galaxy and you have + configured the `celery_conf` option below. Specifically, you need to set the + `result_backend` option in the `celery_conf` option to a valid Celery result + backend URL. + For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks celery_conf: type: any required: false default: + result_backend: redis://127.0.0.1:6379/0 task_routes: 'galaxy.fetch_data': 'galaxy.external' 'galaxy.set_job_metadata': 'galaxy.external' From 06c5762a5cf30dde0cd99ccddb51a07f85814523 Mon Sep 17 00:00:00 2001 From: Martin Cech Date: Fri, 5 Apr 2024 09:45:34 -0700 Subject: [PATCH 364/669] adjust release notes to use the same languag3 as interface --- doc/source/releases/24.0_announce.rst | 2 +- doc/source/releases/24.0_announce_user.rst | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/source/releases/24.0_announce.rst b/doc/source/releases/24.0_announce.rst index 34c63d9757dd..1773e2c1223a 100644 --- a/doc/source/releases/24.0_announce.rst +++ b/doc/source/releases/24.0_announce.rst @@ -16,7 +16,7 @@ Highlights **image_diff.** For tool developers, image_diff, a new comparison method for test output verification using images has been added. Unlike previously used comparison methods, image_diff is specifically tailored for single-channel and multi-channel image data (e.g. RGB). The difference of a pair of images is quantified as the pixel-wise distance between the images, for which different metrics can be used. A pair of images is considered to be equal in terms of the specified metric, if the distance between the images computed with respect to the metric is not above a given threshold. For more details, see `the original pull request `__). -Other notable improvements include consolidating resource grids for histories, visualizations and pages into tab views; the addition of a new UI feature for "relocating" a dataset to a new object store; and, for tool developers, a new comparison method for test output verification using images. Check out the `24.0 user release notes <24.0_announce_user.html>`__ for all the details. +Other notable improvements include consolidating resource grids for histories, visualizations and pages into tab views; the addition of a new UI feature for "relocating" a dataset to a different storage location; and, for tool developers, a new comparison method for test output verification using images. Check out the `24.0 user release notes <24.0_announce_user.html>`__ for all the details. Are you an admin? See the Administration Notes below, and check out `some admin relevant PRs `__. diff --git a/doc/source/releases/24.0_announce_user.rst b/doc/source/releases/24.0_announce_user.rst index f48ba67eb1ec..56569265996d 100644 --- a/doc/source/releases/24.0_announce_user.rst +++ b/doc/source/releases/24.0_announce_user.rst @@ -30,9 +30,9 @@ My workflows: list view New select component for selecting a large amount of options ----------------------------------------------------------- -This new component addresses the need of selecting a very large number of options at once. The component features a list based layout which can be filtered using plain-text or a regular expression. Options can be added individually, in bulk via the filter value, or by first highlighting a range. +This new component addresses the need of selecting a very large number of options at once. The component features a list based layout which can be filtered using plain-text or a regular expression. Options can be added individually, in bulk via the filter value, or by first highlighting a range. -The component is fully keyboard accessible. All methods of selection and highlighting work via keyboard. The options are not selectable individually with tab, but can be scrolled through using the arrow-keys. The hover hint adapts when a keyboard is used. +The component is fully keyboard accessible. All methods of selection and highlighting work via keyboard. The options are not selectable individually with tab, but can be scrolled through using the arrow-keys. The hover hint adapts when a keyboard is used. The size of the table can be increased to allow for seeing more options at once.The options in the list are limited to 1000 for performance reasons, but this can be temporarily increased in steps of 500, when reaching the end of a list. @@ -47,7 +47,7 @@ The component can be chosen when multiple options are available; a local prefere Multiple item drag and drop and keyboard navigation ----------------------------------------------------------- -This new feature allows users to select multiple history items and drag and drop them into other histories or even tool forms! Users can also navigate their history using the arrow keys; multiple history items can be selected with the Shift + ArrowUp/ArrowDown key combination. +This new feature allows users to select multiple history items and drag and drop them into other histories or even tool forms! Users can also navigate their history using the arrow keys; multiple history items can be selected with the Shift + ArrowUp/ArrowDown key combination. Drag and drop: @@ -73,7 +73,7 @@ Your histories, visualisations, and pages are now consolidated into an easier to Move datasets between storage locations ----------------------------------------------------------- -Relocate a dataset to a new object store with ease using this new UI feature! The example below uses an instance with four object stores defined, but only three of them declaring the same "device" ID. Clicking on the dataset information and scrolling to storage details has a "Relocate" option if the dataset is "safe" to relocate and there are valid targets to relocate it to. The UI utilizes the same visual language used for describing attributes of the storage and exposing admin provided details. This example also shows what the buttons look like with quota enabled object stores and object stores without quota enabled. +Relocate a dataset to a different storage location with ease using this new UI feature! The example below uses an instance with four storage locations defined, but only three of them declaring the same "device" ID (set by the administrator). Clicking on the dataset information and scrolling to storage details has a "Relocate" option if the dataset is "safe" to relocate and there are valid targets to relocate it to. The UI utilizes the same visual language used for describing attributes of the storage and exposing admin provided details. This example also shows what the buttons look like for storage locations with and without quota enabled. .. raw:: html From 8e75d32ced3586e7e638769d73943f8014a67f87 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 8 Mar 2024 14:51:16 -0500 Subject: [PATCH 365/669] Use DeclarativeBase as base class mypy bug workaround no longer needed as we are no longer specifying a metaclass --- lib/galaxy/model/__init__.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index a226b2f33f89..a1e4c9ef314c 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -106,6 +106,7 @@ from sqlalchemy.orm import ( aliased, column_property, + DeclarativeBase, deferred, joinedload, Mapped, @@ -119,6 +120,7 @@ from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.collections import attribute_keyed_dict from sqlalchemy.sql import exists +from sqlalchemy.sql.expression import FromClause from typing_extensions import ( Literal, Protocol, @@ -224,23 +226,15 @@ if TYPE_CHECKING: - # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta - - class DeclarativeMeta(_DeclarativeMeta, type): - pass - from galaxy.datatypes.data import Data from galaxy.tools import DefaultToolState from galaxy.workflow.modules import WorkflowModule class _HasTable: - table: Table - __table__: Table + table: FromClause + __table__: FromClause else: - from sqlalchemy.orm import DeclarativeMeta - _HasTable = object @@ -252,7 +246,7 @@ def get_uuid(uuid: Optional[Union[UUID, str]] = None) -> UUID: return UUID(str(uuid)) -class Base(_HasTable, metaclass=DeclarativeMeta): +class Base(_HasTable, DeclarativeBase): __abstract__ = True metadata = MetaData(naming_convention=NAMING_CONVENTION) mapper_registry.metadata = metadata From dcf2d843049adedf0c7ba56578aad332d3557b95 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 4 Apr 2024 23:51:38 -0400 Subject: [PATCH 366/669] Fix method resolution order for base model class --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index a1e4c9ef314c..5cfd17dc200f 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -246,7 +246,7 @@ def get_uuid(uuid: Optional[Union[UUID, str]] = None) -> UUID: return UUID(str(uuid)) -class Base(_HasTable, DeclarativeBase): +class Base(DeclarativeBase, _HasTable): __abstract__ = True metadata = MetaData(naming_convention=NAMING_CONVENTION) mapper_registry.metadata = metadata From 09fee6aaad34fe34e6c2c76daab35bf89b95a681 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 8 Mar 2024 15:04:46 -0500 Subject: [PATCH 367/669] Remove future, autocommit args from session and engine creation --- lib/galaxy/jobs/handler.py | 1 - lib/galaxy/model/base.py | 2 +- lib/galaxy/model/database_utils.py | 2 +- lib/galaxy/model/migrations/__init__.py | 4 ++-- lib/galaxy/model/migrations/alembic/env.py | 2 +- lib/galaxy/model/migrations/scripts.py | 6 +++--- lib/galaxy/model/orm/engine_factory.py | 4 ++-- .../model/unittest_utils/migration_scripts_testing_utils.py | 2 +- lib/galaxy/model/unittest_utils/model_testing_utils.py | 4 ++-- lib/tool_shed/webapp/model/migrations/__init__.py | 2 +- lib/tool_shed/webapp/model/migrations/alembic/env.py | 2 +- scripts/check_model.py | 2 +- scripts/update_shed_config_path.py | 4 ++-- test/unit/data/model/conftest.py | 2 +- 14 files changed, 19 insertions(+), 20 deletions(-) diff --git a/lib/galaxy/jobs/handler.py b/lib/galaxy/jobs/handler.py index 008a4eee0be4..2ed1a19580fa 100644 --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -123,7 +123,6 @@ def __init__( self.self_handler_tags = self_handler_tags self.max_grab = max_grab self.handler_tags = handler_tags - self._grab_conn_opts = {"autocommit": False} self._grab_query = None self._supports_returning = self.app.application_stack.supports_returning() diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index 0258824f54f8..59bda7d7a671 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -81,7 +81,7 @@ def check_database_connection(session): class ModelMapping(Bunch): def __init__(self, model_modules, engine): self.engine = engine - self._SessionLocal = sessionmaker(autoflush=False, autocommit=False, future=True) + self._SessionLocal = sessionmaker(autoflush=False) versioned_session(self._SessionLocal) context = scoped_session(self._SessionLocal, scopefunc=self.request_scopefunc) # For backward compatibility with "context.current" diff --git a/lib/galaxy/model/database_utils.py b/lib/galaxy/model/database_utils.py index 403be8d1242f..15fdf283a7b3 100644 --- a/lib/galaxy/model/database_utils.py +++ b/lib/galaxy/model/database_utils.py @@ -45,7 +45,7 @@ def create_database(db_url, database=None, encoding="utf8", template=None): @contextmanager def sqlalchemy_engine(url): - engine = create_engine(url, future=True) + engine = create_engine(url) try: yield engine finally: diff --git a/lib/galaxy/model/migrations/__init__.py b/lib/galaxy/model/migrations/__init__.py index 61c367c98085..da0a1927c867 100644 --- a/lib/galaxy/model/migrations/__init__.py +++ b/lib/galaxy/model/migrations/__init__.py @@ -124,10 +124,10 @@ def verify_databases_via_script( ) -> None: # This function serves a use case when an engine has not been created yet # (e.g. when called from a script). - gxy_engine = create_engine(gxy_config.url, future=True) + gxy_engine = create_engine(gxy_config.url) tsi_engine = None if tsi_config.url and tsi_config.url != gxy_config.url: - tsi_engine = create_engine(tsi_config.url, future=True) + tsi_engine = create_engine(tsi_config.url) verify_databases( gxy_engine, diff --git a/lib/galaxy/model/migrations/alembic/env.py b/lib/galaxy/model/migrations/alembic/env.py index cf9cb6b36a81..5c3ec11f8298 100644 --- a/lib/galaxy/model/migrations/alembic/env.py +++ b/lib/galaxy/model/migrations/alembic/env.py @@ -116,7 +116,7 @@ def _configure_and_run_migrations_offline(url: str) -> None: def _configure_and_run_migrations_online(url) -> None: - engine = create_engine(url, future=True) + engine = create_engine(url) with engine.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): diff --git a/lib/galaxy/model/migrations/scripts.py b/lib/galaxy/model/migrations/scripts.py index 207928653a0a..6315c83ecd23 100644 --- a/lib/galaxy/model/migrations/scripts.py +++ b/lib/galaxy/model/migrations/scripts.py @@ -59,7 +59,7 @@ def verify_database_is_initialized(db_url: str) -> None: if not database_exists(db_url): raise DatabaseDoesNotExistError(db_url) - engine = create_engine(db_url, future=True) + engine = create_engine(db_url) try: db_state = DatabaseStateCache(engine=engine) if db_state.is_database_empty() or db_state.contains_only_kombu_tables(): @@ -161,7 +161,7 @@ def get_gxy_db_version(self, gxy_db_url=None): """ db_url = gxy_db_url or self.gxy_db_url try: - engine = create_engine(db_url, future=True) + engine = create_engine(db_url) version = self._get_gxy_alembic_db_version(engine) if not version: version = self._get_gxy_sam_db_version(engine) @@ -197,7 +197,7 @@ def _rename_arg(self, argv, old_name, new_name) -> None: def _upgrade(self, db_url, model): try: - engine = create_engine(db_url, future=True) + engine = create_engine(db_url) am = get_alembic_manager(engine) am.upgrade(model) finally: diff --git a/lib/galaxy/model/orm/engine_factory.py b/lib/galaxy/model/orm/engine_factory.py index 374c53219c73..3effa9ed8e00 100644 --- a/lib/galaxy/model/orm/engine_factory.py +++ b/lib/galaxy/model/orm/engine_factory.py @@ -106,9 +106,9 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema set_sqlite_connect_args(engine_options, url) if url.startswith("sqlite://") and url not in ("sqlite:///:memory:", "sqlite://"): - engine = create_engine(url, **engine_options, poolclass=NullPool, future=True) + engine = create_engine(url, **engine_options, poolclass=NullPool) else: - engine = create_engine(url, **engine_options, future=True) + engine = create_engine(url, **engine_options) # Prevent sharing connection across fork: https://docs.sqlalchemy.org/en/14/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork register_after_fork(engine, lambda e: e.dispose()) diff --git a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py index 0fbfaf947de3..b4704e1394e8 100644 --- a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py @@ -76,7 +76,7 @@ def get_db_heads(config: Config) -> Tuple[str, ...]: """Return revision ids (version heads) stored in the database.""" dburl = config.get_main_option("sqlalchemy.url") assert dburl - engine = create_engine(dburl, future=True) + engine = create_engine(dburl) with engine.connect() as conn: context = MigrationContext.configure(conn) heads = context.get_current_heads() diff --git a/lib/galaxy/model/unittest_utils/model_testing_utils.py b/lib/galaxy/model/unittest_utils/model_testing_utils.py index 960045510abb..869cc40888e8 100644 --- a/lib/galaxy/model/unittest_utils/model_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/model_testing_utils.py @@ -67,7 +67,7 @@ def drop_existing_database(url: DbUrl) -> Iterator[None]: @contextmanager def disposing_engine(url: DbUrl) -> Iterator[Engine]: """Context manager for engine that disposes of its connection pool on exit.""" - engine = create_engine(url, future=True) + engine = create_engine(url) try: yield engine finally: @@ -233,7 +233,7 @@ def _drop_postgres_database(url: DbUrl) -> None: def _drop_database(connection_url, database_name): - engine = create_engine(connection_url, isolation_level="AUTOCOMMIT", future=True) + engine = create_engine(connection_url, isolation_level="AUTOCOMMIT") preparer = IdentifierPreparer(engine.dialect) database_name = preparer.quote(database_name) stmt = text(f"DROP DATABASE IF EXISTS {database_name}") diff --git a/lib/tool_shed/webapp/model/migrations/__init__.py b/lib/tool_shed/webapp/model/migrations/__init__.py index 8cf1165661c5..eae748a507d5 100644 --- a/lib/tool_shed/webapp/model/migrations/__init__.py +++ b/lib/tool_shed/webapp/model/migrations/__init__.py @@ -46,7 +46,7 @@ def __init__(self) -> None: def verify_database(url, engine_options=None) -> None: engine_options = engine_options or {} - engine = create_engine(url, **engine_options, future=True) + engine = create_engine(url, **engine_options) verifier = DatabaseStateVerifier(engine) verifier.run() engine.dispose() diff --git a/lib/tool_shed/webapp/model/migrations/alembic/env.py b/lib/tool_shed/webapp/model/migrations/alembic/env.py index df344a900e28..4c05fabaf5d7 100644 --- a/lib/tool_shed/webapp/model/migrations/alembic/env.py +++ b/lib/tool_shed/webapp/model/migrations/alembic/env.py @@ -62,7 +62,7 @@ def _configure_and_run_migrations_offline(url: str) -> None: def _configure_and_run_migrations_online(url) -> None: - engine = create_engine(url, future=True) + engine = create_engine(url) with engine.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): diff --git a/scripts/check_model.py b/scripts/check_model.py index 96cb66e9f562..f08544f6c1fa 100644 --- a/scripts/check_model.py +++ b/scripts/check_model.py @@ -49,7 +49,7 @@ def load_indexes(metadata): # create EMPTY metadata, then load from database db_url = get_config(sys.argv)["db_url"] metadata = MetaData() - engine = create_engine(db_url, future=True) + engine = create_engine(db_url) metadata.reflect(bind=engine) indexes_in_db = load_indexes(metadata) diff --git a/scripts/update_shed_config_path.py b/scripts/update_shed_config_path.py index faa7b8871f2c..d3619a099704 100644 --- a/scripts/update_shed_config_path.py +++ b/scripts/update_shed_config_path.py @@ -46,9 +46,9 @@ def create_database(config_file): exit(1) # Initialize the database connection. - engine = create_engine(database_connection, future=True) + engine = create_engine(database_connection) MetaData(bind=engine) - install_session = scoped_session(sessionmaker(bind=engine, autoflush=False, autocommit=True)) + install_session = scoped_session(sessionmaker(bind=engine, autoflush=False)) model = mapping.init(database_connection) return install_session, model diff --git a/test/unit/data/model/conftest.py b/test/unit/data/model/conftest.py index 95908e2c8c1c..4d8728e9f197 100644 --- a/test/unit/data/model/conftest.py +++ b/test/unit/data/model/conftest.py @@ -30,7 +30,7 @@ def sqlite_memory_url(): @pytest.fixture(scope="module") def engine(): db_uri = "sqlite:///:memory:" - return create_engine(db_uri, future=True) + return create_engine(db_uri) @pytest.fixture From ae432bd375c5793ddb4fed9483a31b18ae19ebd3 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 8 Mar 2024 16:09:02 -0500 Subject: [PATCH 368/669] Remove fix for model constructors (fixed in SA2.0) --- lib/galaxy/model/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5cfd17dc200f..d831e4340fdf 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -251,7 +251,6 @@ class Base(DeclarativeBase, _HasTable): metadata = MetaData(naming_convention=NAMING_CONVENTION) mapper_registry.metadata = metadata registry = mapper_registry - __init__ = mapper_registry.constructor @classmethod def __declare_last__(cls): From ee067008f1df008770dbf7bbd470f88331656296 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 8 Mar 2024 16:33:57 -0500 Subject: [PATCH 369/669] Add missing type hints to mapped_column in the model --- lib/galaxy/model/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index d831e4340fdf..be21ac134849 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2979,7 +2979,7 @@ class HistoryAudit(Base): __tablename__ = "history_audit" __table_args__ = (PrimaryKeyConstraint(sqlite_on_conflict="IGNORE"),) - history_id = mapped_column(Integer, ForeignKey("history.id"), primary_key=True, nullable=False) + history_id: Mapped[int] = mapped_column(Integer, ForeignKey("history.id"), primary_key=True, nullable=False) update_time: Mapped[datetime] = mapped_column(DateTime, default=now, primary_key=True, nullable=False) # This class should never be instantiated. @@ -9478,7 +9478,7 @@ class WorkflowInvocationOutputDatasetCollectionAssociation(Base, Dictifiable, Se dataset_collection_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id", name="fk_wiodca_dci"), index=True ) - workflow_output_id = mapped_column(Integer, ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True) + workflow_output_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_dataset_collections") workflow_step = relationship("WorkflowStep") @@ -11106,7 +11106,7 @@ class CeleryUserRateLimit(Base): __tablename__ = "celery_user_rate_limit" - user_id = mapped_column(Integer, ForeignKey("galaxy_user.id", ondelete="CASCADE"), primary_key=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id", ondelete="CASCADE"), primary_key=True) last_scheduled_time: Mapped[datetime] = mapped_column(DateTime, nullable=False) def __repr__(self): From 61c3dc6f29717698cfe3dec66dd0fac966da2bf9 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Sat, 6 Apr 2024 12:42:15 +0200 Subject: [PATCH 370/669] Consolidate HDA and HDCA type names in client --- client/src/api/datasets.ts | 8 +++----- client/src/api/index.ts | 8 ++++---- .../src/components/Collections/ListCollectionCreator.vue | 4 ++-- .../src/components/Collections/PairCollectionCreator.vue | 4 ++-- client/src/components/Dataset/DatasetList.vue | 3 ++- client/src/components/Dataset/DatasetName.vue | 2 +- .../components/History/Content/Dataset/DatasetActions.vue | 4 ++-- .../History/Content/Dataset/DatasetDownload.vue | 4 ++-- client/src/stores/datasetStore.ts | 6 +++--- client/src/stores/historyItemsStore.ts | 4 ++-- 10 files changed, 23 insertions(+), 24 deletions(-) diff --git a/client/src/api/datasets.ts b/client/src/api/datasets.ts index 703a9d30c190..8bdae1e2c012 100644 --- a/client/src/api/datasets.ts +++ b/client/src/api/datasets.ts @@ -1,13 +1,11 @@ import type { FetchArgType } from "openapi-typescript-fetch"; -import { DatasetDetails } from "@/api"; +import { HDADetailed } from "@/api"; import { components, fetcher } from "@/api/schema"; import { withPrefix } from "@/utils/redirect"; export const datasetsFetcher = fetcher.path("/api/datasets").method("get").create(); -export type HDASummary = components["schemas"]["HDASummary"]; - type GetDatasetsApiOptions = FetchArgType; type GetDatasetsQuery = Pick; // custom interface for how we use getDatasets @@ -42,11 +40,11 @@ export const fetchDataset = fetcher.path("/api/datasets/{dataset_id}").method("g export const fetchDatasetStorage = fetcher.path("/api/datasets/{dataset_id}/storage").method("get").create(); -export async function fetchDatasetDetails(params: { id: string }): Promise { +export async function fetchDatasetDetails(params: { id: string }): Promise { const { data } = await fetchDataset({ dataset_id: params.id, view: "detailed" }); // We know that the server will return a DatasetDetails object because of the view parameter // but the type system doesn't, so we have to cast it. - return data as unknown as DatasetDetails; + return data as unknown as HDADetailed; } const updateDataset = fetcher.path("/api/datasets/{dataset_id}").method("put").create(); diff --git a/client/src/api/index.ts b/client/src/api/index.ts index 0d17530014d2..7c3a88636903 100644 --- a/client/src/api/index.ts +++ b/client/src/api/index.ts @@ -82,12 +82,12 @@ export type HistoryContentItemBase = components["schemas"]["EncodedHistoryConten /** * Contains summary information about a HistoryDatasetAssociation. */ -export type DatasetSummary = components["schemas"]["HDASummary"]; +export type HDASummary = components["schemas"]["HDASummary"]; /** * Contains additional details about a HistoryDatasetAssociation. */ -export type DatasetDetails = components["schemas"]["HDADetailed"]; +export type HDADetailed = components["schemas"]["HDADetailed"]; /** * Contains storage (object store, quota, etc..) details for a dataset. @@ -97,7 +97,7 @@ export type DatasetStorageDetails = components["schemas"]["DatasetStorageDetails /** * Represents a HistoryDatasetAssociation with either summary or detailed information. */ -export type DatasetEntry = DatasetSummary | DatasetDetails; +export type DatasetEntry = HDASummary | HDADetailed; /** * Contains summary information about a DCE (DatasetCollectionElement). @@ -179,7 +179,7 @@ export function isCollectionElement(element: DCESummary): element is DCECollecti /** * Returns true if the given dataset entry is an instance of DatasetDetails. */ -export function hasDetails(entry: DatasetEntry): entry is DatasetDetails { +export function hasDetails(entry: DatasetEntry): entry is HDADetailed { return "peek" in entry; } diff --git a/client/src/components/Collections/ListCollectionCreator.vue b/client/src/components/Collections/ListCollectionCreator.vue index e6cb37846cba..6198fa24cd5c 100644 --- a/client/src/components/Collections/ListCollectionCreator.vue +++ b/client/src/components/Collections/ListCollectionCreator.vue @@ -8,7 +8,7 @@ import { BAlert, BButton } from "bootstrap-vue"; import { computed, onMounted, ref } from "vue"; import draggable from "vuedraggable"; -import { type HDCADetailed } from "@/api"; +import type { HDASummary, HDCADetailed, HDCASummary } from "@/api"; import STATES from "@/mvc/dataset/states"; import localize from "@/utils/localization"; @@ -116,7 +116,7 @@ function _validateElements() { } /** describe what is wrong with a particular element if anything */ -function _isElementInvalid(element: HDCADetailed) { +function _isElementInvalid(element: HDASummary | HDCASummary) { if (element.history_content_type === "dataset_collection") { return localize("is a collection, this is not allowed"); } diff --git a/client/src/components/Collections/PairCollectionCreator.vue b/client/src/components/Collections/PairCollectionCreator.vue index 154d82c2cfcc..7ef5a92935c9 100644 --- a/client/src/components/Collections/PairCollectionCreator.vue +++ b/client/src/components/Collections/PairCollectionCreator.vue @@ -2,7 +2,7 @@ import { BAlert } from "bootstrap-vue"; import { computed, onMounted, ref } from "vue"; -import { type HDCADetailed } from "@/api"; +import type { HDASummary, HDCADetailed, HDCASummary } from "@/api"; import STATES from "@/mvc/dataset/states"; import localize from "@/utils/localization"; @@ -81,7 +81,7 @@ function _validateElements() { } /** describe what is wrong with a particular element if anything */ -function _isElementInvalid(element: HDCADetailed) { +function _isElementInvalid(element: HDASummary | HDCASummary) { if (element.history_content_type === "dataset_collection") { return localize("is a collection, this is not allowed"); } diff --git a/client/src/components/Dataset/DatasetList.vue b/client/src/components/Dataset/DatasetList.vue index 014004214ff0..cec54b0c7429 100644 --- a/client/src/components/Dataset/DatasetList.vue +++ b/client/src/components/Dataset/DatasetList.vue @@ -3,7 +3,8 @@ import { BAlert, BTable } from "bootstrap-vue"; import { storeToRefs } from "pinia"; import { computed, onMounted, ref } from "vue"; -import { copyDataset, getDatasets, HDASummary } from "@/api/datasets"; +import { HDASummary } from "@/api"; +import { copyDataset, getDatasets } from "@/api/datasets"; import { updateTags } from "@/api/tags"; import { useHistoryStore } from "@/stores/historyStore"; diff --git a/client/src/components/Dataset/DatasetName.vue b/client/src/components/Dataset/DatasetName.vue index 99a1e335a65d..71e629011e3a 100644 --- a/client/src/components/Dataset/DatasetName.vue +++ b/client/src/components/Dataset/DatasetName.vue @@ -5,7 +5,7 @@ import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome"; import { BLink } from "bootstrap-vue"; import { computed } from "vue"; -import { HDASummary } from "@/api/datasets"; +import { HDASummary } from "@/api"; library.add(faCaretDown, faCopy, faEye, faTimesCircle, faPause); diff --git a/client/src/components/History/Content/Dataset/DatasetActions.vue b/client/src/components/History/Content/Dataset/DatasetActions.vue index 8bc15069c15b..fbdd789c368a 100644 --- a/client/src/components/History/Content/Dataset/DatasetActions.vue +++ b/client/src/components/History/Content/Dataset/DatasetActions.vue @@ -14,7 +14,7 @@ import { BButton } from "bootstrap-vue"; import { computed } from "vue"; import { useRouter } from "vue-router/composables"; -import { type DatasetDetails } from "@/api"; +import { type HDADetailed } from "@/api"; import { copy as sendToClipboard } from "@/utils/clipboard"; import localize from "@/utils/localization"; import { absPath, prependPath } from "@/utils/redirect"; @@ -26,7 +26,7 @@ import DatasetDownload from "@/components/History/Content/Dataset/DatasetDownloa library.add(faBug, faChartBar, faInfoCircle, faLink, faQuestion, faRedo, faSitemap); interface Props { - item: DatasetDetails; + item: HDADetailed; writable: boolean; showHighlight: boolean; itemUrls: ItemUrls; diff --git a/client/src/components/History/Content/Dataset/DatasetDownload.vue b/client/src/components/History/Content/Dataset/DatasetDownload.vue index 89e5069f5b41..2aa377505819 100644 --- a/client/src/components/History/Content/Dataset/DatasetDownload.vue +++ b/client/src/components/History/Content/Dataset/DatasetDownload.vue @@ -5,13 +5,13 @@ import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome"; import { BButton, BDropdown, BDropdownItem } from "bootstrap-vue"; import { computed } from "vue"; -import { type DatasetDetails } from "@/api"; +import { type HDADetailed } from "@/api"; import { prependPath } from "@/utils/redirect"; library.add(faSave); interface Props { - item: DatasetDetails; + item: HDADetailed; } const props = defineProps(); diff --git a/client/src/stores/datasetStore.ts b/client/src/stores/datasetStore.ts index 8c4582505294..811e7a88be39 100644 --- a/client/src/stores/datasetStore.ts +++ b/client/src/stores/datasetStore.ts @@ -1,14 +1,14 @@ import { defineStore } from "pinia"; import { computed, set } from "vue"; -import type { DatasetDetails, DatasetEntry, HistoryContentItemBase } from "@/api"; +import type { DatasetEntry, HDADetailed, HistoryContentItemBase } from "@/api"; import { fetchDataset } from "@/api/datasets"; import { ApiResponse } from "@/api/schema"; import { useKeyedCache } from "@/composables/keyedCache"; -async function fetchDatasetDetails(params: { id: string }): Promise> { +async function fetchDatasetDetails(params: { id: string }): Promise> { const response = await fetchDataset({ dataset_id: params.id, view: "detailed" }); - return response as unknown as ApiResponse; + return response as unknown as ApiResponse; } export const useDatasetStore = defineStore("datasetStore", () => { diff --git a/client/src/stores/historyItemsStore.ts b/client/src/stores/historyItemsStore.ts index 5dcf46ee56df..45f14891b991 100644 --- a/client/src/stores/historyItemsStore.ts +++ b/client/src/stores/historyItemsStore.ts @@ -8,13 +8,13 @@ import { reverse } from "lodash"; import { defineStore } from "pinia"; import { computed, ref, set } from "vue"; -import type { DatasetSummary, HDCASummary } from "@/api"; +import type { HDASummary, HDCASummary } from "@/api"; import { HistoryFilters } from "@/components/History/HistoryFilters"; import { mergeArray } from "@/store/historyStore/model/utilities"; import { ActionSkippedError, LastQueue } from "@/utils/lastQueue"; import { urlData } from "@/utils/url"; -export type HistoryItem = DatasetSummary | HDCASummary; +export type HistoryItem = HDASummary | HDCASummary; const limit = 100; From 2b1e7966de30f75ead000d3e130418e72ece001b Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Sat, 6 Apr 2024 12:50:11 +0200 Subject: [PATCH 371/669] Consolidate HistoryItemSummary type and imports --- client/src/api/index.ts | 5 +++ .../Collections/ListCollectionCreator.vue | 4 +- .../Collections/PairCollectionCreator.vue | 4 +- .../History/CurrentHistory/HistoryPanel.vue | 38 +++++++++---------- .../History/Multiple/MultipleViewList.vue | 8 ++-- client/src/stores/historyItemsStore.ts | 14 +++---- 6 files changed, 38 insertions(+), 35 deletions(-) diff --git a/client/src/api/index.ts b/client/src/api/index.ts index 7c3a88636903..6ee22a2590b2 100644 --- a/client/src/api/index.ts +++ b/client/src/api/index.ts @@ -89,6 +89,11 @@ export type HDASummary = components["schemas"]["HDASummary"]; */ export type HDADetailed = components["schemas"]["HDADetailed"]; +/** + * Represents either an HDA or an HDCA with minimal information. + */ +export type HistoryItemSummary = HDASummary | HDCASummary; + /** * Contains storage (object store, quota, etc..) details for a dataset. */ diff --git a/client/src/components/Collections/ListCollectionCreator.vue b/client/src/components/Collections/ListCollectionCreator.vue index 6198fa24cd5c..74bf917e6b6a 100644 --- a/client/src/components/Collections/ListCollectionCreator.vue +++ b/client/src/components/Collections/ListCollectionCreator.vue @@ -8,7 +8,7 @@ import { BAlert, BButton } from "bootstrap-vue"; import { computed, onMounted, ref } from "vue"; import draggable from "vuedraggable"; -import type { HDASummary, HDCADetailed, HDCASummary } from "@/api"; +import type { HDCADetailed, HistoryItemSummary } from "@/api"; import STATES from "@/mvc/dataset/states"; import localize from "@/utils/localization"; @@ -116,7 +116,7 @@ function _validateElements() { } /** describe what is wrong with a particular element if anything */ -function _isElementInvalid(element: HDASummary | HDCASummary) { +function _isElementInvalid(element: HistoryItemSummary) { if (element.history_content_type === "dataset_collection") { return localize("is a collection, this is not allowed"); } diff --git a/client/src/components/Collections/PairCollectionCreator.vue b/client/src/components/Collections/PairCollectionCreator.vue index 7ef5a92935c9..15597d9ddda7 100644 --- a/client/src/components/Collections/PairCollectionCreator.vue +++ b/client/src/components/Collections/PairCollectionCreator.vue @@ -2,7 +2,7 @@ import { BAlert } from "bootstrap-vue"; import { computed, onMounted, ref } from "vue"; -import type { HDASummary, HDCADetailed, HDCASummary } from "@/api"; +import type { HDCADetailed, HistoryItemSummary } from "@/api"; import STATES from "@/mvc/dataset/states"; import localize from "@/utils/localization"; @@ -81,7 +81,7 @@ function _validateElements() { } /** describe what is wrong with a particular element if anything */ -function _isElementInvalid(element: HDASummary | HDCASummary) { +function _isElementInvalid(element: HistoryItemSummary) { if (element.history_content_type === "dataset_collection") { return localize("is a collection, this is not allowed"); } diff --git a/client/src/components/History/CurrentHistory/HistoryPanel.vue b/client/src/components/History/CurrentHistory/HistoryPanel.vue index ad742f154c4d..759efea9a90b 100644 --- a/client/src/components/History/CurrentHistory/HistoryPanel.vue +++ b/client/src/components/History/CurrentHistory/HistoryPanel.vue @@ -3,7 +3,7 @@ import { BAlert } from "bootstrap-vue"; import { storeToRefs } from "pinia"; import { computed, onMounted, type Ref, ref, set as VueSet, unref, watch } from "vue"; -import type { HistorySummaryExtended } from "@/api"; +import type { HistoryItemSummary, HistorySummaryExtended } from "@/api"; import { copyDataset } from "@/api/datasets"; import ExpandedItems from "@/components/History/Content/ExpandedItems"; import SelectedItems from "@/components/History/Content/SelectedItems"; @@ -13,7 +13,7 @@ import { Toast } from "@/composables/toast"; import { useActiveElement } from "@/composables/useActiveElement"; import { startWatchingHistory } from "@/store/historyStore/model/watchHistory"; import { useEventStore } from "@/stores/eventStore"; -import { type HistoryItem, useHistoryItemsStore } from "@/stores/historyItemsStore"; +import { useHistoryItemsStore } from "@/stores/historyItemsStore"; import { useHistoryStore } from "@/stores/historyStore"; import { type Alias, getOperatorForAlias } from "@/utils/filtering"; import { setDrag } from "@/utils/setDrag"; @@ -219,11 +219,11 @@ function dragSameHistory() { function getDragData() { const eventStore = useEventStore(); const multiple = eventStore.multipleDragData; - let data: HistoryItem[] | undefined; + let data: HistoryItemSummary[] | undefined; let historyId: string | undefined; try { if (multiple) { - const dragData = eventStore.getDragData() as Record; + const dragData = eventStore.getDragData() as Record; // set historyId to the first history_id in the multiple drag data const firstItem = Object.values(dragData)[0]; if (firstItem) { @@ -231,7 +231,7 @@ function getDragData() { } data = Object.values(dragData); } else { - data = [eventStore.getDragData() as HistoryItem]; + data = [eventStore.getDragData() as HistoryItemSummary]; if (data[0]) { historyId = data[0].history_id; } @@ -242,7 +242,7 @@ function getDragData() { return { data, sameHistory: historyId === props.history.id, multiple }; } -function getHighlight(item: HistoryItem) { +function getHighlight(item: HistoryItemSummary) { if (unref(isLoading)) { return undefined; } @@ -263,11 +263,11 @@ function getHighlight(item: HistoryItem) { return "input"; } -function hasMatches(items: HistoryItem[]) { +function hasMatches(items: HistoryItemSummary[]) { return !!items && items.length > 0; } -function isDataset(item: HistoryItem) { +function isDataset(item: HistoryItemSummary) { return item.history_content_type === "dataset"; } @@ -288,7 +288,7 @@ async function loadHistoryItems() { } } -async function onDelete(item: HistoryItem, recursive = false) { +async function onDelete(item: HistoryItemSummary, recursive = false) { isLoading.value = true; setInvisible(item); @@ -300,7 +300,7 @@ async function onDelete(item: HistoryItem, recursive = false) { } } -function onHideSelection(selectedItems: HistoryItem[]) { +function onHideSelection(selectedItems: HistoryItemSummary[]) { for (const item of selectedItems) { setInvisible(item); } @@ -310,7 +310,7 @@ function onScroll(newOffset: number) { offsetQueryParam.value = newOffset; } -async function onUndelete(item: HistoryItem) { +async function onUndelete(item: HistoryItemSummary) { setInvisible(item); isLoading.value = true; @@ -322,7 +322,7 @@ async function onUndelete(item: HistoryItem) { } } -async function onUnhide(item: HistoryItem) { +async function onUnhide(item: HistoryItemSummary) { setInvisible(item); isLoading.value = true; @@ -342,11 +342,11 @@ function reloadContents() { startWatchingHistory(); } -function setInvisible(item: HistoryItem) { +function setInvisible(item: HistoryItemSummary) { VueSet(unref(invisibleHistoryItems), item.hid, true); } -function onTagChange(item: HistoryItem, newTags: string[]) { +function onTagChange(item: HistoryItemSummary, newTags: string[]) { item.tags = newTags; } @@ -424,11 +424,11 @@ function updateFilterValue(filterKey: string, newValue: any) { filterText.value = filterClass.setFilterValue(currentFilterText, filterKey, newValue); } -function getItemKey(item: HistoryItem) { +function getItemKey(item: HistoryItemSummary) { return item.type_id; } -function itemUniqueKey(item: HistoryItem) { +function itemUniqueKey(item: HistoryItemSummary) { return `${item.history_content_type}-${item.id}`; } @@ -448,7 +448,7 @@ onMounted(async () => { } }); -function arrowNavigate(item: HistoryItem, eventKey: string) { +function arrowNavigate(item: HistoryItemSummary, eventKey: string) { let nextItem = null; if (eventKey === "ArrowDown") { nextItem = historyItems.value[historyItems.value.indexOf(item) + 1]; @@ -463,9 +463,9 @@ function arrowNavigate(item: HistoryItem, eventKey: string) { } function setItemDragstart( - item: HistoryItem, + item: HistoryItemSummary, itemIsSelected: boolean, - selectedItems: Map, + selectedItems: Map, selectionSize: number, event: DragEvent ) { diff --git a/client/src/components/History/Multiple/MultipleViewList.vue b/client/src/components/History/Multiple/MultipleViewList.vue index cff77a6284a3..ea7d8da600cc 100644 --- a/client/src/components/History/Multiple/MultipleViewList.vue +++ b/client/src/components/History/Multiple/MultipleViewList.vue @@ -6,12 +6,12 @@ import { computed, type Ref, ref } from "vue"; //@ts-ignore missing typedefs import VirtualList from "vue-virtual-scroll-list"; +import { HistoryItemSummary } from "@/api"; import { copyDataset } from "@/api/datasets"; import { useAnimationFrameResizeObserver } from "@/composables/sensors/animationFrameResizeObserver"; import { useAnimationFrameScroll } from "@/composables/sensors/animationFrameScroll"; import { Toast } from "@/composables/toast"; import { useEventStore } from "@/stores/eventStore"; -import type { HistoryItem } from "@/stores/historyItemsStore"; import { useHistoryStore } from "@/stores/historyStore"; import localize from "@/utils/localization"; import { errorMessageAsString } from "@/utils/simple-error"; @@ -75,12 +75,12 @@ async function onDrop(evt: any) { } processingDrop.value = true; showDropZone.value = false; - let data: HistoryItem[] | undefined; + let data: HistoryItemSummary[] | undefined; let originalHistoryId: string | undefined; const multiple = eventStore.multipleDragData; try { if (multiple) { - const dragData = eventStore.getDragData() as Record; + const dragData = eventStore.getDragData() as Record; // set originalHistoryId to the first history_id in the multiple drag data const firstItem = Object.values(dragData)[0]; if (firstItem) { @@ -88,7 +88,7 @@ async function onDrop(evt: any) { } data = Object.values(dragData); } else { - data = [eventStore.getDragData() as HistoryItem]; + data = [eventStore.getDragData() as HistoryItemSummary]; if (data[0]) { originalHistoryId = data[0].history_id; } diff --git a/client/src/stores/historyItemsStore.ts b/client/src/stores/historyItemsStore.ts index 45f14891b991..8b5dc1aca807 100644 --- a/client/src/stores/historyItemsStore.ts +++ b/client/src/stores/historyItemsStore.ts @@ -8,21 +8,19 @@ import { reverse } from "lodash"; import { defineStore } from "pinia"; import { computed, ref, set } from "vue"; -import type { HDASummary, HDCASummary } from "@/api"; +import type { HistoryItemSummary } from "@/api"; import { HistoryFilters } from "@/components/History/HistoryFilters"; import { mergeArray } from "@/store/historyStore/model/utilities"; import { ActionSkippedError, LastQueue } from "@/utils/lastQueue"; import { urlData } from "@/utils/url"; -export type HistoryItem = HDASummary | HDCASummary; - const limit = 100; -type ExpectedReturn = { stats: { total_matches: number }; contents: HistoryItem[] }; +type ExpectedReturn = { stats: { total_matches: number }; contents: HistoryItemSummary[] }; const queue = new LastQueue(1000, true); export const useHistoryItemsStore = defineStore("historyItemsStore", () => { - const items = ref>({}); + const items = ref>({}); const itemKey = ref("hid"); const totalMatchesCount = ref(undefined); const lastCheckedTime = ref(new Date()); @@ -37,7 +35,7 @@ export const useHistoryItemsStore = defineStore("historyItemsStore", () => { (filter: [string, string]) => !filter[0].includes("related") ); const relatedHid = HistoryFilters.getFilterValue(filterText, "related"); - const filtered = itemArray.filter((item: HistoryItem) => { + const filtered = itemArray.filter((item: HistoryItemSummary) => { if (!item) { return false; } @@ -74,12 +72,12 @@ export const useHistoryItemsStore = defineStore("historyItemsStore", () => { } } - function saveHistoryItems(historyId: string, payload: HistoryItem[], relatedHid = null) { + function saveHistoryItems(historyId: string, payload: HistoryItemSummary[], relatedHid = null) { // merges incoming payload into existing state mergeArray(historyId, payload, items.value, itemKey.value); // if related filter is included, set keys in state if (relatedHid) { - payload.forEach((item: HistoryItem) => { + payload.forEach((item: HistoryItemSummary) => { // current `item.hid` is related to item with hid = `relatedHid` const relationKey = `${historyId}-${relatedHid}-${item.hid}`; set(relatedItems.value, relationKey, true); From a8e88d5fa71a3aab99b595351c3d57143d91dbb3 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Sat, 6 Apr 2024 12:51:14 +0200 Subject: [PATCH 372/669] Remove unused leftover `_uid` --- client/src/components/Collections/ListCollectionCreator.vue | 2 +- client/src/components/Collections/PairCollectionCreator.vue | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/src/components/Collections/ListCollectionCreator.vue b/client/src/components/Collections/ListCollectionCreator.vue index 74bf917e6b6a..5798187943f2 100644 --- a/client/src/components/Collections/ListCollectionCreator.vue +++ b/client/src/components/Collections/ListCollectionCreator.vue @@ -93,7 +93,7 @@ function _elementsSetUp() { function _ensureElementIds() { workingElements.value.forEach((element) => { if (!Object.prototype.hasOwnProperty.call(element, "id")) { - element.id = element._uid as string; + console.warn("Element missing id", element); } }); diff --git a/client/src/components/Collections/PairCollectionCreator.vue b/client/src/components/Collections/PairCollectionCreator.vue index 15597d9ddda7..2520a3a022f8 100644 --- a/client/src/components/Collections/PairCollectionCreator.vue +++ b/client/src/components/Collections/PairCollectionCreator.vue @@ -58,7 +58,7 @@ function _elementsSetUp() { function _ensureElementIds() { workingElements.value.forEach((element) => { if (!Object.prototype.hasOwnProperty.call(element, "id")) { - element.id = element._uid as string; + console.warn("Element missing id", element); } }); From 14fc332c24b6c3bb00b66695b17bdcfcc31bcb12 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sat, 6 Apr 2024 14:00:26 +0100 Subject: [PATCH 373/669] Unpin sphinx-rtd-theme now that 2.0.0 was released --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 1829b50f0e5f..155bcbc30cdf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -158,7 +158,7 @@ selenium = "*" urllib3 = "<2" # Necessary for poetry for resolve deps for Python 3.8+ seletools = "*" Sphinx = ">=4.0" # support docutils 0.17 -sphinx-rtd-theme = ">=1.0.0,<2" # https://github.com/python-poetry/poetry/issues/8194 +sphinx-rtd-theme = "*" statsd = "*" testfixtures = "*" tuspy = "*" From 1aa66a80cf4eaa615ab248b37f95dcbb17d911c4 Mon Sep 17 00:00:00 2001 From: galaxybot Date: Sat, 6 Apr 2024 03:11:51 +0000 Subject: [PATCH 374/669] Update Python dependencies --- lib/galaxy/dependencies/dev-requirements.txt | 20 ++++++++--------- .../dependencies/pinned-lint-requirements.txt | 2 +- .../dependencies/pinned-requirements.txt | 22 +++++++++---------- .../pinned-typecheck-requirements.txt | 4 ++-- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/lib/galaxy/dependencies/dev-requirements.txt b/lib/galaxy/dependencies/dev-requirements.txt index 419d7da4aace..ac0461e41e4f 100644 --- a/lib/galaxy/dependencies/dev-requirements.txt +++ b/lib/galaxy/dependencies/dev-requirements.txt @@ -33,11 +33,11 @@ darker==2.1.0 ; python_version >= "3.8" and python_version < "3.13" darkgraylib==1.1.1 ; python_version >= "3.8" and python_version < "3.13" defusedxml==0.7.1 ; python_version >= "3.8" and python_version < "3.13" deprecated==1.2.14 ; python_version >= "3.8" and python_version < "3.13" -docutils==0.18.1 ; python_version >= "3.8" and python_version < "3.13" +docutils==0.20.1 ; python_version >= "3.8" and python_version < "3.13" exceptiongroup==1.2.0 ; python_version >= "3.8" and python_version < "3.11" filelock==3.13.3 ; python_version >= "3.8" and python_version < "3.13" fluent-logger==0.11.0 ; python_version >= "3.8" and python_version < "3.13" -fonttools==4.50.0 ; python_version >= "3.8" and python_version < "3.13" +fonttools==4.51.0 ; python_version >= "3.8" and python_version < "3.13" frozenlist==1.4.1 ; python_version >= "3.8" and python_version < "3.13" galaxy-release-util==0.1.7 ; python_version >= "3.8" and python_version < "3.13" graylint==1.0.1 ; python_version >= "3.8" and python_version < "3.13" @@ -53,12 +53,12 @@ iniconfig==2.0.0 ; python_version >= "3.8" and python_version < "3.13" isodate==0.6.1 ; python_version >= "3.8" and python_version < "3.13" isort==5.13.2 ; python_version >= "3.8" and python_version < "3.13" jaraco-classes==3.4.0 ; python_version >= "3.8" and python_version < "3.13" -jaraco-context==4.3.0 ; python_version >= "3.8" and python_version < "3.13" +jaraco-context==5.1.0 ; python_version >= "3.8" and python_version < "3.13" jaraco-functools==4.0.0 ; python_version >= "3.8" and python_version < "3.13" jeepney==0.8.0 ; python_version >= "3.8" and python_version < "3.13" and sys_platform == "linux" jinja2==3.1.3 ; python_version >= "3.8" and python_version < "3.13" junit-xml==1.9 ; python_version >= "3.8" and python_version < "3.13" -keyring==25.0.0 ; python_version >= "3.8" and python_version < "3.13" +keyring==25.1.0 ; python_version >= "3.8" and python_version < "3.13" kiwisolver==1.4.5 ; python_version >= "3.8" and python_version < "3.13" kombu==5.3.6 ; python_version >= "3.8" and python_version < "3.13" lxml==4.9.4 ; python_version >= "3.8" and python_version < "3.13" @@ -66,7 +66,7 @@ markdown-it-py==3.0.0 ; python_version >= "3.8" and python_version < "3.13" markdown-it-reporter==0.0.2 ; python_version >= "3.8" and python_version < "3.13" markupsafe==2.1.5 ; python_version >= "3.8" and python_version < "3.13" matplotlib==3.7.5 ; python_version >= "3.8" and python_version < "3.9" -matplotlib==3.8.3 ; python_version >= "3.9" and python_version < "3.13" +matplotlib==3.8.4 ; python_version >= "3.9" and python_version < "3.13" mdit-py-plugins==0.4.0 ; python_version >= "3.8" and python_version < "3.13" mdurl==0.1.2 ; python_version >= "3.8" and python_version < "3.13" mirakuru==2.5.2 ; python_version >= "3.8" and python_version < "3.13" @@ -82,7 +82,7 @@ numpy==1.26.4 ; python_version >= "3.9" and python_version < "3.13" outcome==1.3.0.post0 ; python_version >= "3.8" and python_version < "3.13" packaging==24.0 ; python_version >= "3.8" and python_version < "3.13" pathspec==0.12.1 ; python_version >= "3.8" and python_version < "3.13" -pillow==10.2.0 ; python_version >= "3.8" and python_version < "3.13" +pillow==10.3.0 ; python_version >= "3.8" and python_version < "3.13" pkce==1.0.3 ; python_version >= "3.8" and python_version < "3.13" pkginfo==1.10.0 ; python_version >= "3.8" and python_version < "3.13" platformdirs==4.2.0 ; python_version >= "3.8" and python_version < "3.13" @@ -132,7 +132,7 @@ ruamel-yaml-clib==0.2.8 ; platform_python_implementation == "CPython" and python ruamel-yaml==0.18.6 ; python_version >= "3.8" and python_version < "3.13" schema-salad==8.5.20240311110950 ; python_version >= "3.8" and python_version < "3.13" scipy==1.10.1 ; python_version >= "3.8" and python_version < "3.9" -scipy==1.12.0 ; python_version >= "3.9" and python_version < "3.13" +scipy==1.13.0 ; python_version >= "3.9" and python_version < "3.13" secretstorage==3.3.3 ; python_version >= "3.8" and python_version < "3.13" and sys_platform == "linux" selenium==4.19.0 ; python_version >= "3.8" and python_version < "3.13" seletools==1.4.0 ; python_version >= "3.8" and python_version < "3.13" @@ -141,7 +141,7 @@ six==1.16.0 ; python_version >= "3.8" and python_version < "3.13" sniffio==1.3.1 ; python_version >= "3.8" and python_version < "3.13" snowballstemmer==2.2.0 ; python_version >= "3.8" and python_version < "3.13" sortedcontainers==2.4.0 ; python_version >= "3.8" and python_version < "3.13" -sphinx-rtd-theme==1.3.0 ; python_version >= "3.8" and python_version < "3.13" +sphinx-rtd-theme==2.0.0 ; python_version >= "3.8" and python_version < "3.13" sphinx==7.1.2 ; python_version >= "3.8" and python_version < "3.13" sphinxcontrib-applehelp==1.0.4 ; python_version >= "3.8" and python_version < "3.13" sphinxcontrib-devhelp==1.0.2 ; python_version >= "3.8" and python_version < "3.13" @@ -161,14 +161,14 @@ trio==0.25.0 ; python_version >= "3.8" and python_version < "3.13" tuspy==1.0.3 ; python_version >= "3.8" and python_version < "3.13" twill==3.2.3 ; python_version >= "3.8" and python_version < "3.13" twine==5.0.0 ; python_version >= "3.8" and python_version < "3.13" -typing-extensions==4.10.0 ; python_version >= "3.8" and python_version < "3.13" +typing-extensions==4.11.0 ; python_version >= "3.8" and python_version < "3.13" tzdata==2024.1 ; python_version >= "3.8" and python_version < "3.13" urllib3==1.26.18 ; python_version >= "3.8" and python_version < "3.13" urllib3[socks]==1.26.18 ; python_version >= "3.8" and python_version < "3.13" vine==5.1.0 ; python_version >= "3.8" and python_version < "3.13" watchdog==4.0.0 ; python_version >= "3.8" and python_version < "3.13" wcwidth==0.2.13 ; python_version >= "3.8" and python_version < "3.13" -werkzeug==3.0.1 ; python_version >= "3.8" and python_version < "3.13" +werkzeug==3.0.2 ; python_version >= "3.8" and python_version < "3.13" wrapt==1.16.0 ; python_version >= "3.8" and python_version < "3.13" wsproto==1.2.0 ; python_version >= "3.8" and python_version < "3.13" yarl==1.9.4 ; python_version >= "3.8" and python_version < "3.13" diff --git a/lib/galaxy/dependencies/pinned-lint-requirements.txt b/lib/galaxy/dependencies/pinned-lint-requirements.txt index ab54c17d8670..d45d101010d4 100644 --- a/lib/galaxy/dependencies/pinned-lint-requirements.txt +++ b/lib/galaxy/dependencies/pinned-lint-requirements.txt @@ -4,4 +4,4 @@ flake8-bugbear==24.2.6 mccabe==0.7.0 pycodestyle==2.11.1 pyflakes==3.2.0 -ruff==0.3.4 +ruff==0.3.5 diff --git a/lib/galaxy/dependencies/pinned-requirements.txt b/lib/galaxy/dependencies/pinned-requirements.txt index 571c648afc85..5181edef346e 100644 --- a/lib/galaxy/dependencies/pinned-requirements.txt +++ b/lib/galaxy/dependencies/pinned-requirements.txt @@ -2,7 +2,7 @@ a2wsgi==1.10.4 ; python_version >= "3.8" and python_version < "3.13" adal==1.2.7 ; python_version >= "3.8" and python_version < "3.13" -aiobotocore==2.12.1 ; python_version >= "3.8" and python_version < "3.13" +aiobotocore==2.12.2 ; python_version >= "3.8" and python_version < "3.13" aiodataloader==0.4.0 ; python_version >= "3.8" and python_version < "3.13" aiofiles==23.2.1 ; python_version >= "3.8" and python_version < "3.13" aiohttp==3.9.3 ; python_version >= "3.8" and python_version < "3.13" @@ -31,7 +31,7 @@ beaker==1.12.1 ; python_version >= "3.8" and python_version < "3.13" billiard==4.2.0 ; python_version >= "3.8" and python_version < "3.13" bioblend==1.2.0 ; python_version >= "3.8" and python_version < "3.13" bleach==6.1.0 ; python_version >= "3.8" and python_version < "3.13" -boltons==23.1.1 ; python_version >= "3.8" and python_version < "3.13" +boltons==24.0.0 ; python_version >= "3.8" and python_version < "3.13" boto==2.49.0 ; python_version >= "3.8" and python_version < "3.13" botocore==1.34.51 ; python_version >= "3.8" and python_version < "3.13" bx-python==0.11.0 ; python_version >= "3.8" and python_version < "3.13" @@ -61,12 +61,12 @@ deprecation==2.1.0 ; python_version >= "3.8" and python_version < "3.13" dictobj==0.4 ; python_version >= "3.8" and python_version < "3.13" dnspython==2.6.1 ; python_version >= "3.8" and python_version < "3.13" docopt==0.6.2 ; python_version >= "3.8" and python_version < "3.13" -docutils==0.18.1 ; python_version >= "3.8" and python_version < "3.13" +docutils==0.20.1 ; python_version >= "3.8" and python_version < "3.13" dparse==0.6.3 ; python_version >= "3.8" and python_version < "3.13" edam-ontology==1.25.2 ; python_version >= "3.8" and python_version < "3.13" email-validator==2.1.1 ; python_version >= "3.8" and python_version < "3.13" exceptiongroup==1.2.0 ; python_version >= "3.8" and python_version < "3.11" -fastapi==0.110.0 ; python_version >= "3.8" and python_version < "3.13" +fastapi==0.110.1 ; python_version >= "3.8" and python_version < "3.13" filelock==3.13.3 ; python_version >= "3.8" and python_version < "3.13" frozenlist==1.4.1 ; python_version >= "3.8" and python_version < "3.13" fs==2.4.16 ; python_version >= "3.8" and python_version < "3.13" @@ -79,7 +79,7 @@ graphene==3.3 ; python_version >= "3.8" and python_version < "3.13" graphql-core==3.2.3 ; python_version >= "3.8" and python_version < "3.13" graphql-relay==3.2.0 ; python_version >= "3.8" and python_version < "3.13" gravity==1.0.6 ; python_version >= "3.8" and python_version < "3.13" -greenlet==3.0.3 ; python_version >= "3.8" and (platform_machine == "aarch64" or platform_machine == "ppc64le" or platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "win32" or platform_machine == "WIN32") and python_version < "3.13" +greenlet==3.0.3 ; python_version >= "3.8" and python_version < "3.13" and (platform_machine == "aarch64" or platform_machine == "ppc64le" or platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "win32" or platform_machine == "WIN32") gunicorn==21.2.0 ; python_version >= "3.8" and python_version < "3.13" gxformat2==0.18.0 ; python_version >= "3.8" and python_version < "3.13" h11==0.14.0 ; python_version >= "3.8" and python_version < "3.13" @@ -122,10 +122,10 @@ oyaml==1.0 ; python_version >= "3.8" and python_version < "3.13" packaging==24.0 ; python_version >= "3.8" and python_version < "3.13" paramiko==3.4.0 ; python_version >= "3.8" and python_version < "3.13" parsley==1.3 ; python_version >= "3.8" and python_version < "3.13" -paste==3.8.0 ; python_version >= "3.8" and python_version < "3.13" +paste==3.9.0 ; python_version >= "3.8" and python_version < "3.13" pastedeploy==3.1.0 ; python_version >= "3.8" and python_version < "3.13" pebble==5.0.7 ; python_version >= "3.8" and python_version < "3.13" -pillow==10.2.0 ; python_version >= "3.8" and python_version < "3.13" +pillow==10.3.0 ; python_version >= "3.8" and python_version < "3.13" pkgutil-resolve-name==1.3.10 ; python_version >= "3.8" and python_version < "3.9" promise==2.3 ; python_version >= "3.8" and python_version < "3.13" prompt-toolkit==3.0.43 ; python_version >= "3.8" and python_version < "3.13" @@ -182,12 +182,12 @@ sniffio==1.3.1 ; python_version >= "3.8" and python_version < "3.13" social-auth-core==4.5.3 ; python_version >= "3.8" and python_version < "3.13" sortedcontainers==2.4.0 ; python_version >= "3.8" and python_version < "3.13" spython==0.3.13 ; python_version >= "3.8" and python_version < "3.13" -sqlalchemy==2.0.25 ; python_version >= "3.8" and python_version < "3.13" +sqlalchemy==2.0.29 ; python_version >= "3.8" and python_version < "3.13" sqlitedict==2.1.0 ; python_version >= "3.8" and python_version < "3.13" sqlparse==0.4.4 ; python_version >= "3.8" and python_version < "3.13" starlette-context==0.3.6 ; python_version >= "3.8" and python_version < "3.13" starlette-graphene3==0.6.0 ; python_version >= "3.8" and python_version < "3.13" -starlette==0.36.3 ; python_version >= "3.8" and python_version < "3.13" +starlette==0.37.2 ; python_version >= "3.8" and python_version < "3.13" supervisor==4.2.5 ; python_version >= "3.8" and python_version < "3.13" svgwrite==1.4.3 ; python_version >= "3.8" and python_version < "3.13" tenacity==8.2.3 ; python_version >= "3.8" and python_version < "3.13" @@ -198,10 +198,10 @@ tornado==6.4 ; python_version >= "3.8" and python_version < "3.13" tqdm==4.66.2 ; python_version >= "3.8" and python_version < "3.13" tuspy==1.0.3 ; python_version >= "3.8" and python_version < "3.13" tuswsgi==0.5.5 ; python_version >= "3.8" and python_version < "3.13" -typing-extensions==4.10.0 ; python_version >= "3.8" and python_version < "3.13" +typing-extensions==4.11.0 ; python_version >= "3.8" and python_version < "3.13" tzdata==2024.1 ; python_version >= "3.8" and python_version < "3.13" tzlocal==5.2 ; python_version >= "3.8" and python_version < "3.13" -ubiquerg==0.7.0 ; python_version >= "3.8" and python_version < "3.13" +ubiquerg==0.8.0 ; python_version >= "3.8" and python_version < "3.13" urllib3==1.26.18 ; python_version >= "3.8" and python_version < "3.13" uvicorn==0.29.0 ; python_version >= "3.8" and python_version < "3.13" uvloop==0.19.0 ; python_version >= "3.8" and python_version < "3.13" diff --git a/lib/galaxy/dependencies/pinned-typecheck-requirements.txt b/lib/galaxy/dependencies/pinned-typecheck-requirements.txt index 191f63a7fb18..a39a18f06f07 100644 --- a/lib/galaxy/dependencies/pinned-typecheck-requirements.txt +++ b/lib/galaxy/dependencies/pinned-typecheck-requirements.txt @@ -12,7 +12,7 @@ types-bleach==6.1.0.20240331 ; python_version >= "3.8" and python_version < "3.1 types-boto==2.49.18.20240205 ; python_version >= "3.8" and python_version < "3.13" types-contextvars==2.4.7.3 ; python_version >= "3.8" and python_version < "3.13" types-dataclasses==0.6.6 ; python_version >= "3.8" and python_version < "3.13" -types-docutils==0.20.0.20240331 ; python_version >= "3.8" and python_version < "3.13" +types-docutils==0.20.0.20240406 ; python_version >= "3.8" and python_version < "3.13" types-html5lib==1.1.11.20240228 ; python_version >= "3.8" and python_version < "3.13" types-markdown==3.6.0.20240316 ; python_version >= "3.8" and python_version < "3.13" types-paramiko==3.4.0.20240311 ; python_version >= "3.8" and python_version < "3.13" @@ -22,4 +22,4 @@ types-pyyaml==6.0.12.20240311 ; python_version >= "3.8" and python_version < "3. types-requests==2.31.0.6 ; python_version >= "3.8" and python_version < "3.13" types-six==1.16.21.20240311 ; python_version >= "3.8" and python_version < "3.13" types-urllib3==1.26.25.14 ; python_version >= "3.8" and python_version < "3.13" -typing-extensions==4.10.0 ; python_version >= "3.8" and python_version < "3.13" +typing-extensions==4.11.0 ; python_version >= "3.8" and python_version < "3.13" From 29e9cc327672ac6a6c472b2501a010a36b7782b0 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Mon, 8 Apr 2024 11:26:06 +0200 Subject: [PATCH 375/669] For workflow run form for worflows with null rename PJA Fixes: ``` AttributeError: 'NoneType' object has no attribute 'values' File "galaxy/web/framework/decorators.py", line 346, in decorator rval = func(self, trans, *args, **kwargs) File "galaxy/webapps/galaxy/api/workflows.py", line 357, in workflow_dict ret_dict = self.workflow_contents_manager.workflow_to_dict( File "galaxy/managers/workflows.py", line 907, in workflow_to_dict wf_dict = self._workflow_to_dict_run(trans, stored, workflow=workflow, history=history or trans.history) File "galaxy/managers/workflows.py", line 1025, in _workflow_to_dict_run step_model["replacement_parameters"] = step.module.get_informal_replacement_parameters(step) File "galaxy/workflow/modules.py", line 2388, in get_informal_replacement_parameters for argument in pja.action_arguments.values(): ``` from https://sentry.galaxyproject.org/share/issue/954ebe7b658f401a8a8e40987d43a91d/ --- lib/galaxy/workflow/modules.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index 6af779177993..ba0692c77d52 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -2381,13 +2381,14 @@ def __to_pja(self, key, value, step): action_arguments = None return PostJobAction(value["action_type"], step, output_name, action_arguments) - def get_informal_replacement_parameters(self, step) -> List[str]: + def get_informal_replacement_parameters(self, step: WorkflowStep) -> List[str]: """Return a list of replacement parameters.""" replacement_parameters = set() for pja in step.post_job_actions: - for argument in pja.action_arguments.values(): - for match in re.findall(r"\$\{(.+?)\}", unicodify(argument)): - replacement_parameters.add(match) + if action_arguments := pja.action_arguments: + for argument in action_arguments.values(): + for match in re.findall(r"\$\{(.+?)\}", unicodify(argument)): + replacement_parameters.add(match) return list(replacement_parameters) From 741e93e73b7aae06ff4d7f0bf3008df3430f4c88 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Thu, 7 Mar 2024 16:12:39 +0100 Subject: [PATCH 376/669] add undo-redo store --- client/src/stores/undoRedoStore/index.ts | 54 +++++++++++++++++++ .../stores/undoRedoStore/undoRedoAction.ts | 22 ++++++++ 2 files changed, 76 insertions(+) create mode 100644 client/src/stores/undoRedoStore/index.ts create mode 100644 client/src/stores/undoRedoStore/undoRedoAction.ts diff --git a/client/src/stores/undoRedoStore/index.ts b/client/src/stores/undoRedoStore/index.ts new file mode 100644 index 000000000000..0dfb6e046e0b --- /dev/null +++ b/client/src/stores/undoRedoStore/index.ts @@ -0,0 +1,54 @@ +import { ref } from "vue"; + +import { defineScopedStore } from "@/stores/scopedStore"; + +import { UndoRedoAction } from "./undoRedoAction"; + +export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { + const undoActionStack = ref([]); + const redoActionStack = ref([]); + const maxUndoActions = ref(100); + + function undo() { + const action = undoActionStack.value.pop(); + + if (action) { + action.undo(); + redoActionStack.value.push(action); + } + } + + function redo() { + const action = redoActionStack.value.pop(); + + if (action) { + action.redo(); + undoActionStack.value.push(action); + } + } + + function applyAction(action: UndoRedoAction) { + action.run(); + clearRedoStack(); + undoActionStack.value.push(action); + + while (undoActionStack.value.length > maxUndoActions.value && undoActionStack.value.length > 0) { + const action = undoActionStack.value.shift(); + action?.destroy(); + } + } + + function clearRedoStack() { + redoActionStack.value.forEach((action) => action.destroy()); + redoActionStack.value = []; + } + + return { + undoActionStack, + redoActionStack, + maxUndoActions, + undo, + redo, + applyAction, + }; +}); diff --git a/client/src/stores/undoRedoStore/undoRedoAction.ts b/client/src/stores/undoRedoStore/undoRedoAction.ts new file mode 100644 index 000000000000..ad2899c412d7 --- /dev/null +++ b/client/src/stores/undoRedoStore/undoRedoAction.ts @@ -0,0 +1,22 @@ +export class UndoRedoAction { + onRun?: () => void; + onUndo?: () => void; + onRedo?: () => void; + onDestroy?: () => void; + + run() { + this.onRun ? this.onRun() : null; + } + + undo() { + this.onUndo ? this.onUndo() : null; + } + + redo() { + this.onRedo ? this.onRedo() : this.run(); + } + + destroy() { + this.onDestroy ? this.onDestroy() : null; + } +} From b6ae7a87cafa41cf3b04e6331a10fedcda0ccd6e Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Fri, 8 Mar 2024 10:05:37 +0100 Subject: [PATCH 377/669] undo-redo adding comments --- .../src/components/Workflow/Editor/Index.vue | 9 ++++- .../Workflow/Editor/Tools/ToolBar.vue | 4 +-- .../Workflow/Editor/Tools/useToolLogic.ts | 33 ++++++++++++++++--- client/src/composables/workflowStores.ts | 6 ++++ client/src/stores/undoRedoStore/index.ts | 8 +++-- .../stores/undoRedoStore/undoRedoAction.ts | 32 +++++++++++++----- 6 files changed, 74 insertions(+), 18 deletions(-) diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 64502b77a927..284e94d96448 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -168,6 +168,7 @@ diff --git a/client/src/stores/undoRedoStore/index.ts b/client/src/stores/undoRedoStore/index.ts index 66523b1d5e2b..173de34744a8 100644 --- a/client/src/stores/undoRedoStore/index.ts +++ b/client/src/stores/undoRedoStore/index.ts @@ -1,4 +1,4 @@ -import { ref } from "vue"; +import { computed, ref } from "vue"; import { defineScopedStore } from "@/stores/scopedStore"; @@ -14,6 +14,7 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { const maxUndoActions = ref(100); function undo() { + flushLazyAction(); const action = undoActionStack.value.pop(); if (action !== undefined) { @@ -32,6 +33,7 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { } function applyAction(action: UndoRedoAction) { + flushLazyAction(); action.run(); clearRedoStack(); undoActionStack.value.push(action); @@ -60,6 +62,52 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { return new FactoryAction((action) => applyAction(action)); } + let lazyActionTimeout: ReturnType | undefined = undefined; + + /** action which is currently queued to run */ + const pendingLazyAction = ref(null); + + /** + * Queues an action to be applied after a delay. + * The action is applied immediately, should another action be applied, or be queued. + * You can read the `pendingLazyAction` state, or `isQueued`, to find out if the action was applied. + * + * `flushLazyAction` runs the pending lazy action immediately. + * + * `setLazyActionTimeout` can be used to extend the timeout. + * + * @param action action to queue + * @param timeout when to run the action in milliseconds. default to 1000 milliseconds + */ + function applyLazyAction(action: UndoRedoAction, timeout = 1000) { + flushLazyAction(); + clearRedoStack(); + pendingLazyAction.value = action; + lazyActionTimeout = setTimeout(() => flushLazyAction(), timeout); + } + + function clearLazyAction() { + clearTimeout(lazyActionTimeout); + pendingLazyAction.value = null; + } + + function flushLazyAction() { + clearTimeout(lazyActionTimeout); + + if (pendingLazyAction.value) { + const action = pendingLazyAction.value; + clearLazyAction(); + applyAction(action); + } + } + + function setLazyActionTimeout(timeout: number) { + clearTimeout(lazyActionTimeout); + lazyActionTimeout = setTimeout(() => flushLazyAction(), timeout); + } + + const isQueued = computed(() => (action?: UndoRedoAction | null) => pendingLazyAction.value === action); + return { undoActionStack, redoActionStack, @@ -68,6 +116,12 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { redo, applyAction, action, + applyLazyAction, + clearLazyAction, + flushLazyAction, + setLazyActionTimeout, + isQueued, + pendingLazyAction, }; }); From ecd3740b4b7e42e02c3bf3c664cb72b3e4266ba2 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Fri, 8 Mar 2024 16:07:50 +0100 Subject: [PATCH 381/669] add undo-redo for frame movement --- .../Workflow/Editor/Actions/commentActions.ts | 85 +++++++++++++++++-- .../Workflow/Editor/Comments/FrameComment.vue | 46 ++++------ client/src/stores/undoRedoStore/README.md | 69 +++++++++++++++ client/src/stores/workflowStepStore.ts | 2 + 4 files changed, 165 insertions(+), 37 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/commentActions.ts b/client/src/components/Workflow/Editor/Actions/commentActions.ts index d57efd220e1e..264aa82a3b86 100644 --- a/client/src/components/Workflow/Editor/Actions/commentActions.ts +++ b/client/src/components/Workflow/Editor/Actions/commentActions.ts @@ -5,6 +5,7 @@ import type { WorkflowCommentColor, WorkflowCommentStore, } from "@/stores/workflowEditorCommentStore"; +import { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; class CommentAction extends UndoRedoAction { protected store: WorkflowCommentStore; @@ -64,7 +65,6 @@ class MutateCommentAction extends UndoRedoActio private commentId: number; private startData: WorkflowComment[K]; private endData: WorkflowComment[K]; - private ran = false; protected applyDataCallback: (commentId: number, data: WorkflowComment[K]) => void; constructor( @@ -82,17 +82,12 @@ class MutateCommentAction extends UndoRedoActio } updateData(data: WorkflowComment[K]) { - if (this.ran) { - throw new Error("data of a mutation action can not be changed once the action was run"); - } else { - this.endData = data; - this.applyDataCallback(this.commentId, this.endData); - } + this.endData = data; + this.applyDataCallback(this.commentId, this.endData); } - run() { + redo() { this.applyDataCallback(this.commentId, this.endData); - this.ran = true; } undo() { @@ -120,3 +115,75 @@ export class ChangeSizeAction extends MutateCommentAction<"size"> { super(comment, "size", size, callback); } } + +type StepWithPosition = Step & { position: NonNullable }; + +export class MoveMultipleAction extends UndoRedoAction { + private commentStore; + private stepStore; + private comments; + private steps; + + private stepStartOffsets = new Map(); + private commentStartOffsets = new Map(); + + private positionFrom; + private positionTo; + + constructor( + commentStore: WorkflowCommentStore, + stepStore: WorkflowStepStore, + comments: WorkflowComment[], + steps: StepWithPosition[], + position: { x: number; y: number } + ) { + super(); + this.commentStore = commentStore; + this.stepStore = stepStore; + this.comments = [...comments]; + this.steps = [...steps]; + + this.steps.forEach((step) => { + this.stepStartOffsets.set(step.id, [step.position.left - position.x, step.position.top - position.y]); + }); + + this.comments.forEach((comment) => { + this.commentStartOffsets.set(comment.id, [ + comment.position[0] - position.x, + comment.position[1] - position.y, + ]); + }); + + this.positionFrom = { ...position }; + this.positionTo = { ...position }; + } + + changePosition(position: { x: number; y: number }) { + this.setPosition(position); + this.positionTo = { ...position }; + } + + private setPosition(position: { x: number; y: number }) { + this.steps.forEach((step) => { + const stepPosition = { left: 0, top: 0 }; + const offset = this.stepStartOffsets.get(step.id) ?? [0, 0]; + stepPosition.left = position.x + offset[0]; + stepPosition.top = position.y + offset[1]; + this.stepStore.updateStep({ ...step, position: stepPosition }); + }); + + this.comments.forEach((comment) => { + const offset = this.commentStartOffsets.get(comment.id) ?? [0, 0]; + const commentPosition = [position.x + offset[0], position.y + offset[1]] as [number, number]; + this.commentStore.changePosition(comment.id, commentPosition); + }); + } + + undo() { + this.setPosition(this.positionFrom); + } + + redo() { + this.setPosition(this.positionTo); + } +} diff --git a/client/src/components/Workflow/Editor/Comments/FrameComment.vue b/client/src/components/Workflow/Editor/Comments/FrameComment.vue index 8e316ddf8c96..496eb28625a3 100644 --- a/client/src/components/Workflow/Editor/Comments/FrameComment.vue +++ b/client/src/components/Workflow/Editor/Comments/FrameComment.vue @@ -13,6 +13,7 @@ import { useWorkflowStores } from "@/composables/workflowStores"; import type { FrameWorkflowComment, WorkflowComment, WorkflowCommentColor } from "@/stores/workflowEditorCommentStore"; import type { Step } from "@/stores/workflowStepStore"; +import { MoveMultipleAction } from "../Actions/commentActions"; import { brighterColors, darkenedColors } from "./colors"; import { useResizable } from "./useResizable"; import { selectAllText } from "./utilities"; @@ -66,7 +67,11 @@ function getInnerText() { } function saveText() { - emit("change", { ...props.comment.data, title: getInnerText() }); + const text = getInnerText(); + + if (text !== props.comment.data.title) { + emit("change", { ...props.comment.data, title: text }); + } } const showColorSelector = ref(false); @@ -91,7 +96,7 @@ function onSetColor(color: WorkflowCommentColor) { emit("set-color", color); } -const { stateStore, stepStore, commentStore } = useWorkflowStores(); +const { stateStore, stepStore, commentStore, undoRedoStore } = useWorkflowStores(); function getStepsInBounds(bounds: AxisAlignedBoundingBox) { const steps: StepWithPosition[] = []; @@ -139,8 +144,8 @@ type StepWithPosition = Step & { position: NonNullable }; let stepsInBounds: StepWithPosition[] = []; let commentsInBounds: WorkflowComment[] = []; -const stepStartOffsets = new Map(); -const commentStartOffsets = new Map(); + +let lazyAction: MoveMultipleAction | null = null; function getAABB() { const aabb = new AxisAlignedBoundingBox(); @@ -157,39 +162,25 @@ function onDragStart() { stepsInBounds = getStepsInBounds(aabb); commentsInBounds = getCommentsInBounds(aabb); - stepsInBounds.forEach((step) => { - stepStartOffsets.set(step.id, [step.position.left - aabb.x, step.position.top - aabb.y]); - }); + commentsInBounds.push(props.comment); - commentsInBounds.forEach((comment) => { - commentStartOffsets.set(comment.id, [comment.position[0] - aabb.x, comment.position[1] - aabb.y]); - }); + lazyAction = new MoveMultipleAction(commentStore, stepStore, commentsInBounds, stepsInBounds, aabb); + undoRedoStore.applyLazyAction(lazyAction); } function onDragEnd() { saveText(); stepsInBounds = []; commentsInBounds = []; - stepStartOffsets.clear(); - commentStartOffsets.clear(); + undoRedoStore.flushLazyAction(); } function onMove(position: { x: number; y: number }) { - stepsInBounds.forEach((step) => { - const stepPosition = { left: 0, top: 0 }; - const offset = stepStartOffsets.get(step.id) ?? [0, 0]; - stepPosition.left = position.x + offset[0]; - stepPosition.top = position.y + offset[1]; - stepStore.updateStep({ ...step, position: stepPosition }); - }); - - commentsInBounds.forEach((comment) => { - const offset = commentStartOffsets.get(comment.id) ?? [0, 0]; - const commentPosition = [position.x + offset[0], position.y + offset[1]] as [number, number]; - commentStore.changePosition(comment.id, commentPosition); - }); - - emit("move", [position.x, position.y]); + if (lazyAction && undoRedoStore.isQueued(lazyAction)) { + lazyAction.changePosition(position); + } else { + onDragStart(); + } } function onDoubleClick() { @@ -273,7 +264,6 @@ onMounted(() => { class="draggable-pan" @move="onMove" @mouseup="onDragEnd" - @start="onDragStart" @pan-by="(p) => emit('pan-by', p)" />
diff --git a/client/src/stores/undoRedoStore/README.md b/client/src/stores/undoRedoStore/README.md index 79639ae8af00..b157b3077852 100644 --- a/client/src/stores/undoRedoStore/README.md +++ b/client/src/stores/undoRedoStore/README.md @@ -71,3 +71,72 @@ Classes offer the advantage that they can be defined in another file, and easily They also make it easier to store and keep track of the state required by the action. The inline factory is good for short, simple actions that need little to no state. + +## Lazy Actions + +Sometimes many similar events happen in a short time frame, and we do not want to save them all as individual actions. +One example for this may be entering text. Having every individual letter as an undo action is not practical. + +This is where lazy actions come in. They can be applied, by calling `undoRedoStore.applyLazyAction`. + +When calling this function, the actual applying of the action is delayed, and as long as it hasn't entered the undo stack, we can mutate the action. + +In order to check if an action is still waiting to be applied, we can use `undoRedoStore.isQueued`. +As long as this check returns true, it is save to mutate the action. + +Applying any action, or applying a new lazy action, will apply the currently pending action and push it to the undo stack. +Lazy actions can also be ran immediately, canceled, or have their time delay extended. + +Due to the additional complexity introduced by mutating action state, it is not recommended to use lazy actions together with the factory api. + +Here is an example of a lazy action in action. + +```ts +class ChangeCommentPositionAction extends UndoRedoAction { + private store: WorkflowCommentStore; + private commentId: number; + private startPosition: Position; + private endPosition: Position; + + constructor( + store: WorkflowCommentStore, + comment: WorkflowComment, + position: Position + ) { + super(); + this.store + this.commentId = comment.id; + this.startPosition = structuredClone(position); + this.endPosition = structuredClone(position); + this.store.changePosition(this.commentId, position); + } + + updatePosition(position: Position) { + this.endPosition = position; + this.store.changePosition(this.commentId, position); + } + + redo() { + this.store.changePosition(this.commentId, this.endPosition); + } + + undo() { + this.store.changePosition(this.commentId, this.startPosition); + } +} +``` + +In this example, we would call `updatePosition` as long as the action hasn't been applied to the undo stack. + +```ts +let lazyAction: ChangeCommentPositionAction | null = null; + +function onCommentChangePosition(position: Position) { + if (lazyAction && undoRedoStore.isQueued(lazyAction)) { + lazyAction.changePosition(position); + } else { + lazyAction = new ChangeCommentPositionAction(commentStore, comment, position); + undoRedoStore.applyLazyAction(lazyAction); + } +} +``` diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts index e1ee49f479df..9bd995ea19f7 100644 --- a/client/src/stores/workflowStepStore.ts +++ b/client/src/stores/workflowStepStore.ts @@ -137,6 +137,8 @@ interface StepInputMapOver { [index: number]: { [index: string]: CollectionTypeDescriptor }; } +export type WorkflowStepStore = ReturnType; + export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (workflowId) => { const steps = ref({}); const stepMapOver = ref<{ [index: number]: CollectionTypeDescriptor }>({}); From 5f1c80a25ad9ceb700121dcf34b2a1c034df951e Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Fri, 8 Mar 2024 16:12:52 +0100 Subject: [PATCH 382/669] reduce number of change events emitted --- .../components/Workflow/Editor/Comments/MarkdownComment.vue | 2 +- .../src/components/Workflow/Editor/Comments/TextComment.vue | 6 +++++- client/src/stores/undoRedoStore/README.md | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/client/src/components/Workflow/Editor/Comments/MarkdownComment.vue b/client/src/components/Workflow/Editor/Comments/MarkdownComment.vue index b052dbdc508c..ac4841b3c293 100644 --- a/client/src/components/Workflow/Editor/Comments/MarkdownComment.vue +++ b/client/src/components/Workflow/Editor/Comments/MarkdownComment.vue @@ -107,7 +107,7 @@ function onSetColor(color: WorkflowCommentColor) { function onTextChange() { const element = markdownTextarea.value; - if (element) { + if (element && element.value !== props.comment.data.text) { emit("change", { text: element.value }); } } diff --git a/client/src/components/Workflow/Editor/Comments/TextComment.vue b/client/src/components/Workflow/Editor/Comments/TextComment.vue index d3ede0bcfe3c..8139921c69c4 100644 --- a/client/src/components/Workflow/Editor/Comments/TextComment.vue +++ b/client/src/components/Workflow/Editor/Comments/TextComment.vue @@ -65,7 +65,11 @@ function getInnerText() { } function saveText() { - emit("change", { ...props.comment.data, text: getInnerText() }); + const text = getInnerText(); + + if (text !== props.comment.data.text) { + emit("change", { ...props.comment.data, text }); + } } function toggleBold() { diff --git a/client/src/stores/undoRedoStore/README.md b/client/src/stores/undoRedoStore/README.md index b157b3077852..e82817fb0246 100644 --- a/client/src/stores/undoRedoStore/README.md +++ b/client/src/stores/undoRedoStore/README.md @@ -133,7 +133,7 @@ let lazyAction: ChangeCommentPositionAction | null = null; function onCommentChangePosition(position: Position) { if (lazyAction && undoRedoStore.isQueued(lazyAction)) { - lazyAction.changePosition(position); + lazyAction.updatePosition(position); } else { lazyAction = new ChangeCommentPositionAction(commentStore, comment, position); undoRedoStore.applyLazyAction(lazyAction); From b6d4311025d7ae3c403ac356170938fa8b413c68 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Fri, 8 Mar 2024 16:29:11 +0100 Subject: [PATCH 383/669] add tips and tricks section to documentation --- client/src/stores/undoRedoStore/README.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/client/src/stores/undoRedoStore/README.md b/client/src/stores/undoRedoStore/README.md index e82817fb0246..04287025f481 100644 --- a/client/src/stores/undoRedoStore/README.md +++ b/client/src/stores/undoRedoStore/README.md @@ -140,3 +140,25 @@ function onCommentChangePosition(position: Position) { } } ``` + +## Tips and Tricks + +**Do not rely on data which may be deleted by an Action** + +For example, relying on a vue component instance, or any object instance for that matter may break, +if the object gets deleted and re-created. + +Instead treat all data as if it were serializable. + +**Operate on stores as much as you can** + +The safest thing to mutate from an Undo-Redo Action is a store, since they are singletons by nature, +and you can be fairly certain that they will be around as long as your UndoRedoStore instance will be. + +**Use shallow and deep copies to avoid state mutation** + +Accidentally mutating the state of an action once it is applies breaks the undo-redo stack. +Undoing or Redoing an action may now no longer yield the same results. + +Using shallow copies (`{ ...object }; [ ...array ];`) or deep copies (`structuredClone(object)`), +avoids accidental mutation. From 6836d7b301ff5065ad51ec3a24739eea03eaa676 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 12:20:27 +0100 Subject: [PATCH 384/669] add workflow undo actions --- .../Workflow/Editor/Actions/commentActions.ts | 2 +- .../Editor/Actions/workflowActions.ts | 48 ++++++++++ .../src/components/Workflow/Editor/Index.vue | 90 +++++++++++++++---- 3 files changed, 122 insertions(+), 18 deletions(-) create mode 100644 client/src/components/Workflow/Editor/Actions/workflowActions.ts diff --git a/client/src/components/Workflow/Editor/Actions/commentActions.ts b/client/src/components/Workflow/Editor/Actions/commentActions.ts index 264aa82a3b86..49c9b70b3f68 100644 --- a/client/src/components/Workflow/Editor/Actions/commentActions.ts +++ b/client/src/components/Workflow/Editor/Actions/commentActions.ts @@ -5,7 +5,7 @@ import type { WorkflowCommentColor, WorkflowCommentStore, } from "@/stores/workflowEditorCommentStore"; -import { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; +import type { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; class CommentAction extends UndoRedoAction { protected store: WorkflowCommentStore; diff --git a/client/src/components/Workflow/Editor/Actions/workflowActions.ts b/client/src/components/Workflow/Editor/Actions/workflowActions.ts new file mode 100644 index 000000000000..a493c99a8906 --- /dev/null +++ b/client/src/components/Workflow/Editor/Actions/workflowActions.ts @@ -0,0 +1,48 @@ +import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; + +export class LazySetValueAction extends UndoRedoAction { + setValueHandler; + fromValue; + toValue; + + constructor(fromValue: T, toValue: T, setValueHandler: (value: T) => void) { + super(); + this.fromValue = structuredClone(fromValue); + this.toValue = structuredClone(toValue); + this.setValueHandler = setValueHandler; + this.setValueHandler(toValue); + } + + changeValue(value: T) { + this.toValue = structuredClone(value); + this.setValueHandler(this.toValue); + } + + undo() { + this.setValueHandler(this.fromValue); + } + + redo() { + this.setValueHandler(this.toValue); + } +} + +export class SetValueActionHandler { + undoRedoStore; + setValueHandler; + lazyAction: LazySetValueAction | null = null; + + constructor(undoRedoStore: UndoRedoStore, setValueHandler: (value: T) => void) { + this.undoRedoStore = undoRedoStore; + this.setValueHandler = setValueHandler; + } + + set(from: T, to: T) { + if (this.lazyAction && this.undoRedoStore.isQueued(this.lazyAction)) { + this.lazyAction.changeValue(to); + } else { + this.lazyAction = new LazySetValueAction(from, to, this.setValueHandler); + this.undoRedoStore.applyLazyAction(this.lazyAction); + } + } +} diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 284e94d96448..b344e0c88c44 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -115,18 +115,18 @@ :id="id" :tags="tags" :parameters="parameters" - :annotation-current.sync="annotation" :annotation="annotation" - :name-current.sync="name" :name="name" :version="version" :versions="versions" :license="license" :creator="creator" @onVersion="onVersion" - @onTags="onTags" + @onTags="setTags" @onLicense="onLicense" - @onCreator="onCreator" /> + @onCreator="onCreator" + @update:nameCurrent="setName" + @update:annotationCurrent="setAnnotation" /> (name.value = value)); + /** user set name. queues an undo/redo action */ + function setName(newName) { + if (name.value !== newName) { + setNameActionHandler.set(name.value, newName); + } + } + + const report = ref({}); + const setReportActionHandler = new SetValueActionHandler( + undoRedoStore, + (value) => (report.value = structuredClone(value)) + ); + /** user set report. queues an undo/redo action */ + function setReport(newReport) { + setReportActionHandler.set(report.value, newReport); + } + + const license = ref(null); + const setLicenseHandler = new SetValueActionHandler(undoRedoStore, (value) => (license.value = value)); + /** user set license. queues an undo/redo action */ + function setLicense(newLicense) { + if (license.value !== newLicense) { + setLicenseHandler.set(license.value, newLicense); + } + } + + const creator = ref(null); + const setCreatorHandler = new SetValueActionHandler(undoRedoStore, (value) => (creator.value = value)); + /** user set creator. queues an undo/redo action */ + function setCreator(newCreator) { + setCreatorHandler.set(creator.value, newCreator); + } + + const annotation = ref(null); + const setAnnotationHandler = new SetValueActionHandler(undoRedoStore, (value) => (annotation.value = value)); + /** user set annotation. queues an undo/redo action */ + function setAnnotation(newAnnotation) { + if (annotation.value !== newAnnotation) { + setAnnotationHandler.set(annotation.value, newAnnotation); + } + } + + const tags = ref([]); + const setTagsHandler = new SetValueActionHandler( + undoRedoStore, + (value) => (tags.value = structuredClone(value)) + ); + /** user set tags. queues an undo/redo action */ + function setTags(newTags) { + setTagsHandler.set(tags.value, newTags); + } + const { comments } = storeToRefs(commentStore); const { getStepIndex, steps } = storeToRefs(stepStore); const { activeNodeId } = storeToRefs(stateStore); @@ -303,6 +358,18 @@ export default { return { id, + name, + setName, + report, + setReport, + license, + setLicense, + creator, + setCreator, + annotation, + setAnnotation, + tags, + setTags, connectionStore, hasChanges, hasInvalidConnections, @@ -326,13 +393,7 @@ export default { markdownText: null, versions: [], parameters: null, - report: {}, labels: {}, - license: null, - creator: null, - annotation: null, - name: "Unnamed Workflow", - tags: this.workflowTags, services: null, stateMessages: [], insertedStateMessages: [], @@ -832,21 +893,16 @@ export default { } } }, - onTags(tags) { - if (this.tags != tags) { - this.tags = tags; - } - }, onLicense(license) { if (this.license != license) { this.hasChanges = true; - this.license = license; + this.setLicense(license); } }, onCreator(creator) { if (this.creator != creator) { this.hasChanges = true; - this.creator = creator; + this.setCreator(creator); } }, onActiveNode(nodeId) { From c4d63126d5e1b55ae583657741bf6f35882d8a81 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 12:22:28 +0100 Subject: [PATCH 385/669] make tags in attributes reactive --- client/src/components/Workflow/Editor/Attributes.vue | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/client/src/components/Workflow/Editor/Attributes.vue b/client/src/components/Workflow/Editor/Attributes.vue index 062635e65679..c0d7311e06ee 100644 --- a/client/src/components/Workflow/Editor/Attributes.vue +++ b/client/src/components/Workflow/Editor/Attributes.vue @@ -47,7 +47,7 @@
Tags - +
Apply tags to make it easy to search for and find items with the same tag.
@@ -117,7 +117,6 @@ export default { return { message: null, messageVariant: null, - tagsCurrent: this.tags, versionCurrent: this.version, annotationCurrent: this.annotation, nameCurrent: this.name, @@ -186,9 +185,8 @@ export default { }, methods: { onTags(tags) { - this.tagsCurrent = tags; this.onAttributes({ tags }); - this.$emit("onTags", this.tagsCurrent); + this.$emit("onTags", tags); }, onVersion() { this.$emit("onVersion", this.versionCurrent); From d5f86b71ac44fffb9ff5c83ba8a88f71f8c2e823 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 14:14:01 +0100 Subject: [PATCH 386/669] add basic step actions --- .../Workflow/Editor/Actions/stepActions.ts | 83 +++++++++++++++++++ .../src/components/Workflow/Editor/Index.vue | 28 ++----- client/src/stores/undoRedoStore/index.ts | 7 ++ client/src/stores/workflowStepStore.ts | 11 +++ 4 files changed, 109 insertions(+), 20 deletions(-) create mode 100644 client/src/components/Workflow/Editor/Actions/stepActions.ts diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts new file mode 100644 index 000000000000..a0381157732c --- /dev/null +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -0,0 +1,83 @@ +import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; +import type { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; + +class LazyMutateStepAction extends UndoRedoAction { + key: K; + fromValue: Step[K]; + toValue: Step[K]; + stepId; + stepStore; + + constructor(stepStore: WorkflowStepStore, stepId: number, key: K, fromValue: Step[K], toValue: Step[K]) { + super(); + this.stepStore = stepStore; + this.stepId = stepId; + this.key = key; + this.fromValue = fromValue; + this.toValue = toValue; + + this.stepStore.updateStepValue(this.stepId, this.key, this.toValue); + } + + changeValue(value: Step[K]) { + this.toValue = value; + this.stepStore.updateStepValue(this.stepId, this.key, this.toValue); + } + + undo() { + this.stepStore.updateStepValue(this.stepId, this.key, this.fromValue); + } + + redo() { + this.stepStore.updateStepValue(this.stepId, this.key, this.toValue); + } +} + +export function useStepActions(stepStore: WorkflowStepStore, undoRedoStore: UndoRedoStore) { + /** + * If the pending action is a `LazyMutateStepAction` and matches the step id and field key, returns it. + * Otherwise returns `null` + */ + function actionForIdAndKey(id: number, key: keyof Step) { + const pendingAction = undoRedoStore.pendingLazyAction; + + if (pendingAction instanceof LazyMutateStepAction && pendingAction.stepId === id && pendingAction.key === key) { + return pendingAction; + } else { + return null; + } + } + + /** + * Mutates a queued lazy action, if a matching one exists, + * otherwise creates a new lazy action ans queues it. + */ + function changeValueOrCreateAction(step: Step, key: K, value: Step[K]) { + const actionForKey = actionForIdAndKey(step.id, key); + + if (actionForKey) { + actionForKey.changeValue(value); + } else { + const action = new LazyMutateStepAction(stepStore, step.id, key, step[key], value); + undoRedoStore.applyLazyAction(action); + } + } + + function setPosition(step: Step, position: NonNullable) { + changeValueOrCreateAction(step, "position", position); + } + + function setAnnotation(step: Step, annotation: Step["annotation"]) { + changeValueOrCreateAction(step, "annotation", annotation); + } + + function setLabel(step: Step, label: Step["label"]) { + changeValueOrCreateAction(step, "label", label); + } + + return { + setPosition, + setAnnotation, + setLabel, + }; +} diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index b344e0c88c44..8eec2b4bf5a8 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -186,6 +186,7 @@ import { LastQueue } from "@/utils/lastQueue"; import { errorMessageAsString } from "@/utils/simple-error"; import { Services } from "../services"; +import { useStepActions } from "./Actions/stepActions"; import { SetValueActionHandler } from "./Actions/workflowActions"; import { defaultPosition } from "./composables/useDefaultStepPosition"; import { fromSimple } from "./modules/model"; @@ -349,6 +350,7 @@ export default { stepStore.$reset(); stateStore.$reset(); commentStore.$reset(); + undoRedoStore.$reset(); } onUnmounted(() => { @@ -356,6 +358,8 @@ export default { emit("update:confirmation", false); }); + const stepActions = useStepActions(stepStore, undoRedoStore); + return { id, name, @@ -385,6 +389,7 @@ export default { stateStore, resetStores, initialLoading, + stepActions, }; }, data() { @@ -472,8 +477,7 @@ export default { this.stepStore.updateStep(step); }, onUpdateStepPosition(stepId, position) { - const step = { ...this.steps[stepId], position }; - this.onUpdateStep(step); + this.stepActions.setPosition(this.steps[stepId], position); }, onConnect(connection) { this.connectionStore.addConnection(connection); @@ -656,8 +660,7 @@ export default { this.showInPanel = "attributes"; }, onAnnotation(nodeId, newAnnotation) { - const step = { ...this.steps[nodeId], annotation: newAnnotation }; - this.onUpdateStep(step); + this.stepActions.setAnnotation(this.steps[nodeId], newAnnotation); }, async routeToWorkflow(id) { // map scoped stores to existing stores, before updating the id @@ -724,22 +727,7 @@ export default { this.onReportUpdate(newMarkdown); }, onLabel(nodeId, newLabel) { - const step = { ...this.steps[nodeId], label: newLabel }; - const oldLabel = this.steps[nodeId].label; - this.onUpdateStep(step); - const stepType = this.steps[nodeId].type; - const isInput = ["data_input", "data_collection_input", "parameter_input"].indexOf(stepType) >= 0; - const labelType = isInput ? "input" : "step"; - const labelTypeTitle = isInput ? "Input" : "Step"; - const newMarkdown = replaceLabel(this.markdownText, labelType, oldLabel, newLabel); - if (newMarkdown !== this.markdownText) { - this.debouncedToast(`${labelTypeTitle} label updated in workflow report.`, 1500); - } - this.onReportUpdate(newMarkdown); - }, - debouncedToast(message, delay) { - clearTimeout(this.debounceTimer); - this.debounceTimer = setTimeout(() => Toast.success(message), delay); + this.stepActions.setLabel(this.steps[nodeId], newLabel); }, onScrollTo(stepId) { this.scrollToId = stepId; diff --git a/client/src/stores/undoRedoStore/index.ts b/client/src/stores/undoRedoStore/index.ts index 173de34744a8..9e8483b4f926 100644 --- a/client/src/stores/undoRedoStore/index.ts +++ b/client/src/stores/undoRedoStore/index.ts @@ -13,6 +13,12 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { const redoActionStack = ref([]); const maxUndoActions = ref(100); + function $reset() { + undoActionStack.value.forEach((action) => action.destroy()); + undoActionStack.value = []; + clearRedoStack(); + } + function undo() { flushLazyAction(); const action = undoActionStack.value.pop(); @@ -122,6 +128,7 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { setLazyActionTimeout, isQueued, pendingLazyAction, + $reset, }; }); diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts index 9bd995ea19f7..15cf99b7afe4 100644 --- a/client/src/stores/workflowStepStore.ts +++ b/client/src/stores/workflowStepStore.ts @@ -248,6 +248,16 @@ export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (work stepExtraInputs.value[step.id] = findStepExtraInputs(step); } + function updateStepValue(stepId: number, key: K, value: Step[K]) { + const step = steps.value[stepId]; + assertDefined(step); + + const partialStep: Partial = {}; + partialStep[key] = value; + + updateStep({ ...step, ...partialStep }); + } + function changeStepMapOver(stepId: number, mapOver: CollectionTypeDescriptor) { set(stepMapOver.value, stepId, mapOver); } @@ -361,6 +371,7 @@ export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (work addStep, insertNewStep, updateStep, + updateStepValue, changeStepMapOver, resetStepInputMapOver, changeStepInputMapOver, From 0d41f6d7a27d28e37dd5ec43e55d6e4b8513d4aa Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 16:49:01 +0100 Subject: [PATCH 387/669] show attributes when undoing/redoing them --- .../Editor/Actions/workflowActions.ts | 13 +++- .../src/components/Workflow/Editor/Index.vue | 71 +++++++++++++------ 2 files changed, 61 insertions(+), 23 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/workflowActions.ts b/client/src/components/Workflow/Editor/Actions/workflowActions.ts index a493c99a8906..c00988b68cb8 100644 --- a/client/src/components/Workflow/Editor/Actions/workflowActions.ts +++ b/client/src/components/Workflow/Editor/Actions/workflowActions.ts @@ -2,14 +2,17 @@ import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; export class LazySetValueAction extends UndoRedoAction { setValueHandler; + showAttributesCallback; fromValue; toValue; - constructor(fromValue: T, toValue: T, setValueHandler: (value: T) => void) { + constructor(fromValue: T, toValue: T, setValueHandler: (value: T) => void, showCanvasCallback: () => void) { super(); this.fromValue = structuredClone(fromValue); this.toValue = structuredClone(toValue); this.setValueHandler = setValueHandler; + this.showAttributesCallback = showCanvasCallback; + this.setValueHandler(toValue); } @@ -19,10 +22,12 @@ export class LazySetValueAction extends UndoRedoAction { } undo() { + this.showAttributesCallback(); this.setValueHandler(this.fromValue); } redo() { + this.showAttributesCallback(); this.setValueHandler(this.toValue); } } @@ -30,18 +35,20 @@ export class LazySetValueAction extends UndoRedoAction { export class SetValueActionHandler { undoRedoStore; setValueHandler; + showAttributesCallback; lazyAction: LazySetValueAction | null = null; - constructor(undoRedoStore: UndoRedoStore, setValueHandler: (value: T) => void) { + constructor(undoRedoStore: UndoRedoStore, setValueHandler: (value: T) => void, showCanvasCallback: () => void) { this.undoRedoStore = undoRedoStore; this.setValueHandler = setValueHandler; + this.showAttributesCallback = showCanvasCallback; } set(from: T, to: T) { if (this.lazyAction && this.undoRedoStore.isQueued(this.lazyAction)) { this.lazyAction.changeValue(to); } else { - this.lazyAction = new LazySetValueAction(from, to, this.setValueHandler); + this.lazyAction = new LazySetValueAction(from, to, this.setValueHandler, this.showAttributesCallback); this.undoRedoStore.applyLazyAction(this.lazyAction); } } diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 8eec2b4bf5a8..1afb176c767d 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -111,7 +111,7 @@ @onUpdateStep="onUpdateStep" @onSetData="onSetData" /> (name.value = value)); + const setNameActionHandler = new SetValueActionHandler( + undoRedoStore, + (value) => (name.value = value), + showAttributes + ); /** user set name. queues an undo/redo action */ function setName(newName) { if (name.value !== newName) { @@ -274,9 +294,12 @@ export default { } const report = ref({}); + + // TODO: move report undo redo to report editor const setReportActionHandler = new SetValueActionHandler( undoRedoStore, - (value) => (report.value = structuredClone(value)) + (value) => (report.value = structuredClone(value)), + showAttributes ); /** user set report. queues an undo/redo action */ function setReport(newReport) { @@ -284,7 +307,11 @@ export default { } const license = ref(null); - const setLicenseHandler = new SetValueActionHandler(undoRedoStore, (value) => (license.value = value)); + const setLicenseHandler = new SetValueActionHandler( + undoRedoStore, + (value) => (license.value = value), + showAttributes + ); /** user set license. queues an undo/redo action */ function setLicense(newLicense) { if (license.value !== newLicense) { @@ -293,14 +320,22 @@ export default { } const creator = ref(null); - const setCreatorHandler = new SetValueActionHandler(undoRedoStore, (value) => (creator.value = value)); + const setCreatorHandler = new SetValueActionHandler( + undoRedoStore, + (value) => (creator.value = value), + showAttributes + ); /** user set creator. queues an undo/redo action */ function setCreator(newCreator) { setCreatorHandler.set(creator.value, newCreator); } const annotation = ref(null); - const setAnnotationHandler = new SetValueActionHandler(undoRedoStore, (value) => (annotation.value = value)); + const setAnnotationHandler = new SetValueActionHandler( + undoRedoStore, + (value) => (annotation.value = value), + showAttributes + ); /** user set annotation. queues an undo/redo action */ function setAnnotation(newAnnotation) { if (annotation.value !== newAnnotation) { @@ -311,7 +346,8 @@ export default { const tags = ref([]); const setTagsHandler = new SetValueActionHandler( undoRedoStore, - (value) => (tags.value = structuredClone(value)) + (value) => (tags.value = structuredClone(value)), + showAttributes ); /** user set tags. queues an undo/redo action */ function setTags(newTags) { @@ -363,6 +399,11 @@ export default { return { id, name, + isCanvas, + parameters, + ensureParametersSet, + showInPanel, + showAttributes, setName, report, setReport, @@ -394,10 +435,9 @@ export default { }, data() { return { - isCanvas: true, + markdownConfig: null, markdownText: null, versions: [], - parameters: null, labels: {}, services: null, stateMessages: [], @@ -409,7 +449,6 @@ export default { messageBody: null, messageIsError: false, version: this.initialVersion, - showInPanel: "attributes", saveAsName: null, saveAsAnnotation: null, showSaveAsModal: false, @@ -421,7 +460,7 @@ export default { }; }, computed: { - showAttributes() { + attributesVisible() { return this.showInPanel == "attributes"; }, showLint() { @@ -650,11 +689,6 @@ export default { }); }); }, - onAttributes() { - this._ensureParametersSet(); - this.stateStore.activeNodeId = null; - this.showInPanel = "attributes"; - }, onWorkflowTextEditor() { this.stateStore.activeNodeId = null; this.showInPanel = "attributes"; @@ -740,7 +774,7 @@ export default { this.highlightId = null; }, onLint() { - this._ensureParametersSet(); + this.ensureParametersSet(); this.stateStore.activeNodeId = null; this.showInPanel = "lint"; }, @@ -813,9 +847,6 @@ export default { this._loadCurrent(this.id, version); } }, - _ensureParametersSet() { - this.parameters = getUntypedWorkflowParameters(this.steps); - }, _insertStep(contentId, name, type) { if (!this.isCanvas) { this.isCanvas = true; From 9c63cd538d0b1001ec592f220dc546903d32ee01 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 17:21:39 +0100 Subject: [PATCH 388/669] switch to node on node attribute edit --- .../Workflow/Editor/Actions/stepActions.ts | 14 +++++++++++++- client/src/components/Workflow/Editor/Index.vue | 8 ++++---- client/src/stores/workflowEditorStateStore.ts | 2 ++ 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index a0381157732c..d9a7fc8a1b7f 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -1,4 +1,5 @@ import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; +import { WorkflowStateStore } from "@/stores/workflowEditorStateStore"; import type { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; class LazyMutateStepAction extends UndoRedoAction { @@ -7,6 +8,7 @@ class LazyMutateStepAction extends UndoRedoAction { toValue: Step[K]; stepId; stepStore; + onUndoRedo?: () => void; constructor(stepStore: WorkflowStepStore, stepId: number, key: K, fromValue: Step[K], toValue: Step[K]) { super(); @@ -26,14 +28,20 @@ class LazyMutateStepAction extends UndoRedoAction { undo() { this.stepStore.updateStepValue(this.stepId, this.key, this.fromValue); + this.onUndoRedo?.(); } redo() { this.stepStore.updateStepValue(this.stepId, this.key, this.toValue); + this.onUndoRedo?.(); } } -export function useStepActions(stepStore: WorkflowStepStore, undoRedoStore: UndoRedoStore) { +export function useStepActions( + stepStore: WorkflowStepStore, + undoRedoStore: UndoRedoStore, + stateStore: WorkflowStateStore +) { /** * If the pending action is a `LazyMutateStepAction` and matches the step id and field key, returns it. * Otherwise returns `null` @@ -60,6 +68,10 @@ export function useStepActions(stepStore: WorkflowStepStore, undoRedoStore: Undo } else { const action = new LazyMutateStepAction(stepStore, step.id, key, step[key], value); undoRedoStore.applyLazyAction(action); + + action.onUndoRedo = () => { + stateStore.activeNodeId = step.id; + }; } } diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 1afb176c767d..daf377616bf4 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -81,7 +81,7 @@ @onReport="onReport" @onLayout="onLayout" @onEdit="onEdit" - @onAttributes="onAttributes" + @onAttributes="showAttributes" @onLint="onLint" @onUpgrade="onUpgrade" />
@@ -135,7 +135,7 @@ :license="license" :steps="steps" :datatypes-mapper="datatypesMapper" - @onAttributes="onAttributes" + @onAttributes="showAttributes" @onHighlight="onHighlight" @onUnhighlight="onUnhighlight" @onRefactor="onAttemptRefactor" @@ -394,7 +394,7 @@ export default { emit("update:confirmation", false); }); - const stepActions = useStepActions(stepStore, undoRedoStore); + const stepActions = useStepActions(stepStore, undoRedoStore, stateStore); return { id, @@ -731,7 +731,7 @@ export default { nameValidate() { if (!this.name) { Toast.error("Please provide a name for your workflow."); - this.onAttributes(); + this.showAttributes(); return false; } return true; diff --git a/client/src/stores/workflowEditorStateStore.ts b/client/src/stores/workflowEditorStateStore.ts index 731153b1ce1b..1f8f3f6743f8 100644 --- a/client/src/stores/workflowEditorStateStore.ts +++ b/client/src/stores/workflowEditorStateStore.ts @@ -28,6 +28,8 @@ type OutputTerminalPositions = { [index: number]: { [index: string]: OutputTermi type StepPosition = { [index: number]: UnwrapRef }; type StepLoadingState = { [index: number]: { loading?: boolean; error?: string } }; +export type WorkflowStateStore = ReturnType; + export const useWorkflowStateStore = defineScopedStore("workflowStateStore", () => { const inputTerminals = ref({}); const outputTerminals = ref({}); From 516f204cf808ff48c3809cc866e00ab9571e7f24 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 17:43:24 +0100 Subject: [PATCH 389/669] add set data action --- .../Workflow/Editor/Actions/stepActions.ts | 45 +++++++++++++++++++ .../src/components/Workflow/Editor/Index.vue | 2 +- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index d9a7fc8a1b7f..145fbffa187c 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -1,6 +1,7 @@ import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; import { WorkflowStateStore } from "@/stores/workflowEditorStateStore"; import type { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; +import { assertDefined } from "@/utils/assertions"; class LazyMutateStepAction extends UndoRedoAction { key: K; @@ -37,6 +38,44 @@ class LazyMutateStepAction extends UndoRedoAction { } } +export class SetDataAction extends UndoRedoAction { + stepStore; + stateStore; + stepId; + fromPartial: Partial = {}; + toPartial: Partial = {}; + + constructor(stepStore: WorkflowStepStore, stateStore: WorkflowStateStore, from: Step, to: Step) { + super(); + this.stepStore = stepStore; + this.stateStore = stateStore; + this.stepId = from.id; + + Object.entries(from).forEach(([key, value]) => { + const otherValue = to[key as keyof Step] as any; + + if (JSON.stringify(value) !== JSON.stringify(otherValue)) { + this.fromPartial[key as keyof Step] = structuredClone(value); + this.toPartial[key as keyof Step] = structuredClone(otherValue); + } + }); + } + + run() { + const step = this.stepStore.getStep(this.stepId); + assertDefined(step); + this.stateStore.activeNodeId = this.stepId; + this.stepStore.updateStep({ ...step, ...this.toPartial }); + } + + undo() { + const step = this.stepStore.getStep(this.stepId); + assertDefined(step); + this.stateStore.activeNodeId = this.stepId; + this.stepStore.updateStep({ ...step, ...this.fromPartial }); + } +} + export function useStepActions( stepStore: WorkflowStepStore, undoRedoStore: UndoRedoStore, @@ -87,9 +126,15 @@ export function useStepActions( changeValueOrCreateAction(step, "label", label); } + function setData(from: Step, to: Step) { + const action = new SetDataAction(stepStore, stateStore, from, to); + undoRedoStore.applyAction(action); + } + return { setPosition, setAnnotation, setLabel, + setData, }; } diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index daf377616bf4..093879c62e0a 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -750,7 +750,7 @@ export default { tool_version: data.tool_version, errors: data.errors, }; - this.onUpdateStep(step); + this.stepActions.setData(this.steps[stepId], step); }); }, onOutputLabel(oldValue, newValue) { From b97b9e79c896e2e5c2e15a145c6f8bb491fbf685 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 11 Mar 2024 18:11:04 +0100 Subject: [PATCH 390/669] remove unused emit --- client/src/components/Workflow/Editor/Forms/FormTool.vue | 2 +- client/src/components/Workflow/Editor/Index.vue | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/client/src/components/Workflow/Editor/Forms/FormTool.vue b/client/src/components/Workflow/Editor/Forms/FormTool.vue index 919cbad7a7b0..dce544f66a11 100644 --- a/client/src/components/Workflow/Editor/Forms/FormTool.vue +++ b/client/src/components/Workflow/Editor/Forms/FormTool.vue @@ -89,7 +89,7 @@ export default { required: true, }, }, - emits: ["onSetData", "onUpdateStep", "onChangePostJobActions", "onAnnotation", "onLabel", "onOutputLabel"], + emits: ["onSetData", "onChangePostJobActions", "onAnnotation", "onLabel"], setup(props, { emit }) { const { stepId, annotation, label, stepInputs, stepOutputs, configForm, postJobActions } = useStepProps( toRef(props, "step") diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 093879c62e0a..96ddf806e838 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -96,8 +96,6 @@ @onChangePostJobActions="onChangePostJobActions" @onAnnotation="onAnnotation" @onLabel="onLabel" - @onOutputLabel="onOutputLabel" - @onUpdateStep="onUpdateStep" @onSetData="onSetData" /> Date: Tue, 12 Mar 2024 11:43:24 +0100 Subject: [PATCH 391/669] fix form undo reactivity --- .../Workflow/Editor/Actions/stepActions.ts | 11 ++++++ .../Workflow/Editor/Forms/FormDefault.vue | 34 +++++++++++++------ .../Workflow/Editor/Forms/FormTool.vue | 16 +++++++-- client/src/stores/refreshFromStore.ts | 26 ++++++++++++++ 4 files changed, 75 insertions(+), 12 deletions(-) create mode 100644 client/src/stores/refreshFromStore.ts diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index 145fbffa187c..b5933779e55f 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -1,3 +1,4 @@ +import { useRefreshFromStore } from "@/stores/refreshFromStore"; import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; import { WorkflowStateStore } from "@/stores/workflowEditorStateStore"; import type { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; @@ -44,6 +45,7 @@ export class SetDataAction extends UndoRedoAction { stepId; fromPartial: Partial = {}; toPartial: Partial = {}; + refreshForm: () => void; constructor(stepStore: WorkflowStepStore, stateStore: WorkflowStateStore, from: Step, to: Step) { super(); @@ -59,6 +61,9 @@ export class SetDataAction extends UndoRedoAction { this.toPartial[key as keyof Step] = structuredClone(otherValue); } }); + + const { refresh } = useRefreshFromStore(); + this.refreshForm = refresh; } run() { @@ -73,6 +78,12 @@ export class SetDataAction extends UndoRedoAction { assertDefined(step); this.stateStore.activeNodeId = this.stepId; this.stepStore.updateStep({ ...step, ...this.fromPartial }); + this.refreshForm(); + } + + redo() { + this.run(); + this.refreshForm(); } } diff --git a/client/src/components/Workflow/Editor/Forms/FormDefault.vue b/client/src/components/Workflow/Editor/Forms/FormDefault.vue index 72e786d3b6d3..e846b86b1568 100644 --- a/client/src/components/Workflow/Editor/Forms/FormDefault.vue +++ b/client/src/components/Workflow/Editor/Forms/FormDefault.vue @@ -43,6 +43,7 @@
@@ -59,11 +60,13 @@ diff --git a/client/src/components/Workflow/Editor/Forms/FormTool.vue b/client/src/components/Workflow/Editor/Forms/FormTool.vue index dce544f66a11..5dcb91afff92 100644 --- a/client/src/components/Workflow/Editor/Forms/FormTool.vue +++ b/client/src/components/Workflow/Editor/Forms/FormTool.vue @@ -30,6 +30,7 @@ Tool Parameters diff --git a/client/src/components/Workflow/Editor/Forms/FormDefault.vue b/client/src/components/Workflow/Editor/Forms/FormDefault.vue index e846b86b1568..3fcf1a669c44 100644 --- a/client/src/components/Workflow/Editor/Forms/FormDefault.vue +++ b/client/src/components/Workflow/Editor/Forms/FormDefault.vue @@ -39,7 +39,10 @@ :area="true" help="Add an annotation or notes to this step. Annotations are available when a workflow is viewed." @input="onAnnotation" /> - + - +
Tool Parameters + @onSetData="onSetData" + @onUpdateStep="updateStep" /> + @onSetData="onSetData" + @onUpdateStep="updateStep" /> { - this.onUpdateStep({ + this.stepStore.updateStep({ ...this.steps[step.id], config_form: response.config_form, content_id: response.content_id, @@ -596,8 +601,7 @@ export default { this.stepActions.setData(step, updatedStep); }, onRemove(nodeId) { - this.stepStore.removeStep(nodeId); - this.showInPanel = "attributes"; + this.stepActions.removeStep(this.steps[nodeId], this.showAttributes); }, onEditSubworkflow(contentId) { const editUrl = `/workflows/edit?workflow_id=${contentId}`; @@ -682,7 +686,7 @@ export default { onLayout() { return import(/* webpackChunkName: "workflowLayout" */ "./modules/layout.ts").then((layout) => { layout.autoLayout(this.id, this.steps).then((newSteps) => { - newSteps.map((step) => this.onUpdateStep(step)); + newSteps.map((step) => this.stepStore.updateStep(step)); }); }); }, From 20d2644c5d7a597c51f6fd3c9b53a43f3a98287c Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 12 Mar 2024 15:13:19 +0100 Subject: [PATCH 395/669] fix reactivity in post job actions form --- client/src/components/Workflow/Editor/Forms/FormTool.vue | 1 + 1 file changed, 1 insertion(+) diff --git a/client/src/components/Workflow/Editor/Forms/FormTool.vue b/client/src/components/Workflow/Editor/Forms/FormTool.vue index 5d17c1e6c763..279027223000 100644 --- a/client/src/components/Workflow/Editor/Forms/FormTool.vue +++ b/client/src/components/Workflow/Editor/Forms/FormTool.vue @@ -42,6 +42,7 @@ Additional Options Date: Tue, 12 Mar 2024 15:18:01 +0100 Subject: [PATCH 396/669] replace setData with updateStep --- client/src/components/Workflow/Editor/Index.vue | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 4a97618461be..7478e0885378 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -596,9 +596,8 @@ export default { this.hasChanges = true; }, onChangePostJobActions(nodeId, postJobActions) { - const step = this.steps[nodeId]; - const updatedStep = { ...step, post_job_actions: postJobActions }; - this.stepActions.setData(step, updatedStep); + const partialStep = { post_job_actions: postJobActions }; + this.stepActions.updateStep(nodeId, partialStep); }, onRemove(nodeId) { this.stepActions.removeStep(this.steps[nodeId], this.showAttributes); @@ -741,8 +740,7 @@ export default { this.lastQueue .enqueue(() => getModule(newData, stepId, this.stateStore.setLoadingState)) .then((data) => { - const step = { - ...this.steps[stepId], + const partialStep = { content_id: data.content_id, inputs: data.inputs, outputs: data.outputs, @@ -751,7 +749,7 @@ export default { tool_version: data.tool_version, errors: data.errors, }; - this.stepActions.setData(this.steps[stepId], step); + this.stepActions.updateStep(stepId, partialStep); }); }, onOutputLabel(oldValue, newValue) { From 3c5ee1e5a0f2c37e0200c78ad64d22e2a50b4979 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 12 Mar 2024 16:41:43 +0100 Subject: [PATCH 397/669] reduce emitted events --- .../Workflow/Editor/Forms/FormSection.vue | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/client/src/components/Workflow/Editor/Forms/FormSection.vue b/client/src/components/Workflow/Editor/Forms/FormSection.vue index d7b96dfc9591..8067c7f361af 100644 --- a/client/src/components/Workflow/Editor/Forms/FormSection.vue +++ b/client/src/components/Workflow/Editor/Forms/FormSection.vue @@ -92,8 +92,11 @@ export default { return Boolean(this.formData[this.deleteActionKey]); }, }, - watch: { - formData() { + created() { + this.setFormData(); + }, + methods: { + postPostJobActions() { // The formData shape is kind of unfortunate, but it is what we have now. // This should be a properly nested object whose values should be retrieved and set via a store const postJobActions = {}; @@ -119,11 +122,6 @@ export default { }); this.$emit("onChange", postJobActions); }, - }, - created() { - this.setFormData(); - }, - methods: { setFormData() { const pjas = {}; Object.values(this.postJobActions).forEach((pja) => { @@ -168,6 +166,7 @@ export default { this.setEmailAction(this.formData); if (changed) { this.formData = Object.assign({}, this.formData); + this.postPostJobActions(); } }, onDatatype(pjaKey, outputName, newDatatype) { From ace5fa76ba97b4a02cab8caa6a88d0116c179dfe Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 12 Mar 2024 16:45:58 +0100 Subject: [PATCH 398/669] fix form reactivity for update action --- .../Workflow/Editor/Actions/stepActions.ts | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index 094aa876247a..828b7e2af4d7 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -180,11 +180,13 @@ export class RemoveStepAction extends UndoRedoAction { run() { this.stepStore.removeStep(this.step.id); this.showAttributesCallback(); + this.stateStore.hasChanges = true; } undo() { this.stepStore.addStep(structuredClone(this.step)); this.stateStore.activeNodeId = this.step.id; + this.stateStore.hasChanges = true; } } @@ -269,7 +271,15 @@ export function useStepActions( }); const action = new UpdateStepAction(stepStore, stateStore, id, fromPartial, toPartial); - undoRedoStore.applyAction(action); + + if (!action.isEmpty()) { + action.onUndoRedo = () => { + stateStore.activeNodeId = id; + stateStore.hasChanges = true; + refresh(); + }; + undoRedoStore.applyAction(action); + } } return { From 679c6b62586b69083a08e56e9f18cf1048c0fc95 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 12 Mar 2024 16:59:19 +0100 Subject: [PATCH 399/669] add copy step undo action --- .../Workflow/Editor/Actions/stepActions.ts | 30 +++++++++++++++++++ .../src/components/Workflow/Editor/Index.vue | 6 ++-- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index 828b7e2af4d7..afe3fb2d857c 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -190,6 +190,30 @@ export class RemoveStepAction extends UndoRedoAction { } } +export class CopyStepAction extends UndoRedoAction { + stepStore; + stateStore; + step; + onUndoRedo?: () => void; + + constructor(stepStore: WorkflowStepStore, stateStore: WorkflowStateStore, step: Step) { + super(); + this.stepStore = stepStore; + this.stateStore = stateStore; + this.step = structuredClone(step); + } + + run() { + this.step = this.stepStore.addStep(this.step); + this.stateStore.activeNodeId = this.step.id; + this.stateStore.hasChanges = true; + } + + undo() { + this.stepStore.removeStep(this.step.id); + } +} + export function useStepActions( stepStore: WorkflowStepStore, undoRedoStore: UndoRedoStore, @@ -282,6 +306,11 @@ export function useStepActions( } } + function copyStep(step: Step) { + const action = new CopyStepAction(stepStore, stateStore, step); + undoRedoStore.applyAction(action); + } + return { setPosition, setAnnotation, @@ -289,5 +318,6 @@ export function useStepActions( setData, removeStep, updateStep, + copyStep, }; } diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 7478e0885378..012f598853a1 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -608,15 +608,13 @@ export default { }, async onClone(stepId) { const sourceStep = this.steps[parseInt(stepId)]; - const stepCopy = JSON.parse(JSON.stringify(sourceStep)); - const { id } = this.stepStore.addStep({ - ...stepCopy, + this.stepActions.copyStep({ + ...sourceStep, id: null, uuid: null, label: null, position: defaultPosition(this.graphOffset, this.transform), }); - this.stateStore.activeNodeId = id; }, onInsertTool(tool_id, tool_name) { this._insertStep(tool_id, tool_name, "tool"); From 5b81b687210dff7647b572df7a318f90af8a85b9 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 12 Mar 2024 17:20:03 +0100 Subject: [PATCH 400/669] remove unused events --- client/src/components/Workflow/Editor/Index.vue | 6 ------ client/src/components/Workflow/Editor/Node.vue | 1 - 2 files changed, 7 deletions(-) diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 012f598853a1..3d348dbe66c3 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -55,13 +55,10 @@ @scrollTo="scrollToId = null" @transform="(value) => (transform = value)" @graph-offset="(value) => (graphOffset = value)" - @onUpdate="onUpdate" @onClone="onClone" @onCreate="onInsertTool" @onChange="onChange" - @onConnect="onConnect" @onRemove="onRemove" - @onUpdateStep="onUpdateStep" @onUpdateStepPosition="onUpdateStepPosition">
@@ -520,9 +517,6 @@ export default { onUpdateStepPosition(stepId, position) { this.stepActions.setPosition(this.steps[stepId], position); }, - onConnect(connection) { - this.connectionStore.addConnection(connection); - }, onAttemptRefactor(actions) { if (this.hasChanges) { const r = window.confirm( diff --git a/client/src/components/Workflow/Editor/Node.vue b/client/src/components/Workflow/Editor/Node.vue index e152292f2db3..3b29ed433f27 100644 --- a/client/src/components/Workflow/Editor/Node.vue +++ b/client/src/components/Workflow/Editor/Node.vue @@ -172,7 +172,6 @@ const emit = defineEmits([ "onActivate", "onChange", "onCreate", - "onUpdate", "onClone", "onUpdateStepPosition", "pan-by", From 05325cc408f9816bf39d3ded50292c16ae6fa991 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 11:35:59 +0100 Subject: [PATCH 401/669] add actions for output visible and active --- .../components/Workflow/Editor/NodeOutput.vue | 50 +++++++++++++------ 1 file changed, 35 insertions(+), 15 deletions(-) diff --git a/client/src/components/Workflow/Editor/NodeOutput.vue b/client/src/components/Workflow/Editor/NodeOutput.vue index 3d21e05105ed..af3c571edba6 100644 --- a/client/src/components/Workflow/Editor/NodeOutput.vue +++ b/client/src/components/Workflow/Editor/NodeOutput.vue @@ -22,8 +22,9 @@ import { type PostJobActions, type Step, } from "@/stores/workflowStepStore"; -import { assertDefined, ensureDefined } from "@/utils/assertions"; +import { assertDefined } from "@/utils/assertions"; +import { UpdateStepAction } from "./Actions/stepActions"; import { useRelativePosition } from "./composables/relativePosition"; import { useTerminal } from "./composables/useTerminal"; import { type CollectionTypeDescriptor, NULL_COLLECTION_TYPE_DESCRIPTION } from "./modules/collectionTypeDescription"; @@ -53,7 +54,7 @@ const props = defineProps<{ }>(); const emit = defineEmits(["pan-by", "stopDragging", "onDragConnector"]); -const { stateStore, stepStore } = useWorkflowStores(); +const { stateStore, stepStore, undoRedoStore } = useWorkflowStores(); const { rootOffset, output, stepId, datatypesMapper } = toRefs(props); const terminalComponent: Ref | null> = ref(null); @@ -155,7 +156,15 @@ function onToggleActive() { } else { stepWorkflowOutputs.push({ output_name: output.value.name, label: output.value.name }); } - stepStore.updateStep({ ...step, workflow_outputs: stepWorkflowOutputs }); + + const action = new UpdateStepAction( + stepStore, + stateStore, + step.id, + { workflow_outputs: step.workflow_outputs }, + { workflow_outputs: stepWorkflowOutputs } + ); + undoRedoStore.applyAction(action); } function onToggleVisible() { @@ -164,25 +173,36 @@ function onToggleVisible() { } const actionKey = `HideDatasetAction${props.output.name}`; - const step = { ...ensureDefined(stepStore.getStep(stepId.value)) }; + const step = stepStore.getStep(stepId.value); + assertDefined(step); + + const oldPostJobActions = structuredClone(step.post_job_actions) ?? {}; + let newPostJobActions; + if (isVisible.value) { - step.post_job_actions = { - ...step.post_job_actions, - [actionKey]: { - action_type: "HideDatasetAction", - output_name: props.output.name, - action_arguments: {}, - }, + newPostJobActions = structuredClone(step.post_job_actions) ?? {}; + newPostJobActions[actionKey] = { + action_type: "HideDatasetAction", + output_name: props.output.name, + action_arguments: {}, }; } else { if (step.post_job_actions) { - const { [actionKey]: _unused, ...newPostJobActions } = step.post_job_actions; - step.post_job_actions = newPostJobActions; + const { [actionKey]: _unused, ...remainingPostJobActions } = step.post_job_actions; + newPostJobActions = structuredClone(remainingPostJobActions); } else { - step.post_job_actions = {}; + newPostJobActions = {}; } } - stepStore.updateStep(step); + + const action = new UpdateStepAction( + stepStore, + stateStore, + step.id, + { post_job_actions: oldPostJobActions }, + { post_job_actions: newPostJobActions } + ); + undoRedoStore.applyAction(action); } function onPanBy(panBy: XYPosition) { From 09e945db5d48dfcff3a766fade3ef16f433716c5 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:11:16 +0100 Subject: [PATCH 402/669] refactor terminals to have access to all stores --- .../Workflow/Editor/ConnectionMenu.vue | 5 +- .../src/components/Workflow/Editor/Lint.vue | 15 +- .../components/Workflow/Editor/NodeInput.vue | 6 +- .../Workflow/Editor/NodeOutput.test.ts | 23 ++- .../components/Workflow/Editor/NodeOutput.vue | 2 +- .../Editor/composables/useTerminal.ts | 14 +- .../Workflow/Editor/modules/linting.ts | 14 +- .../Workflow/Editor/modules/terminals.test.ts | 83 ++++----- .../Workflow/Editor/modules/terminals.ts | 164 +++++++----------- 9 files changed, 136 insertions(+), 190 deletions(-) diff --git a/client/src/components/Workflow/Editor/ConnectionMenu.vue b/client/src/components/Workflow/Editor/ConnectionMenu.vue index a8bf0b00e832..e97cc24a4ad3 100644 --- a/client/src/components/Workflow/Editor/ConnectionMenu.vue +++ b/client/src/components/Workflow/Editor/ConnectionMenu.vue @@ -62,7 +62,8 @@ watch(focused, (focused) => { } }); -const { connectionStore, stepStore } = useWorkflowStores(); +const stores = useWorkflowStores(); +const { stepStore } = stores; interface InputObject { stepId: number; @@ -97,7 +98,7 @@ function inputObjectToTerminal(inputObject: InputObject): InputTerminals { const step = stepStore.getStep(inputObject.stepId); assertDefined(step); const inputSource = step.inputs.find((input) => input.name == inputObject.inputName)!; - return terminalFactory(inputObject.stepId, inputSource, props.terminal.datatypesMapper, connectionStore, stepStore); + return terminalFactory(inputObject.stepId, inputSource, props.terminal.datatypesMapper, stores); } const validInputs: ComputedRef = computed(() => { diff --git a/client/src/components/Workflow/Editor/Lint.vue b/client/src/components/Workflow/Editor/Lint.vue index 5ab31d273263..86b80a1c392c 100644 --- a/client/src/components/Workflow/Editor/Lint.vue +++ b/client/src/components/Workflow/Editor/Lint.vue @@ -135,9 +135,10 @@ export default { }, }, setup() { - const { connectionStore, stepStore } = useWorkflowStores(); + const stores = useWorkflowStores(); + const { connectionStore, stepStore } = stores; const { hasActiveOutputs } = storeToRefs(stepStore); - return { connectionStore, stepStore, hasActiveOutputs }; + return { stores, connectionStore, stepStore, hasActiveOutputs }; }, computed: { showRefactor() { @@ -171,7 +172,7 @@ export default { return getUntypedParameters(this.untypedParameters); }, warningDisconnectedInputs() { - return getDisconnectedInputs(this.steps, this.datatypesMapper, this.connectionStore, this.stepStore); + return getDisconnectedInputs(this.steps, this.datatypesMapper, this.stores); }, warningMissingMetadata() { return getMissingMetadata(this.steps); @@ -227,13 +228,7 @@ export default { this.$emit("onUnhighlight", item.stepId); }, onRefactor() { - const actions = fixAllIssues( - this.steps, - this.untypedParameters, - this.datatypesMapper, - this.connectionStore, - this.stepStore - ); + const actions = fixAllIssues(this.steps, this.untypedParameters, this.datatypesMapper, this.stores); this.$emit("onRefactor", actions); }, }, diff --git a/client/src/components/Workflow/Editor/NodeInput.vue b/client/src/components/Workflow/Editor/NodeInput.vue index 81e5dbcd8c1e..b4ab0ac1962b 100644 --- a/client/src/components/Workflow/Editor/NodeInput.vue +++ b/client/src/components/Workflow/Editor/NodeInput.vue @@ -90,7 +90,8 @@ const position = useRelativePosition( computed(() => props.parentNode) ); -const { connectionStore, stateStore, stepStore } = useWorkflowStores(); +const stores = useWorkflowStores(); +const { connectionStore, stateStore } = stores; const hasTerminals = ref(false); watchEffect(() => { hasTerminals.value = connectionStore.getOutputTerminalsForInputTerminal(id.value).length > 0; @@ -178,8 +179,7 @@ function onDrop(event: DragEvent) { stepOut.stepId, stepOut.output, props.datatypesMapper, - connectionStore, - stepStore + stores ) as OutputCollectionTerminal; showTooltip.value = false; diff --git a/client/src/components/Workflow/Editor/NodeOutput.test.ts b/client/src/components/Workflow/Editor/NodeOutput.test.ts index 521822429c3b..9ee9a8d3361a 100644 --- a/client/src/components/Workflow/Editor/NodeOutput.test.ts +++ b/client/src/components/Workflow/Editor/NodeOutput.test.ts @@ -4,6 +4,7 @@ import { getLocalVue } from "tests/jest/helpers"; import { nextTick, ref } from "vue"; import { testDatatypesMapper } from "@/components/Datatypes/test_fixtures"; +import { UndoRedoStore, useUndoRedoStore } from "@/stores/undoRedoStore"; import { useConnectionStore } from "@/stores/workflowConnectionStore"; import { type Step, type Steps, useWorkflowStepStore } from "@/stores/workflowStepStore"; @@ -53,12 +54,14 @@ describe("NodeOutput", () => { let pinia: ReturnType; let stepStore: ReturnType; let connectionStore: ReturnType; + let undoRedoStore: UndoRedoStore; beforeEach(() => { pinia = createPinia(); setActivePinia(pinia); stepStore = useWorkflowStepStore("mock-workflow"); connectionStore = useConnectionStore("mock-workflow"); + undoRedoStore = useUndoRedoStore("mock-workflow"); Object.values(advancedSteps).map((step) => stepStore.addStep(step)); }); @@ -77,20 +80,16 @@ describe("NodeOutput", () => { it("displays multiple icon if not mapped over", async () => { const simpleDataStep = stepForLabel("simple data", stepStore.steps); const listInputStep = stepForLabel("list input", stepStore.steps); - const inputTerminal = terminalFactory( - simpleDataStep.id, - simpleDataStep.inputs[0]!, - testDatatypesMapper, + const inputTerminal = terminalFactory(simpleDataStep.id, simpleDataStep.inputs[0]!, testDatatypesMapper, { connectionStore, - stepStore - ); - const outputTerminal = terminalFactory( - listInputStep.id, - listInputStep.outputs[0]!, - testDatatypesMapper, + stepStore, + undoRedoStore, + } as any); + const outputTerminal = terminalFactory(listInputStep.id, listInputStep.outputs[0]!, testDatatypesMapper, { connectionStore, - stepStore - ); + stepStore, + undoRedoStore, + } as any); const propsData = propsForStep(simpleDataStep); const wrapper = shallowMount(NodeOutput as any, { propsData: propsData, diff --git a/client/src/components/Workflow/Editor/NodeOutput.vue b/client/src/components/Workflow/Editor/NodeOutput.vue index af3c571edba6..cac8dba4a5ac 100644 --- a/client/src/components/Workflow/Editor/NodeOutput.vue +++ b/client/src/components/Workflow/Editor/NodeOutput.vue @@ -28,7 +28,7 @@ import { UpdateStepAction } from "./Actions/stepActions"; import { useRelativePosition } from "./composables/relativePosition"; import { useTerminal } from "./composables/useTerminal"; import { type CollectionTypeDescriptor, NULL_COLLECTION_TYPE_DESCRIPTION } from "./modules/collectionTypeDescription"; -import { OutputTerminals } from "./modules/terminals"; +import type { OutputTerminals } from "./modules/terminals"; import DraggableWrapper from "./DraggablePan.vue"; import StatelessTags from "@/components/TagsMultiselect/StatelessTags.vue"; diff --git a/client/src/components/Workflow/Editor/composables/useTerminal.ts b/client/src/components/Workflow/Editor/composables/useTerminal.ts index 482c9dfcbe0e..fbe92d11dda3 100644 --- a/client/src/components/Workflow/Editor/composables/useTerminal.ts +++ b/client/src/components/Workflow/Editor/composables/useTerminal.ts @@ -11,21 +11,15 @@ export function useTerminal( datatypesMapper: Ref ) { const terminal: Ref | null> = ref(null); - const { connectionStore, stepStore } = useWorkflowStores(); - const step = computed(() => stepStore.getStep(stepId.value)); - const isMappedOver = computed(() => stepStore.stepMapOver[stepId.value]?.isCollection ?? false); + const stores = useWorkflowStores(); + const step = computed(() => stores.stepStore.getStep(stepId.value)); + const isMappedOver = computed(() => stores.stepStore.stepMapOver[stepId.value]?.isCollection ?? false); watch( [step, terminalSource, datatypesMapper], () => { // rebuild terminal if any of the tracked dependencies change - const newTerminal = terminalFactory( - stepId.value, - terminalSource.value, - datatypesMapper.value, - connectionStore, - stepStore - ); + const newTerminal = terminalFactory(stepId.value, terminalSource.value, datatypesMapper.value, stores); newTerminal.getInvalidConnectedTerminals(); terminal.value = newTerminal; }, diff --git a/client/src/components/Workflow/Editor/modules/linting.ts b/client/src/components/Workflow/Editor/modules/linting.ts index 86f700e70436..b451229305c2 100644 --- a/client/src/components/Workflow/Editor/modules/linting.ts +++ b/client/src/components/Workflow/Editor/modules/linting.ts @@ -1,7 +1,7 @@ import type { DatatypesMapperModel } from "@/components/Datatypes/model"; import type { UntypedParameters } from "@/components/Workflow/Editor/modules/parameters"; -import type { useConnectionStore } from "@/stores/workflowConnectionStore"; -import type { Step, Steps, useWorkflowStepStore } from "@/stores/workflowStepStore"; +import type { useWorkflowStores } from "@/composables/workflowStores"; +import type { Step, Steps } from "@/stores/workflowStepStore"; import { assertDefined } from "@/utils/assertions"; import { terminalFactory } from "./terminals"; @@ -18,13 +18,12 @@ interface LintState { export function getDisconnectedInputs( steps: Steps = {}, datatypesMapper: DatatypesMapperModel, - connectionStore: ReturnType, - stepStore: ReturnType + stores: ReturnType ) { const inputs: LintState[] = []; Object.values(steps).forEach((step) => { step.inputs.map((inputSource) => { - const inputTerminal = terminalFactory(step.id, inputSource, datatypesMapper, connectionStore, stepStore); + const inputTerminal = terminalFactory(step.id, inputSource, datatypesMapper, stores); if (!inputTerminal.optional && inputTerminal.connections.length === 0) { const inputLabel = inputSource.label || inputSource.name; inputs.push({ @@ -124,8 +123,7 @@ export function fixAllIssues( steps: Steps, parameters: UntypedParameters, datatypesMapper: DatatypesMapperModel, - connectionStore: ReturnType, - stepStore: ReturnType + stores: ReturnType ) { const actions = []; const untypedParameters = getUntypedParameters(parameters); @@ -134,7 +132,7 @@ export function fixAllIssues( actions.push(fixUntypedParameter(untypedParameter)); } } - const disconnectedInputs = getDisconnectedInputs(steps, datatypesMapper, connectionStore, stepStore); + const disconnectedInputs = getDisconnectedInputs(steps, datatypesMapper, stores); for (const disconnectedInput of disconnectedInputs) { if (disconnectedInput.autofix) { actions.push(fixDisconnectedInput(disconnectedInput)); diff --git a/client/src/components/Workflow/Editor/modules/terminals.test.ts b/client/src/components/Workflow/Editor/modules/terminals.test.ts index 1c49c1513aa6..844c9114bb14 100644 --- a/client/src/components/Workflow/Editor/modules/terminals.test.ts +++ b/client/src/components/Workflow/Editor/modules/terminals.test.ts @@ -1,7 +1,11 @@ import { createPinia, setActivePinia } from "pinia"; import { testDatatypesMapper } from "@/components/Datatypes/test_fixtures"; +import { useUndoRedoStore } from "@/stores/undoRedoStore"; import { useConnectionStore } from "@/stores/workflowConnectionStore"; +import { useWorkflowCommentStore } from "@/stores/workflowEditorCommentStore"; +import { useWorkflowStateStore } from "@/stores/workflowEditorStateStore"; +import { useWorkflowEditorToolbarStore } from "@/stores/workflowEditorToolbarStore"; import { DataOutput, Step, Steps, type TerminalSource, useWorkflowStepStore } from "@/stores/workflowStepStore"; import { advancedSteps, simpleSteps } from "../test_fixtures"; @@ -22,33 +26,38 @@ import { terminalFactory, } from "./terminals"; +function useStores(id = "mock-workflow") { + const connectionStore = useConnectionStore(id); + const stateStore = useWorkflowStateStore(id); + const stepStore = useWorkflowStepStore(id); + const commentStore = useWorkflowCommentStore(id); + const toolbarStore = useWorkflowEditorToolbarStore(id); + const undoRedoStore = useUndoRedoStore(id); + + return { + connectionStore, + stateStore, + stepStore, + commentStore, + toolbarStore, + undoRedoStore, + }; +} + function setupAdvanced() { const terminals: { [index: string]: { [index: string]: ReturnType } } = {}; - const connectionStore = useConnectionStore("mock-workflow"); - const stepStore = useWorkflowStepStore("mock-workflow"); + const stores = useStores(); Object.values(advancedSteps).map((step) => { const stepLabel = step.label; if (stepLabel) { terminals[stepLabel] = {}; step.inputs?.map((input) => { - terminals[stepLabel]![input.name] = terminalFactory( - step.id, - input, - testDatatypesMapper, - connectionStore, - stepStore - ); + terminals[stepLabel]![input.name] = terminalFactory(step.id, input, testDatatypesMapper, stores); }); step.outputs?.map((output) => { - terminals[stepLabel]![output.name] = terminalFactory( - step.id, - output, - testDatatypesMapper, - connectionStore, - stepStore - ); + terminals[stepLabel]![output.name] = terminalFactory(step.id, output, testDatatypesMapper, stores); }); } }); @@ -57,17 +66,16 @@ function setupAdvanced() { function rebuildTerminal>(terminal: T): T { let terminalSource: TerminalSource; - const step = terminal.stepStore.getStep(terminal.stepId); + const step = terminal.stores.stepStore.getStep(terminal.stepId); - const connectionStore = useConnectionStore("mock-workflow"); - const stepStore = useWorkflowStepStore("mock-workflow"); + const stores = useStores(); if (terminal.terminalType === "input") { terminalSource = step!.inputs.find((input) => input.name == terminal.name)!; } else { terminalSource = step!.outputs.find((output) => output.name == terminal.name)!; } - return terminalFactory(terminal.stepId, terminalSource, testDatatypesMapper, connectionStore, stepStore) as T; + return terminalFactory(terminal.stepId, terminalSource, testDatatypesMapper, stores) as T; } describe("terminalFactory", () => { @@ -103,10 +111,9 @@ describe("terminalFactory", () => { expect(terminals["filter_failed"]?.["output"]).toBeInstanceOf(OutputCollectionTerminal); }); it("throws error on invalid terminalSource", () => { - const connectionStore = useConnectionStore("mock-workflow"); - const stepStore = useWorkflowStepStore("mock-workflow"); + const stores = useStores(); - const invalidFactory = () => terminalFactory(1, {} as any, testDatatypesMapper, connectionStore, stepStore); + const invalidFactory = () => terminalFactory(1, {} as any, testDatatypesMapper, stores); expect(invalidFactory).toThrow(); }); }); @@ -560,41 +567,27 @@ describe("canAccept", () => { }); describe("Input terminal", () => { - let stepStore: ReturnType; - let connectionStore: ReturnType; + let stores: ReturnType; let terminals: { [index: number]: { [index: string]: ReturnType } }; beforeEach(() => { setActivePinia(createPinia()); - stepStore = useWorkflowStepStore("mock-workflow"); - connectionStore = useConnectionStore("mock-workflow"); + stores = useStores(); terminals = {}; Object.values(simpleSteps).map((step) => { - stepStore.addStep(step); + stores.stepStore.addStep(step); terminals[step.id] = {}; const stepTerminals = terminals[step.id]!; step.inputs?.map((input) => { - stepTerminals[input.name] = terminalFactory( - step.id, - input, - testDatatypesMapper, - connectionStore, - stepStore - ); + stepTerminals[input.name] = terminalFactory(step.id, input, testDatatypesMapper, stores); }); step.outputs?.map((output) => { - stepTerminals[output.name] = terminalFactory( - step.id, - output, - testDatatypesMapper, - connectionStore, - stepStore - ); + stepTerminals[output.name] = terminalFactory(step.id, output, testDatatypesMapper, stores); }); }); }); it("has step", () => { - expect(stepStore.getStep(1)).toEqual(simpleSteps["1"]); + expect(stores.stepStore.getStep(1)).toEqual(simpleSteps["1"]); }); it("infers correct state", () => { const firstInputTerminal = terminals[1]!["input"] as InputTerminal; @@ -627,11 +620,11 @@ describe("Input terminal", () => { firstInputTerminal.disconnect(connection); expect(firstInputTerminal.canAccept(dataInputOutputTerminal).canAccept).toBe(true); expect(dataInputOutputTerminal.validInputTerminals().length).toBe(1); - connectionStore.addConnection(connection); + stores.connectionStore.addConnection(connection); expect(firstInputTerminal.canAccept(dataInputOutputTerminal).canAccept).toBe(false); }); it("will maintain invalid connections", () => { - const connection = connectionStore.connections[0]!; + const connection = stores.connectionStore.connections[0]!; connection.output.name = "I don't exist"; const firstInputTerminal = terminals[1]?.["input"] as InputTerminal; const invalidTerminals = firstInputTerminal.getConnectedTerminals(); diff --git a/client/src/components/Workflow/Editor/modules/terminals.ts b/client/src/components/Workflow/Editor/modules/terminals.ts index 0b4745ea7c81..2d85b31dfd52 100644 --- a/client/src/components/Workflow/Editor/modules/terminals.ts +++ b/client/src/components/Workflow/Editor/modules/terminals.ts @@ -1,12 +1,8 @@ import EventEmitter from "events"; import type { DatatypesMapperModel } from "@/components/Datatypes/model"; -import { - type Connection, - type ConnectionId, - getConnectionId, - type useConnectionStore, -} from "@/stores/workflowConnectionStore"; +import type { useWorkflowStores } from "@/composables/workflowStores"; +import { type Connection, type ConnectionId, getConnectionId } from "@/stores/workflowConnectionStore"; import type { CollectionOutput, DataCollectionStepInput, @@ -15,7 +11,6 @@ import type { ParameterOutput, ParameterStepInput, TerminalSource, - useWorkflowStepStore, } from "@/stores/workflowStepStore"; import { assertDefined } from "@/utils/assertions"; @@ -39,8 +34,7 @@ interface BaseTerminalArgs { name: string; stepId: number; datatypesMapper: DatatypesMapperModel; - connectionStore: ReturnType; - stepStore: ReturnType; + stores: ReturnType; } interface InputTerminalInputs { @@ -55,8 +49,7 @@ interface InputTerminalArgs extends BaseTerminalArgs { } class Terminal extends EventEmitter { - connectionStore: ReturnType; - stepStore: ReturnType; + stores; name: string; multiple: boolean; stepId: number; @@ -66,8 +59,7 @@ class Terminal extends EventEmitter { constructor(attr: BaseTerminalArgs) { super(); - this.connectionStore = attr.connectionStore; - this.stepStore = attr.stepStore; + this.stores = attr.stores; this.stepId = attr.stepId; this.name = attr.name; this.multiple = false; @@ -79,19 +71,25 @@ class Terminal extends EventEmitter { return `node-${this.stepId}-${this.terminalType}-${this.name}`; } public get connections(): Connection[] { - return this.connectionStore.getConnectionsForTerminal(this.id); + return this.stores.connectionStore.getConnectionsForTerminal(this.id); } public get mapOver(): CollectionTypeDescriptor { - return this.stepStore.stepMapOver[this.stepId] || NULL_COLLECTION_TYPE_DESCRIPTION; + return this.stores.stepStore.stepMapOver[this.stepId] || NULL_COLLECTION_TYPE_DESCRIPTION; } connect(other: Terminal) { + this.makeConnection(other); + } + makeConnection(other: Terminal) { const connection: Connection = { input: { stepId: this.stepId, name: this.name, connectorType: "input" }, output: { stepId: other.stepId, name: other.name, connectorType: "output" }, }; - this.connectionStore.addConnection(connection); + this.stores.connectionStore.addConnection(connection); } disconnect(other: BaseOutputTerminal | Connection) { + this.dropConnection(other); + } + dropConnection(other: BaseOutputTerminal | Connection) { let connection: Connection; if (other instanceof Terminal) { connection = { @@ -101,7 +99,7 @@ class Terminal extends EventEmitter { } else { connection = other; } - this.connectionStore.removeConnection(getConnectionId(connection)); + this.stores.connectionStore.removeConnection(getConnectionId(connection)); this.resetMappingIfNeeded(connection); } setMapOver(val: CollectionTypeDescriptor) { @@ -120,18 +118,18 @@ class Terminal extends EventEmitter { const effectiveMapOver = this._effectiveMapOver(outputVal); if (!this.localMapOver.equal(effectiveMapOver)) { - this.stepStore.changeStepInputMapOver(this.stepId, this.name, effectiveMapOver); + this.stores.stepStore.changeStepInputMapOver(this.stepId, this.name, effectiveMapOver); this.localMapOver = effectiveMapOver; } if ( !this.mapOver.equal(effectiveMapOver) && (effectiveMapOver.isCollection || - !Object.values(this.stepStore.stepInputMapOver[this.stepId] ?? []).find( + !Object.values(this.stores.stepStore.stepInputMapOver[this.stepId] ?? []).find( (mapOver) => mapOver.isCollection )) ) { - this.stepStore.changeStepMapOver(this.stepId, effectiveMapOver); + this.stores.stepStore.changeStepMapOver(this.stepId, effectiveMapOver); } } _effectiveMapOver(otherCollectionType: CollectionTypeDescriptor) { @@ -141,19 +139,20 @@ class Terminal extends EventEmitter { return Boolean(this.mapOver.isCollection); } resetMapping(_connection?: Connection) { - this.stepStore.changeStepMapOver(this.stepId, NULL_COLLECTION_TYPE_DESCRIPTION); - this.stepStore.resetStepInputMapOver(this.stepId); + this.stores.stepStore.changeStepMapOver(this.stepId, NULL_COLLECTION_TYPE_DESCRIPTION); + this.stores.stepStore.resetStepInputMapOver(this.stepId); } hasConnectedMappedInputTerminals() { // check if step has connected and mapped input terminals ... should maybe be on step/node ? - const connections = this.connectionStore.getConnectionsForStep(this.stepId); + const connections = this.stores.connectionStore.getConnectionsForStep(this.stepId); return connections.some( (connection) => - connection.input.stepId === this.stepId && this.stepStore.stepMapOver[this.stepId]?.collectionType + connection.input.stepId === this.stepId && + this.stores.stepStore.stepMapOver[this.stepId]?.collectionType ); } _getOutputConnections() { - return this.connectionStore.getConnectionsForStep(this.stepId).filter((connection) => { + return this.stores.connectionStore.getConnectionsForStep(this.stepId).filter((connection) => { return connection.output.stepId === this.stepId; }); } @@ -162,7 +161,7 @@ class Terminal extends EventEmitter { return this._getOutputConnections().length > 0; } hasMappedOverInputTerminals() { - return Boolean(this.stepStore.stepMapOver[this.stepId]?.collectionType); + return Boolean(this.stores.stepStore.stepMapOver[this.stepId]?.collectionType); } resetMappingIfNeeded(connection?: Connection) { const mapOver = this.mapOver; @@ -189,8 +188,11 @@ class BaseInputTerminal extends Terminal { this.datatypes = attr.input.datatypes; this.multiple = attr.input.multiple; this.optional = attr.input.optional; - if (this.stepStore.stepInputMapOver[this.stepId] && this.stepStore.stepInputMapOver[this.stepId]?.[this.name]) { - this.localMapOver = this.stepStore.stepInputMapOver[this.stepId]![this.name]!; + if ( + this.stores.stepStore.stepInputMapOver[this.stepId] && + this.stores.stepStore.stepInputMapOver[this.stepId]?.[this.name] + ) { + this.localMapOver = this.stores.stepStore.stepInputMapOver[this.stepId]![this.name]!; } else { this.localMapOver = NULL_COLLECTION_TYPE_DESCRIPTION; } @@ -231,39 +233,29 @@ class BaseInputTerminal extends Terminal { _getOutputStepsMapOver() { const connections = this._getOutputConnections(); const connectedStepIds = Array.from(new Set(connections.map((connection) => connection.output.stepId))); - return connectedStepIds.map((stepId) => this.stepStore.stepMapOver[stepId] || NULL_COLLECTION_TYPE_DESCRIPTION); + return connectedStepIds.map( + (stepId) => this.stores.stepStore.stepMapOver[stepId] || NULL_COLLECTION_TYPE_DESCRIPTION + ); } resetMapping(connection?: Connection) { super.resetMapping(connection); - this.stepStore.changeStepInputMapOver(this.stepId, this.name, NULL_COLLECTION_TYPE_DESCRIPTION); + this.stores.stepStore.changeStepInputMapOver(this.stepId, this.name, NULL_COLLECTION_TYPE_DESCRIPTION); const outputStepIds = this._getOutputTerminals().map((outputTerminal) => outputTerminal.stepId); if (connection) { outputStepIds.push(connection.output.stepId); } Array.from(new Set(outputStepIds)).forEach((stepId) => { - const step = this.stepStore.getStep(stepId); + const step = this.stores.stepStore.getStep(stepId); if (step) { // step must have an output, since it is or was connected to this step const terminalSource = step.outputs[0]; if (terminalSource) { - const terminal = terminalFactory( - step.id, - terminalSource, - this.datatypesMapper, - this.connectionStore, - this.stepStore - ); + const terminal = terminalFactory(step.id, terminalSource, this.datatypesMapper, this.stores); // drop mapping restrictions terminal.resetMappingIfNeeded(); // re-establish map over through inputs step.inputs.forEach((input) => { - terminalFactory( - step.id, - input, - this.datatypesMapper, - this.connectionStore, - this.stepStore - ).getStepMapOver(); + terminalFactory(step.id, input, this.datatypesMapper, this.stores).getStepMapOver(); }); } } else { @@ -272,7 +264,7 @@ class BaseInputTerminal extends Terminal { }); } _getOutputTerminals() { - return this.connectionStore.getOutputTerminalsForInputTerminal(this.id); + return this.stores.connectionStore.getOutputTerminalsForInputTerminal(this.id); } _getFirstOutputTerminal() { const outputTerminals = this._getOutputTerminals(); @@ -307,7 +299,7 @@ class BaseInputTerminal extends Terminal { _collectionAttached() { const outputTerminals = this._getOutputTerminals(); return outputTerminals.some((outputTerminal) => { - const step = this.stepStore.getStep(outputTerminal.stepId); + const step = this.stores.stepStore.getStep(outputTerminal.stepId); if (!step) { console.error(`Invalid step. Could not find step with id ${outputTerminal.stepId} in store.`); @@ -319,7 +311,7 @@ class BaseInputTerminal extends Terminal { if ( output && (("collection" in output && output.collection) || - this.stepStore.stepMapOver[outputTerminal.stepId]?.isCollection || + this.stores.stepStore.stepMapOver[outputTerminal.stepId]?.isCollection || ("extensions" in output && output.extensions.indexOf("input") > 0)) ) { return true; @@ -361,7 +353,7 @@ class BaseInputTerminal extends Terminal { } getConnectedTerminals() { return this.connections.map((connection) => { - const outputStep = this.stepStore.getStep(connection.output.stepId); + const outputStep = this.stores.stepStore.getStep(connection.output.stepId); if (!outputStep) { return new InvalidOutputTerminal({ stepId: -1, @@ -370,8 +362,7 @@ class BaseInputTerminal extends Terminal { name: connection.output.name, valid: false, datatypesMapper: this.datatypesMapper, - connectionStore: this.connectionStore, - stepStore: this.stepStore, + stores: this.stores, }); } let terminalSource = outputStep.outputs.find((output) => output.name === connection.output.name); @@ -383,8 +374,7 @@ class BaseInputTerminal extends Terminal { name: connection.output.name, valid: false, datatypesMapper: this.datatypesMapper, - connectionStore: this.connectionStore, - stepStore: this.stepStore, + stores: this.stores, }); } const postJobActionKey = `ChangeDatatypeAction${connection.output.name}`; @@ -401,13 +391,7 @@ class BaseInputTerminal extends Terminal { }; } - return terminalFactory( - outputStep.id, - terminalSource, - this.datatypesMapper, - this.connectionStore, - this.stepStore - ); + return terminalFactory(outputStep.id, terminalSource, this.datatypesMapper, this.stores); }); } @@ -416,10 +400,10 @@ class BaseInputTerminal extends Terminal { const canAccept = this.attachable(terminal); const connectionId: ConnectionId = `${this.stepId}-${this.name}-${terminal.stepId}-${terminal.name}`; if (!canAccept.canAccept) { - this.connectionStore.markInvalidConnection(connectionId, canAccept.reason ?? "Unknown"); + this.stores.connectionStore.markInvalidConnection(connectionId, canAccept.reason ?? "Unknown"); return true; - } else if (this.connectionStore.invalidConnections[connectionId]) { - this.connectionStore.dropFromInvalidConnections(connectionId); + } else if (this.stores.connectionStore.invalidConnections[connectionId]) { + this.stores.connectionStore.dropFromInvalidConnections(connectionId); } return false; }); @@ -658,15 +642,15 @@ class BaseOutputTerminal extends Terminal { constructor(attr: BaseOutputTerminalArgs) { super(attr); this.datatypes = attr.datatypes; - this.optional = attr.optional || Boolean(this.stepStore.getStep(this.stepId)?.when); + this.optional = attr.optional || Boolean(this.stores.stepStore.getStep(this.stepId)?.when); this.terminalType = "output"; } getConnectedTerminals(): InputTerminalsAndInvalid[] { return this.connections.map((connection) => { - const inputStep = this.stepStore.getStep(connection.input.stepId); + const inputStep = this.stores.stepStore.getStep(connection.input.stepId); assertDefined(inputStep, `Invalid step. Could not find step with id ${connection.input.stepId} in store.`); - const extraStepInput = this.stepStore.getStepExtraInputs(inputStep.id); + const extraStepInput = this.stores.stepStore.getStepExtraInputs(inputStep.id); const terminalSource = [...extraStepInput, ...inputStep.inputs].find( (input) => input.name === connection.input.name ); @@ -682,17 +666,10 @@ class BaseOutputTerminal extends Terminal { optional: false, multiple: false, }, - connectionStore: this.connectionStore, - stepStore: this.stepStore, + stores: this.stores, }); } - return terminalFactory( - inputStep.id, - terminalSource, - this.datatypesMapper, - this.connectionStore, - this.stepStore - ); + return terminalFactory(inputStep.id, terminalSource, this.datatypesMapper, this.stores); }); } @@ -701,10 +678,10 @@ class BaseOutputTerminal extends Terminal { const canAccept = terminal.attachable(this); const connectionId: ConnectionId = `${terminal.stepId}-${terminal.name}-${this.stepId}-${this.name}`; if (!canAccept.canAccept) { - this.connectionStore.markInvalidConnection(connectionId, canAccept.reason ?? "Unknown"); + this.stores.connectionStore.markInvalidConnection(connectionId, canAccept.reason ?? "Unknown"); return true; - } else if (this.connectionStore.invalidConnections[connectionId]) { - this.connectionStore.dropFromInvalidConnections(connectionId); + } else if (this.stores.connectionStore.invalidConnections[connectionId]) { + this.stores.connectionStore.dropFromInvalidConnections(connectionId); } return false; }); @@ -719,15 +696,9 @@ class BaseOutputTerminal extends Terminal { } validInputTerminals() { const validInputTerminals: InputTerminals[] = []; - Object.values(this.stepStore.steps).map((step) => { + Object.values(this.stores.stepStore.steps).map((step) => { step.inputs?.forEach((input) => { - const inputTerminal = terminalFactory( - step.id, - input, - this.datatypesMapper, - this.connectionStore, - this.stepStore - ); + const inputTerminal = terminalFactory(step.id, input, this.datatypesMapper, this.stores); if (inputTerminal.canAccept(this).canAccept) { validInputTerminals.push(inputTerminal); } @@ -763,13 +734,13 @@ export class OutputCollectionTerminal extends BaseOutputTerminal { } getCollectionTypeFromInput() { - const connection = this.connectionStore.connections.find( + const connection = this.stores.connectionStore.connections.find( (connection) => connection.input.name === this.collectionTypeSource && connection.input.stepId === this.stepId ); if (connection) { - const outputStep = this.stepStore.getStep(connection.output.stepId); - const inputStep = this.stepStore.getStep(this.stepId); + const outputStep = this.stores.stepStore.getStep(connection.output.stepId); + const inputStep = this.stores.stepStore.getStep(this.stepId); assertDefined(inputStep, `Invalid step. Could not find step with id ${connection.input.stepId} in store.`); if (outputStep) { @@ -780,15 +751,13 @@ export class OutputCollectionTerminal extends BaseOutputTerminal { connection.output.stepId, stepOutput, this.datatypesMapper, - this.connectionStore, - this.stepStore + this.stores ); const inputTerminal = terminalFactory( connection.output.stepId, stepInput, this.datatypesMapper, - this.connectionStore, - this.stepStore + this.stores ); // otherCollectionType is the mapped over output collection as it would appear at the input terminal const otherCollectionType = inputTerminal._otherCollectionType(outputTerminal); @@ -936,8 +905,7 @@ export function terminalFactory( stepId: number, terminalSource: T, datatypesMapper: DatatypesMapperModel, - connectionStore: ReturnType, - stepStore: ReturnType + stores: ReturnType ): TerminalOf { if ("input_type" in terminalSource) { const terminalArgs = { @@ -945,8 +913,7 @@ export function terminalFactory( input_type: terminalSource.input_type, name: terminalSource.name, stepId: stepId, - connectionStore, - stepStore, + stores, }; if ("valid" in terminalSource) { return new InvalidInputTerminal({ @@ -994,8 +961,7 @@ export function terminalFactory( optional: terminalSource.optional, stepId: stepId, datatypesMapper: datatypesMapper, - connectionStore, - stepStore, + stores, }; if (isOutputParameterArg(terminalSource)) { return new OutputParameterTerminal({ From 5e2bca29f69aa3e91a25d9ea13826c03ddc9d0b1 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:21:02 +0100 Subject: [PATCH 403/669] add connection undo redo actions --- .../Workflow/Editor/modules/terminals.ts | 37 ++++++++++++------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/client/src/components/Workflow/Editor/modules/terminals.ts b/client/src/components/Workflow/Editor/modules/terminals.ts index 2d85b31dfd52..5a745b8ac0ba 100644 --- a/client/src/components/Workflow/Editor/modules/terminals.ts +++ b/client/src/components/Workflow/Editor/modules/terminals.ts @@ -76,20 +76,7 @@ class Terminal extends EventEmitter { public get mapOver(): CollectionTypeDescriptor { return this.stores.stepStore.stepMapOver[this.stepId] || NULL_COLLECTION_TYPE_DESCRIPTION; } - connect(other: Terminal) { - this.makeConnection(other); - } - makeConnection(other: Terminal) { - const connection: Connection = { - input: { stepId: this.stepId, name: this.name, connectorType: "input" }, - output: { stepId: other.stepId, name: other.name, connectorType: "output" }, - }; - this.stores.connectionStore.addConnection(connection); - } - disconnect(other: BaseOutputTerminal | Connection) { - this.dropConnection(other); - } - dropConnection(other: BaseOutputTerminal | Connection) { + buildConnection(other: Terminal | Connection) { let connection: Connection; if (other instanceof Terminal) { connection = { @@ -99,6 +86,28 @@ class Terminal extends EventEmitter { } else { connection = other; } + return connection; + } + connect(other: Terminal | Connection) { + this.stores.undoRedoStore + .action() + .onRun(() => this.makeConnection(other)) + .onUndo(() => this.dropConnection(other)) + .apply(); + } + makeConnection(other: Terminal | Connection) { + const connection = this.buildConnection(other); + this.stores.connectionStore.addConnection(connection); + } + disconnect(other: Terminal | Connection) { + this.stores.undoRedoStore + .action() + .onRun(() => this.dropConnection(other)) + .onUndo(() => this.makeConnection(other)) + .apply(); + } + dropConnection(other: Terminal | Connection) { + const connection = this.buildConnection(other); this.stores.connectionStore.removeConnection(getConnectionId(connection)); this.resetMappingIfNeeded(connection); } From cda8534b2f9d951ead1ac0d4502e9248435c7a2c Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:35:28 +0100 Subject: [PATCH 404/669] fix falsey number bug --- client/src/stores/workflowStepStore.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts index 15cf99b7afe4..8916ac3fdabe 100644 --- a/client/src/stores/workflowStepStore.ts +++ b/client/src/stores/workflowStepStore.ts @@ -208,7 +208,7 @@ export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (work const connectionStore = useConnectionStore(workflowId); function addStep(newStep: NewStep): Step { - const stepId = newStep.id ? newStep.id : getStepIndex.value + 1; + const stepId = newStep.id ?? getStepIndex.value + 1; const step = Object.freeze({ ...newStep, id: stepId } as Step); set(steps.value, stepId.toString(), step); From 133894203fd31235168d455eb5b82af2e056296c Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:44:45 +0100 Subject: [PATCH 405/669] refactor connection construction --- client/src/stores/workflowStepStore.ts | 30 +++++++++++++++----------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts index 8916ac3fdabe..ee49e138ab66 100644 --- a/client/src/stores/workflowStepStore.ts +++ b/client/src/stores/workflowStepStore.ts @@ -381,8 +381,24 @@ export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (work }; }); +function makeConnection(inputId: number, inputName: string, outputId: number, outputName: string): Connection { + return { + input: { + stepId: inputId, + name: inputName, + connectorType: "input", + }, + output: { + stepId: outputId, + name: outputName, + connectorType: "output", + }, + }; +} + function stepToConnections(step: Step): Connection[] { const connections: Connection[] = []; + if (step.input_connections) { Object.entries(step?.input_connections).forEach(([inputName, outputArray]) => { if (outputArray === undefined) { @@ -392,18 +408,7 @@ function stepToConnections(step: Step): Connection[] { outputArray = [outputArray]; } outputArray.forEach((output) => { - const connection: Connection = { - input: { - stepId: step.id, - name: inputName, - connectorType: "input", - }, - output: { - stepId: output.id, - name: output.output_name, - connectorType: "output", - }, - }; + const connection = makeConnection(step.id, inputName, output.id, output.output_name); const connectionInput = step.inputs.find((input) => input.name == inputName); if (connectionInput && "input_subworkflow_step_id" in connectionInput) { connection.input.input_subworkflow_step_id = connectionInput.input_subworkflow_step_id; @@ -412,6 +417,7 @@ function stepToConnections(step: Step): Connection[] { }); }); } + return connections; } From 885f0638db8728abc1405ff941c864680421a969 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:52:24 +0100 Subject: [PATCH 406/669] fix only input connections recreated on remove undo --- .../Workflow/Editor/Actions/stepActions.ts | 14 +++++++++++--- client/src/components/Workflow/Editor/Index.vue | 2 +- client/src/stores/workflowConnectionStore.ts | 2 ++ client/src/stores/workflowStepStore.ts | 8 ++++++-- 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index afe3fb2d857c..868d3a47868d 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -1,5 +1,6 @@ import { useRefreshFromStore } from "@/stores/refreshFromStore"; import { UndoRedoAction, UndoRedoStore } from "@/stores/undoRedoStore"; +import { Connection, WorkflowConnectionStore } from "@/stores/workflowConnectionStore"; import { WorkflowStateStore } from "@/stores/workflowEditorStateStore"; import type { Step, WorkflowStepStore } from "@/stores/workflowStepStore"; import { assertDefined } from "@/utils/assertions"; @@ -161,20 +162,25 @@ export class InsertStepAction extends UndoRedoAction { export class RemoveStepAction extends UndoRedoAction { stepStore; stateStore; + connectionStore; showAttributesCallback; step: Step; + connections: Connection[]; constructor( stepStore: WorkflowStepStore, stateStore: WorkflowStateStore, + connectionStore: WorkflowConnectionStore, showAttributesCallback: () => void, step: Step ) { super(); this.stepStore = stepStore; this.stateStore = stateStore; + this.connectionStore = connectionStore; this.showAttributesCallback = showAttributesCallback; this.step = structuredClone(step); + this.connections = structuredClone(this.connectionStore.getConnectionsForStep(this.step.id)); } run() { @@ -184,7 +190,8 @@ export class RemoveStepAction extends UndoRedoAction { } undo() { - this.stepStore.addStep(structuredClone(this.step)); + this.stepStore.addStep(structuredClone(this.step), false); + this.connections.forEach((connection) => this.connectionStore.addConnection(connection)); this.stateStore.activeNodeId = this.step.id; this.stateStore.hasChanges = true; } @@ -217,7 +224,8 @@ export class CopyStepAction extends UndoRedoAction { export function useStepActions( stepStore: WorkflowStepStore, undoRedoStore: UndoRedoStore, - stateStore: WorkflowStateStore + stateStore: WorkflowStateStore, + connectionStore: WorkflowConnectionStore ) { /** * If the pending action is a `LazyMutateStepAction` and matches the step id and field key, returns it. @@ -281,7 +289,7 @@ export function useStepActions( } function removeStep(step: Step, showAttributesCallback: () => void) { - const action = new RemoveStepAction(stepStore, stateStore, showAttributesCallback, step); + const action = new RemoveStepAction(stepStore, stateStore, connectionStore, showAttributesCallback, step); undoRedoStore.applyAction(action); } diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index 3d348dbe66c3..c6e5c63d5295 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -389,7 +389,7 @@ export default { emit("update:confirmation", false); }); - const stepActions = useStepActions(stepStore, undoRedoStore, stateStore); + const stepActions = useStepActions(stepStore, undoRedoStore, stateStore, connectionStore); return { id, diff --git a/client/src/stores/workflowConnectionStore.ts b/client/src/stores/workflowConnectionStore.ts index 6349abb5f14f..675a53cc253a 100644 --- a/client/src/stores/workflowConnectionStore.ts +++ b/client/src/stores/workflowConnectionStore.ts @@ -89,6 +89,8 @@ export function getConnectionId(item: Connection): ConnectionId { return `${item.input.stepId}-${item.input.name}-${item.output.stepId}-${item.output.name}`; } +export type WorkflowConnectionStore = ReturnType; + export const useConnectionStore = defineScopedStore("workflowConnectionStore", (workflowId) => { const connections = ref[]>([]); const invalidConnections = ref({}); diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts index ee49e138ab66..99025538f604 100644 --- a/client/src/stores/workflowStepStore.ts +++ b/client/src/stores/workflowStepStore.ts @@ -207,12 +207,16 @@ export const useWorkflowStepStore = defineScopedStore("workflowStepStore", (work const connectionStore = useConnectionStore(workflowId); - function addStep(newStep: NewStep): Step { + function addStep(newStep: NewStep, createConnections = true): Step { const stepId = newStep.id ?? getStepIndex.value + 1; const step = Object.freeze({ ...newStep, id: stepId } as Step); set(steps.value, stepId.toString(), step); - stepToConnections(step).map((connection) => connectionStore.addConnection(connection)); + + if (createConnections) { + stepToConnections(step).forEach((connection) => connectionStore.addConnection(connection)); + } + stepExtraInputs.value[step.id] = findStepExtraInputs(step); return step; From 6cbae75f51be39949748c896d49364a066036491 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 14:09:54 +0100 Subject: [PATCH 407/669] name lazy actions as such --- .../Workflow/Editor/Actions/commentActions.ts | 10 +++++----- .../Workflow/Editor/Comments/FrameComment.vue | 6 +++--- .../Editor/Comments/WorkflowComment.vue | 20 +++++++++---------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/commentActions.ts b/client/src/components/Workflow/Editor/Actions/commentActions.ts index 49c9b70b3f68..1d1819dd2d3d 100644 --- a/client/src/components/Workflow/Editor/Actions/commentActions.ts +++ b/client/src/components/Workflow/Editor/Actions/commentActions.ts @@ -61,7 +61,7 @@ export class ChangeColorAction extends UndoRedoAction { } } -class MutateCommentAction extends UndoRedoAction { +class LazyMutateCommentAction extends UndoRedoAction { private commentId: number; private startData: WorkflowComment[K]; private endData: WorkflowComment[K]; @@ -95,21 +95,21 @@ class MutateCommentAction extends UndoRedoActio } } -export class ChangeDataAction extends MutateCommentAction<"data"> { +export class LazyChangeDataAction extends LazyMutateCommentAction<"data"> { constructor(store: WorkflowCommentStore, comment: WorkflowComment, data: WorkflowComment["data"]) { const callback = store.changeData; super(comment, "data", data, callback); } } -export class ChangePositionAction extends MutateCommentAction<"position"> { +export class LazyChangePositionAction extends LazyMutateCommentAction<"position"> { constructor(store: WorkflowCommentStore, comment: WorkflowComment, position: [number, number]) { const callback = store.changePosition; super(comment, "position", position, callback); } } -export class ChangeSizeAction extends MutateCommentAction<"size"> { +export class LazyChangeSizeAction extends LazyMutateCommentAction<"size"> { constructor(store: WorkflowCommentStore, comment: WorkflowComment, size: [number, number]) { const callback = store.changeSize; super(comment, "size", size, callback); @@ -118,7 +118,7 @@ export class ChangeSizeAction extends MutateCommentAction<"size"> { type StepWithPosition = Step & { position: NonNullable }; -export class MoveMultipleAction extends UndoRedoAction { +export class LazyMoveMultipleAction extends UndoRedoAction { private commentStore; private stepStore; private comments; diff --git a/client/src/components/Workflow/Editor/Comments/FrameComment.vue b/client/src/components/Workflow/Editor/Comments/FrameComment.vue index 496eb28625a3..9fce95958129 100644 --- a/client/src/components/Workflow/Editor/Comments/FrameComment.vue +++ b/client/src/components/Workflow/Editor/Comments/FrameComment.vue @@ -13,7 +13,7 @@ import { useWorkflowStores } from "@/composables/workflowStores"; import type { FrameWorkflowComment, WorkflowComment, WorkflowCommentColor } from "@/stores/workflowEditorCommentStore"; import type { Step } from "@/stores/workflowStepStore"; -import { MoveMultipleAction } from "../Actions/commentActions"; +import { LazyMoveMultipleAction } from "../Actions/commentActions"; import { brighterColors, darkenedColors } from "./colors"; import { useResizable } from "./useResizable"; import { selectAllText } from "./utilities"; @@ -145,7 +145,7 @@ type StepWithPosition = Step & { position: NonNullable }; let stepsInBounds: StepWithPosition[] = []; let commentsInBounds: WorkflowComment[] = []; -let lazyAction: MoveMultipleAction | null = null; +let lazyAction: LazyMoveMultipleAction | null = null; function getAABB() { const aabb = new AxisAlignedBoundingBox(); @@ -164,7 +164,7 @@ function onDragStart() { commentsInBounds.push(props.comment); - lazyAction = new MoveMultipleAction(commentStore, stepStore, commentsInBounds, stepsInBounds, aabb); + lazyAction = new LazyMoveMultipleAction(commentStore, stepStore, commentsInBounds, stepsInBounds, aabb); undoRedoStore.applyLazyAction(lazyAction); } diff --git a/client/src/components/Workflow/Editor/Comments/WorkflowComment.vue b/client/src/components/Workflow/Editor/Comments/WorkflowComment.vue index 1643b5dfd2d2..1427b3f085de 100644 --- a/client/src/components/Workflow/Editor/Comments/WorkflowComment.vue +++ b/client/src/components/Workflow/Editor/Comments/WorkflowComment.vue @@ -7,10 +7,10 @@ import type { WorkflowComment, WorkflowCommentColor } from "@/stores/workflowEdi import { ChangeColorAction, - ChangeDataAction, - ChangePositionAction, - ChangeSizeAction, DeleteCommentAction, + LazyChangeDataAction, + LazyChangePositionAction, + LazyChangeSizeAction, } from "../Actions/commentActions"; import FrameComment from "./FrameComment.vue"; @@ -37,31 +37,31 @@ const cssVariables = computed(() => ({ })); const { commentStore, undoRedoStore } = useWorkflowStores(); -let lazyAction: ChangeDataAction | ChangePositionAction | ChangeSizeAction | null = null; +let lazyAction: LazyChangeDataAction | LazyChangePositionAction | LazyChangeSizeAction | null = null; function onUpdateData(data: any) { - if (lazyAction instanceof ChangeDataAction && undoRedoStore.isQueued(lazyAction)) { + if (lazyAction instanceof LazyChangeDataAction && undoRedoStore.isQueued(lazyAction)) { lazyAction.updateData(data); } else { - lazyAction = new ChangeDataAction(commentStore, props.comment, data); + lazyAction = new LazyChangeDataAction(commentStore, props.comment, data); undoRedoStore.applyLazyAction(lazyAction); } } function onResize(size: [number, number]) { - if (lazyAction instanceof ChangeSizeAction && undoRedoStore.isQueued(lazyAction)) { + if (lazyAction instanceof LazyChangeSizeAction && undoRedoStore.isQueued(lazyAction)) { lazyAction.updateData(size); } else { - lazyAction = new ChangeSizeAction(commentStore, props.comment, size); + lazyAction = new LazyChangeSizeAction(commentStore, props.comment, size); undoRedoStore.applyLazyAction(lazyAction); } } function onMove(position: [number, number]) { - if (lazyAction instanceof ChangePositionAction && undoRedoStore.isQueued(lazyAction)) { + if (lazyAction instanceof LazyChangePositionAction && undoRedoStore.isQueued(lazyAction)) { lazyAction.updateData(position); } else { - lazyAction = new ChangePositionAction(commentStore, props.comment, position); + lazyAction = new LazyChangePositionAction(commentStore, props.comment, position); undoRedoStore.applyLazyAction(lazyAction); } } From d74aababbf2723ebb52616a44c399facefc3b226 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 15:23:33 +0100 Subject: [PATCH 408/669] run prettier on readme --- client/src/stores/undoRedoStore/README.md | 25 ++++++++++------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/client/src/stores/undoRedoStore/README.md b/client/src/stores/undoRedoStore/README.md index 04287025f481..6155273d0b38 100644 --- a/client/src/stores/undoRedoStore/README.md +++ b/client/src/stores/undoRedoStore/README.md @@ -16,15 +16,15 @@ An example for this may be a tab, or pop-up having it's own separate undo-redo s How undo-redo operations are handled is determined by Undo-Redo actions. There are two ways of creating actions: -* extending the `UndoRedoAction` class, and calling `undoRedoStore.applyAction` -* using the `undoRedoStore.action` factory +- extending the `UndoRedoAction` class, and calling `undoRedoStore.applyAction` +- using the `undoRedoStore.action` factory Actions always provide 4 callbacks, all of them optional: -* run / onRun: ran as soon as the action is applied to the store -* undo / onUndo: ran when an action is rolled back -* redo / onRedo: ran when an action is re-applied. If not defined, `run` will be ran instead -* destroy / onDestroy: ran when an action is discarded, either by the undo stack reaching it's max size, or if a new action is applied when this action is in the redo stack +- run / onRun: ran as soon as the action is applied to the store +- undo / onUndo: ran when an action is rolled back +- redo / onRedo: ran when an action is re-applied. If not defined, `run` will be ran instead +- destroy / onDestroy: ran when an action is discarded, either by the undo stack reaching it's max size, or if a new action is applied when this action is in the redo stack Example: extending the `UndoRedoAction` class: @@ -61,9 +61,10 @@ const commentStore = useWorkflowCommentStore("some-scope"); const newComment = structuredClone(commentStore[comment.id]!); -undoRedoStore.action() +undoRedoStore + .action() .onRun(() => commentStore.deleteComment(newComment.id)) - .onUndo(() => commentStore.addComments([ newComment.id ])) + .onUndo(() => commentStore.addComments([newComment.id])) .apply(); ``` @@ -98,13 +99,9 @@ class ChangeCommentPositionAction extends UndoRedoAction { private startPosition: Position; private endPosition: Position; - constructor( - store: WorkflowCommentStore, - comment: WorkflowComment, - position: Position - ) { + constructor(store: WorkflowCommentStore, comment: WorkflowComment, position: Position) { super(); - this.store + this.store; this.commentId = comment.id; this.startPosition = structuredClone(position); this.endPosition = structuredClone(position); From 68a4dcdd4ca4f594faa3fb28d1ad52fe997fbc20 Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Mon, 18 Mar 2024 15:59:20 +0100 Subject: [PATCH 409/669] fix unit test --- .../Workflow/Editor/Actions/commentActions.ts | 2 +- .../Editor/Comments/WorkflowComment.test.ts | 16 +++++++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/client/src/components/Workflow/Editor/Actions/commentActions.ts b/client/src/components/Workflow/Editor/Actions/commentActions.ts index 1d1819dd2d3d..40a908f03126 100644 --- a/client/src/components/Workflow/Editor/Actions/commentActions.ts +++ b/client/src/components/Workflow/Editor/Actions/commentActions.ts @@ -14,7 +14,7 @@ class CommentAction extends UndoRedoAction { constructor(store: WorkflowCommentStore, comment: BaseWorkflowComment) { super(); this.store = store; - this.comment = structuredClone(this.store.commentsRecord[comment.id]!); + this.comment = structuredClone(comment) as WorkflowComment; } } diff --git a/client/src/components/Workflow/Editor/Comments/WorkflowComment.test.ts b/client/src/components/Workflow/Editor/Comments/WorkflowComment.test.ts index 20d9cf9d01f0..e1056ddf982c 100644 --- a/client/src/components/Workflow/Editor/Comments/WorkflowComment.test.ts +++ b/client/src/components/Workflow/Editor/Comments/WorkflowComment.test.ts @@ -1,6 +1,11 @@ +import { createTestingPinia } from "@pinia/testing"; import { mount, shallowMount } from "@vue/test-utils"; +import { setActivePinia } from "pinia"; import { nextTick } from "vue"; +import { UndoRedoAction } from "@/stores/undoRedoStore"; +import type { TextWorkflowComment } from "@/stores/workflowEditorCommentStore"; + import MarkdownComment from "./MarkdownComment.vue"; import TextComment from "./TextComment.vue"; import WorkflowComment from "./WorkflowComment.vue"; @@ -11,6 +16,9 @@ const changePosition = jest.fn(); const changeColor = jest.fn(); const deleteComment = jest.fn(); +const pinia = createTestingPinia(); +setActivePinia(pinia); + jest.mock("@/composables/workflowStores", () => ({ useWorkflowStores: () => ({ commentStore: { @@ -21,6 +29,10 @@ jest.mock("@/composables/workflowStores", () => ({ deleteComment, isJustCreated: () => false, }, + undoRedoStore: { + applyAction: (action: UndoRedoAction) => action.run(), + applyLazyAction: jest.fn(), + }, }), })); @@ -106,9 +118,11 @@ describe("WorkflowComment", () => { }); it("forwards events to the comment store", () => { + const testComment = { ...comment, id: 123, data: { size: 1, text: "HelloWorld" } } as TextWorkflowComment; + const wrapper = mount(WorkflowComment as any, { propsData: { - comment: { ...comment, id: 123, data: { size: 1, text: "HelloWorld" } }, + comment: testComment, scale: 1, rootOffset: {}, }, From a44ded5807430e678e264f12a7fa8197231d606c Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 19 Mar 2024 11:47:43 +0100 Subject: [PATCH 410/669] add names to actions --- .../Workflow/Editor/Actions/commentActions.ts | 32 +++++++++++++++++++ .../Workflow/Editor/Actions/stepActions.ts | 20 ++++++++++++ .../Editor/Actions/workflowActions.ts | 4 +++ .../Workflow/Editor/modules/terminals.ts | 2 ++ client/src/stores/undoRedoStore/index.ts | 32 +++++++++++++++++++ .../stores/undoRedoStore/undoRedoAction.ts | 10 ++++++ 6 files changed, 100 insertions(+) diff --git a/client/src/components/Workflow/Editor/Actions/commentActions.ts b/client/src/components/Workflow/Editor/Actions/commentActions.ts index 40a908f03126..baaec39d4ea9 100644 --- a/client/src/components/Workflow/Editor/Actions/commentActions.ts +++ b/client/src/components/Workflow/Editor/Actions/commentActions.ts @@ -19,6 +19,10 @@ class CommentAction extends UndoRedoAction { } export class AddCommentAction extends CommentAction { + get name() { + return `add ${this.comment.type} comment`; + } + undo() { this.store.deleteComment(this.comment.id); } @@ -29,6 +33,10 @@ export class AddCommentAction extends CommentAction { } export class DeleteCommentAction extends CommentAction { + get name() { + return `delete ${this.comment.type} comment`; + } + run() { this.store.deleteComment(this.comment.id); } @@ -43,6 +51,7 @@ export class ChangeColorAction extends UndoRedoAction { private toColor: WorkflowCommentColor; private fromColor: WorkflowCommentColor; private store: WorkflowCommentStore; + protected type; constructor(store: WorkflowCommentStore, comment: WorkflowComment, color: WorkflowCommentColor) { super(); @@ -50,6 +59,11 @@ export class ChangeColorAction extends UndoRedoAction { this.commentId = comment.id; this.fromColor = comment.color; this.toColor = color; + this.type = comment.type; + } + + get name() { + return `change ${this.type} comment color to ${this.toColor}`; } run() { @@ -65,6 +79,7 @@ class LazyMutateCommentAction extends UndoRedoA private commentId: number; private startData: WorkflowComment[K]; private endData: WorkflowComment[K]; + protected type; protected applyDataCallback: (commentId: number, data: WorkflowComment[K]) => void; constructor( @@ -79,6 +94,11 @@ class LazyMutateCommentAction extends UndoRedoA this.endData = structuredClone(data); this.applyDataCallback = applyDataCallback; this.applyDataCallback(this.commentId, this.endData); + this.type = comment.type; + } + + get name() { + return `change ${this.type} comment`; } updateData(data: WorkflowComment[K]) { @@ -107,6 +127,10 @@ export class LazyChangePositionAction extends LazyMutateCommentAction<"position" const callback = store.changePosition; super(comment, "position", position, callback); } + + get name() { + return `change ${this.type} comment position`; + } } export class LazyChangeSizeAction extends LazyMutateCommentAction<"size"> { @@ -114,6 +138,10 @@ export class LazyChangeSizeAction extends LazyMutateCommentAction<"size"> { const callback = store.changeSize; super(comment, "size", size, callback); } + + get name() { + return `resize ${this.type} comment`; + } } type StepWithPosition = Step & { position: NonNullable }; @@ -130,6 +158,10 @@ export class LazyMoveMultipleAction extends UndoRedoAction { private positionFrom; private positionTo; + get name() { + return "move multiple nodes"; + } + constructor( commentStore: WorkflowCommentStore, stepStore: WorkflowStepStore, diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index 868d3a47868d..4554e5cfaaba 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -13,6 +13,10 @@ class LazyMutateStepAction extends UndoRedoAction { stepStore; onUndoRedo?: () => void; + get name() { + return "modify step"; + } + constructor(stepStore: WorkflowStepStore, stepId: number, key: K, fromValue: Step[K], toValue: Step[K]) { super(); this.stepStore = stepStore; @@ -48,6 +52,10 @@ export class UpdateStepAction extends UndoRedoAction { toPartial; onUndoRedo?: () => void; + get name() { + return "modify step"; + } + constructor( stepStore: WorkflowStepStore, stateStore: WorkflowStateStore, @@ -128,6 +136,10 @@ export class InsertStepAction extends UndoRedoAction { this.stepData = stepData; } + get name() { + return `insert ${this.stepData.name} step`; + } + stepDataToTuple() { return Object.values(this.stepData) as Parameters; } @@ -183,6 +195,10 @@ export class RemoveStepAction extends UndoRedoAction { this.connections = structuredClone(this.connectionStore.getConnectionsForStep(this.step.id)); } + get name() { + return `remove step ${this.step.label ?? this.step.name}`; + } + run() { this.stepStore.removeStep(this.step.id); this.showAttributesCallback(); @@ -210,6 +226,10 @@ export class CopyStepAction extends UndoRedoAction { this.step = structuredClone(step); } + get name() { + return `duplicate step ${this.step.label ?? this.step.name}`; + } + run() { this.step = this.stepStore.addStep(this.step); this.stateStore.activeNodeId = this.step.id; diff --git a/client/src/components/Workflow/Editor/Actions/workflowActions.ts b/client/src/components/Workflow/Editor/Actions/workflowActions.ts index c00988b68cb8..bf8c29a00751 100644 --- a/client/src/components/Workflow/Editor/Actions/workflowActions.ts +++ b/client/src/components/Workflow/Editor/Actions/workflowActions.ts @@ -30,6 +30,10 @@ export class LazySetValueAction extends UndoRedoAction { this.showAttributesCallback(); this.setValueHandler(this.toValue); } + + get name() { + return "modify workflow"; + } } export class SetValueActionHandler { diff --git a/client/src/components/Workflow/Editor/modules/terminals.ts b/client/src/components/Workflow/Editor/modules/terminals.ts index 5a745b8ac0ba..7ed030b537e2 100644 --- a/client/src/components/Workflow/Editor/modules/terminals.ts +++ b/client/src/components/Workflow/Editor/modules/terminals.ts @@ -93,6 +93,7 @@ class Terminal extends EventEmitter { .action() .onRun(() => this.makeConnection(other)) .onUndo(() => this.dropConnection(other)) + .setName("connect steps") .apply(); } makeConnection(other: Terminal | Connection) { @@ -104,6 +105,7 @@ class Terminal extends EventEmitter { .action() .onRun(() => this.dropConnection(other)) .onUndo(() => this.makeConnection(other)) + .setName("disconnect steps") .apply(); } dropConnection(other: Terminal | Connection) { diff --git a/client/src/stores/undoRedoStore/index.ts b/client/src/stores/undoRedoStore/index.ts index 9e8483b4f926..c80d6c6e5098 100644 --- a/client/src/stores/undoRedoStore/index.ts +++ b/client/src/stores/undoRedoStore/index.ts @@ -114,6 +114,29 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { const isQueued = computed(() => (action?: UndoRedoAction | null) => pendingLazyAction.value === action); + const nextUndoAction = computed(() => undoActionStack.value[undoActionStack.value.length - 1]); + const nextRedoAction = computed(() => redoActionStack.value[redoActionStack.value.length - 1]); + + const undoText = computed(() => { + if (!nextUndoAction.value) { + return "Nothing to undo"; + } else if (!nextUndoAction.value.name) { + return "Undo"; + } else { + return `Undo ${nextUndoAction.value.name}`; + } + }); + + const redoText = computed(() => { + if (!nextRedoAction.value) { + return "Nothing to redo"; + } else if (!nextRedoAction.value.name) { + return "Redo"; + } else { + return `Redo ${nextRedoAction.value.name}`; + } + }); + return { undoActionStack, redoActionStack, @@ -128,6 +151,10 @@ export const useUndoRedoStore = defineScopedStore("undoRedoStore", () => { setLazyActionTimeout, isQueued, pendingLazyAction, + nextUndoAction, + nextRedoAction, + undoText, + redoText, $reset, }; }); @@ -165,6 +192,11 @@ class FactoryAction extends UndoRedoAction { return this; } + setName(name: string) { + this.name = name; + return this; + } + apply() { this.applyCallback(this); } diff --git a/client/src/stores/undoRedoStore/undoRedoAction.ts b/client/src/stores/undoRedoStore/undoRedoAction.ts index 022748017a91..58d23dcd423b 100644 --- a/client/src/stores/undoRedoStore/undoRedoAction.ts +++ b/client/src/stores/undoRedoStore/undoRedoAction.ts @@ -1,4 +1,14 @@ export class UndoRedoAction { + private internalName?: string; + + get name(): string | undefined { + return this.internalName; + } + + set name(name: string) { + this.internalName = name; + } + run() { return; } From 6f6809964dd2dd2c0e1b6b8ac3de7654d3a3de2b Mon Sep 17 00:00:00 2001 From: Laila Los <44241786+ElectronicBlueberry@users.noreply.github.com> Date: Tue, 19 Mar 2024 12:28:48 +0100 Subject: [PATCH 411/669] add undo/redo buttons --- client/package.json | 1 + .../Workflow/Editor/Actions/stepActions.ts | 4 +-- .../src/components/Workflow/Editor/Index.vue | 28 +++++++++++++++++-- client/src/stores/undoRedoStore/index.ts | 5 ++++ client/yarn.lock | 20 +++++++++++++ 5 files changed, 53 insertions(+), 5 deletions(-) diff --git a/client/package.json b/client/package.json index f63ed194a64f..15d5b7f818c9 100644 --- a/client/package.json +++ b/client/package.json @@ -43,6 +43,7 @@ "@sentry/browser": "^7.74.1", "@types/jest": "^29.5.6", "@vueuse/core": "^10.5.0", + "@vueuse/math": "^10.9.0", "assert": "^2.1.0", "axios": "^1.6.2", "babel-runtime": "^6.26.0", diff --git a/client/src/components/Workflow/Editor/Actions/stepActions.ts b/client/src/components/Workflow/Editor/Actions/stepActions.ts index 4554e5cfaaba..12adf48051d2 100644 --- a/client/src/components/Workflow/Editor/Actions/stepActions.ts +++ b/client/src/components/Workflow/Editor/Actions/stepActions.ts @@ -137,7 +137,7 @@ export class InsertStepAction extends UndoRedoAction { } get name() { - return `insert ${this.stepData.name} step`; + return `insert ${this.stepData.name}`; } stepDataToTuple() { @@ -196,7 +196,7 @@ export class RemoveStepAction extends UndoRedoAction { } get name() { - return `remove step ${this.step.label ?? this.step.name}`; + return `remove ${this.step.label ?? this.step.name}`; } run() { diff --git a/client/src/components/Workflow/Editor/Index.vue b/client/src/components/Workflow/Editor/Index.vue index c6e5c63d5295..09d623d6591e 100644 --- a/client/src/components/Workflow/Editor/Index.vue +++ b/client/src/components/Workflow/Editor/Index.vue @@ -44,6 +44,21 @@ Workflow Editor {{ name }} Create New Workflow + + + + + + + + +
- diff --git a/client/src/style/scss/base.scss b/client/src/style/scss/base.scss index 0b08c0aa0af1..6532c64b2c8d 100644 --- a/client/src/style/scss/base.scss +++ b/client/src/style/scss/base.scss @@ -42,6 +42,7 @@ $fa-font-path: "../../../node_modules/@fortawesome/fontawesome-free/webfonts/"; @import "peek-columns.scss"; @import "dataset.scss"; @import "list-item.scss"; +@import "panels.scss"; // ==== Select2 ==== /* fix for zero width select2 - remove when fixed there */ @@ -124,73 +125,6 @@ body { display: none; } -// ==== Unified panel styles ==== -.unified-panel { - display: flex; - flex-flow: column; - background: $panel-bg-color; - height: 100%; - overflow: auto; -} -.unified-panel-header { - @extend .unselectable; - @extend .px-3; - @extend .d-flex; - height: $panel_header_height; - font-size: 1rem; - font-weight: bold; - align-items: center; - color: $panel-header-text-color; - background: $panel-bg-header-color; - a { - color: $panel-header-text-color; - } - .unified-panel-header-inner { - @extend .w-100; - min-width: max-content; - align-items: center; - justify-content: space-between; - display: flex; - } - .panel-header-buttons { - order: 9999; - @extend .d-flex; - .panel-header-button { - text-align: center; - &:not(:last-child) { - @extend .mr-2; - } - &:hover { - color: $brand-info; - } - } - .panel-header-button-toolbox { - color: $brand-dark; - flex: 1; - @extend .p-1; - text-align: center; - font-size: $h4-font-size; - align-items: center; - &:hover { - color: $brand-info; - background-color: $brand-light; - text-decoration: none !important; - border-color: $brand-light; - } - } - } -} -.unified-panel-controls { - @extend .px-3; -} -.unified-panel-body { - @extend .p-0; - @extend .w-100; - @extend .h-100; - @extend .overflow-auto; - flex: 1; -} - // State colors $galaxy-state-border: ( "new": $state-default-border, diff --git a/client/src/style/scss/panels.scss b/client/src/style/scss/panels.scss new file mode 100644 index 000000000000..c6ed1f34b911 --- /dev/null +++ b/client/src/style/scss/panels.scss @@ -0,0 +1,65 @@ +.unified-panel { + display: flex; + flex-flow: column; + background: $panel-bg-color; + height: 100%; + overflow: auto; +} +.unified-panel-header { + @extend .unselectable; + @extend .px-3; + @extend .d-flex; + height: $panel_header_height; + font-size: 1rem; + font-weight: bold; + align-items: center; + color: $panel-header-text-color; + background: $panel-bg-header-color; + a { + color: $panel-header-text-color; + } + .unified-panel-header-inner { + @extend .w-100; + min-width: max-content; + align-items: center; + justify-content: space-between; + display: flex; + } + .panel-header-buttons { + order: 9999; + @extend .d-flex; + .panel-header-button { + text-align: center; + &:not(:last-child) { + @extend .mr-2; + } + &:hover { + color: $brand-info; + } + } + .panel-header-button-toolbox { + color: $brand-dark; + flex: 1; + @extend .p-1; + text-align: center; + font-size: $h4-font-size; + align-items: center; + &:hover { + color: $brand-info; + background-color: $brand-light; + text-decoration: none !important; + border-color: $brand-light; + } + } + } +} +.unified-panel-controls { + @extend .px-3; +} +.unified-panel-body { + @extend .p-0; + @extend .w-100; + @extend .h-100; + @extend .overflow-auto; + flex: 1; +} diff --git a/client/src/style/scss/ui.scss b/client/src/style/scss/ui.scss index ec436450cb75..007812dbc959 100644 --- a/client/src/style/scss/ui.scss +++ b/client/src/style/scss/ui.scss @@ -40,6 +40,10 @@ overflow: auto !important; } +.overflow-y { + overflow-y: auto !important; +} + // utility class to set word wrap to normal .word-wrap-normal { word-wrap: normal; From c839c3ae90679b0e1a26a94517cf1d7bf0e1bd4b Mon Sep 17 00:00:00 2001 From: guerler Date: Mon, 8 Apr 2024 15:37:59 +0300 Subject: [PATCH 471/669] Add search field to visualizations activity --- .../components/Panels/VisualizationPanel.vue | 44 +++++++++++++------ 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/client/src/components/Panels/VisualizationPanel.vue b/client/src/components/Panels/VisualizationPanel.vue index 68b988fb9aae..076f31dd57f4 100644 --- a/client/src/components/Panels/VisualizationPanel.vue +++ b/client/src/components/Panels/VisualizationPanel.vue @@ -1,9 +1,10 @@ @@ -41,7 +44,8 @@ async function getPlugins() {

Create Visualization

-
+ +
diff --git a/client/src/components/Panels/VisualizationPanel.vue b/client/src/components/Panels/VisualizationPanel.vue index 92c78a95b309..5d11937303fa 100644 --- a/client/src/components/Panels/VisualizationPanel.vue +++ b/client/src/components/Panels/VisualizationPanel.vue @@ -79,7 +79,7 @@ onMounted(() => {
diff --git a/client/src/components/Panels/VisualizationPanel.vue b/client/src/components/Panels/VisualizationPanel.vue index 5d11937303fa..9297032bfc3f 100644 --- a/client/src/components/Panels/VisualizationPanel.vue +++ b/client/src/components/Panels/VisualizationPanel.vue @@ -85,7 +85,7 @@ onMounted(() => {
{{ plugin.html }}
-
{{ plugin.description }}
+
{{ plugin.description }}
From 11bb4b414b7351d8d23a3319d38105d52dfb703a Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 10 Apr 2024 12:08:11 +0100 Subject: [PATCH 484/669] Fix KeyError in ``XForwardedHostMiddleware`` when `REMOTE_ADDR` is not defined. Reported by @vazovn . Also fix typos in environment variable names. --- lib/galaxy/web/framework/middleware/xforwardedhost.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/web/framework/middleware/xforwardedhost.py b/lib/galaxy/web/framework/middleware/xforwardedhost.py index 3e0e11958632..d47d741bc9c1 100644 --- a/lib/galaxy/web/framework/middleware/xforwardedhost.py +++ b/lib/galaxy/web/framework/middleware/xforwardedhost.py @@ -10,11 +10,11 @@ def __init__(self, app, global_conf=None): def __call__(self, environ, start_response): x_forwarded_host = environ.get("HTTP_X_FORWARDED_HOST", None) if x_forwarded_host: - environ["ORGINAL_HTTP_HOST"] = environ["HTTP_HOST"] + environ["ORIGINAL_HTTP_HOST"] = environ.get("HTTP_HOST") environ["HTTP_HOST"] = x_forwarded_host.split(", ", 1)[0] x_forwarded_for = environ.get("HTTP_X_FORWARDED_FOR", None) if x_forwarded_for: - environ["ORGINAL_REMOTE_ADDR"] = environ["REMOTE_ADDR"] + environ["ORIGINAL_REMOTE_ADDR"] = environ.get("REMOTE_ADDR") environ["REMOTE_ADDR"] = x_forwarded_for.split(",", 1)[0].strip() x_forwarded_proto = environ.get("HTTP_X_FORWARDED_PROTO", None) if x_forwarded_proto: From ac526184e8cb78f2b0df3cf1670955fec870a52d Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Wed, 10 Apr 2024 15:06:11 +0200 Subject: [PATCH 485/669] Make sure `data_dir` is used for default result backend DB --- doc/source/admin/galaxy_options.rst | 7 ++++--- lib/galaxy/config/__init__.py | 13 +++++++++++++ lib/galaxy/config/sample/galaxy.yml.sample | 7 ++++--- lib/galaxy/config/schemas/config_schema.yml | 4 ++-- 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index 5cd5d4d2649f..a8a7066d2554 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5154,9 +5154,10 @@ only if you have setup a Celery worker for Galaxy and you have configured the `celery_conf` option below. Specifically, you need to set the `result_backend` option in the `celery_conf` option to - a valid Celery result backend URL. By default a SQLite database is - used for storing task results, please use a more robust backend - for production setups like Redis. For details, see + a valid Celery result backend URL. By default, Galaxy uses an + SQLite database at '/results.sqlite' for storing task + results. Please use a more robust backend for production setups + like Redis. For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks :Default: ``false`` :Type: bool diff --git a/lib/galaxy/config/__init__.py b/lib/galaxy/config/__init__.py index ce8e5e8dc2b0..626aa1d489ea 100644 --- a/lib/galaxy/config/__init__.py +++ b/lib/galaxy/config/__init__.py @@ -1082,6 +1082,9 @@ def _process_config(self, kwargs: Dict[str, Any]) -> None: self.amqp_internal_connection = ( f"sqlalchemy+sqlite:///{self._in_data_dir('control.sqlite')}?isolation_level=IMMEDIATE" ) + + self._process_celery_config() + self.pretty_datetime_format = expand_pretty_datetime_format(self.pretty_datetime_format) try: with open(self.user_preferences_extra_conf_path) as stream: @@ -1203,6 +1206,16 @@ def _load_theme(path: str, theme_dict: dict): else: _load_theme(self.themes_config_file, self.themes) + def _process_celery_config(self): + if self.celery_conf: + result_backend = self.celery_conf.get("result_backend") + if result_backend: + # If the result_backend is the default SQLite database, we need to + # ensure that the correct data directory is used. + if "results.sqlite" in result_backend: + result_backend = f"db+sqlite:///{self._in_data_dir('results.sqlite')}?isolation_level=IMMEDIATE" + self.celery_conf["result_backend"] = result_backend + def _check_database_connection_strings(self): """ Verify connection URI strings in galaxy's configuration are parseable with urllib. diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index b379fa67e838..a7ed80d5a489 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2761,9 +2761,10 @@ galaxy: # only if you have setup a Celery worker for Galaxy and you have # configured the `celery_conf` option below. Specifically, you need to # set the `result_backend` option in the `celery_conf` option to a - # valid Celery result backend URL. By default a SQLite database is - # used for storing task results, please use a more robust backend for - # production setups like Redis. For details, see + # valid Celery result backend URL. By default, Galaxy uses an SQLite + # database at '/results.sqlite' for storing task results. + # Please use a more robust backend for production setups like Redis. + # For details, see # https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks #enable_celery_tasks: false diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 1f742f869339..8e9c0e44a13e 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3764,8 +3764,8 @@ mapping: Activate this only if you have setup a Celery worker for Galaxy and you have configured the `celery_conf` option below. Specifically, you need to set the `result_backend` option in the `celery_conf` option to a valid Celery result - backend URL. By default a SQLite database is used for storing task results, please - use a more robust backend for production setups like Redis. + backend URL. By default, Galaxy uses an SQLite database at '/results.sqlite' for storing task results. + Please use a more robust backend for production setups like Redis. For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks celery_conf: From f3b968adf667a6d000e0ad7966258830fc67926c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20L=C3=B3pez?= <46503462+davelopez@users.noreply.github.com> Date: Wed, 10 Apr 2024 16:42:38 +0200 Subject: [PATCH 486/669] Fix typo Co-authored-by: John Davis --- lib/galaxy/config/schemas/config_schema.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 8e9c0e44a13e..95f5aaa027de 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3765,7 +3765,7 @@ mapping: configured the `celery_conf` option below. Specifically, you need to set the `result_backend` option in the `celery_conf` option to a valid Celery result backend URL. By default, Galaxy uses an SQLite database at '/results.sqlite' for storing task results. - Please use a more robust backend for production setups like Redis. + Please use a more robust backend for production setups (e.g. Redis). For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks celery_conf: From 4ec8cef337cb62be6e1e3529e3fdf507fc9ea982 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:03:55 +0200 Subject: [PATCH 487/669] Reword sentence Co-authored-by: Nicola Soranzo --- doc/source/admin/galaxy_options.rst | 4 ++-- lib/galaxy/config/sample/galaxy.yml.sample | 2 +- lib/galaxy/config/schemas/config_schema.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index a8a7066d2554..d83bbb65edd3 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5156,8 +5156,8 @@ to set the `result_backend` option in the `celery_conf` option to a valid Celery result backend URL. By default, Galaxy uses an SQLite database at '/results.sqlite' for storing task - results. Please use a more robust backend for production setups - like Redis. For details, see + results. Please use a more robust backend (e.g. Redis) for + production setups. For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks :Default: ``false`` :Type: bool diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index a7ed80d5a489..b27e6c887ae3 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2763,7 +2763,7 @@ galaxy: # set the `result_backend` option in the `celery_conf` option to a # valid Celery result backend URL. By default, Galaxy uses an SQLite # database at '/results.sqlite' for storing task results. - # Please use a more robust backend for production setups like Redis. + # Please use a more robust backend (e.g. Redis) for production setups. # For details, see # https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks #enable_celery_tasks: false diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 95f5aaa027de..4fffa48b62d3 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3765,7 +3765,7 @@ mapping: configured the `celery_conf` option below. Specifically, you need to set the `result_backend` option in the `celery_conf` option to a valid Celery result backend URL. By default, Galaxy uses an SQLite database at '/results.sqlite' for storing task results. - Please use a more robust backend for production setups (e.g. Redis). + Please use a more robust backend (e.g. Redis) for production setups. For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks celery_conf: From f10ba4a37b53e7e35e4f8fb8e0ee4635eaa0ec33 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:12:37 +0200 Subject: [PATCH 488/669] Do not set default value for result_backend --- lib/galaxy/config/__init__.py | 12 ++++-------- lib/galaxy/config/schemas/config_schema.yml | 4 ++-- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/lib/galaxy/config/__init__.py b/lib/galaxy/config/__init__.py index 626aa1d489ea..6670611637e6 100644 --- a/lib/galaxy/config/__init__.py +++ b/lib/galaxy/config/__init__.py @@ -1207,14 +1207,10 @@ def _load_theme(path: str, theme_dict: dict): _load_theme(self.themes_config_file, self.themes) def _process_celery_config(self): - if self.celery_conf: - result_backend = self.celery_conf.get("result_backend") - if result_backend: - # If the result_backend is the default SQLite database, we need to - # ensure that the correct data directory is used. - if "results.sqlite" in result_backend: - result_backend = f"db+sqlite:///{self._in_data_dir('results.sqlite')}?isolation_level=IMMEDIATE" - self.celery_conf["result_backend"] = result_backend + if self.celery_conf and self.celery_conf.get("result_backend") is None: + # If the result_backend is not set, use a SQLite database in the data directory + result_backend = f"db+sqlite:///{self._in_data_dir('results.sqlite')}?isolation_level=IMMEDIATE" + self.celery_conf["result_backend"] = result_backend def _check_database_connection_strings(self): """ diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 4fffa48b62d3..13051a2098ef 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3772,7 +3772,6 @@ mapping: type: any required: false default: - result_backend: db+sqlite:///./database/results.sqlite?isolation_level=IMMEDIATE task_routes: 'galaxy.fetch_data': 'galaxy.external' 'galaxy.set_job_metadata': 'galaxy.external' @@ -3783,7 +3782,8 @@ mapping: of the task defined in the galaxy.celery.tasks module. The `broker_url` option, if unset, defaults to the value of `amqp_internal_connection`. - The `result_backend` option must be set if the `enable_celery_tasks` option is set. + The `result_backend` option, if unset, defaults to an SQLite database at '/results.sqlite' + for storing task results. The galaxy.fetch_data task can be disabled by setting its route to "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be disabled on a per-task basis at this time.) From a739ed1cb80e2a9961053e6950503db9dd019c8b Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:56:52 +0200 Subject: [PATCH 489/669] Rebuild config samples --- doc/source/admin/galaxy_options.rst | 7 ++++--- lib/galaxy/config/sample/galaxy.yml.sample | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index d83bbb65edd3..2af6995cfd09 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5173,14 +5173,15 @@ `foo` is the function name of the task defined in the galaxy.celery.tasks module. The `broker_url` option, if unset, defaults to the value of - `amqp_internal_connection`. The `result_backend` option must be - set if the `enable_celery_tasks` option is set. + `amqp_internal_connection`. The `result_backend` option, if unset, + defaults to an SQLite database at '/results.sqlite' for + storing task results. The galaxy.fetch_data task can be disabled by setting its route to "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be disabled on a per-task basis at this time.) For details, see Celery documentation at https://docs.celeryq.dev/en/stable/userguide/configuration.html. -:Default: ``{'result_backend': 'db+sqlite:///./database/results.sqlite?isolation_level=IMMEDIATE', 'task_routes': {'galaxy.fetch_data': 'galaxy.external', 'galaxy.set_job_metadata': 'galaxy.external'}}`` +:Default: ``{'task_routes': {'galaxy.fetch_data': 'galaxy.external', 'galaxy.set_job_metadata': 'galaxy.external'}}`` :Type: any diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index b27e6c887ae3..1cc93b78e462 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2773,15 +2773,15 @@ galaxy: # `foo` is the function name of the task defined in the # galaxy.celery.tasks module. # The `broker_url` option, if unset, defaults to the value of - # `amqp_internal_connection`. The `result_backend` option must be set - # if the `enable_celery_tasks` option is set. + # `amqp_internal_connection`. The `result_backend` option, if unset, + # defaults to an SQLite database at '/results.sqlite' for + # storing task results. # The galaxy.fetch_data task can be disabled by setting its route to # "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be # disabled on a per-task basis at this time.) # For details, see Celery documentation at # https://docs.celeryq.dev/en/stable/userguide/configuration.html. #celery_conf: - # result_backend: db+sqlite:///./database/results.sqlite?isolation_level=IMMEDIATE # task_routes: # galaxy.fetch_data: galaxy.external # galaxy.set_job_metadata: galaxy.external From 514df553239f76d8ffa2ece3f537198b58aff7dd Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Wed, 10 Apr 2024 18:23:52 +0200 Subject: [PATCH 490/669] Add broker and backend defaults as null This will keep the values in the sample config. --- doc/source/admin/galaxy_options.rst | 10 +++++----- lib/galaxy/config/sample/galaxy.yml.sample | 10 ++++++---- lib/galaxy/config/schemas/config_schema.yml | 6 ++++-- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index 2af6995cfd09..9ffc90e33ba4 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5172,16 +5172,16 @@ To refer to a task by name, use the template `galaxy.foo` where `foo` is the function name of the task defined in the galaxy.celery.tasks module. - The `broker_url` option, if unset, defaults to the value of - `amqp_internal_connection`. The `result_backend` option, if unset, - defaults to an SQLite database at '/results.sqlite' for - storing task results. + The `broker_url` option, if unset or null, defaults to the value + of `amqp_internal_connection`. The `result_backend` option, if + unset or null, defaults to an SQLite database at + '/results.sqlite' for storing task results. The galaxy.fetch_data task can be disabled by setting its route to "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be disabled on a per-task basis at this time.) For details, see Celery documentation at https://docs.celeryq.dev/en/stable/userguide/configuration.html. -:Default: ``{'task_routes': {'galaxy.fetch_data': 'galaxy.external', 'galaxy.set_job_metadata': 'galaxy.external'}}`` +:Default: ``{'broker_url': None, 'result_backend': None, 'task_routes': {'galaxy.fetch_data': 'galaxy.external', 'galaxy.set_job_metadata': 'galaxy.external'}}`` :Type: any diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index 1cc93b78e462..d55859a04eba 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2772,16 +2772,18 @@ galaxy: # To refer to a task by name, use the template `galaxy.foo` where # `foo` is the function name of the task defined in the # galaxy.celery.tasks module. - # The `broker_url` option, if unset, defaults to the value of - # `amqp_internal_connection`. The `result_backend` option, if unset, - # defaults to an SQLite database at '/results.sqlite' for - # storing task results. + # The `broker_url` option, if unset or null, defaults to the value of + # `amqp_internal_connection`. The `result_backend` option, if unset or + # null, defaults to an SQLite database at '/results.sqlite' + # for storing task results. # The galaxy.fetch_data task can be disabled by setting its route to # "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be # disabled on a per-task basis at this time.) # For details, see Celery documentation at # https://docs.celeryq.dev/en/stable/userguide/configuration.html. #celery_conf: + # broker_url: null + # result_backend: null # task_routes: # galaxy.fetch_data: galaxy.external # galaxy.set_job_metadata: galaxy.external diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 13051a2098ef..7e15c81cb566 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3772,6 +3772,8 @@ mapping: type: any required: false default: + broker_url: null + result_backend: null task_routes: 'galaxy.fetch_data': 'galaxy.external' 'galaxy.set_job_metadata': 'galaxy.external' @@ -3781,8 +3783,8 @@ mapping: To refer to a task by name, use the template `galaxy.foo` where `foo` is the function name of the task defined in the galaxy.celery.tasks module. - The `broker_url` option, if unset, defaults to the value of `amqp_internal_connection`. - The `result_backend` option, if unset, defaults to an SQLite database at '/results.sqlite' + The `broker_url` option, if unset or null, defaults to the value of `amqp_internal_connection`. + The `result_backend` option, if unset or null, defaults to an SQLite database at '/results.sqlite' for storing task results. The galaxy.fetch_data task can be disabled by setting its route to "disabled": `galaxy.fetch_data: disabled`. From 74c243755bc4cfb6b97ff8b6fd31e1de51dd7e2b Mon Sep 17 00:00:00 2001 From: guerler Date: Thu, 11 Apr 2024 08:27:37 +0300 Subject: [PATCH 491/669] Reduce size of visualization logo placeholder icon --- client/src/components/Panels/VisualizationPanel.vue | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/src/components/Panels/VisualizationPanel.vue b/client/src/components/Panels/VisualizationPanel.vue index 9297032bfc3f..e20a115ddad5 100644 --- a/client/src/components/Panels/VisualizationPanel.vue +++ b/client/src/components/Panels/VisualizationPanel.vue @@ -108,8 +108,8 @@ onMounted(() => { width: 2rem; } .plugin-icon { - font-size: 1.5rem; - padding: 0.2rem; + font-size: 1.3rem; + padding: 0.3rem; } } From 3bb49cab8e0b8352e6210851e57875abd28dc6cb Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Thu, 11 Apr 2024 09:21:27 +0200 Subject: [PATCH 492/669] Move result_backend comment to celery_conf Co-authored-by: Nicola Soranzo --- doc/source/admin/galaxy_options.rst | 6 +++--- lib/galaxy/config/sample/galaxy.yml.sample | 4 ++-- lib/galaxy/config/schemas/config_schema.yml | 3 +-- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/doc/source/admin/galaxy_options.rst b/doc/source/admin/galaxy_options.rst index 9ffc90e33ba4..b5511036af2f 100644 --- a/doc/source/admin/galaxy_options.rst +++ b/doc/source/admin/galaxy_options.rst @@ -5156,8 +5156,7 @@ to set the `result_backend` option in the `celery_conf` option to a valid Celery result backend URL. By default, Galaxy uses an SQLite database at '/results.sqlite' for storing task - results. Please use a more robust backend (e.g. Redis) for - production setups. For details, see + results. For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks :Default: ``false`` :Type: bool @@ -5175,7 +5174,8 @@ The `broker_url` option, if unset or null, defaults to the value of `amqp_internal_connection`. The `result_backend` option, if unset or null, defaults to an SQLite database at - '/results.sqlite' for storing task results. + '/results.sqlite' for storing task results. Please use a + more robust backend (e.g. Redis) for production setups. The galaxy.fetch_data task can be disabled by setting its route to "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be disabled on a per-task basis at this time.) diff --git a/lib/galaxy/config/sample/galaxy.yml.sample b/lib/galaxy/config/sample/galaxy.yml.sample index d55859a04eba..8032d879039b 100644 --- a/lib/galaxy/config/sample/galaxy.yml.sample +++ b/lib/galaxy/config/sample/galaxy.yml.sample @@ -2763,7 +2763,6 @@ galaxy: # set the `result_backend` option in the `celery_conf` option to a # valid Celery result backend URL. By default, Galaxy uses an SQLite # database at '/results.sqlite' for storing task results. - # Please use a more robust backend (e.g. Redis) for production setups. # For details, see # https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks #enable_celery_tasks: false @@ -2775,7 +2774,8 @@ galaxy: # The `broker_url` option, if unset or null, defaults to the value of # `amqp_internal_connection`. The `result_backend` option, if unset or # null, defaults to an SQLite database at '/results.sqlite' - # for storing task results. + # for storing task results. Please use a more robust backend (e.g. + # Redis) for production setups. # The galaxy.fetch_data task can be disabled by setting its route to # "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be # disabled on a per-task basis at this time.) diff --git a/lib/galaxy/config/schemas/config_schema.yml b/lib/galaxy/config/schemas/config_schema.yml index 7e15c81cb566..99dbe9657746 100644 --- a/lib/galaxy/config/schemas/config_schema.yml +++ b/lib/galaxy/config/schemas/config_schema.yml @@ -3765,7 +3765,6 @@ mapping: configured the `celery_conf` option below. Specifically, you need to set the `result_backend` option in the `celery_conf` option to a valid Celery result backend URL. By default, Galaxy uses an SQLite database at '/results.sqlite' for storing task results. - Please use a more robust backend (e.g. Redis) for production setups. For details, see https://docs.galaxyproject.org/en/master/admin/production.html#use-celery-for-asynchronous-tasks celery_conf: @@ -3785,7 +3784,7 @@ mapping: The `broker_url` option, if unset or null, defaults to the value of `amqp_internal_connection`. The `result_backend` option, if unset or null, defaults to an SQLite database at '/results.sqlite' - for storing task results. + for storing task results. Please use a more robust backend (e.g. Redis) for production setups. The galaxy.fetch_data task can be disabled by setting its route to "disabled": `galaxy.fetch_data: disabled`. (Other tasks cannot be disabled on a per-task basis at this time.) From 9291173c77e0f81a9eb8ab3f3e856be9e6b52445 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 10 Apr 2024 22:42:04 -0400 Subject: [PATCH 493/669] In HistoryDatasetDisplay markdown element, display images in iframe --- .../Markdown/Elements/HistoryDatasetDisplay.vue | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue b/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue index 547f6bcc9353..08aebe4aba57 100644 --- a/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue +++ b/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue @@ -61,7 +61,13 @@
{{ error }}
From c15b6e0fd3cdb7c53660bdbe58a31c8fa98f9a1a Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Wed, 10 Apr 2024 22:48:25 -0400 Subject: [PATCH 494/669] Allow abstract parent class lookup by classname for isSubType. --- client/src/components/Datatypes/model.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/src/components/Datatypes/model.ts b/client/src/components/Datatypes/model.ts index 45334ba8d255..28e55257daa7 100644 --- a/client/src/components/Datatypes/model.ts +++ b/client/src/components/Datatypes/model.ts @@ -17,7 +17,8 @@ export class DatatypesMapperModel { isSubType(child: string, parent: string): boolean { const mapping = this.datatypesMapping; const childClassName = mapping.ext_to_class_name[child]; - const parentClassName = mapping.ext_to_class_name[parent]; + const parentClassName = mapping.ext_to_class_name[parent] || parent; + if (!childClassName || !parentClassName) { return false; } From dd9af46effe1067168a7bbdbda7c0c2dec0be6f3 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Thu, 11 Apr 2024 07:44:21 -0400 Subject: [PATCH 495/669] Document isSubType --- client/src/components/Datatypes/model.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/client/src/components/Datatypes/model.ts b/client/src/components/Datatypes/model.ts index 28e55257daa7..797d0349e199 100644 --- a/client/src/components/Datatypes/model.ts +++ b/client/src/components/Datatypes/model.ts @@ -14,6 +14,13 @@ export class DatatypesMapperModel { this.datatypesMapping = typesAndMapping.datatypes_mapping; } + /** + * Checks if a given child datatype is a subtype of a parent datatype. + * @param child - The child datatype extension as registered in the datatypes registry. + * @param parent - The parent datatype, which can be an extension or explicit class name + * Can also be used with extensionless abstract datatypes (e.g. "galaxy.datatypes.images.Image") + * @returns A boolean indicating whether the child is a subtype of the parent. + */ isSubType(child: string, parent: string): boolean { const mapping = this.datatypesMapping; const childClassName = mapping.ext_to_class_name[child]; From ea455b382399c4e6e1dd75766656bee8c2a2ee51 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Thu, 11 Apr 2024 07:37:04 -0400 Subject: [PATCH 496/669] Reuse datasetimage display for appropriate datatypes Minor tweaking of pre --- .../Elements/HistoryDatasetDisplay.vue | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue b/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue index 08aebe4aba57..d44e22d74b4b 100644 --- a/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue +++ b/client/src/components/Markdown/Elements/HistoryDatasetDisplay.vue @@ -61,16 +61,13 @@
{{ error }}
+
-                                    {{ itemContent.item_data }}
-                                
+ {{ itemContent.item_data }} +
No content found.
Show More... @@ -105,10 +102,13 @@ import LoadingSpan from "components/LoadingSpan"; import { UrlDataProvider } from "components/providers/UrlDataProvider"; import { getAppRoot } from "onload/loadConfig"; +import HistoryDatasetAsImage from "./HistoryDatasetAsImage.vue"; + export default { components: { LoadingSpan, UrlDataProvider, + HistoryDatasetAsImage, }, props: { args: { From 4551d50ddaf99a9aa231bdddf0533e2f3e0cd0df Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Thu, 11 Apr 2024 15:12:44 +0200 Subject: [PATCH 497/669] Fix notification display time Prioritize the publication date over the creation date to accurately reflect possible scheduled notifications --- client/src/components/Notifications/NotificationActions.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/src/components/Notifications/NotificationActions.vue b/client/src/components/Notifications/NotificationActions.vue index ebcd9a469893..fe161b7ddbab 100644 --- a/client/src/components/Notifications/NotificationActions.vue +++ b/client/src/components/Notifications/NotificationActions.vue @@ -37,7 +37,7 @@ function getNotificationExpirationTitle(notification: UserNotification) {
- + From 7e2bd277b4ed21a3afd4d7d1af3f7f4d7b2885cc Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Thu, 11 Apr 2024 15:31:57 +0200 Subject: [PATCH 498/669] Fix message input type for notifications Otherwise, is not possible to use line breaks in the message which are supported by the API with full markdown support. --- client/src/components/admin/Notifications/NotificationForm.vue | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/src/components/admin/Notifications/NotificationForm.vue b/client/src/components/admin/Notifications/NotificationForm.vue index 86e4c98efc26..027649bc067c 100644 --- a/client/src/components/admin/Notifications/NotificationForm.vue +++ b/client/src/components/admin/Notifications/NotificationForm.vue @@ -153,7 +153,8 @@ async function sendNewNotification() { :optional="false" help="The message can be written in markdown." placeholder="Enter message" - required /> + required + area /> Date: Tue, 9 Apr 2024 13:27:08 -0400 Subject: [PATCH 499/669] tool error integration test --- lib/galaxy/util/__init__.py | 19 +++-- lib/galaxy_test/base/populators.py | 30 +++++++ test/integration/test_error_report.py | 114 ++++++++++++++++++++++++++ 3 files changed, 157 insertions(+), 6 deletions(-) create mode 100644 test/integration/test_error_report.py diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py index b8a35995f2f0..b688d633c9b8 100644 --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -644,12 +644,6 @@ def pretty_print_time_interval(time=False, precise=False, utc=False): return "a few years ago" -def pretty_print_json(json_data, is_json_string=False): - if is_json_string: - json_data = json.loads(json_data) - return json.dumps(json_data, sort_keys=True, indent=4) - - # characters that are valid valid_chars = set(string.ascii_letters + string.digits + " -=_.()/+*^,:?!") @@ -1621,6 +1615,19 @@ def send_mail(frm, to, subject, body, config, html=None, reply_to=None): :type reply_to: str :param reply_to: Reply-to address (Default None) """ + if config.smtp_server.startswith("mock_emails_to_path://"): + path = config.smtp_server[len("mock_emails_to_path://") :] + email_dict = { + "from": frm, + "to": to, + "subject": subject, + "body": body, + "html": html, + "reply_to": reply_to, + } + email_json = json.to_json_string(email_dict) + with open(path, "w") as f: + f.write(email_json) to = listify(to) if html: diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 710cf9544d22..e4fd0026245f 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -1048,6 +1048,36 @@ def new_error_dataset(self, history_id: str) -> str: assert output_details["state"] == "error", output_details return output_details["id"] + def report_job_error_raw( + self, job_id: str, dataset_id: str, message: str = "", email: Optional[str] = None + ) -> Response: + url = f"jobs/{job_id}/error" + payload = dict( + dataset_id=dataset_id, + message=message, + ) + if email is not None: + payload["email"] = email + report_response = self._post(url, data=payload, json=True) + return report_response + + def report_job_error( + self, job_id: str, dataset_id: str, message: str = "", email: Optional[str] = None + ) -> Response: + report_response = self.report_job_error_raw(job_id, dataset_id, message=message, email=email) + api_asserts.assert_status_code_is_ok(report_response) + return report_response.json() + + def run_detect_errors(self, history_id: str, exit_code: int, stdout: str = "", stderr: str = "") -> dict: + inputs = { + "stdoutmsg": stdout, + "stderrmsg": stderr, + "exit_code": exit_code, + } + response = self.run_tool("detect_errors", inputs, history_id) + self.wait_for_history(history_id, assert_ok=False) + return response + def run_exit_code_from_file(self, history_id: str, hdca_id: str) -> dict: exit_code_inputs = { "input": {"batch": True, "values": [{"src": "hdca", "id": hdca_id}]}, diff --git a/test/integration/test_error_report.py b/test/integration/test_error_report.py new file mode 100644 index 000000000000..355afda23f88 --- /dev/null +++ b/test/integration/test_error_report.py @@ -0,0 +1,114 @@ +"""Integration tests for user error reporting.""" + +import json +import os +import string + +from galaxy_test.base.populators import DatasetPopulator +from galaxy_test.driver import integration_util + +JSON_ERROR_REPORTS = """ +- type: json + verbose: true + user_submission: true + directory: ${reports_directory} +""" + +MOCK_EMAIL_ERROR_REPORTS = """ +- type: email + verbose: true + user_submission: true +""" + + +class TestErrorReportIntegration(integration_util.IntegrationTestCase): + dataset_populator: DatasetPopulator + reports_directory: str + framework_tool_and_types = True + + def setUp(self): + super().setUp() + self.dataset_populator = DatasetPopulator(self.galaxy_interactor) + + @classmethod + def handle_galaxy_config_kwds(cls, config): + reports_directory = cls._test_driver.mkdtemp() + cls.reports_directory = reports_directory + template = string.Template(JSON_ERROR_REPORTS) + reports_yaml = template.safe_substitute({"reports_directory": reports_directory}) + reports_conf = os.path.join(reports_directory, "error_report.yml") + with open(reports_conf, "w") as f: + f.write(reports_yaml) + config["error_report_file"] = reports_conf + + def test_basic_tool_error(self): + with self.dataset_populator.test_history() as history_id: + response = self.dataset_populator.run_detect_errors(history_id, 6, "my stdout", "my stderr") + job_id = response["jobs"][0]["id"] + dataset_result = response["outputs"][0] + self.dataset_populator.report_job_error(job_id, dataset_result["id"]) + assert len(os.listdir(self.reports_directory)) == 2 + error_json = self.read_error_report(job_id) + error_dict = json.loads(error_json) + assert error_dict["exit_code"] == 6 + + def test_tool_error_custom_message_and_email(self): + with self.dataset_populator.test_history() as history_id: + response = self.dataset_populator.run_detect_errors(history_id, 6, "my stdout", "my stderr") + job_id = response["jobs"][0]["id"] + dataset_result = response["outputs"][0] + self.dataset_populator.report_job_error( + job_id, dataset_result["id"], "some new details", "notreal@galaxyproject.org" + ) + error_json = self.read_error_report(job_id) + error_dict = json.loads(error_json) + assert error_dict["exit_code"] == 6 + assert error_dict["message"] == "some new details" + assert error_dict["email"] == "notreal@galaxyproject.org" + + def read_error_report(self, job_id: str): + app = self._app + job_id_decoded = app.security.decode_id(job_id) + with open(os.path.join(self.reports_directory, str(job_id_decoded))) as f: + return f.read() + + +class TestErrorEmailReportIntegration(integration_util.IntegrationTestCase): + dataset_populator: DatasetPopulator + reports_directory: str + framework_tool_and_types = True + + def setUp(self): + super().setUp() + self.dataset_populator = DatasetPopulator(self.galaxy_interactor) + + @classmethod + def handle_galaxy_config_kwds(cls, config): + reports_directory = cls._test_driver.mkdtemp() + cls.reports_directory = reports_directory + template = string.Template(MOCK_EMAIL_ERROR_REPORTS) + reports_yaml = template.safe_substitute({"reports_directory": reports_directory}) + reports_conf = os.path.join(reports_directory, "error_report.yml") + with open(reports_conf, "w") as f: + f.write(reports_yaml) + config["error_report_file"] = reports_conf + config["smtp_server"] = f"mock_emails_to_path://{reports_directory}/email.json" + config["error_email_to"] = "jsonfiles@thefilesystem.org" + + def test_tool_error_custom_message_and_email(self): + with self.dataset_populator.test_history() as history_id: + response = self.dataset_populator.run_detect_errors(history_id, 6, "my stdout", "my stderr") + job_id = response["jobs"][0]["id"] + dataset_result = response["outputs"][0] + self.dataset_populator.report_job_error( + job_id, dataset_result["id"], "some new details", "notreal@galaxyproject.org" + ) + error_json = self.read_most_recent_error_report() + error_dict = json.loads(error_json) + assert error_dict["to"] == "jsonfiles@thefilesystem.org, notreal@galaxyproject.org" + assert error_dict["subject"] == "Galaxy tool error report from notreal@galaxyproject.org (detect_errors)" + assert "

Galaxy Tool Error Report

" in error_dict["html"] + + def read_most_recent_error_report(self): + with open(os.path.join(self.reports_directory, "email.json")) as f: + return f.read() From d0e81dba4b227431575da5dc46db7875fd7fb430 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 11 Apr 2024 10:07:50 -0400 Subject: [PATCH 500/669] Use typed API client for reporting job errors. --- client/src/api/jobs.ts | 2 ++ .../src/components/DatasetInformation/services.js | 15 +++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/client/src/api/jobs.ts b/client/src/api/jobs.ts index 65399c0beb96..d18644d3acd3 100644 --- a/client/src/api/jobs.ts +++ b/client/src/api/jobs.ts @@ -8,3 +8,5 @@ export const jobLockUpdate = fetcher.path("/api/job_lock").method("put").create( export const fetchJobDestinationParams = fetcher.path("/api/jobs/{job_id}/destination_params").method("get").create(); export const jobsFetcher = fetcher.path("/api/jobs").method("get").create(); + +export const jobsReportError = fetcher.path("/api/jobs/{job_id}/error").method("post").create(); diff --git a/client/src/components/DatasetInformation/services.js b/client/src/components/DatasetInformation/services.js index 13c4c62bd3d6..f13b19c980af 100644 --- a/client/src/components/DatasetInformation/services.js +++ b/client/src/components/DatasetInformation/services.js @@ -2,19 +2,18 @@ import axios from "axios"; import { getAppRoot } from "onload/loadConfig"; import { rethrowSimple } from "utils/simple-error"; +import { jobsReportError } from "@/api/jobs"; + export async function sendErrorReport(dataset, message, email) { - const payload = { + const jobId = dataset.creating_job; + const request = { + job_id: jobId, dataset_id: dataset.id, message, email, }; - const url = `${getAppRoot()}api/jobs/${dataset.creating_job}/error`; - try { - const { data } = await axios.post(url, payload); - return data.messages; - } catch (e) { - rethrowSimple(e); - } + const { data } = await jobsReportError(request); + return data.messages; } export async function setAttributes(datasetId, settings, operation) { From c3f5f59008402ea159ffac9eadbb2523eae0c4ba Mon Sep 17 00:00:00 2001 From: Natalie Whitaker <129767521+natwhitaker@users.noreply.github.com> Date: Thu, 11 Apr 2024 13:00:33 -0400 Subject: [PATCH 501/669] Update _thanks.rst to include new socials and remove inactive ones Updating the _thanks.rst that is associated with the User Release Notes to reflect Galaxy's new social media presence. Galaxy is no longer active on Twitter and has joined Bluesky and LinkedIn. The new 'thanks' message has been updated to reflect this with the appropriate links to the new socials. --- doc/source/releases/_thanks.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/source/releases/_thanks.rst b/doc/source/releases/_thanks.rst index ac233ef7dd70..35cb0ccc90c5 100644 --- a/doc/source/releases/_thanks.rst +++ b/doc/source/releases/_thanks.rst @@ -1,7 +1,5 @@ -To stay up to date with Galaxy's progress, watch our `screencasts `__, -visit our community `Hub `__, and follow -`@galaxyproject@mstdn.science `__ on Mastodon or -`@galaxyproject `__ on Twitter. +To stay up to date with Galaxy's progress, watch our `screencasts `__; +visit our community `Hub `__; and follow us on `Bluesky `__, `Mastodon `__, and `LinkedIn `__. You can always chat with us on `Matrix `__. From e2d737ff2b33443bfa2cda56d18b715c4430ade2 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Thu, 11 Apr 2024 19:31:39 +0200 Subject: [PATCH 502/669] Don't save in test_integer_input Reproduces https://github.com/galaxyproject/galaxy/issues/17970 --- lib/galaxy/selenium/navigates_galaxy.py | 13 +++++++------ lib/galaxy_test/selenium/test_workflow_editor.py | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/selenium/navigates_galaxy.py b/lib/galaxy/selenium/navigates_galaxy.py index f156f348703d..ac13d0fbe605 100644 --- a/lib/galaxy/selenium/navigates_galaxy.py +++ b/lib/galaxy/selenium/navigates_galaxy.py @@ -1510,7 +1510,7 @@ def workflow_run_ensure_expanded(self): workflow_run.expand_form_link.wait_for_and_click() workflow_run.expanded_form.wait_for_visible() - def workflow_create_new(self, annotation=None, clear_placeholder=False): + def workflow_create_new(self, annotation=None, clear_placeholder=False, save_workflow=True): self.workflow_index_open() self.sleep_for(self.wait_types.UX_RENDER) self.click_button_new_workflow() @@ -1522,11 +1522,12 @@ def workflow_create_new(self, annotation=None, clear_placeholder=False): name_component.wait_for_and_send_keys(name) annotation = annotation or self._get_random_name() self.components.workflow_editor.edit_annotation.wait_for_and_send_keys(annotation) - save_button = self.components.workflow_editor.save_button - save_button.wait_for_visible() - assert not save_button.has_class("disabled") - save_button.wait_for_and_click() - self.sleep_for(self.wait_types.UX_RENDER) + if save_workflow: + save_button = self.components.workflow_editor.save_button + save_button.wait_for_visible() + assert not save_button.has_class("disabled") + save_button.wait_for_and_click() + self.sleep_for(self.wait_types.UX_RENDER) return name def invocation_index_table_elements(self): diff --git a/lib/galaxy_test/selenium/test_workflow_editor.py b/lib/galaxy_test/selenium/test_workflow_editor.py index a98e5f3b756b..39471e887267 100644 --- a/lib/galaxy_test/selenium/test_workflow_editor.py +++ b/lib/galaxy_test/selenium/test_workflow_editor.py @@ -224,7 +224,7 @@ def test_data_column_input_editing(self): def test_integer_input(self): editor = self.components.workflow_editor - name = self.workflow_create_new() + name = self.workflow_create_new(save_workflow=False) self.workflow_editor_add_input(item_name="parameter_input") self.screenshot("workflow_editor_parameter_input_new") editor.label_input.wait_for_and_send_keys("input1") From 0ec987faa111b52305933608141b7929190cd228 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Thu, 11 Apr 2024 19:17:35 +0200 Subject: [PATCH 503/669] Set from_tool_form: true when saving new workflow Fixes https://github.com/galaxyproject/galaxy/issues/17963 and https://github.com/galaxyproject/galaxy/issues/17970 --- client/src/components/Workflow/services.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/src/components/Workflow/services.js b/client/src/components/Workflow/services.js index d354fda81826..ab0f1f4acaf2 100644 --- a/client/src/components/Workflow/services.js +++ b/client/src/components/Workflow/services.js @@ -32,7 +32,7 @@ export class Services { async createWorkflow(workflow) { const url = withPrefix("/api/workflows"); try { - const { data } = await axios.post(url, { workflow: toSimple(workflow.id, workflow) }); + const { data } = await axios.post(url, { workflow: toSimple(workflow.id, workflow), from_tool_form: true }); return data; } catch (e) { rethrowSimple(e); From f2ef679e1e724d64c99fa3db633e41d9a525004a Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Thu, 11 Apr 2024 14:46:20 -0400 Subject: [PATCH 504/669] Use standard buttons for history import/switch in the view interface --- client/src/components/History/HistoryView.vue | 2 -- 1 file changed, 2 deletions(-) diff --git a/client/src/components/History/HistoryView.vue b/client/src/components/History/HistoryView.vue index 9fa3061b4e05..b8bad9242b5e 100644 --- a/client/src/components/History/HistoryView.vue +++ b/client/src/components/History/HistoryView.vue @@ -8,7 +8,6 @@ Import this history From 822f8121f828885347f9638a8240dda007832632 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Thu, 11 Apr 2024 11:00:59 -0400 Subject: [PATCH 505/669] Unit test for email error reporting. --- .../tools/error_reports/plugins/email.py | 4 +- lib/galaxy/util/__init__.py | 4 +- test/unit/app/tools/test_error_reporting.py | 145 ++++++++++++++++++ 3 files changed, 151 insertions(+), 2 deletions(-) create mode 100644 test/unit/app/tools/test_error_reporting.py diff --git a/lib/galaxy/tools/error_reports/plugins/email.py b/lib/galaxy/tools/error_reports/plugins/email.py index 42446c2ecb3e..2db91db9a2b5 100644 --- a/lib/galaxy/tools/error_reports/plugins/email.py +++ b/lib/galaxy/tools/error_reports/plugins/email.py @@ -35,7 +35,9 @@ def submit_report(self, dataset, job, tool, **kwargs): ) return ("Your error report has been sent", "success") except Exception as e: - return (f"An error occurred sending the report by email: {unicodify(e)}", "danger") + msg = f"An error occurred sending the report by email: {unicodify(e)}" + log.exception(msg) + return (msg, "danger") __all__ = ("EmailPlugin",) diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py index b688d633c9b8..a49e5c1ddd1a 100644 --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -1615,7 +1615,8 @@ def send_mail(frm, to, subject, body, config, html=None, reply_to=None): :type reply_to: str :param reply_to: Reply-to address (Default None) """ - if config.smtp_server.startswith("mock_emails_to_path://"): + smtp_server = config.smtp_server + if smtp_server and isinstance(smtp_server, str) and smtp_server.startswith("mock_emails_to_path://"): path = config.smtp_server[len("mock_emails_to_path://") :] email_dict = { "from": frm, @@ -1628,6 +1629,7 @@ def send_mail(frm, to, subject, body, config, html=None, reply_to=None): email_json = json.to_json_string(email_dict) with open(path, "w") as f: f.write(email_json) + return to = listify(to) if html: diff --git a/test/unit/app/tools/test_error_reporting.py b/test/unit/app/tools/test_error_reporting.py new file mode 100644 index 000000000000..7db9cb4bf60e --- /dev/null +++ b/test/unit/app/tools/test_error_reporting.py @@ -0,0 +1,145 @@ +import json +import shutil +import tempfile +from pathlib import Path + +from galaxy import model +from galaxy.app_unittest_utils.tools_support import UsesApp +from galaxy.model.base import transaction +from galaxy.tools.errors import EmailErrorReporter +from galaxy.util.unittest import TestCase + +# The email the user created their account with. +TEST_USER_EMAIL = "mockgalaxyuser@galaxyproject.org" +# The email the user supplied when submitting the error +TEST_USER_SUPPLIED_EMAIL = "fake@example.org" +TEST_SERVER_EMAIL_FROM = "email_from@galaxyproject.org" +TEST_SERVER_ERROR_EMAIL_TO = "admin@email.to" # setup in mock config + + +class TestErrorReporter(TestCase, UsesApp): + + def setUp(self): + self.setup_app() + self.app.config.email_from = TEST_SERVER_EMAIL_FROM + self.tmp_path = Path(tempfile.mkdtemp()) + self.email_path = self.tmp_path / "email.json" + smtp_server = f"mock_emails_to_path://{self.email_path}" + self.app.config.smtp_server = smtp_server # type: ignore[attr-defined] + + def tearDown(self): + shutil.rmtree(self.tmp_path) + + def test_basic(self): + user, hda = self._setup_model_objects() + + email_path = self.email_path + assert not email_path.exists() + error_report = EmailErrorReporter(hda, self.app) + error_report.send_report(user, email=TEST_USER_SUPPLIED_EMAIL, message="My custom message") + assert email_path.exists() + text = email_path.read_text() + email_json = json.loads(text) + assert email_json["from"] == TEST_SERVER_EMAIL_FROM + assert email_json["to"] == f"{TEST_SERVER_ERROR_EMAIL_TO}, {TEST_USER_SUPPLIED_EMAIL}" + assert f"Galaxy tool error report from {TEST_USER_SUPPLIED_EMAIL}" == email_json["subject"] + assert "cat1" in email_json["body"] + assert "cat1" in email_json["html"] + assert TEST_USER_EMAIL == email_json["reply_to"] + + def test_hda_security(self, tmp_path): + user, hda = self._setup_model_objects() + error_report = EmailErrorReporter(hda, self.app) + security_agent = self.app.security_agent + private_role = security_agent.create_private_user_role(user) + access_action = security_agent.permitted_actions.DATASET_ACCESS.action + manage_action = security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action + permissions = {access_action: [private_role], manage_action: [private_role]} + security_agent.set_all_dataset_permissions(hda.dataset, permissions) + + other_user = model.User(email="otheruser@galaxyproject.org", password="mockpass2") + self._commit_objects([other_user]) + security_agent = self.app.security_agent + email_path = self.email_path + assert not email_path.exists() + error_report.send_report(other_user, email=TEST_USER_SUPPLIED_EMAIL, message="My custom message") + # Without permissions, the email still gets sent but the supplied email is ignored + # I'm not saying this is the right behavior but it is what the code does at the time of test + # writing -John + assert email_path.exists() + text = email_path.read_text() + email_json = json.loads(text) + assert "otheruser@galaxyproject.org" not in email_json["to"] + + def test_html_sanitization(self, tmp_path): + user, hda = self._setup_model_objects() + email_path = self.email_path + assert not email_path.exists() + error_report = EmailErrorReporter(hda, self.app) + error_report.send_report( + user, email=TEST_USER_SUPPLIED_EMAIL, message='My custom message' + ) + assert email_path.exists() + text = email_path.read_text() + email_json = json.loads(text) + html = email_json["html"] + assert "<a href="http://sneaky.com/">message</a>" in html + + def test_redact_user_details_in_bugreport(self, tmp_path): + user, hda = self._setup_model_objects() + + email_path = self.email_path + assert not email_path.exists() + error_report = EmailErrorReporter(hda, self.app) + error_report.send_report( + user, email=TEST_USER_SUPPLIED_EMAIL, message="My custom message", redact_user_details_in_bugreport=True + ) + assert email_path.exists() + text = email_path.read_text() + email_json = json.loads(text) + assert "The user redacted (user: 1) provided the following information:" in email_json["body"] + assert ( + """The user redacted (user: 1) provided the following information:""" + in email_json["html"] + ) + + def test_no_redact_user_details_in_bugreport(self, tmp_path): + user, hda = self._setup_model_objects() + + email_path = self.email_path + assert not email_path.exists() + error_report = EmailErrorReporter(hda, self.app) + error_report.send_report( + user, email=TEST_USER_SUPPLIED_EMAIL, message="My custom message", redact_user_details_in_bugreport=False + ) + assert email_path.exists() + text = email_path.read_text() + email_json = json.loads(text) + assert ( + f"The user '{TEST_USER_EMAIL}' (providing preferred contact email '{TEST_USER_SUPPLIED_EMAIL}') provided the following information:" + in email_json["body"] + ) + assert ( + f"""The user '{TEST_USER_EMAIL}' (providing preferred contact email '{TEST_USER_SUPPLIED_EMAIL}') provided the following information:""" + in email_json["html"] + ) + + def _setup_model_objects(self): + user = model.User(email=TEST_USER_EMAIL, password="mockpass") + job = model.Job() + job.tool_id = "cat1" + job.history = model.History() + job.user = user + job.history.user = user + hda = model.HistoryDatasetAssociation(history=job.history) + hda.dataset = model.Dataset() + hda.dataset.state = "ok" + job.add_output_dataset("out1", hda) + self._commit_objects([job, hda, user]) + return user, hda + + def _commit_objects(self, objects): + session = self.app.model.context + session.add_all(objects) + with transaction(session): + session.commit() From 1fa496671ff3b75feef83b41292fb37e1db21f83 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Fri, 12 Apr 2024 12:45:42 +0200 Subject: [PATCH 506/669] Drop unused workflow controller methods --- .../webapps/galaxy/controllers/workflow.py | 153 ------------------ 1 file changed, 153 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/workflow.py b/lib/galaxy/webapps/galaxy/controllers/workflow.py index 4f26c72ce804..0b073c65e0c6 100644 --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -1,5 +1,4 @@ import logging -from html.parser import HTMLParser from markupsafe import escape from sqlalchemy import desc @@ -11,15 +10,9 @@ web, ) from galaxy.managers.sharable import SlugBuilder -from galaxy.managers.workflows import ( - MissingToolsException, - WorkflowUpdateOptions, -) -from galaxy.model.base import transaction from galaxy.model.item_attrs import UsesItemRatings from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.util import FILENAME_VALID_CHARS -from galaxy.util.sanitize_html import sanitize_html from galaxy.web import url_for from galaxy.webapps.base.controller import ( BaseUIController, @@ -35,25 +28,6 @@ log = logging.getLogger(__name__) -# Simple HTML parser to get all content in a single tag. -class SingleTagContentsParser(HTMLParser): - def __init__(self, target_tag): - # Cannot use super() because HTMLParser is an old-style class in Python2 - HTMLParser.__init__(self) - self.target_tag = target_tag - self.cur_tag = None - self.tag_content = "" - - def handle_starttag(self, tag, attrs): - """Called for each start tag.""" - self.cur_tag = tag - - def handle_data(self, text): - """Called for each block of plain text.""" - if self.cur_tag == self.target_tag: - self.tag_content += text - - class WorkflowController(BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesItemRatings): slug_builder = SlugBuilder() @@ -145,30 +119,6 @@ def imp(self, trans, id, **kwargs): ) ) - @web.expose - @web.require_login("use Galaxy workflows") - def rename_async(self, trans, id, new_name=None, **kwargs): - stored = self.get_stored_workflow(trans, id) - if new_name: - san_new_name = sanitize_html(new_name) - stored.name = san_new_name - stored.latest_workflow.name = san_new_name - with transaction(trans.sa_session): - trans.sa_session.commit() - return stored.name - - @web.expose - @web.require_login("use Galaxy workflows") - def annotate_async(self, trans, id, new_annotation=None, **kwargs): - stored = self.get_stored_workflow(trans, id) - if new_annotation: - # Sanitize annotation before adding it. - new_annotation = sanitize_html(new_annotation) - self.add_item_annotation(trans.sa_session, trans.get_user(), stored, new_annotation) - with transaction(trans.sa_session): - trans.sa_session.commit() - return new_annotation - @web.expose @web.require_login("use Galaxy workflows") def gen_image(self, trans, id, embed="false", version="", **kwargs): @@ -188,109 +138,6 @@ def gen_image(self, trans, id, embed="false", version="", **kwargs): error_message = str(e) return trans.show_error_message(error_message) - @web.legacy_expose_api - def create(self, trans, payload=None, **kwd): - if trans.request.method == "GET": - return { - "title": "Create Workflow", - "inputs": [ - {"name": "workflow_name", "label": "Name", "value": "Unnamed workflow"}, - { - "name": "workflow_annotation", - "label": "Annotation", - "help": "A description of the workflow; annotation is shown alongside shared or published workflows.", - }, - ], - } - else: - user = trans.get_user() - workflow_name = payload.get("workflow_name") - workflow_annotation = payload.get("workflow_annotation") - workflow_tags = payload.get("workflow_tags", []) - if not workflow_name: - return self.message_exception(trans, "Please provide a workflow name.") - # Create the new stored workflow - stored_workflow = model.StoredWorkflow() - stored_workflow.name = workflow_name - stored_workflow.user = user - self.slug_builder.create_item_slug(trans.sa_session, stored_workflow) - # And the first (empty) workflow revision - workflow = model.Workflow() - workflow.name = workflow_name - workflow.stored_workflow = stored_workflow - stored_workflow.latest_workflow = workflow - # Add annotation. - workflow_annotation = sanitize_html(workflow_annotation) - self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation) - # Add tags - trans.tag_handler.set_tags_from_list( - trans.user, - stored_workflow, - workflow_tags, - ) - # Persist - session = trans.sa_session - session.add(stored_workflow) - with transaction(session): - session.commit() - return { - "id": trans.security.encode_id(stored_workflow.id), - "message": f"Workflow {workflow_name} has been created.", - } - - @web.json - def save_workflow_as( - self, trans, workflow_name, workflow_data, workflow_annotation="", from_tool_form=False, **kwargs - ): - """ - Creates a new workflow based on Save As command. It is a new workflow, but - is created with workflow_data already present. - """ - user = trans.get_user() - if workflow_name is not None: - workflow_contents_manager = self.app.workflow_contents_manager - stored_workflow = model.StoredWorkflow() - stored_workflow.name = workflow_name - stored_workflow.user = user - self.slug_builder.create_item_slug(trans.sa_session, stored_workflow) - workflow = model.Workflow() - workflow.name = workflow_name - workflow.stored_workflow = stored_workflow - stored_workflow.latest_workflow = workflow - # Add annotation. - workflow_annotation = sanitize_html(workflow_annotation) - self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation) - - # Persist - session = trans.sa_session - session.add(stored_workflow) - with transaction(session): - session.commit() - workflow_update_options = WorkflowUpdateOptions( - update_stored_workflow_attributes=False, # taken care of above - from_tool_form=from_tool_form, - ) - try: - workflow, errors = workflow_contents_manager.update_workflow_from_raw_description( - trans, - stored_workflow, - workflow_data, - workflow_update_options, - ) - except MissingToolsException as e: - return dict( - name=e.workflow.name, - message=( - "This workflow includes missing or invalid tools. " - "It cannot be saved until the following steps are removed or the missing tools are enabled." - ), - errors=e.errors, - ) - return trans.security.encode_id(stored_workflow.id) - else: - # This is an error state, 'save as' must have a workflow_name - log.exception("Error in Save As workflow: no name.") - @web.expose @web.json @web.require_login("edit workflows") From 1ed2af1a6900a3b1c1950cb5f10a39c73d1530f5 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Fri, 12 Apr 2024 13:25:01 +0200 Subject: [PATCH 507/669] Fix get_content_as_text for compressed text datatypes Fixes: ``` UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte File "starlette/applications.py", line 123, in __call__ await self.middleware_stack(scope, receive, send) File "starlette/middleware/errors.py", line 186, in __call__ raise exc File "starlette/middleware/errors.py", line 164, in __call__ await self.app(scope, receive, _send) File "starlette_context/middleware/raw_middleware.py", line 92, in __call__ await self.app(scope, receive, send_wrapper) File "starlette/middleware/base.py", line 189, in __call__ with collapse_excgroups(): File "contextlib.py", line 155, in __exit__ self.gen.throw(typ, value, traceback) File "starlette/_utils.py", line 93, in collapse_excgroups raise exc File "starlette/middleware/base.py", line 191, in __call__ response = await self.dispatch_func(request, call_next) File "galaxy/webapps/galaxy/fast_app.py", line 108, in add_x_frame_options response = await call_next(request) File "starlette/middleware/base.py", line 165, in call_next raise app_exc File "starlette/middleware/base.py", line 151, in coro await self.app(scope, receive_or_disconnect, send_no_error) File "starlette/middleware/exceptions.py", line 62, in __call__ await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) File "starlette/_exception_handler.py", line 64, in wrapped_app raise exc File "starlette/_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "starlette/routing.py", line 758, in __call__ await self.middleware_stack(scope, receive, send) File "starlette/routing.py", line 778, in app await route.handle(scope, receive, send) File "starlette/routing.py", line 299, in handle await self.app(scope, receive, send) File "starlette/routing.py", line 79, in app await wrap_app_handling_exceptions(app, request)(scope, receive, send) File "starlette/_exception_handler.py", line 64, in wrapped_app raise exc File "starlette/_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "starlette/routing.py", line 74, in app response = await func(request) File "fastapi/routing.py", line 278, in app raw_response = await run_endpoint_function( File "fastapi/routing.py", line 193, in run_endpoint_function return await run_in_threadpool(dependant.call, **values) File "starlette/concurrency.py", line 42, in run_in_threadpool return await anyio.to_thread.run_sync(func, *args) File "anyio/to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( File "anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread return await future File "anyio/_backends/_asyncio.py", line 851, in run result = context.run(func, *args) File "galaxy/webapps/galaxy/api/datasets.py", line 192, in get_content_as_text return self.service.get_content_as_text(trans, dataset_id) File "galaxy/webapps/galaxy/services/datasets.py", line 643, in get_content_as_text truncated, dataset_data = self.hda_manager.text_data(hda, preview=True) File "galaxy/managers/hdas.py", line 310, in text_data hda_data = open(hda.get_file_name()).read(MAX_PEEK_SIZE) File "", line 322, in decode ``` from https://sentry.galaxyproject.org/share/issue/9eb8e5b692b94700ac9b304b6d1c2418/ --- lib/galaxy/managers/hdas.py | 9 ++++++--- lib/galaxy_test/api/test_datasets.py | 10 ++++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index e600ab311cd2..3be812fcf0e8 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -68,6 +68,7 @@ MinimalManagerApp, StructuredApp, ) +from galaxy.util.compression_utils import get_fileobj log = logging.getLogger(__name__) @@ -303,11 +304,13 @@ def text_data(self, hda, preview=True): # For now, cannot get data from non-text datasets. if not isinstance(hda.datatype, datatypes.data.Text): return truncated, hda_data - if not os.path.exists(hda.get_file_name()): + file_path = hda.get_file_name() + if not os.path.exists(file_path): return truncated, hda_data - truncated = preview and os.stat(hda.get_file_name()).st_size > MAX_PEEK_SIZE - hda_data = open(hda.get_file_name()).read(MAX_PEEK_SIZE) + truncated = preview and os.stat(file_path).st_size > MAX_PEEK_SIZE + with get_fileobj(file_path) as fh: + hda_data = fh.read(MAX_PEEK_SIZE) return truncated, hda_data # .... annotatable diff --git a/lib/galaxy_test/api/test_datasets.py b/lib/galaxy_test/api/test_datasets.py index fdd139d78640..3c8c9daf3420 100644 --- a/lib/galaxy_test/api/test_datasets.py +++ b/lib/galaxy_test/api/test_datasets.py @@ -12,6 +12,7 @@ one_hda_model_store_dict, TEST_SOURCE_URI, ) +from galaxy.tool_util.verify.test_data import TestDataResolver from galaxy.util.unittest_utils import skip_if_github_down from galaxy_test.base.api_asserts import assert_has_keys from galaxy_test.base.decorators import ( @@ -356,6 +357,15 @@ def test_get_content_as_text(self, history_id): self._assert_has_key(get_content_as_text_response.json(), "item_data") assert get_content_as_text_response.json().get("item_data") == contents + def test_get_content_as_text_with_compressed_text_data(self, history_id): + test_data_resolver = TestDataResolver() + with open(test_data_resolver.get_filename("1.fasta.gz"), mode="rb") as fh: + hda1 = self.dataset_populator.new_dataset(history_id, content=fh, ftype="fasta.gz", wait=True) + get_content_as_text_response = self._get(f"datasets/{hda1['id']}/get_content_as_text") + self._assert_status_code_is(get_content_as_text_response, 200) + self._assert_has_key(get_content_as_text_response.json(), "item_data") + assert ">hg17" in get_content_as_text_response.json().get("item_data") + def test_anon_get_content_as_text(self, history_id): contents = "accessible data" hda1 = self.dataset_populator.new_dataset(history_id, content=contents, wait=True) From 9cd781e12fd41cc4a740330df6fe0f732171d604 Mon Sep 17 00:00:00 2001 From: davelopez <46503462+davelopez@users.noreply.github.com> Date: Fri, 12 Apr 2024 14:52:58 +0200 Subject: [PATCH 508/669] Filter workflow outputs by type in markdown directives --- .../src/components/Markdown/MarkdownToolBox.vue | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/client/src/components/Markdown/MarkdownToolBox.vue b/client/src/components/Markdown/MarkdownToolBox.vue index c8f7341f6fed..d382dad028b7 100644 --- a/client/src/components/Markdown/MarkdownToolBox.vue +++ b/client/src/components/Markdown/MarkdownToolBox.vue @@ -259,18 +259,25 @@ export default { }); return steps; }, - getOutputs() { + getOutputs(filterByType = undefined) { const outputLabels = []; this.steps && Object.values(this.steps).forEach((step) => { step.workflow_outputs.forEach((workflowOutput) => { if (workflowOutput.label) { - outputLabels.push(workflowOutput.label); + if (!filterByType || this.stepOutputMatchesType(step, workflowOutput, filterByType)) { + outputLabels.push(workflowOutput.label); + } } }); }); return outputLabels; }, + stepOutputMatchesType(step, workflowOutput, type) { + return Boolean( + step.outputs.find((output) => output.name === workflowOutput.output_name && output.type === type) + ); + }, getArgumentTitle(argumentName) { return ( argumentName[0].toUpperCase() + @@ -331,13 +338,13 @@ export default { onHistoryDatasetId(argumentName) { this.selectedArgumentName = argumentName; this.selectedType = "history_dataset_id"; - this.selectedLabels = this.getOutputs(); + this.selectedLabels = this.getOutputs("data"); this.selectedShow = true; }, onHistoryCollectionId(argumentName) { this.selectedArgumentName = argumentName; this.selectedType = "history_dataset_collection_id"; - this.selectedLabels = this.getOutputs(); + this.selectedLabels = this.getOutputs("collection"); this.selectedShow = true; }, onWorkflowId(argumentName) { From 5adc34f13b847b227e92d67d4245b7ed4159aaef Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 9 Apr 2024 14:40:27 -0400 Subject: [PATCH 509/669] Drop UCI model (dead code) --- lib/galaxy/model/__init__.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index a93e2bfe00a6..6810daabbe11 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -7455,12 +7455,6 @@ def __init__(self, galaxy_session, history): self.history = history -class UCI: - def __init__(self): - self.id = None - self.user = None - - class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): """ StoredWorkflow represents the root node of a tree of objects that compose a workflow, including workflow revisions, steps, and subworkflows. From 9feec897a6a9752d6919e83449f1e9bc9e523499 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 10 Apr 2024 11:58:13 -0400 Subject: [PATCH 510/669] Correct optionality of relationship mapping (see note) Optionality of a relationship should be defined by the optionality of the corresponding foreign key, i.e.: ``` foo_id: Mapped[int] foo: Mapped[int] = relationship("Foo"...) but foo_id: Mapped[Optional[int]] foo: Mapped[Optional[int]] = relationship("Foo"...) ``` This only applies to the "one" side of a one-to-many relationship. The "many" side is never None: if there are no bars associated with foo, then foo.bars will be an empty list (if using the default data structure) on first access. --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 6810daabbe11..8da03ae3bc6a 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1269,7 +1269,7 @@ class PasswordResetToken(Base): token: Mapped[str] = mapped_column(String(32), primary_key=True, unique=True, index=True) expiration_time: Mapped[Optional[datetime]] user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped["User"] = relationship("User") + user: Mapped[Optional["User"]] = relationship("User") def __init__(self, user, token=None): if token: From db7d7e3750cb757c13f7deb811992e362bb9a539 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 10 Apr 2024 15:06:30 -0400 Subject: [PATCH 511/669] Correct typing for custom collection class --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 8da03ae3bc6a..5756352a37cd 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -771,7 +771,7 @@ class User(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", collection_class=ordering_list("order_index"), ) - _preferences: Mapped[List["UserPreference"]] = relationship( + _preferences: Mapped[Dict[str, "UserPreference"]] = relationship( "UserPreference", collection_class=attribute_keyed_dict("name") ) values: Mapped[List["FormValues"]] = relationship( From a51c8b18fa064be782a8af33e8c317c418c1bf1c Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 10 Apr 2024 13:07:53 -0400 Subject: [PATCH 512/669] Add typing for the "one" side in one-to-many relationships --- lib/galaxy/model/__init__.py | 540 ++++++++++++++++++++--------------- 1 file changed, 312 insertions(+), 228 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5756352a37cd..ed5c8c68e505 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1407,10 +1407,10 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255)) object_store_id_overrides: Mapped[Optional[bytes]] = mapped_column(JSONType) - user = relationship("User") - galaxy_session = relationship("GalaxySession") - history = relationship("History", back_populates="jobs") - library_folder = relationship("LibraryFolder") + user: Mapped[Optional["User"]] = relationship("User") + galaxy_session: Mapped[Optional["GalaxySession"]] = relationship("GalaxySession") + history: Mapped[Optional["History"]] = relationship("History", back_populates="jobs") + library_folder: Mapped[Optional["LibraryFolder"]] = relationship("LibraryFolder") parameters = relationship("JobParameter") input_datasets = relationship("JobToInputDatasetAssociation", back_populates="job") input_dataset_collections = relationship("JobToInputDatasetCollectionAssociation", back_populates="job") @@ -2153,7 +2153,7 @@ class Task(Base, JobLike, RepresentById): task_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) task_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255)) prepare_input_files_cmd: Mapped[Optional[str]] = mapped_column(TEXT) - job = relationship("Job", back_populates="tasks") + job: Mapped["Job"] = relationship("Job", back_populates="tasks") text_metrics = relationship("TaskMetricText") numeric_metrics = relationship("TaskMetricNumeric") @@ -2323,8 +2323,10 @@ class JobToInputDatasetAssociation(Base, RepresentById): dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) dataset_version: Mapped[Optional[int]] name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset = relationship("HistoryDatasetAssociation", lazy="joined", back_populates="dependent_jobs") - job = relationship("Job", back_populates="input_datasets") + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", lazy="joined", back_populates="dependent_jobs" + ) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_datasets") def __init__(self, name, dataset): self.name = name @@ -2340,8 +2342,10 @@ class JobToOutputDatasetAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset = relationship("HistoryDatasetAssociation", lazy="joined", back_populates="creating_job_associations") - job = relationship("Job", back_populates="output_datasets") + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", lazy="joined", back_populates="creating_job_associations" + ) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_datasets") def __init__(self, name, dataset): self.name = name @@ -2362,8 +2366,10 @@ class JobToInputDatasetCollectionAssociation(Base, RepresentById): ForeignKey("history_dataset_collection_association.id"), index=True ) name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset_collection = relationship("HistoryDatasetCollectionAssociation", lazy="joined") - job = relationship("Job", back_populates="input_dataset_collections") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation", lazy="joined" + ) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_dataset_collections") def __init__(self, name, dataset_collection): self.name = name @@ -2379,8 +2385,10 @@ class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): ForeignKey("dataset_collection_element.id"), index=True ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection_element = relationship("DatasetCollectionElement", lazy="joined") - job = relationship("Job", back_populates="input_dataset_collection_elements") + dataset_collection_element: Mapped[Optional["DatasetCollectionElement"]] = relationship( + "DatasetCollectionElement", lazy="joined" + ) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_dataset_collection_elements") def __init__(self, name, dataset_collection_element): self.name = name @@ -2398,8 +2406,10 @@ class JobToOutputDatasetCollectionAssociation(Base, RepresentById): ForeignKey("history_dataset_collection_association.id"), index=True ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection_instance = relationship("HistoryDatasetCollectionAssociation", lazy="joined") - job = relationship("Job", back_populates="output_dataset_collection_instances") + dataset_collection_instance: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation", lazy="joined" + ) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_dataset_collection_instances") def __init__(self, name, dataset_collection_instance): self.name = name @@ -2420,8 +2430,8 @@ class JobToImplicitOutputDatasetCollectionAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) dataset_collection_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset_collection.id"), index=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection = relationship("DatasetCollection") - job = relationship("Job", back_populates="output_dataset_collections") + dataset_collection: Mapped[Optional["DatasetCollection"]] = relationship("DatasetCollection") + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_dataset_collections") def __init__(self, name, dataset_collection): self.name = name @@ -2435,8 +2445,10 @@ class JobToInputLibraryDatasetAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - job = relationship("Job", back_populates="input_library_datasets") - dataset = relationship("LibraryDatasetDatasetAssociation", lazy="joined", back_populates="dependent_jobs") + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_library_datasets") + dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + "LibraryDatasetDatasetAssociation", lazy="joined", back_populates="dependent_jobs" + ) def __init__(self, name, dataset): self.name = name @@ -2451,8 +2463,8 @@ class JobToOutputLibraryDatasetAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - job = relationship("Job", back_populates="output_library_datasets") - dataset = relationship( + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_library_datasets") + dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", lazy="joined", back_populates="creating_job_associations" ) @@ -2489,7 +2501,7 @@ class ImplicitlyCreatedDatasetCollectionInput(Base, RepresentById): ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - input_dataset_collection = relationship( + input_dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( "HistoryDatasetCollectionAssociation", primaryjoin=( lambda: HistoryDatasetCollectionAssociation.id # type: ignore[has-type] @@ -2543,7 +2555,7 @@ class ImplicitCollectionJobsJobAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) # Consider making this nullable... order_index: Mapped[int] implicit_collection_jobs = relationship("ImplicitCollectionJobs", back_populates="jobs") - job = relationship("Job", back_populates="implicit_collection_jobs_association") + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="implicit_collection_jobs_association") class PostJobAction(Base, RepresentById): @@ -2554,7 +2566,7 @@ class PostJobAction(Base, RepresentById): action_type: Mapped[str] = mapped_column(String(255)) output_name: Mapped[Optional[str]] = mapped_column(String(255)) action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_step = relationship( + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( "WorkflowStep", back_populates="post_job_actions", primaryjoin=(lambda: WorkflowStep.id == PostJobAction.workflow_step_id), # type: ignore[has-type] @@ -2574,8 +2586,8 @@ class PostJobActionAssociation(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True) post_job_action_id: Mapped[int] = mapped_column(ForeignKey("post_job_action.id"), index=True) - post_job_action = relationship("PostJobAction") - job = relationship("Job", back_populates="post_job_actions") + post_job_action: Mapped["PostJobAction"] = relationship("PostJobAction") + job: Mapped["Job"] = relationship("Job", back_populates="post_job_actions") def __init__(self, pja, job=None, job_id=None): if job is not None: @@ -2606,9 +2618,13 @@ class JobExternalOutputMetadata(Base, RepresentById): filename_kwds: Mapped[Optional[str]] = mapped_column(String(255)) filename_override_metadata: Mapped[Optional[str]] = mapped_column(String(255)) job_runner_external_pid: Mapped[Optional[str]] = mapped_column(String(255)) - history_dataset_association = relationship("HistoryDatasetAssociation", lazy="joined") - library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", lazy="joined") - job = relationship("Job", back_populates="external_output_metadata") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", lazy="joined" + ) + library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + "LibraryDatasetDatasetAssociation", lazy="joined" + ) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="external_output_metadata") def __init__(self, job=None, dataset=None): add_object_to_object_session(self, job) @@ -2654,9 +2670,9 @@ class JobExportHistoryArchive(Base, RepresentById): dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset.id"), index=True) compressed: Mapped[Optional[bool]] = mapped_column(index=True, default=False) history_attrs_filename: Mapped[Optional[str]] = mapped_column(TEXT) - job = relationship("Job") - dataset = relationship("Dataset") - history = relationship("History", back_populates="exports") + job: Mapped[Optional["Job"]] = relationship("Job") + dataset: Mapped[Optional["Dataset"]] = relationship("Dataset") + history: Mapped[Optional["History"]] = relationship("History", back_populates="exports") ATTRS_FILENAME_HISTORY = "history_attrs.txt" @@ -2740,8 +2756,8 @@ class JobImportHistoryArchive(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) archive_dir: Mapped[Optional[str]] = mapped_column(TEXT) - job = relationship("Job") - history = relationship("History") + job: Mapped[Optional["Job"]] = relationship("Job") + history: Mapped[Optional["History"]] = relationship("History") class StoreExportAssociation(Base, RepresentById): @@ -2766,7 +2782,7 @@ class JobContainerAssociation(Base, RepresentById): container_info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) created_time: Mapped[Optional[datetime]] = mapped_column(default=now) modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) - job = relationship("Job", back_populates="container") + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="container") def __init__(self, **kwd): if "job" in kwd: @@ -2796,7 +2812,7 @@ class InteractiveToolEntryPoint(Base, Dictifiable, RepresentById): created_time: Mapped[Optional[datetime]] = mapped_column(default=now) modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) label: Mapped[Optional[str]] = mapped_column(TEXT) - job = relationship("Job", back_populates="interactivetool_entry_points", uselist=False) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="interactivetool_entry_points", uselist=False) dict_collection_visible_keys = [ "id", @@ -2862,9 +2878,9 @@ class GenomeIndexToolData(Base, RepresentById): # TODO: params arg is lost modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) indexer: Mapped[Optional[str]] = mapped_column(String(64)) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - job = relationship("Job") - dataset = relationship("Dataset") - user = relationship("User") + job: Mapped[Optional["Job"]] = relationship("Job") + dataset: Mapped[Optional["Dataset"]] = relationship("Dataset") + user: Mapped[Optional["User"]] = relationship("User") class Group(Base, Dictifiable, RepresentById): @@ -2895,8 +2911,8 @@ class UserGroupAssociation(Base, RepresentById): group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user = relationship("User", back_populates="groups") - group = relationship("Group", back_populates="users") + user: Mapped[Optional["User"]] = relationship("User", back_populates="groups") + group: Mapped[Optional["Group"]] = relationship("Group", back_populates="users") def __init__(self, user, group): add_object_to_object_session(self, user) @@ -2946,8 +2962,10 @@ class UserNotificationAssociation(Base, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) update_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) - user = relationship("User", back_populates="all_notifications") - notification = relationship("Notification", back_populates="user_notification_associations") + user: Mapped[Optional["User"]] = relationship("User", back_populates="all_notifications") + notification: Mapped[Optional["Notification"]] = relationship( + "Notification", back_populates="user_notification_associations" + ) def __init__(self, user, notification): self.user = user @@ -3098,7 +3116,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable users_shared_with = relationship("HistoryUserShareAssociation", back_populates="history") galaxy_sessions = relationship("GalaxySessionToHistoryAssociation", back_populates="history") workflow_invocations = relationship("WorkflowInvocation", back_populates="history", cascade_backrefs=False) - user = relationship("User", back_populates="histories") + user: Mapped[Optional["User"]] = relationship("User", back_populates="histories") jobs = relationship("Job", back_populates="history", cascade_backrefs=False) update_time = column_property( @@ -3593,7 +3611,7 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user: Mapped[User] = relationship("User") - history = relationship("History", back_populates="users_shared_with") + history: Mapped[Optional["History"]] = relationship("History", back_populates="users_shared_with") class UserRoleAssociation(Base, RepresentById): @@ -3605,8 +3623,8 @@ class UserRoleAssociation(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user = relationship("User", back_populates="roles") - role = relationship("Role", back_populates="users") + user: Mapped[Optional["User"]] = relationship("User", back_populates="roles") + role: Mapped[Optional["Role"]] = relationship("Role", back_populates="users") def __init__(self, user, role): add_object_to_object_session(self, user) @@ -3622,8 +3640,8 @@ class GroupRoleAssociation(Base, RepresentById): role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - group = relationship("Group", back_populates="roles") - role = relationship("Role", back_populates="groups") + group: Mapped[Optional["Group"]] = relationship("Group", back_populates="roles") + role: Mapped[Optional["Role"]] = relationship("Role", back_populates="groups") def __init__(self, group, role): self.group = group @@ -3674,7 +3692,7 @@ class UserQuotaSourceUsage(Base, Dictifiable, RepresentById): quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), index=True) # user had an index on disk_usage - does that make any sense? -John disk_usage: Mapped[Decimal] = mapped_column(Numeric(15, 0), default=0) - user = relationship("User", back_populates="quota_source_usages") + user: Mapped[Optional["User"]] = relationship("User", back_populates="quota_source_usages") class UserQuotaAssociation(Base, Dictifiable, RepresentById): @@ -3685,8 +3703,8 @@ class UserQuotaAssociation(Base, Dictifiable, RepresentById): quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user = relationship("User", back_populates="quotas") - quota = relationship("Quota", back_populates="users") + user: Mapped[Optional["User"]] = relationship("User", back_populates="quotas") + quota: Mapped[Optional["Quota"]] = relationship("Quota", back_populates="users") dict_element_visible_keys = ["user"] @@ -3704,8 +3722,8 @@ class GroupQuotaAssociation(Base, Dictifiable, RepresentById): quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - group = relationship("Group", back_populates="quotas") - quota = relationship("Quota", back_populates="groups") + group: Mapped[Optional["Group"]] = relationship("Group", back_populates="quotas") + quota: Mapped[Optional["Quota"]] = relationship("Quota", back_populates="groups") dict_element_visible_keys = ["group"] @@ -3786,7 +3804,7 @@ class DefaultQuotaAssociation(Base, Dictifiable, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) type: Mapped[Optional[str]] = mapped_column(String(32)) quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) - quota = relationship("Quota", back_populates="default") + quota: Mapped[Optional["Quota"]] = relationship("Quota", back_populates="default") dict_element_visible_keys = ["type"] @@ -3810,8 +3828,8 @@ class DatasetPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - dataset = relationship("Dataset", back_populates="actions") - role = relationship("Role", back_populates="dataset_actions") + dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="actions") + role: Mapped[Optional["Role"]] = relationship("Role", back_populates="dataset_actions") def __init__(self, action, dataset, role=None, role_id=None): self.action = action @@ -3832,8 +3850,8 @@ class LibraryPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) library_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - library = relationship("Library", back_populates="actions") - role = relationship("Role") + library: Mapped[Optional["Library"]] = relationship("Library", back_populates="actions") + role: Mapped[Optional["Role"]] = relationship("Role") def __init__(self, action, library_item, role): self.action = action @@ -3854,8 +3872,8 @@ class LibraryFolderPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) library_folder_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_folder.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - folder = relationship("LibraryFolder", back_populates="actions") - role = relationship("Role") + folder: Mapped[Optional["LibraryFolder"]] = relationship("LibraryFolder", back_populates="actions") + role: Mapped[Optional["Role"]] = relationship("Role") def __init__(self, action, library_item, role): self.action = action @@ -3876,8 +3894,8 @@ class LibraryDatasetPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) library_dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - library_dataset = relationship("LibraryDataset", back_populates="actions") - role = relationship("Role") + library_dataset: Mapped[Optional["LibraryDataset"]] = relationship("LibraryDataset", back_populates="actions") + role: Mapped[Optional["Role"]] = relationship("Role") def __init__(self, action, library_item, role): self.action = action @@ -3900,8 +3918,10 @@ class LibraryDatasetDatasetAssociationPermissions(Base, RepresentById): ForeignKey("library_dataset_dataset_association.id"), index=True ) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", back_populates="actions") - role = relationship("Role") + library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + "LibraryDatasetDatasetAssociation", back_populates="actions" + ) + role: Mapped[Optional["Role"]] = relationship("Role") def __init__(self, action, library_item, role): self.action = action @@ -3920,8 +3940,8 @@ class DefaultUserPermissions(Base, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) action: Mapped[Optional[str]] = mapped_column(TEXT) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - user = relationship("User", back_populates="default_permissions") - role = relationship("Role") + user: Mapped[Optional["User"]] = relationship("User", back_populates="default_permissions") + role: Mapped[Optional["Role"]] = relationship("Role") def __init__(self, user, action, role): add_object_to_object_session(self, user) @@ -3937,8 +3957,8 @@ class DefaultHistoryPermissions(Base, RepresentById): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) action: Mapped[Optional[str]] = mapped_column(TEXT) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - history = relationship("History", back_populates="default_permissions") - role = relationship("Role") + history: Mapped[Optional["History"]] = relationship("History", back_populates="default_permissions") + role: Mapped[Optional["Role"]] = relationship("Role") def __init__(self, history, action, role): add_object_to_object_session(self, history) @@ -3974,7 +3994,7 @@ class Dataset(Base, StorableObject, Serializable): uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType()) actions = relationship("DatasetPermissions", back_populates="dataset") - job = relationship(Job, primaryjoin=(lambda: Dataset.job_id == Job.id)) + job: Mapped[Optional["Job"]] = relationship(Job, primaryjoin=(lambda: Dataset.job_id == Job.id)) active_history_associations = relationship( "HistoryDatasetAssociation", primaryjoin=( @@ -4342,7 +4362,7 @@ class DatasetSource(Base, Dictifiable, Serializable): source_uri: Mapped[Optional[str]] = mapped_column(TEXT) extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) transform: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - dataset = relationship("Dataset", back_populates="sources") + dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="sources") hashes = relationship("DatasetSourceHash", back_populates="source") dict_collection_visible_keys = ["id", "source_uri", "extra_files_path", "transform"] dict_element_visible_keys = [ @@ -4379,7 +4399,7 @@ class DatasetSourceHash(Base, Serializable): dataset_source_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset_source.id"), index=True) hash_function: Mapped[Optional[str]] = mapped_column(TEXT) hash_value: Mapped[Optional[str]] = mapped_column(TEXT) - source = relationship("DatasetSource", back_populates="hashes") + source: Mapped[Optional["DatasetSource"]] = relationship("DatasetSource", back_populates="hashes") def _serialize(self, id_encoder, serialization_options): rval = dict_for( @@ -4405,7 +4425,7 @@ class DatasetHash(Base, Dictifiable, Serializable): hash_function: Mapped[Optional[str]] = mapped_column(TEXT) hash_value: Mapped[Optional[str]] = mapped_column(TEXT) extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) - dataset = relationship("Dataset", back_populates="hashes") + dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="hashes") dict_collection_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] dict_element_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] @@ -5486,8 +5506,10 @@ class HistoryDatasetAssociationDisplayAtAuthorization(Base, RepresentById): ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) site: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) - history_dataset_association = relationship("HistoryDatasetAssociation") - user = relationship("User") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation" + ) + user: Mapped[Optional["User"]] = relationship("User") def __init__(self, hda=None, user=None, site=None): self.history_dataset_association = hda @@ -5507,13 +5529,13 @@ class HistoryDatasetAssociationSubset(Base, RepresentById): ) location: Mapped[Optional[str]] = mapped_column(Unicode(255), index=True) - hda = relationship( + hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=( lambda: HistoryDatasetAssociationSubset.history_dataset_association_id == HistoryDatasetAssociation.id ), ) - subset = relationship( + subset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=( lambda: HistoryDatasetAssociationSubset.history_dataset_association_subset_id @@ -5626,7 +5648,9 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): order_by=asc(name), back_populates="parent", ) - parent = relationship("LibraryFolder", back_populates="folders", remote_side=[id]) + parent: Mapped[Optional["LibraryFolder"]] = relationship( + "LibraryFolder", back_populates="folders", remote_side=[id] + ) active_folders = relationship( "LibraryFolder", @@ -5768,7 +5792,7 @@ class LibraryDataset(Base, Serializable): _info: Mapped[Optional[str]] = mapped_column("info", TrimmedString(255)) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - folder = relationship("LibraryFolder") + folder: Mapped[Optional["LibraryFolder"]] = relationship("LibraryFolder") library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=library_dataset_dataset_association_id, post_update=True ) @@ -6065,7 +6089,9 @@ class ExtendedMetadataIndex(Base, RepresentById): ) path: Mapped[Optional[str]] = mapped_column(String(255)) value: Mapped[Optional[str]] = mapped_column(TEXT) - extended_metadata = relationship("ExtendedMetadata", back_populates="children") + extended_metadata: Mapped[Optional["ExtendedMetadata"]] = relationship( + "ExtendedMetadata", back_populates="children" + ) def __init__(self, extended_metadata, path, value): self.extended_metadata = extended_metadata @@ -6083,7 +6109,7 @@ class LibraryInfoAssociation(Base, RepresentById): inheritable: Mapped[Optional[bool]] = mapped_column(index=True, default=False) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - library = relationship( + library: Mapped[Optional["Library"]] = relationship( "Library", primaryjoin=( lambda: and_( @@ -6092,10 +6118,10 @@ class LibraryInfoAssociation(Base, RepresentById): ) ), ) - template = relationship( + template: Mapped[Optional["FormDefinition"]] = relationship( "FormDefinition", primaryjoin=lambda: LibraryInfoAssociation.form_definition_id == FormDefinition.id ) - info = relationship( + info: Mapped[Optional["FormValues"]] = relationship( "FormValues", primaryjoin=lambda: LibraryInfoAssociation.form_values_id == FormValues.id # type: ignore[has-type] ) @@ -6116,17 +6142,17 @@ class LibraryFolderInfoAssociation(Base, RepresentById): inheritable: Mapped[Optional[bool]] = mapped_column(index=True, default=False) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - folder = relationship( + folder: Mapped[Optional["LibraryFolder"]] = relationship( "LibraryFolder", primaryjoin=( lambda: (LibraryFolderInfoAssociation.library_folder_id == LibraryFolder.id) & (not_(LibraryFolderInfoAssociation.deleted)) ), ) - template = relationship( + template: Mapped[Optional["FormDefinition"]] = relationship( "FormDefinition", primaryjoin=(lambda: LibraryFolderInfoAssociation.form_definition_id == FormDefinition.id) ) - info = relationship( + info: Mapped[Optional["FormValues"]] = relationship( "FormValues", primaryjoin=(lambda: LibraryFolderInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] ) @@ -6148,7 +6174,7 @@ class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): form_values_id: Mapped[Optional[int]] = mapped_column(ForeignKey("form_values.id"), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - library_dataset_dataset_association = relationship( + library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", primaryjoin=( lambda: ( @@ -6158,11 +6184,11 @@ class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): & (not_(LibraryDatasetDatasetInfoAssociation.deleted)) ), ) - template = relationship( + template: Mapped[Optional["FormDefinition"]] = relationship( "FormDefinition", primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_definition_id == FormDefinition.id), ) - info = relationship( + info: Mapped[Optional["FormValues"]] = relationship( "FormValues", primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] ) @@ -6193,22 +6219,22 @@ class ImplicitlyConvertedDatasetAssociation(Base, Serializable): metadata_safe: Mapped[Optional[bool]] = mapped_column(index=True, default=True) type: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) - parent_hda = relationship( + parent_hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.hda_parent_id == HistoryDatasetAssociation.id), back_populates="implicitly_converted_datasets", ) - dataset_ldda = relationship( + dataset_ldda: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.ldda_id == LibraryDatasetDatasetAssociation.id), back_populates="implicitly_converted_parent_datasets", ) - dataset = relationship( + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.hda_id == HistoryDatasetAssociation.id), back_populates="implicitly_converted_parent_datasets", ) - parent_ldda = relationship( + parent_ldda: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", primaryjoin=( lambda: ImplicitlyConvertedDatasetAssociation.ldda_parent_id == LibraryDatasetDatasetAssociation.table.c.id @@ -6769,7 +6795,7 @@ class HistoryDatasetCollectionAssociation( update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, index=True, nullable=True) collection = relationship("DatasetCollection") - history = relationship("History", back_populates="dataset_collections") + history: Mapped[Optional["History"]] = relationship("History", back_populates="dataset_collections") copied_from_history_dataset_collection_association = relationship( "HistoryDatasetCollectionAssociation", @@ -6790,7 +6816,7 @@ class HistoryDatasetCollectionAssociation( ), ) implicit_collection_jobs = relationship("ImplicitCollectionJobs", uselist=False) - job = relationship( + job: Mapped[Optional["Job"]] = relationship( "Job", back_populates="history_dataset_collection_associations", uselist=False, @@ -7387,9 +7413,9 @@ class Event(Base, RepresentById): session_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_session.id"), index=True) tool_id: Mapped[Optional[str]] = mapped_column(String(255)) - history = relationship("History") - user = relationship("User") - galaxy_session = relationship("GalaxySession") + history: Mapped[Optional["History"]] = relationship("History") + user: Mapped[Optional["User"]] = relationship("User") + galaxy_session: Mapped[Optional["GalaxySession"]] = relationship("GalaxySession") class GalaxySession(Base, RepresentById): @@ -7410,11 +7436,11 @@ class GalaxySession(Base, RepresentById): prev_session_id: Mapped[Optional[int]] disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) last_action: Mapped[Optional[datetime]] - current_history = relationship("History") + current_history: Mapped[Optional["History"]] = relationship("History") histories = relationship( "GalaxySessionToHistoryAssociation", back_populates="galaxy_session", cascade_backrefs=False ) - user = relationship("User", back_populates="galaxy_sessions") + user: Mapped[Optional["User"]] = relationship("User", back_populates="galaxy_sessions") def __init__(self, is_valid=False, **kwd): super().__init__(**kwd) @@ -7445,8 +7471,8 @@ class GalaxySessionToHistoryAssociation(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) session_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_session.id"), index=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) - galaxy_session = relationship("GalaxySession", back_populates="histories") - history = relationship("History", back_populates="galaxy_sessions") + galaxy_session: Mapped[Optional["GalaxySession"]] = relationship("GalaxySession", back_populates="histories") + history: Mapped[Optional["History"]] = relationship("History", back_populates="galaxy_sessions") def __init__(self, galaxy_session, history): self.galaxy_session = galaxy_session @@ -7482,7 +7508,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): from_path: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - user = relationship( + user: Mapped["User"] = relationship( "User", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" ) workflows = relationship( @@ -7844,18 +7870,20 @@ class WorkflowStep(Base, RepresentById): temp_input_connections = None parent_comment_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_comment.id"), index=True) - parent_comment = relationship( + parent_comment: Mapped[Optional["WorkflowComment"]] = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.id == WorkflowStep.parent_comment_id), back_populates="child_steps", ) - subworkflow = relationship( + subworkflow: Mapped[Optional["Workflow"]] = relationship( "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id), back_populates="parent_workflow_steps", ) - dynamic_tool = relationship("DynamicTool", primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id)) + dynamic_tool: Mapped[Optional["DynamicTool"]] = relationship( + "DynamicTool", primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id) + ) tags: Mapped[List["WorkflowStepTagAssociation"]] = relationship( "WorkflowStepTagAssociation", order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" ) @@ -7870,7 +7898,7 @@ class WorkflowStep(Base, RepresentById): output_connections = relationship( "WorkflowStepConnection", primaryjoin=(lambda: WorkflowStepConnection.output_step_id == WorkflowStep.id) ) - workflow = relationship( + workflow: Mapped["Workflow"] = relationship( "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), back_populates="steps", @@ -8150,7 +8178,7 @@ class WorkflowStepInput(Base, RepresentById): default_value_set: Mapped[Optional[bool]] = mapped_column(default=False) runtime_value: Mapped[Optional[bool]] = mapped_column(default=False) - workflow_step = relationship( + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( "WorkflowStep", back_populates="inputs", cascade="all", @@ -8244,7 +8272,7 @@ class WorkflowOutput(Base, Serializable): output_name: Mapped[Optional[str]] = mapped_column(String(255)) label: Mapped[Optional[str]] = mapped_column(Unicode(255)) uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType) - workflow_step = relationship( + workflow_step: Mapped["WorkflowStep"] = relationship( "WorkflowStep", back_populates="workflow_outputs", primaryjoin=(lambda: WorkflowStep.id == WorkflowOutput.workflow_step_id), @@ -8290,7 +8318,7 @@ class WorkflowComment(Base, RepresentById): data: Mapped[Optional[bytes]] = mapped_column(JSONType) parent_comment_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_comment.id"), index=True) - workflow = relationship( + workflow: Mapped["Workflow"] = relationship( "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), back_populates="comments", @@ -8302,7 +8330,7 @@ class WorkflowComment(Base, RepresentById): back_populates="parent_comment", ) - parent_comment = relationship( + parent_comment: Mapped[Optional["WorkflowComment"]] = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.id == WorkflowComment.parent_comment_id), back_populates="child_comments", @@ -8355,7 +8383,9 @@ class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user: Mapped[User] = relationship("User") - stored_workflow = relationship("StoredWorkflow", back_populates="users_shared_with") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship( + "StoredWorkflow", back_populates="users_shared_with" + ) class StoredWorkflowMenuEntry(Base, RepresentById): @@ -8366,8 +8396,8 @@ class StoredWorkflowMenuEntry(Base, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) order_index: Mapped[Optional[int]] - stored_workflow = relationship("StoredWorkflow") - user = relationship( + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow") + user: Mapped[Optional["User"]] = relationship( "User", back_populates="stored_workflow_menu_entries", primaryjoin=( @@ -8968,9 +8998,15 @@ class WorkflowInvocationMessage(Base, Dictifiable, Serializable): hda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id")) hdca_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_collection_association.id")) - workflow_invocation = relationship("WorkflowInvocation", back_populates="messages", lazy=True) - workflow_step = relationship("WorkflowStep", foreign_keys=workflow_step_id, lazy=True) - dependent_workflow_step = relationship("WorkflowStep", foreign_keys=dependent_workflow_step_id, lazy=True) + workflow_invocation: Mapped["WorkflowInvocation"] = relationship( + "WorkflowInvocation", back_populates="messages", lazy=True + ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( + "WorkflowStep", foreign_keys=workflow_step_id, lazy=True + ) + dependent_workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( + "WorkflowStep", foreign_keys=dependent_workflow_step_id, lazy=True + ) @property def workflow_step_index(self): @@ -9043,7 +9079,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): action: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step = relationship("WorkflowStep") - job = relationship("Job", back_populates="workflow_invocation_step", uselist=False) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="workflow_invocation_step", uselist=False) implicit_collection_jobs = relationship("ImplicitCollectionJobs", uselist=False) output_dataset_collections = relationship( "WorkflowInvocationStepOutputDatasetCollectionAssociation", @@ -9055,7 +9091,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): back_populates="workflow_invocation_step", cascade_backrefs=False, ) - workflow_invocation = relationship("WorkflowInvocation", back_populates="steps") + workflow_invocation: Mapped["WorkflowInvocation"] = relationship("WorkflowInvocation", back_populates="steps") output_value = relationship( "WorkflowInvocationOutputValue", foreign_keys="[WorkflowInvocationStep.workflow_invocation_id, WorkflowInvocationStep.workflow_step_id]", @@ -9247,7 +9283,9 @@ class WorkflowRequestInputParameter(Base, Dictifiable, Serializable): name: Mapped[Optional[str]] = mapped_column(Unicode(255)) value: Mapped[Optional[str]] = mapped_column(TEXT) type: Mapped[Optional[str]] = mapped_column(Unicode(255)) - workflow_invocation = relationship("WorkflowInvocation", back_populates="input_parameters") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="input_parameters" + ) dict_collection_visible_keys = ["id", "name", "value", "type"] @@ -9276,8 +9314,10 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable): ) workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_step = relationship("WorkflowStep") - workflow_invocation = relationship("WorkflowInvocation", back_populates="step_states") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="step_states" + ) dict_collection_visible_keys = ["id", "name", "value", "workflow_step_id"] @@ -9299,9 +9339,11 @@ class WorkflowRequestToInputDatasetAssociation(Base, Dictifiable, Serializable): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) - workflow_step = relationship("WorkflowStep") - dataset = relationship("HistoryDatasetAssociation") - workflow_invocation = relationship("WorkflowInvocation", back_populates="input_datasets") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="input_datasets" + ) history_content_type = "dataset" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_id", "name"] @@ -9328,9 +9370,13 @@ class WorkflowRequestToInputDatasetCollectionAssociation(Base, Dictifiable, Seri dataset_collection_id: Mapped[Optional[int]] = mapped_column( ForeignKey("history_dataset_collection_association.id"), index=True ) - workflow_step = relationship("WorkflowStep") - dataset_collection = relationship("HistoryDatasetCollectionAssociation") - workflow_invocation = relationship("WorkflowInvocation", back_populates="input_dataset_collections") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation" + ) + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="input_dataset_collections" + ) history_content_type = "dataset_collection" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_collection_id", "name"] @@ -9355,8 +9401,10 @@ class WorkflowRequestInputStepParameter(Base, Dictifiable, Serializable): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) parameter_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_step = relationship("WorkflowStep") - workflow_invocation = relationship("WorkflowInvocation", back_populates="input_step_parameters") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="input_step_parameters" + ) dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "parameter_value"] @@ -9378,10 +9426,12 @@ class WorkflowInvocationOutputDatasetAssociation(Base, Dictifiable, Serializable dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) workflow_output_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_output.id"), index=True) - workflow_invocation = relationship("WorkflowInvocation", back_populates="output_datasets") - workflow_step = relationship("WorkflowStep") - dataset = relationship("HistoryDatasetAssociation") - workflow_output = relationship("WorkflowOutput") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="output_datasets" + ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") + workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship("WorkflowOutput") history_content_type = "dataset" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_id", "name"] @@ -9413,10 +9463,14 @@ class WorkflowInvocationOutputDatasetCollectionAssociation(Base, Dictifiable, Se ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True ) - workflow_invocation = relationship("WorkflowInvocation", back_populates="output_dataset_collections") - workflow_step = relationship("WorkflowStep") - dataset_collection = relationship("HistoryDatasetCollectionAssociation") - workflow_output = relationship("WorkflowOutput") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="output_dataset_collections" + ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation" + ) + workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship("WorkflowOutput") history_content_type = "dataset_collection" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_collection_id", "name"] @@ -9442,9 +9496,11 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): workflow_output_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_output.id"), index=True) value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_invocation = relationship("WorkflowInvocation", back_populates="output_values") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="output_values" + ) - workflow_invocation_step = relationship( + workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( "WorkflowInvocationStep", foreign_keys="[WorkflowInvocationStep.workflow_invocation_id, WorkflowInvocationStep.workflow_step_id]", primaryjoin=( @@ -9457,8 +9513,8 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): viewonly=True, ) - workflow_step = relationship("WorkflowStep") - workflow_output = relationship("WorkflowOutput") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") + workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship("WorkflowOutput") dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "value"] @@ -9481,8 +9537,10 @@ class WorkflowInvocationStepOutputDatasetAssociation(Base, Dictifiable, Represen ) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) output_name: Mapped[Optional[str]] = mapped_column(String(255)) - workflow_invocation_step = relationship("WorkflowInvocationStep", back_populates="output_datasets") - dataset = relationship("HistoryDatasetAssociation") + workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( + "WorkflowInvocationStep", back_populates="output_datasets" + ) + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") dict_collection_visible_keys = ["id", "workflow_invocation_step_id", "dataset_id", "output_name"] @@ -9504,8 +9562,12 @@ class WorkflowInvocationStepOutputDatasetCollectionAssociation(Base, Dictifiable ) output_name: Mapped[Optional[str]] = mapped_column(String(255)) - workflow_invocation_step = relationship("WorkflowInvocationStep", back_populates="output_dataset_collections") - dataset_collection = relationship("HistoryDatasetCollectionAssociation") + workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( + "WorkflowInvocationStep", back_populates="output_dataset_collections" + ) + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation" + ) dict_collection_visible_keys = ["id", "workflow_invocation_step_id", "dataset_collection_id", "output_name"] @@ -9524,7 +9586,7 @@ class MetadataFile(Base, StorableObject, Serializable): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - history_dataset = relationship("HistoryDatasetAssociation") + history_dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") library_dataset = relationship("LibraryDatasetDatasetAssociation") def __init__(self, dataset=None, name=None, uuid=None): @@ -9606,7 +9668,7 @@ class FormDefinition(Base, Dictifiable, RepresentById): fields: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) layout: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - form_definition_current = relationship( + form_definition_current: Mapped["FormDefinitionCurrent"] = relationship( "FormDefinitionCurrent", back_populates="forms", primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), # type: ignore[has-type] @@ -9678,7 +9740,7 @@ class FormDefinitionCurrent(Base, RepresentById): cascade="all, delete-orphan", primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), ) - latest_form = relationship( + latest_form: Mapped[Optional["FormDefinition"]] = relationship( "FormDefinition", post_update=True, primaryjoin=(lambda: FormDefinitionCurrent.latest_form_id == FormDefinition.id), @@ -9696,7 +9758,7 @@ class FormValues(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) form_definition_id: Mapped[Optional[int]] = mapped_column(ForeignKey("form_definition.id"), index=True) content: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - form_definition = relationship( + form_definition: Mapped[Optional["FormDefinition"]] = relationship( "FormDefinition", primaryjoin=(lambda: FormValues.form_definition_id == FormDefinition.id) ) @@ -9725,7 +9787,9 @@ class UserAddress(Base, RepresentById): purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) # `desc` needs to be fully qualified because it is shadowed by `desc` Column defined above # TODO: db migration to rename column, then use `desc` - user = relationship("User", back_populates="addresses", order_by=sqlalchemy.desc("update_time")) + user: Mapped[Optional["User"]] = relationship( + "User", back_populates="addresses", order_by=sqlalchemy.desc("update_time") + ) def to_dict(self, trans): return { @@ -9926,7 +9990,7 @@ class UserAuthnzToken(Base, UserMixin, RepresentById): extra_data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) lifetime: Mapped[Optional[int]] assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) - user = relationship("User", back_populates="social_auth") + user: Mapped[Optional["User"]] = relationship("User", back_populates="social_auth") # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -10107,8 +10171,8 @@ class CloudAuthz(Base): last_activity: Mapped[Optional[datetime]] description: Mapped[Optional[str]] = mapped_column(TEXT) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - user = relationship("User", back_populates="cloudauthz") - authn = relationship("UserAuthnzToken") + user: Mapped[Optional["User"]] = relationship("User", back_populates="cloudauthz") + authn: Mapped[Optional["UserAuthnzToken"]] = relationship("UserAuthnzToken") def __init__(self, user_id, provider, config, authn_id, description=None): self.user_id = user_id @@ -10146,14 +10210,14 @@ class Page(Base, HasTags, Dictifiable, RepresentById): importable: Mapped[Optional[bool]] = mapped_column(index=True, default=False) slug: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - user = relationship("User") + user: Mapped["User"] = relationship("User") revisions = relationship( "PageRevision", cascade="all, delete-orphan", primaryjoin=(lambda: Page.id == PageRevision.page_id), # type: ignore[has-type] back_populates="page", ) - latest_revision = relationship( + latest_revision: Mapped[Optional["PageRevision"]] = relationship( "PageRevision", post_update=True, primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), # type: ignore[has-type] @@ -10222,7 +10286,7 @@ class PageRevision(Base, Dictifiable, RepresentById): title: Mapped[Optional[str]] = mapped_column(TEXT) content: Mapped[Optional[str]] = mapped_column(TEXT) content_format: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) - page = relationship("Page", primaryjoin=(lambda: Page.id == PageRevision.page_id)) + page: Mapped["Page"] = relationship("Page", primaryjoin=(lambda: Page.id == PageRevision.page_id)) DEFAULT_CONTENT_FORMAT = "html" dict_element_visible_keys = ["id", "page_id", "title", "content", "content_format"] @@ -10243,7 +10307,7 @@ class PageUserShareAssociation(Base, UserShareAssociation): page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user: Mapped[User] = relationship("User") - page = relationship("Page", back_populates="users_shared_with") + page: Mapped[Optional["Page"]] = relationship("Page", back_populates="users_shared_with") class Visualization(Base, HasTags, Dictifiable, RepresentById): @@ -10269,7 +10333,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): slug: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(default=False, index=True) - user = relationship("User") + user: Mapped["User"] = relationship("User") revisions = relationship( "VisualizationRevision", back_populates="visualization", @@ -10277,7 +10341,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: Visualization.id == VisualizationRevision.visualization_id), cascade_backrefs=False, ) - latest_revision = relationship( + latest_revision: Mapped[Optional["VisualizationRevision"]] = relationship( "VisualizationRevision", post_update=True, primaryjoin=(lambda: Visualization.latest_revision_id == VisualizationRevision.id), @@ -10373,7 +10437,7 @@ class VisualizationRevision(Base, RepresentById): title: Mapped[Optional[str]] = mapped_column(TEXT) dbkey: Mapped[Optional[str]] = mapped_column(TEXT) config: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - visualization = relationship( + visualization: Mapped["Visualization"] = relationship( "Visualization", back_populates="revisions", primaryjoin=(lambda: Visualization.id == VisualizationRevision.visualization_id), @@ -10398,7 +10462,7 @@ class VisualizationUserShareAssociation(Base, UserShareAssociation): visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user: Mapped[User] = relationship("User") - visualization = relationship("Visualization", back_populates="users_shared_with") + visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="users_shared_with") class Tag(Base, RepresentById): @@ -10410,7 +10474,7 @@ class Tag(Base, RepresentById): parent_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id")) name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) children = relationship("Tag", back_populates="parent") - parent = relationship("Tag", back_populates="children", remote_side=[id]) + parent: Mapped[Optional["Tag"]] = relationship("Tag", back_populates="children", remote_side=[id]) def __str__(self): return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % (self.id, self.type or -1, self.parent_id, self.name) @@ -10446,9 +10510,9 @@ class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - history = relationship("History", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + history: Mapped[Optional["History"]] = relationship("History", back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10462,9 +10526,11 @@ class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, Represen user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - history_dataset_association = relationship("HistoryDatasetAssociation", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", back_populates="tags" + ) + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10478,9 +10544,11 @@ class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, R user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + "LibraryDatasetDatasetAssociation", back_populates="tags" + ) + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class PageTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10492,9 +10560,9 @@ class PageTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - page = relationship("Page", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + page: Mapped[Optional["Page"]] = relationship("Page", back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10506,9 +10574,9 @@ class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - workflow_step = relationship("WorkflowStep", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep", back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10520,9 +10588,9 @@ class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - stored_workflow = relationship("StoredWorkflow", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow", back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10534,9 +10602,9 @@ class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - visualization = relationship("Visualization", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10550,9 +10618,11 @@ class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation", back_populates="tags" + ) + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10566,9 +10636,11 @@ class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="tags") - tag = relationship("Tag") - user = relationship("User") + dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( + "LibraryDatasetCollectionAssociation", back_populates="tags" + ) + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10580,8 +10652,8 @@ class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - tag = relationship("Tag") - user = relationship("User") + tag: Mapped[Optional["Tag"]] = relationship("Tag") + user: Mapped[Optional["User"]] = relationship("User") # Item annotation classes. @@ -10593,8 +10665,8 @@ class HistoryAnnotationAssociation(Base, RepresentById): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - history = relationship("History", back_populates="annotations") - user = relationship("User") + history: Mapped[Optional["History"]] = relationship("History", back_populates="annotations") + user: Mapped[Optional["User"]] = relationship("User") class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): @@ -10607,8 +10679,10 @@ class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - hda = relationship("HistoryDatasetAssociation", back_populates="annotations") - user = relationship("User") + hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", back_populates="annotations" + ) + user: Mapped[Optional["User"]] = relationship("User") class StoredWorkflowAnnotationAssociation(Base, RepresentById): @@ -10619,8 +10693,8 @@ class StoredWorkflowAnnotationAssociation(Base, RepresentById): stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - stored_workflow = relationship("StoredWorkflow", back_populates="annotations") - user = relationship("User") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow", back_populates="annotations") + user: Mapped[Optional["User"]] = relationship("User") class WorkflowStepAnnotationAssociation(Base, RepresentById): @@ -10631,8 +10705,8 @@ class WorkflowStepAnnotationAssociation(Base, RepresentById): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - workflow_step = relationship("WorkflowStep", back_populates="annotations") - user = relationship("User") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep", back_populates="annotations") + user: Mapped[Optional["User"]] = relationship("User") class PageAnnotationAssociation(Base, RepresentById): @@ -10643,8 +10717,8 @@ class PageAnnotationAssociation(Base, RepresentById): page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - page = relationship("Page", back_populates="annotations") - user = relationship("User") + page: Mapped[Optional["Page"]] = relationship("Page", back_populates="annotations") + user: Mapped[Optional["User"]] = relationship("User") class VisualizationAnnotationAssociation(Base, RepresentById): @@ -10655,8 +10729,8 @@ class VisualizationAnnotationAssociation(Base, RepresentById): visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - visualization = relationship("Visualization", back_populates="annotations") - user = relationship("User") + visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="annotations") + user: Mapped[Optional["User"]] = relationship("User") class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentById): @@ -10668,8 +10742,10 @@ class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentBy ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - history_dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="annotations") - user = relationship("User") + history_dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation", back_populates="annotations" + ) + user: Mapped[Optional["User"]] = relationship("User") class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): @@ -10681,8 +10757,10 @@ class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="annotations") - user = relationship("User") + dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( + "LibraryDatasetCollectionAssociation", back_populates="annotations" + ) + user: Mapped[Optional["User"]] = relationship("User") class Vault(Base): @@ -10691,7 +10769,7 @@ class Vault(Base): key: Mapped[str] = mapped_column(Text, primary_key=True) parent_key: Mapped[Optional[str]] = mapped_column(Text, ForeignKey(key), index=True) children = relationship("Vault", back_populates="parent") - parent = relationship("Vault", back_populates="children", remote_side=[key]) + parent: Mapped[Optional["Vault"]] = relationship("Vault", back_populates="children", remote_side=[key]) value: Mapped[Optional[str]] = mapped_column(Text) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) @@ -10718,8 +10796,8 @@ class HistoryRatingAssociation(ItemRatingAssociation, RepresentById): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - history = relationship("History", back_populates="ratings") - user = relationship("User") + history: Mapped[Optional["History"]] = relationship("History", back_populates="ratings") + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, history): add_object_to_object_session(self, history) @@ -10735,8 +10813,10 @@ class HistoryDatasetAssociationRatingAssociation(ItemRatingAssociation, Represen ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - history_dataset_association = relationship("HistoryDatasetAssociation", back_populates="ratings") - user = relationship("User") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", back_populates="ratings" + ) + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, history_dataset_association): add_object_to_object_session(self, history_dataset_association) @@ -10750,8 +10830,8 @@ class StoredWorkflowRatingAssociation(ItemRatingAssociation, RepresentById): stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - stored_workflow = relationship("StoredWorkflow", back_populates="ratings") - user = relationship("User") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow", back_populates="ratings") + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, stored_workflow): add_object_to_object_session(self, stored_workflow) @@ -10765,8 +10845,8 @@ class PageRatingAssociation(ItemRatingAssociation, RepresentById): page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - page = relationship("Page", back_populates="ratings") - user = relationship("User") + page: Mapped[Optional["Page"]] = relationship("Page", back_populates="ratings") + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, page): add_object_to_object_session(self, page) @@ -10780,8 +10860,8 @@ class VisualizationRatingAssociation(ItemRatingAssociation, RepresentById): visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - visualization = relationship("Visualization", back_populates="ratings") - user = relationship("User") + visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="ratings") + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, visualization): add_object_to_object_session(self, visualization) @@ -10797,8 +10877,10 @@ class HistoryDatasetCollectionRatingAssociation(ItemRatingAssociation, Represent ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="ratings") - user = relationship("User") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation", back_populates="ratings" + ) + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, dataset_collection): add_object_to_object_session(self, dataset_collection) @@ -10814,8 +10896,10 @@ class LibraryDatasetCollectionRatingAssociation(ItemRatingAssociation, Represent ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="ratings") - user = relationship("User") + dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( + "LibraryDatasetCollectionAssociation", back_populates="ratings" + ) + user: Mapped[Optional["User"]] = relationship("User") def _set_item(self, dataset_collection): add_object_to_object_session(self, dataset_collection) @@ -10831,8 +10915,8 @@ class DataManagerHistoryAssociation(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(index=True, default=now, onupdate=now, nullable=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - history = relationship("History") - user = relationship("User", back_populates="data_manager_histories") + history: Mapped[Optional["History"]] = relationship("History") + user: Mapped[Optional["User"]] = relationship("User", back_populates="data_manager_histories") class DataManagerJobAssociation(Base, RepresentById): @@ -10844,7 +10928,7 @@ class DataManagerJobAssociation(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(index=True, default=now, onupdate=now, nullable=True) job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) data_manager_id: Mapped[Optional[str]] = mapped_column(TEXT) - job = relationship("Job", back_populates="data_manager_association", uselist=False) + job: Mapped[Optional["Job"]] = relationship("Job", back_populates="data_manager_association", uselist=False) class UserPreference(Base, RepresentById): @@ -10872,7 +10956,7 @@ class UserAction(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(Unicode(255)) context: Mapped[Optional[str]] = mapped_column(Unicode(512)) params: Mapped[Optional[str]] = mapped_column(Unicode(1024)) - user = relationship("User") + user: Mapped[Optional["User"]] = relationship("User") class APIKeys(Base, RepresentById): @@ -10882,7 +10966,7 @@ class APIKeys(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) key: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True, unique=True) - user = relationship("User", back_populates="api_keys") + user: Mapped[Optional["User"]] = relationship("User", back_populates="api_keys") deleted: Mapped[bool] = mapped_column(index=True, server_default=false()) From 828efec050db29a229f1f7fd36e59e2f7ccd6a60 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 10 Apr 2024 11:57:12 -0400 Subject: [PATCH 513/669] Add typing for the "many" side in one-to-many relationships --- lib/galaxy/model/__init__.py | 200 +++++++++++++++++++++-------------- 1 file changed, 119 insertions(+), 81 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index ed5c8c68e505..c34a1e538605 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -737,8 +737,8 @@ class User(Base, Dictifiable, RepresentById): addresses: Mapped[List["UserAddress"]] = relationship( "UserAddress", back_populates="user", order_by=lambda: desc(UserAddress.update_time), cascade_backrefs=False ) - cloudauthz = relationship("CloudAuthz", back_populates="user") - custos_auth = relationship("CustosAuthnzToken", back_populates="user") + cloudauthz: Mapped[List["CloudAuthz"]] = relationship("CloudAuthz", back_populates="user") + custos_auth: Mapped[List["CustosAuthnzToken"]] = relationship("CustosAuthnzToken", back_populates="user") default_permissions: Mapped[List["DefaultUserPermissions"]] = relationship( "DefaultUserPermissions", back_populates="user" ) @@ -759,7 +759,7 @@ class User(Base, Dictifiable, RepresentById): quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship( "UserQuotaSourceUsage", back_populates="user" ) - social_auth = relationship("UserAuthnzToken", back_populates="user") + social_auth: Mapped[List["UserAuthnzToken"]] = relationship("UserAuthnzToken", back_populates="user") stored_workflow_menu_entries: Mapped[List["StoredWorkflowMenuEntry"]] = relationship( "StoredWorkflowMenuEntry", primaryjoin=( @@ -1419,16 +1419,22 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): ) output_dataset_collection_instances = relationship("JobToOutputDatasetCollectionAssociation", back_populates="job") output_dataset_collections = relationship("JobToImplicitOutputDatasetCollectionAssociation", back_populates="job") - post_job_actions = relationship("PostJobActionAssociation", back_populates="job", cascade_backrefs=False) + post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship( + "PostJobActionAssociation", back_populates="job", cascade_backrefs=False + ) input_library_datasets = relationship("JobToInputLibraryDatasetAssociation", back_populates="job") output_library_datasets = relationship("JobToOutputLibraryDatasetAssociation", back_populates="job") - external_output_metadata = relationship("JobExternalOutputMetadata", back_populates="job") - tasks = relationship("Task", back_populates="job") + external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship( + "JobExternalOutputMetadata", back_populates="job" + ) + tasks: Mapped[List["Task"]] = relationship("Task", back_populates="job") output_datasets = relationship("JobToOutputDatasetAssociation", back_populates="job") - state_history = relationship("JobStateHistory") - text_metrics = relationship("JobMetricText") - numeric_metrics = relationship("JobMetricNumeric") - interactivetool_entry_points = relationship("InteractiveToolEntryPoint", back_populates="job", uselist=True) + state_history: Mapped[List["JobStateHistory"]] = relationship("JobStateHistory") + text_metrics: Mapped[List["JobMetricText"]] = relationship("JobMetricText") + numeric_metrics: Mapped[List["JobMetricNumeric"]] = relationship("JobMetricNumeric") + interactivetool_entry_points: Mapped[List["InteractiveToolEntryPoint"]] = relationship( + "InteractiveToolEntryPoint", back_populates="job", uselist=True + ) implicit_collection_jobs_association = relationship( "ImplicitCollectionJobsJobAssociation", back_populates="job", uselist=False, cascade_backrefs=False ) @@ -2154,8 +2160,8 @@ class Task(Base, JobLike, RepresentById): task_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255)) prepare_input_files_cmd: Mapped[Optional[str]] = mapped_column(TEXT) job: Mapped["Job"] = relationship("Job", back_populates="tasks") - text_metrics = relationship("TaskMetricText") - numeric_metrics = relationship("TaskMetricNumeric") + text_metrics: Mapped[List["TaskMetricText"]] = relationship("TaskMetricText") + numeric_metrics: Mapped[List["TaskMetricNumeric"]] = relationship("TaskMetricNumeric") _numeric_metric = TaskMetricNumeric _text_metric = TaskMetricText @@ -2519,7 +2525,7 @@ class ImplicitCollectionJobs(Base, Serializable): id: Mapped[int] = mapped_column(primary_key=True) populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="new") - jobs = relationship( + jobs: Mapped[List["ImplicitCollectionJobsJobAssociation"]] = relationship( "ImplicitCollectionJobsJobAssociation", back_populates="implicit_collection_jobs", cascade_backrefs=False ) @@ -2891,8 +2897,10 @@ class Group(Base, Dictifiable, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - quotas = relationship("GroupQuotaAssociation", back_populates="group") - roles = relationship("GroupRoleAssociation", back_populates="group", cascade_backrefs=False) + quotas: Mapped[List["GroupQuotaAssociation"]] = relationship("GroupQuotaAssociation", back_populates="group") + roles: Mapped[List["GroupRoleAssociation"]] = relationship( + "GroupRoleAssociation", back_populates="group", cascade_backrefs=False + ) users = relationship("UserGroupAssociation", back_populates="group") dict_collection_visible_keys = ["id", "name"] @@ -2943,7 +2951,9 @@ class Notification(Base, Dictifiable, RepresentById): # content should always be a dict content: Mapped[Optional[bytes]] = mapped_column(DoubleEncodedJsonType) - user_notification_associations = relationship("UserNotificationAssociation", back_populates="notification") + user_notification_associations: Mapped[List["UserNotificationAssociation"]] = relationship( + "UserNotificationAssociation", back_populates="notification" + ) def __init__(self, source: str, category: str, variant: str, content): self.source = source @@ -3043,16 +3053,16 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable archived: Mapped[Optional[bool]] = mapped_column(index=True, default=False, server_default=false()) archive_export_id: Mapped[Optional[int]] = mapped_column(ForeignKey("store_export_association.id"), default=None) - datasets = relationship( + datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type] ) - exports = relationship( + exports: Mapped[List["JobExportHistoryArchive"]] = relationship( "JobExportHistoryArchive", back_populates="history", primaryjoin=lambda: JobExportHistoryArchive.history_id == History.id, order_by=lambda: desc(JobExportHistoryArchive.id), ) - active_datasets = relationship( + active_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=( lambda: and_( @@ -3063,8 +3073,10 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetAssociation.hid), # type: ignore[has-type] viewonly=True, ) - dataset_collections = relationship("HistoryDatasetCollectionAssociation", back_populates="history") - active_dataset_collections = relationship( + dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( + "HistoryDatasetCollectionAssociation", back_populates="history" + ) + active_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( "HistoryDatasetCollectionAssociation", primaryjoin=( lambda: ( @@ -3077,7 +3089,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), # type: ignore[has-type] viewonly=True, ) - visible_datasets = relationship( + visible_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=( lambda: and_( @@ -3089,7 +3101,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetAssociation.hid), # type: ignore[has-type] viewonly=True, ) - visible_dataset_collections = relationship( + visible_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( "HistoryDatasetCollectionAssociation", primaryjoin=( lambda: and_( @@ -3104,20 +3116,26 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable tags: Mapped[List["HistoryTagAssociation"]] = relationship( "HistoryTagAssociation", order_by=lambda: HistoryTagAssociation.id, back_populates="history" ) - annotations = relationship( + annotations: Mapped[List["HistoryAnnotationAssociation"]] = relationship( "HistoryAnnotationAssociation", order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" ) - ratings = relationship( + ratings: Mapped[List["HistoryRatingAssociation"]] = relationship( "HistoryRatingAssociation", order_by=lambda: HistoryRatingAssociation.id, # type: ignore[has-type] back_populates="history", ) - default_permissions = relationship("DefaultHistoryPermissions", back_populates="history") - users_shared_with = relationship("HistoryUserShareAssociation", back_populates="history") + default_permissions: Mapped[List["DefaultHistoryPermissions"]] = relationship( + "DefaultHistoryPermissions", back_populates="history" + ) + users_shared_with: Mapped[List["HistoryUserShareAssociation"]] = relationship( + "HistoryUserShareAssociation", back_populates="history" + ) galaxy_sessions = relationship("GalaxySessionToHistoryAssociation", back_populates="history") - workflow_invocations = relationship("WorkflowInvocation", back_populates="history", cascade_backrefs=False) + workflow_invocations: Mapped[List["WorkflowInvocation"]] = relationship( + "WorkflowInvocation", back_populates="history", cascade_backrefs=False + ) user: Mapped[Optional["User"]] = relationship("User", back_populates="histories") - jobs = relationship("Job", back_populates="history", cascade_backrefs=False) + jobs: Mapped[List["Job"]] = relationship("Job", back_populates="history", cascade_backrefs=False) update_time = column_property( select(func.max(HistoryAudit.update_time)).where(HistoryAudit.history_id == id).scalar_subquery(), @@ -3659,9 +3677,9 @@ class Role(Base, Dictifiable, RepresentById): description: Mapped[Optional[str]] = mapped_column(TEXT) type: Mapped[Optional[str]] = mapped_column(String(40), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - dataset_actions = relationship("DatasetPermissions", back_populates="role") - groups = relationship("GroupRoleAssociation", back_populates="role") - users = relationship("UserRoleAssociation", back_populates="role") + dataset_actions: Mapped[List["DatasetPermissions"]] = relationship("DatasetPermissions", back_populates="role") + groups: Mapped[List["GroupRoleAssociation"]] = relationship("GroupRoleAssociation", back_populates="role") + users: Mapped[List["UserRoleAssociation"]] = relationship("UserRoleAssociation", back_populates="role") dict_collection_visible_keys = ["id", "name"] dict_element_visible_keys = ["id", "name", "description", "type"] @@ -3747,8 +3765,8 @@ class Quota(Base, Dictifiable, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), default=None) default = relationship("DefaultQuotaAssociation", back_populates="quota", cascade_backrefs=False) - groups = relationship("GroupQuotaAssociation", back_populates="quota") - users = relationship("UserQuotaAssociation", back_populates="quota") + groups: Mapped[List["GroupQuotaAssociation"]] = relationship("GroupQuotaAssociation", back_populates="quota") + users: Mapped[List["UserQuotaAssociation"]] = relationship("UserQuotaAssociation", back_populates="quota") dict_collection_visible_keys = ["id", "name", "quota_source_label"] dict_element_visible_keys = [ @@ -3993,9 +4011,9 @@ class Dataset(Base, StorableObject, Serializable): total_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType()) - actions = relationship("DatasetPermissions", back_populates="dataset") + actions: Mapped[List["DatasetPermissions"]] = relationship("DatasetPermissions", back_populates="dataset") job: Mapped[Optional["Job"]] = relationship(Job, primaryjoin=(lambda: Dataset.job_id == Job.id)) - active_history_associations = relationship( + active_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=( lambda: and_( @@ -4006,7 +4024,7 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - purged_history_associations = relationship( + purged_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( "HistoryDatasetAssociation", primaryjoin=( lambda: and_( @@ -4016,7 +4034,7 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - active_library_associations = relationship( + active_library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", primaryjoin=( lambda: and_( @@ -4026,10 +4044,12 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - hashes = relationship("DatasetHash", back_populates="dataset", cascade_backrefs=False) - sources = relationship("DatasetSource", back_populates="dataset") - history_associations = relationship("HistoryDatasetAssociation", back_populates="dataset", cascade_backrefs=False) - library_associations = relationship( + hashes: Mapped[List["DatasetHash"]] = relationship("DatasetHash", back_populates="dataset", cascade_backrefs=False) + sources: Mapped[List["DatasetSource"]] = relationship("DatasetSource", back_populates="dataset") + history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( + "HistoryDatasetAssociation", back_populates="dataset", cascade_backrefs=False + ) + library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", primaryjoin=(lambda: LibraryDatasetDatasetAssociation.table.c.dataset_id == Dataset.id), back_populates="dataset", @@ -4363,7 +4383,7 @@ class DatasetSource(Base, Dictifiable, Serializable): extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) transform: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="sources") - hashes = relationship("DatasetSourceHash", back_populates="source") + hashes: Mapped[List["DatasetSourceHash"]] = relationship("DatasetSourceHash", back_populates="source") dict_collection_visible_keys = ["id", "source_uri", "extra_files_path", "transform"] dict_element_visible_keys = [ "id", @@ -5562,7 +5582,9 @@ class Library(Base, Dictifiable, HasName, Serializable): description: Mapped[Optional[str]] = mapped_column(TEXT) synopsis: Mapped[Optional[str]] = mapped_column(TEXT) root_folder = relationship("LibraryFolder", back_populates="library_root") - actions = relationship("LibraryPermissions", back_populates="library", cascade_backrefs=False) + actions: Mapped[List["LibraryPermissions"]] = relationship( + "LibraryPermissions", back_populates="library", cascade_backrefs=False + ) permitted_actions = get_permitted_actions(filter="LIBRARY") dict_collection_visible_keys = ["id", "name"] @@ -5642,7 +5664,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) - folders = relationship( + folders: Mapped[List["LibraryFolder"]] = relationship( "LibraryFolder", primaryjoin=(lambda: LibraryFolder.id == LibraryFolder.parent_id), order_by=asc(name), @@ -5652,7 +5674,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): "LibraryFolder", back_populates="folders", remote_side=[id] ) - active_folders = relationship( + active_folders: Mapped[List["LibraryFolder"]] = relationship( "LibraryFolder", primaryjoin=("and_(LibraryFolder.parent_id == LibraryFolder.id, not_(LibraryFolder.deleted))"), order_by=asc(name), @@ -5663,7 +5685,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): viewonly=True, ) - datasets = relationship( + datasets: Mapped[List["LibraryDataset"]] = relationship( "LibraryDataset", primaryjoin=( lambda: LibraryDataset.folder_id == LibraryFolder.id @@ -5673,7 +5695,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): viewonly=True, ) - active_datasets = relationship( + active_datasets: Mapped[List["LibraryDataset"]] = relationship( "LibraryDataset", primaryjoin=( "and_(LibraryDataset.folder_id == LibraryFolder.id, not_(LibraryDataset.deleted), LibraryDataset.library_dataset_dataset_association_id.isnot(None))" @@ -5683,7 +5705,9 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): ) library_root = relationship("Library", back_populates="root_folder") - actions = relationship("LibraryFolderPermissions", back_populates="folder", cascade_backrefs=False) + actions: Mapped[List["LibraryFolderPermissions"]] = relationship( + "LibraryFolderPermissions", back_populates="folder", cascade_backrefs=False + ) dict_element_visible_keys = [ "id", @@ -5796,7 +5820,7 @@ class LibraryDataset(Base, Serializable): library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=library_dataset_dataset_association_id, post_update=True ) - expired_datasets = relationship( + expired_datasets: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=[id, library_dataset_dataset_association_id], primaryjoin=( @@ -5806,7 +5830,9 @@ class LibraryDataset(Base, Serializable): viewonly=True, uselist=True, ) - actions = relationship("LibraryDatasetPermissions", back_populates="library_dataset", cascade_backrefs=False) + actions: Mapped[List["LibraryDatasetPermissions"]] = relationship( + "LibraryDatasetPermissions", back_populates="library_dataset", cascade_backrefs=False + ) # This class acts as a proxy to the currently selected LDDA upload_options = [ @@ -6074,7 +6100,9 @@ class ExtendedMetadata(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - children = relationship("ExtendedMetadataIndex", back_populates="extended_metadata") + children: Mapped[List["ExtendedMetadataIndex"]] = relationship( + "ExtendedMetadataIndex", back_populates="extended_metadata" + ) def __init__(self, data): self.data = data @@ -6326,7 +6354,7 @@ class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - elements = relationship( + elements: Mapped[List["DatasetCollectionElement"]] = relationship( "DatasetCollectionElement", primaryjoin=(lambda: DatasetCollection.id == DatasetCollectionElement.dataset_collection_id), # type: ignore[has-type] back_populates="collection", @@ -6808,7 +6836,7 @@ class HistoryDatasetCollectionAssociation( "HistoryDatasetCollectionAssociation", back_populates="copied_from_history_dataset_collection_association", ) - implicit_input_collections = relationship( + implicit_input_collections: Mapped[List["ImplicitlyCreatedDatasetCollectionInput"]] = relationship( "ImplicitlyCreatedDatasetCollectionInput", primaryjoin=( lambda: HistoryDatasetCollectionAssociation.id @@ -6826,17 +6854,19 @@ class HistoryDatasetCollectionAssociation( order_by=lambda: HistoryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", ) - annotations = relationship( + annotations: Mapped[List["HistoryDatasetCollectionAssociationAnnotationAssociation"]] = relationship( "HistoryDatasetCollectionAssociationAnnotationAssociation", order_by=lambda: HistoryDatasetCollectionAssociationAnnotationAssociation.id, back_populates="history_dataset_collection", ) - ratings = relationship( + ratings: Mapped[List["HistoryDatasetCollectionRatingAssociation"]] = relationship( "HistoryDatasetCollectionRatingAssociation", order_by=lambda: HistoryDatasetCollectionRatingAssociation.id, # type: ignore[has-type] back_populates="dataset_collection", ) - creating_job_associations = relationship("JobToOutputDatasetCollectionAssociation", viewonly=True) + creating_job_associations: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship( + "JobToOutputDatasetCollectionAssociation", viewonly=True + ) dict_dbkeysandextensions_visible_keys = ["dbkeys", "extensions"] editable_keys = ("name", "deleted", "visible") @@ -7184,12 +7214,12 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre order_by=lambda: LibraryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", ) - annotations = relationship( + annotations: Mapped[List["LibraryDatasetCollectionAnnotationAssociation"]] = relationship( "LibraryDatasetCollectionAnnotationAssociation", order_by=lambda: LibraryDatasetCollectionAnnotationAssociation.id, back_populates="dataset_collection", ) - ratings = relationship( + ratings: Mapped[List["LibraryDatasetCollectionRatingAssociation"]] = relationship( "LibraryDatasetCollectionRatingAssociation", order_by=lambda: LibraryDatasetCollectionRatingAssociation.id, # type: ignore[has-type] back_populates="dataset_collection", @@ -7437,7 +7467,7 @@ class GalaxySession(Base, RepresentById): disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) last_action: Mapped[Optional[datetime]] current_history: Mapped[Optional["History"]] = relationship("History") - histories = relationship( + histories: Mapped[List["GalaxySessionToHistoryAssociation"]] = relationship( "GalaxySessionToHistoryAssociation", back_populates="galaxy_session", cascade_backrefs=False ) user: Mapped[Optional["User"]] = relationship("User", back_populates="galaxy_sessions") @@ -7511,7 +7541,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): user: Mapped["User"] = relationship( "User", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" ) - workflows = relationship( + workflows: Mapped[List["Workflow"]] = relationship( "Workflow", back_populates="stored_workflow", cascade="all, delete-orphan", @@ -7541,17 +7571,19 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): viewonly=True, order_by=lambda: StoredWorkflowTagAssociation.id, ) - annotations = relationship( + annotations: Mapped[List["StoredWorkflowAnnotationAssociation"]] = relationship( "StoredWorkflowAnnotationAssociation", order_by=lambda: StoredWorkflowAnnotationAssociation.id, back_populates="stored_workflow", ) - ratings = relationship( + ratings: Mapped[List["StoredWorkflowRatingAssociation"]] = relationship( "StoredWorkflowRatingAssociation", order_by=lambda: StoredWorkflowRatingAssociation.id, # type: ignore[has-type] back_populates="stored_workflow", ) - users_shared_with = relationship("StoredWorkflowUserShareAssociation", back_populates="stored_workflow") + users_shared_with: Mapped[List["StoredWorkflowUserShareAssociation"]] = relationship( + "StoredWorkflowUserShareAssociation", back_populates="stored_workflow" + ) average_rating = None @@ -7690,7 +7722,7 @@ class Workflow(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", lazy=False, ) - comments = relationship( + comments: Mapped[List["WorkflowComment"]] = relationship( "WorkflowComment", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), # type: ignore[has-type] @@ -7887,15 +7919,17 @@ class WorkflowStep(Base, RepresentById): tags: Mapped[List["WorkflowStepTagAssociation"]] = relationship( "WorkflowStepTagAssociation", order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" ) - annotations = relationship( + annotations: Mapped[List["WorkflowStepAnnotationAssociation"]] = relationship( "WorkflowStepAnnotationAssociation", order_by=lambda: WorkflowStepAnnotationAssociation.id, back_populates="workflow_step", ) post_job_actions = relationship("PostJobAction", back_populates="workflow_step", cascade_backrefs=False) inputs = relationship("WorkflowStepInput", back_populates="workflow_step") - workflow_outputs = relationship("WorkflowOutput", back_populates="workflow_step", cascade_backrefs=False) - output_connections = relationship( + workflow_outputs: Mapped[List["WorkflowOutput"]] = relationship( + "WorkflowOutput", back_populates="workflow_step", cascade_backrefs=False + ) + output_connections: Mapped[List["WorkflowStepConnection"]] = relationship( "WorkflowStepConnection", primaryjoin=(lambda: WorkflowStepConnection.output_step_id == WorkflowStep.id) ) workflow: Mapped["Workflow"] = relationship( @@ -8184,7 +8218,7 @@ class WorkflowStepInput(Base, RepresentById): cascade="all", primaryjoin=(lambda: WorkflowStepInput.workflow_step_id == WorkflowStep.id), ) - connections = relationship( + connections: Mapped[List["WorkflowStepConnection"]] = relationship( "WorkflowStepConnection", back_populates="input_step_input", primaryjoin=(lambda: WorkflowStepConnection.input_step_input_id == WorkflowStepInput.id), @@ -8337,7 +8371,7 @@ class WorkflowComment(Base, RepresentById): remote_side=[id], ) - child_comments = relationship( + child_comments: Mapped[List["WorkflowComment"]] = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", @@ -9734,7 +9768,7 @@ class FormDefinitionCurrent(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) latest_form_id: Mapped[Optional[int]] = mapped_column(ForeignKey("form_definition.id"), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - forms = relationship( + forms: Mapped[List["FormDefinition"]] = relationship( "FormDefinition", back_populates="form_definition_current", cascade="all, delete-orphan", @@ -10211,7 +10245,7 @@ class Page(Base, HasTags, Dictifiable, RepresentById): slug: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(index=True, default=False) user: Mapped["User"] = relationship("User") - revisions = relationship( + revisions: Mapped[List["PageRevision"]] = relationship( "PageRevision", cascade="all, delete-orphan", primaryjoin=(lambda: Page.id == PageRevision.page_id), # type: ignore[has-type] @@ -10226,15 +10260,17 @@ class Page(Base, HasTags, Dictifiable, RepresentById): tags: Mapped[List["PageTagAssociation"]] = relationship( "PageTagAssociation", order_by=lambda: PageTagAssociation.id, back_populates="page" ) - annotations = relationship( + annotations: Mapped[List["PageAnnotationAssociation"]] = relationship( "PageAnnotationAssociation", order_by=lambda: PageAnnotationAssociation.id, back_populates="page" ) - ratings = relationship( + ratings: Mapped[List["PageRatingAssociation"]] = relationship( "PageRatingAssociation", order_by=lambda: PageRatingAssociation.id, # type: ignore[has-type] back_populates="page", ) - users_shared_with = relationship("PageUserShareAssociation", back_populates="page") + users_shared_with: Mapped[List["PageUserShareAssociation"]] = relationship( + "PageUserShareAssociation", back_populates="page" + ) average_rating = None @@ -10334,7 +10370,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): published: Mapped[Optional[bool]] = mapped_column(default=False, index=True) user: Mapped["User"] = relationship("User") - revisions = relationship( + revisions: Mapped[List["VisualizationRevision"]] = relationship( "VisualizationRevision", back_populates="visualization", cascade="all, delete-orphan", @@ -10350,17 +10386,19 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): tags: Mapped[List["VisualizationTagAssociation"]] = relationship( "VisualizationTagAssociation", order_by=lambda: VisualizationTagAssociation.id, back_populates="visualization" ) - annotations = relationship( + annotations: Mapped[List["VisualizationAnnotationAssociation"]] = relationship( "VisualizationAnnotationAssociation", order_by=lambda: VisualizationAnnotationAssociation.id, back_populates="visualization", ) - ratings = relationship( + ratings: Mapped[List["VisualizationRatingAssociation"]] = relationship( "VisualizationRatingAssociation", order_by=lambda: VisualizationRatingAssociation.id, # type: ignore[has-type] back_populates="visualization", ) - users_shared_with = relationship("VisualizationUserShareAssociation", back_populates="visualization") + users_shared_with: Mapped[List["VisualizationUserShareAssociation"]] = relationship( + "VisualizationUserShareAssociation", back_populates="visualization" + ) average_rating = None @@ -10473,7 +10511,7 @@ class Tag(Base, RepresentById): type: Mapped[Optional[int]] parent_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id")) name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) - children = relationship("Tag", back_populates="parent") + children: Mapped[List["Tag"]] = relationship("Tag", back_populates="parent") parent: Mapped[Optional["Tag"]] = relationship("Tag", back_populates="children", remote_side=[id]) def __str__(self): @@ -10768,7 +10806,7 @@ class Vault(Base): key: Mapped[str] = mapped_column(Text, primary_key=True) parent_key: Mapped[Optional[str]] = mapped_column(Text, ForeignKey(key), index=True) - children = relationship("Vault", back_populates="parent") + children: Mapped[List["Vault"]] = relationship("Vault", back_populates="parent") parent: Mapped[Optional["Vault"]] = relationship("Vault", back_populates="children", remote_side=[key]) value: Mapped[Optional[str]] = mapped_column(Text) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) From dc8e8e7569e698d4d09ed698561ccd3a1f29e427 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 10 Apr 2024 16:22:07 -0400 Subject: [PATCH 514/669] Drop redundant model class from relationship call --- lib/galaxy/model/__init__.py | 694 ++++++++++++++--------------------- 1 file changed, 269 insertions(+), 425 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index c34a1e538605..63a495d5a414 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -735,33 +735,27 @@ class User(Base, Dictifiable, RepresentById): activation_token: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) addresses: Mapped[List["UserAddress"]] = relationship( - "UserAddress", back_populates="user", order_by=lambda: desc(UserAddress.update_time), cascade_backrefs=False + back_populates="user", order_by=lambda: desc(UserAddress.update_time), cascade_backrefs=False ) - cloudauthz: Mapped[List["CloudAuthz"]] = relationship("CloudAuthz", back_populates="user") - custos_auth: Mapped[List["CustosAuthnzToken"]] = relationship("CustosAuthnzToken", back_populates="user") - default_permissions: Mapped[List["DefaultUserPermissions"]] = relationship( - "DefaultUserPermissions", back_populates="user" - ) - groups: Mapped[List["UserGroupAssociation"]] = relationship("UserGroupAssociation", back_populates="user") + cloudauthz: Mapped[List["CloudAuthz"]] = relationship(back_populates="user") + custos_auth: Mapped[List["CustosAuthnzToken"]] = relationship(back_populates="user") + default_permissions: Mapped[List["DefaultUserPermissions"]] = relationship(back_populates="user") + groups: Mapped[List["UserGroupAssociation"]] = relationship(back_populates="user") histories: Mapped[List["History"]] = relationship( - "History", back_populates="user", order_by=lambda: desc(History.update_time), cascade_backrefs=False # type: ignore[has-type] + back_populates="user", order_by=lambda: desc(History.update_time), cascade_backrefs=False # type: ignore[has-type] ) active_histories: Mapped[List["History"]] = relationship( - "History", primaryjoin=(lambda: (History.user_id == User.id) & (not_(History.deleted)) & (not_(History.archived))), # type: ignore[has-type] viewonly=True, order_by=lambda: desc(History.update_time), # type: ignore[has-type] ) galaxy_sessions: Mapped[List["GalaxySession"]] = relationship( - "GalaxySession", back_populates="user", order_by=lambda: desc(GalaxySession.update_time), cascade_backrefs=False # type: ignore[has-type] - ) - quotas: Mapped[List["UserQuotaAssociation"]] = relationship("UserQuotaAssociation", back_populates="user") - quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship( - "UserQuotaSourceUsage", back_populates="user" + back_populates="user", order_by=lambda: desc(GalaxySession.update_time), cascade_backrefs=False # type: ignore[has-type] ) - social_auth: Mapped[List["UserAuthnzToken"]] = relationship("UserAuthnzToken", back_populates="user") + quotas: Mapped[List["UserQuotaAssociation"]] = relationship(back_populates="user") + quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship(back_populates="user") + social_auth: Mapped[List["UserAuthnzToken"]] = relationship(back_populates="user") stored_workflow_menu_entries: Mapped[List["StoredWorkflowMenuEntry"]] = relationship( - "StoredWorkflowMenuEntry", primaryjoin=( lambda: (StoredWorkflowMenuEntry.user_id == User.id) & (StoredWorkflowMenuEntry.stored_workflow_id == StoredWorkflow.id) # type: ignore[has-type] @@ -771,15 +765,12 @@ class User(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", collection_class=ordering_list("order_index"), ) - _preferences: Mapped[Dict[str, "UserPreference"]] = relationship( - "UserPreference", collection_class=attribute_keyed_dict("name") - ) + _preferences: Mapped[Dict[str, "UserPreference"]] = relationship(collection_class=attribute_keyed_dict("name")) values: Mapped[List["FormValues"]] = relationship( - "FormValues", primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] + primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] ) # Add type hint (will this work w/SA?) api_keys: Mapped[List["APIKeys"]] = relationship( - "APIKeys", back_populates="user", order_by=lambda: desc(APIKeys.create_time), primaryjoin=( @@ -789,21 +780,17 @@ class User(Base, Dictifiable, RepresentById): ) ), ) - data_manager_histories: Mapped[List["DataManagerHistoryAssociation"]] = relationship( - "DataManagerHistoryAssociation", back_populates="user" - ) - roles: Mapped[List["UserRoleAssociation"]] = relationship("UserRoleAssociation", back_populates="user") + data_manager_histories: Mapped[List["DataManagerHistoryAssociation"]] = relationship(back_populates="user") + roles: Mapped[List["UserRoleAssociation"]] = relationship(back_populates="user") stored_workflows: Mapped[List["StoredWorkflow"]] = relationship( - "StoredWorkflow", back_populates="user", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), # type: ignore[has-type] cascade_backrefs=False, ) all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship( - "UserNotificationAssociation", back_populates="user", cascade_backrefs=False + back_populates="user", cascade_backrefs=False ) non_private_roles: Mapped[List["UserRoleAssociation"]] = relationship( - "UserRoleAssociation", viewonly=True, primaryjoin=( lambda: (User.id == UserRoleAssociation.user_id) # type: ignore[has-type] @@ -1269,7 +1256,7 @@ class PasswordResetToken(Base): token: Mapped[str] = mapped_column(String(32), primary_key=True, unique=True, index=True) expiration_time: Mapped[Optional[datetime]] user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped[Optional["User"]] = relationship("User") + user: Mapped[Optional["User"]] = relationship() def __init__(self, user, token=None): if token: @@ -1407,10 +1394,10 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255)) object_store_id_overrides: Mapped[Optional[bytes]] = mapped_column(JSONType) - user: Mapped[Optional["User"]] = relationship("User") - galaxy_session: Mapped[Optional["GalaxySession"]] = relationship("GalaxySession") - history: Mapped[Optional["History"]] = relationship("History", back_populates="jobs") - library_folder: Mapped[Optional["LibraryFolder"]] = relationship("LibraryFolder") + user: Mapped[Optional["User"]] = relationship() + galaxy_session: Mapped[Optional["GalaxySession"]] = relationship() + history: Mapped[Optional["History"]] = relationship(back_populates="jobs") + library_folder: Mapped[Optional["LibraryFolder"]] = relationship() parameters = relationship("JobParameter") input_datasets = relationship("JobToInputDatasetAssociation", back_populates="job") input_dataset_collections = relationship("JobToInputDatasetCollectionAssociation", back_populates="job") @@ -1420,20 +1407,18 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): output_dataset_collection_instances = relationship("JobToOutputDatasetCollectionAssociation", back_populates="job") output_dataset_collections = relationship("JobToImplicitOutputDatasetCollectionAssociation", back_populates="job") post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship( - "PostJobActionAssociation", back_populates="job", cascade_backrefs=False + back_populates="job", cascade_backrefs=False ) input_library_datasets = relationship("JobToInputLibraryDatasetAssociation", back_populates="job") output_library_datasets = relationship("JobToOutputLibraryDatasetAssociation", back_populates="job") - external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship( - "JobExternalOutputMetadata", back_populates="job" - ) - tasks: Mapped[List["Task"]] = relationship("Task", back_populates="job") + external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship(back_populates="job") + tasks: Mapped[List["Task"]] = relationship(back_populates="job") output_datasets = relationship("JobToOutputDatasetAssociation", back_populates="job") - state_history: Mapped[List["JobStateHistory"]] = relationship("JobStateHistory") - text_metrics: Mapped[List["JobMetricText"]] = relationship("JobMetricText") - numeric_metrics: Mapped[List["JobMetricNumeric"]] = relationship("JobMetricNumeric") + state_history: Mapped[List["JobStateHistory"]] = relationship() + text_metrics: Mapped[List["JobMetricText"]] = relationship() + numeric_metrics: Mapped[List["JobMetricNumeric"]] = relationship() interactivetool_entry_points: Mapped[List["InteractiveToolEntryPoint"]] = relationship( - "InteractiveToolEntryPoint", back_populates="job", uselist=True + back_populates="job", uselist=True ) implicit_collection_jobs_association = relationship( "ImplicitCollectionJobsJobAssociation", back_populates="job", uselist=False, cascade_backrefs=False @@ -2159,9 +2144,9 @@ class Task(Base, JobLike, RepresentById): task_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) task_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255)) prepare_input_files_cmd: Mapped[Optional[str]] = mapped_column(TEXT) - job: Mapped["Job"] = relationship("Job", back_populates="tasks") - text_metrics: Mapped[List["TaskMetricText"]] = relationship("TaskMetricText") - numeric_metrics: Mapped[List["TaskMetricNumeric"]] = relationship("TaskMetricNumeric") + job: Mapped["Job"] = relationship(back_populates="tasks") + text_metrics: Mapped[List["TaskMetricText"]] = relationship() + numeric_metrics: Mapped[List["TaskMetricNumeric"]] = relationship() _numeric_metric = TaskMetricNumeric _text_metric = TaskMetricText @@ -2330,9 +2315,9 @@ class JobToInputDatasetAssociation(Base, RepresentById): dataset_version: Mapped[Optional[int]] name: Mapped[Optional[str]] = mapped_column(String(255)) dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", lazy="joined", back_populates="dependent_jobs" + lazy="joined", back_populates="dependent_jobs" ) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_datasets") + job: Mapped[Optional["Job"]] = relationship(back_populates="input_datasets") def __init__(self, name, dataset): self.name = name @@ -2349,9 +2334,9 @@ class JobToOutputDatasetAssociation(Base, RepresentById): dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) name: Mapped[Optional[str]] = mapped_column(String(255)) dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", lazy="joined", back_populates="creating_job_associations" + lazy="joined", back_populates="creating_job_associations" ) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_datasets") + job: Mapped[Optional["Job"]] = relationship(back_populates="output_datasets") def __init__(self, name, dataset): self.name = name @@ -2372,10 +2357,8 @@ class JobToInputDatasetCollectionAssociation(Base, RepresentById): ForeignKey("history_dataset_collection_association.id"), index=True ) name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", lazy="joined" - ) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_dataset_collections") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(lazy="joined") + job: Mapped[Optional["Job"]] = relationship(back_populates="input_dataset_collections") def __init__(self, name, dataset_collection): self.name = name @@ -2391,10 +2374,8 @@ class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): ForeignKey("dataset_collection_element.id"), index=True ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection_element: Mapped[Optional["DatasetCollectionElement"]] = relationship( - "DatasetCollectionElement", lazy="joined" - ) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_dataset_collection_elements") + dataset_collection_element: Mapped[Optional["DatasetCollectionElement"]] = relationship(lazy="joined") + job: Mapped[Optional["Job"]] = relationship(back_populates="input_dataset_collection_elements") def __init__(self, name, dataset_collection_element): self.name = name @@ -2412,10 +2393,8 @@ class JobToOutputDatasetCollectionAssociation(Base, RepresentById): ForeignKey("history_dataset_collection_association.id"), index=True ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection_instance: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", lazy="joined" - ) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_dataset_collection_instances") + dataset_collection_instance: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(lazy="joined") + job: Mapped[Optional["Job"]] = relationship(back_populates="output_dataset_collection_instances") def __init__(self, name, dataset_collection_instance): self.name = name @@ -2436,8 +2415,8 @@ class JobToImplicitOutputDatasetCollectionAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) dataset_collection_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset_collection.id"), index=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection: Mapped[Optional["DatasetCollection"]] = relationship("DatasetCollection") - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_dataset_collections") + dataset_collection: Mapped[Optional["DatasetCollection"]] = relationship() + job: Mapped[Optional["Job"]] = relationship(back_populates="output_dataset_collections") def __init__(self, name, dataset_collection): self.name = name @@ -2451,9 +2430,9 @@ class JobToInputLibraryDatasetAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="input_library_datasets") + job: Mapped[Optional["Job"]] = relationship(back_populates="input_library_datasets") dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", lazy="joined", back_populates="dependent_jobs" + lazy="joined", back_populates="dependent_jobs" ) def __init__(self, name, dataset): @@ -2469,9 +2448,9 @@ class JobToOutputLibraryDatasetAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="output_library_datasets") + job: Mapped[Optional["Job"]] = relationship(back_populates="output_library_datasets") dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", lazy="joined", back_populates="creating_job_associations" + lazy="joined", back_populates="creating_job_associations" ) def __init__(self, name, dataset): @@ -2508,7 +2487,6 @@ class ImplicitlyCreatedDatasetCollectionInput(Base, RepresentById): name: Mapped[Optional[str]] = mapped_column(Unicode(255)) input_dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", primaryjoin=( lambda: HistoryDatasetCollectionAssociation.id # type: ignore[has-type] == ImplicitlyCreatedDatasetCollectionInput.input_dataset_collection_id @@ -2526,7 +2504,7 @@ class ImplicitCollectionJobs(Base, Serializable): id: Mapped[int] = mapped_column(primary_key=True) populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="new") jobs: Mapped[List["ImplicitCollectionJobsJobAssociation"]] = relationship( - "ImplicitCollectionJobsJobAssociation", back_populates="implicit_collection_jobs", cascade_backrefs=False + back_populates="implicit_collection_jobs", cascade_backrefs=False ) class populated_states(str, Enum): @@ -2561,7 +2539,7 @@ class ImplicitCollectionJobsJobAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) # Consider making this nullable... order_index: Mapped[int] implicit_collection_jobs = relationship("ImplicitCollectionJobs", back_populates="jobs") - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="implicit_collection_jobs_association") + job: Mapped[Optional["Job"]] = relationship(back_populates="implicit_collection_jobs_association") class PostJobAction(Base, RepresentById): @@ -2573,7 +2551,6 @@ class PostJobAction(Base, RepresentById): output_name: Mapped[Optional[str]] = mapped_column(String(255)) action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( - "WorkflowStep", back_populates="post_job_actions", primaryjoin=(lambda: WorkflowStep.id == PostJobAction.workflow_step_id), # type: ignore[has-type] ) @@ -2592,8 +2569,8 @@ class PostJobActionAssociation(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True) post_job_action_id: Mapped[int] = mapped_column(ForeignKey("post_job_action.id"), index=True) - post_job_action: Mapped["PostJobAction"] = relationship("PostJobAction") - job: Mapped["Job"] = relationship("Job", back_populates="post_job_actions") + post_job_action: Mapped["PostJobAction"] = relationship() + job: Mapped["Job"] = relationship(back_populates="post_job_actions") def __init__(self, pja, job=None, job_id=None): if job is not None: @@ -2624,13 +2601,11 @@ class JobExternalOutputMetadata(Base, RepresentById): filename_kwds: Mapped[Optional[str]] = mapped_column(String(255)) filename_override_metadata: Mapped[Optional[str]] = mapped_column(String(255)) job_runner_external_pid: Mapped[Optional[str]] = mapped_column(String(255)) - history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", lazy="joined" - ) + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(lazy="joined") library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", lazy="joined" + lazy="joined" ) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="external_output_metadata") + job: Mapped[Optional["Job"]] = relationship(back_populates="external_output_metadata") def __init__(self, job=None, dataset=None): add_object_to_object_session(self, job) @@ -2676,9 +2651,9 @@ class JobExportHistoryArchive(Base, RepresentById): dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset.id"), index=True) compressed: Mapped[Optional[bool]] = mapped_column(index=True, default=False) history_attrs_filename: Mapped[Optional[str]] = mapped_column(TEXT) - job: Mapped[Optional["Job"]] = relationship("Job") - dataset: Mapped[Optional["Dataset"]] = relationship("Dataset") - history: Mapped[Optional["History"]] = relationship("History", back_populates="exports") + job: Mapped[Optional["Job"]] = relationship() + dataset: Mapped[Optional["Dataset"]] = relationship() + history: Mapped[Optional["History"]] = relationship(back_populates="exports") ATTRS_FILENAME_HISTORY = "history_attrs.txt" @@ -2762,8 +2737,8 @@ class JobImportHistoryArchive(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) archive_dir: Mapped[Optional[str]] = mapped_column(TEXT) - job: Mapped[Optional["Job"]] = relationship("Job") - history: Mapped[Optional["History"]] = relationship("History") + job: Mapped[Optional["Job"]] = relationship() + history: Mapped[Optional["History"]] = relationship() class StoreExportAssociation(Base, RepresentById): @@ -2788,7 +2763,7 @@ class JobContainerAssociation(Base, RepresentById): container_info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) created_time: Mapped[Optional[datetime]] = mapped_column(default=now) modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="container") + job: Mapped[Optional["Job"]] = relationship(back_populates="container") def __init__(self, **kwd): if "job" in kwd: @@ -2818,7 +2793,7 @@ class InteractiveToolEntryPoint(Base, Dictifiable, RepresentById): created_time: Mapped[Optional[datetime]] = mapped_column(default=now) modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) label: Mapped[Optional[str]] = mapped_column(TEXT) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="interactivetool_entry_points", uselist=False) + job: Mapped[Optional["Job"]] = relationship(back_populates="interactivetool_entry_points", uselist=False) dict_collection_visible_keys = [ "id", @@ -2884,9 +2859,9 @@ class GenomeIndexToolData(Base, RepresentById): # TODO: params arg is lost modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) indexer: Mapped[Optional[str]] = mapped_column(String(64)) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - job: Mapped[Optional["Job"]] = relationship("Job") - dataset: Mapped[Optional["Dataset"]] = relationship("Dataset") - user: Mapped[Optional["User"]] = relationship("User") + job: Mapped[Optional["Job"]] = relationship() + dataset: Mapped[Optional["Dataset"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class Group(Base, Dictifiable, RepresentById): @@ -2897,10 +2872,8 @@ class Group(Base, Dictifiable, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - quotas: Mapped[List["GroupQuotaAssociation"]] = relationship("GroupQuotaAssociation", back_populates="group") - roles: Mapped[List["GroupRoleAssociation"]] = relationship( - "GroupRoleAssociation", back_populates="group", cascade_backrefs=False - ) + quotas: Mapped[List["GroupQuotaAssociation"]] = relationship(back_populates="group") + roles: Mapped[List["GroupRoleAssociation"]] = relationship(back_populates="group", cascade_backrefs=False) users = relationship("UserGroupAssociation", back_populates="group") dict_collection_visible_keys = ["id", "name"] @@ -2919,8 +2892,8 @@ class UserGroupAssociation(Base, RepresentById): group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user: Mapped[Optional["User"]] = relationship("User", back_populates="groups") - group: Mapped[Optional["Group"]] = relationship("Group", back_populates="users") + user: Mapped[Optional["User"]] = relationship(back_populates="groups") + group: Mapped[Optional["Group"]] = relationship(back_populates="users") def __init__(self, user, group): add_object_to_object_session(self, user) @@ -2952,7 +2925,7 @@ class Notification(Base, Dictifiable, RepresentById): content: Mapped[Optional[bytes]] = mapped_column(DoubleEncodedJsonType) user_notification_associations: Mapped[List["UserNotificationAssociation"]] = relationship( - "UserNotificationAssociation", back_populates="notification" + back_populates="notification" ) def __init__(self, source: str, category: str, variant: str, content): @@ -2972,10 +2945,8 @@ class UserNotificationAssociation(Base, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) update_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) - user: Mapped[Optional["User"]] = relationship("User", back_populates="all_notifications") - notification: Mapped[Optional["Notification"]] = relationship( - "Notification", back_populates="user_notification_associations" - ) + user: Mapped[Optional["User"]] = relationship(back_populates="all_notifications") + notification: Mapped[Optional["Notification"]] = relationship(back_populates="user_notification_associations") def __init__(self, user, notification): self.user = user @@ -3054,16 +3025,14 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable archive_export_id: Mapped[Optional[int]] = mapped_column(ForeignKey("store_export_association.id"), default=None) datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type] + back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type] ) exports: Mapped[List["JobExportHistoryArchive"]] = relationship( - "JobExportHistoryArchive", back_populates="history", primaryjoin=lambda: JobExportHistoryArchive.history_id == History.id, order_by=lambda: desc(JobExportHistoryArchive.id), ) active_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=( lambda: and_( HistoryDatasetAssociation.history_id == History.id, # type: ignore[arg-type] @@ -3073,11 +3042,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetAssociation.hid), # type: ignore[has-type] viewonly=True, ) - dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", back_populates="history" - ) + dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="history") active_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", primaryjoin=( lambda: ( and_( @@ -3090,7 +3056,6 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable viewonly=True, ) visible_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=( lambda: and_( HistoryDatasetAssociation.history_id == History.id, # type: ignore[arg-type] @@ -3102,7 +3067,6 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable viewonly=True, ) visible_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", primaryjoin=( lambda: and_( HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] @@ -3114,28 +3078,23 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable viewonly=True, ) tags: Mapped[List["HistoryTagAssociation"]] = relationship( - "HistoryTagAssociation", order_by=lambda: HistoryTagAssociation.id, back_populates="history" + order_by=lambda: HistoryTagAssociation.id, back_populates="history" ) annotations: Mapped[List["HistoryAnnotationAssociation"]] = relationship( - "HistoryAnnotationAssociation", order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" + order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" ) ratings: Mapped[List["HistoryRatingAssociation"]] = relationship( - "HistoryRatingAssociation", order_by=lambda: HistoryRatingAssociation.id, # type: ignore[has-type] back_populates="history", ) - default_permissions: Mapped[List["DefaultHistoryPermissions"]] = relationship( - "DefaultHistoryPermissions", back_populates="history" - ) - users_shared_with: Mapped[List["HistoryUserShareAssociation"]] = relationship( - "HistoryUserShareAssociation", back_populates="history" - ) + default_permissions: Mapped[List["DefaultHistoryPermissions"]] = relationship(back_populates="history") + users_shared_with: Mapped[List["HistoryUserShareAssociation"]] = relationship(back_populates="history") galaxy_sessions = relationship("GalaxySessionToHistoryAssociation", back_populates="history") workflow_invocations: Mapped[List["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="history", cascade_backrefs=False + back_populates="history", cascade_backrefs=False ) - user: Mapped[Optional["User"]] = relationship("User", back_populates="histories") - jobs: Mapped[List["Job"]] = relationship("Job", back_populates="history", cascade_backrefs=False) + user: Mapped[Optional["User"]] = relationship(back_populates="histories") + jobs: Mapped[List["Job"]] = relationship(back_populates="history", cascade_backrefs=False) update_time = column_property( select(func.max(HistoryAudit.update_time)).where(HistoryAudit.history_id == id).scalar_subquery(), @@ -3628,8 +3587,8 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(primary_key=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped[User] = relationship("User") - history: Mapped[Optional["History"]] = relationship("History", back_populates="users_shared_with") + user: Mapped[User] = relationship() + history: Mapped[Optional["History"]] = relationship(back_populates="users_shared_with") class UserRoleAssociation(Base, RepresentById): @@ -3641,8 +3600,8 @@ class UserRoleAssociation(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user: Mapped[Optional["User"]] = relationship("User", back_populates="roles") - role: Mapped[Optional["Role"]] = relationship("Role", back_populates="users") + user: Mapped[Optional["User"]] = relationship(back_populates="roles") + role: Mapped[Optional["Role"]] = relationship(back_populates="users") def __init__(self, user, role): add_object_to_object_session(self, user) @@ -3658,8 +3617,8 @@ class GroupRoleAssociation(Base, RepresentById): role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - group: Mapped[Optional["Group"]] = relationship("Group", back_populates="roles") - role: Mapped[Optional["Role"]] = relationship("Role", back_populates="groups") + group: Mapped[Optional["Group"]] = relationship(back_populates="roles") + role: Mapped[Optional["Role"]] = relationship(back_populates="groups") def __init__(self, group, role): self.group = group @@ -3677,9 +3636,9 @@ class Role(Base, Dictifiable, RepresentById): description: Mapped[Optional[str]] = mapped_column(TEXT) type: Mapped[Optional[str]] = mapped_column(String(40), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - dataset_actions: Mapped[List["DatasetPermissions"]] = relationship("DatasetPermissions", back_populates="role") - groups: Mapped[List["GroupRoleAssociation"]] = relationship("GroupRoleAssociation", back_populates="role") - users: Mapped[List["UserRoleAssociation"]] = relationship("UserRoleAssociation", back_populates="role") + dataset_actions: Mapped[List["DatasetPermissions"]] = relationship(back_populates="role") + groups: Mapped[List["GroupRoleAssociation"]] = relationship(back_populates="role") + users: Mapped[List["UserRoleAssociation"]] = relationship(back_populates="role") dict_collection_visible_keys = ["id", "name"] dict_element_visible_keys = ["id", "name", "description", "type"] @@ -3710,7 +3669,7 @@ class UserQuotaSourceUsage(Base, Dictifiable, RepresentById): quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), index=True) # user had an index on disk_usage - does that make any sense? -John disk_usage: Mapped[Decimal] = mapped_column(Numeric(15, 0), default=0) - user: Mapped[Optional["User"]] = relationship("User", back_populates="quota_source_usages") + user: Mapped[Optional["User"]] = relationship(back_populates="quota_source_usages") class UserQuotaAssociation(Base, Dictifiable, RepresentById): @@ -3721,8 +3680,8 @@ class UserQuotaAssociation(Base, Dictifiable, RepresentById): quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user: Mapped[Optional["User"]] = relationship("User", back_populates="quotas") - quota: Mapped[Optional["Quota"]] = relationship("Quota", back_populates="users") + user: Mapped[Optional["User"]] = relationship(back_populates="quotas") + quota: Mapped[Optional["Quota"]] = relationship(back_populates="users") dict_element_visible_keys = ["user"] @@ -3740,8 +3699,8 @@ class GroupQuotaAssociation(Base, Dictifiable, RepresentById): quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - group: Mapped[Optional["Group"]] = relationship("Group", back_populates="quotas") - quota: Mapped[Optional["Quota"]] = relationship("Quota", back_populates="groups") + group: Mapped[Optional["Group"]] = relationship(back_populates="quotas") + quota: Mapped[Optional["Quota"]] = relationship(back_populates="groups") dict_element_visible_keys = ["group"] @@ -3765,8 +3724,8 @@ class Quota(Base, Dictifiable, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), default=None) default = relationship("DefaultQuotaAssociation", back_populates="quota", cascade_backrefs=False) - groups: Mapped[List["GroupQuotaAssociation"]] = relationship("GroupQuotaAssociation", back_populates="quota") - users: Mapped[List["UserQuotaAssociation"]] = relationship("UserQuotaAssociation", back_populates="quota") + groups: Mapped[List["GroupQuotaAssociation"]] = relationship(back_populates="quota") + users: Mapped[List["UserQuotaAssociation"]] = relationship(back_populates="quota") dict_collection_visible_keys = ["id", "name", "quota_source_label"] dict_element_visible_keys = [ @@ -3822,7 +3781,7 @@ class DefaultQuotaAssociation(Base, Dictifiable, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) type: Mapped[Optional[str]] = mapped_column(String(32)) quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) - quota: Mapped[Optional["Quota"]] = relationship("Quota", back_populates="default") + quota: Mapped[Optional["Quota"]] = relationship(back_populates="default") dict_element_visible_keys = ["type"] @@ -3846,8 +3805,8 @@ class DatasetPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="actions") - role: Mapped[Optional["Role"]] = relationship("Role", back_populates="dataset_actions") + dataset: Mapped[Optional["Dataset"]] = relationship(back_populates="actions") + role: Mapped[Optional["Role"]] = relationship(back_populates="dataset_actions") def __init__(self, action, dataset, role=None, role_id=None): self.action = action @@ -3868,8 +3827,8 @@ class LibraryPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) library_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - library: Mapped[Optional["Library"]] = relationship("Library", back_populates="actions") - role: Mapped[Optional["Role"]] = relationship("Role") + library: Mapped[Optional["Library"]] = relationship(back_populates="actions") + role: Mapped[Optional["Role"]] = relationship() def __init__(self, action, library_item, role): self.action = action @@ -3890,8 +3849,8 @@ class LibraryFolderPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) library_folder_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_folder.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - folder: Mapped[Optional["LibraryFolder"]] = relationship("LibraryFolder", back_populates="actions") - role: Mapped[Optional["Role"]] = relationship("Role") + folder: Mapped[Optional["LibraryFolder"]] = relationship(back_populates="actions") + role: Mapped[Optional["Role"]] = relationship() def __init__(self, action, library_item, role): self.action = action @@ -3912,8 +3871,8 @@ class LibraryDatasetPermissions(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(TEXT) library_dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset.id"), index=True) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - library_dataset: Mapped[Optional["LibraryDataset"]] = relationship("LibraryDataset", back_populates="actions") - role: Mapped[Optional["Role"]] = relationship("Role") + library_dataset: Mapped[Optional["LibraryDataset"]] = relationship(back_populates="actions") + role: Mapped[Optional["Role"]] = relationship() def __init__(self, action, library_item, role): self.action = action @@ -3937,9 +3896,9 @@ class LibraryDatasetDatasetAssociationPermissions(Base, RepresentById): ) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", back_populates="actions" + back_populates="actions" ) - role: Mapped[Optional["Role"]] = relationship("Role") + role: Mapped[Optional["Role"]] = relationship() def __init__(self, action, library_item, role): self.action = action @@ -3958,8 +3917,8 @@ class DefaultUserPermissions(Base, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) action: Mapped[Optional[str]] = mapped_column(TEXT) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - user: Mapped[Optional["User"]] = relationship("User", back_populates="default_permissions") - role: Mapped[Optional["Role"]] = relationship("Role") + user: Mapped[Optional["User"]] = relationship(back_populates="default_permissions") + role: Mapped[Optional["Role"]] = relationship() def __init__(self, user, action, role): add_object_to_object_session(self, user) @@ -3975,8 +3934,8 @@ class DefaultHistoryPermissions(Base, RepresentById): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) action: Mapped[Optional[str]] = mapped_column(TEXT) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - history: Mapped[Optional["History"]] = relationship("History", back_populates="default_permissions") - role: Mapped[Optional["Role"]] = relationship("Role") + history: Mapped[Optional["History"]] = relationship(back_populates="default_permissions") + role: Mapped[Optional["Role"]] = relationship() def __init__(self, history, action, role): add_object_to_object_session(self, history) @@ -4011,10 +3970,9 @@ class Dataset(Base, StorableObject, Serializable): total_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType()) - actions: Mapped[List["DatasetPermissions"]] = relationship("DatasetPermissions", back_populates="dataset") - job: Mapped[Optional["Job"]] = relationship(Job, primaryjoin=(lambda: Dataset.job_id == Job.id)) + actions: Mapped[List["DatasetPermissions"]] = relationship(back_populates="dataset") + job: Mapped[Optional["Job"]] = relationship(primaryjoin=(lambda: Dataset.job_id == Job.id)) active_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] @@ -4025,7 +3983,6 @@ class Dataset(Base, StorableObject, Serializable): viewonly=True, ) purged_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] @@ -4035,7 +3992,6 @@ class Dataset(Base, StorableObject, Serializable): viewonly=True, ) active_library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", primaryjoin=( lambda: and_( Dataset.id == LibraryDatasetDatasetAssociation.dataset_id, # type: ignore[attr-defined] @@ -4044,13 +4000,12 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - hashes: Mapped[List["DatasetHash"]] = relationship("DatasetHash", back_populates="dataset", cascade_backrefs=False) - sources: Mapped[List["DatasetSource"]] = relationship("DatasetSource", back_populates="dataset") + hashes: Mapped[List["DatasetHash"]] = relationship(back_populates="dataset", cascade_backrefs=False) + sources: Mapped[List["DatasetSource"]] = relationship(back_populates="dataset") history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", back_populates="dataset", cascade_backrefs=False + back_populates="dataset", cascade_backrefs=False ) library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", primaryjoin=(lambda: LibraryDatasetDatasetAssociation.table.c.dataset_id == Dataset.id), back_populates="dataset", cascade_backrefs=False, @@ -4382,8 +4337,8 @@ class DatasetSource(Base, Dictifiable, Serializable): source_uri: Mapped[Optional[str]] = mapped_column(TEXT) extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) transform: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="sources") - hashes: Mapped[List["DatasetSourceHash"]] = relationship("DatasetSourceHash", back_populates="source") + dataset: Mapped[Optional["Dataset"]] = relationship(back_populates="sources") + hashes: Mapped[List["DatasetSourceHash"]] = relationship(back_populates="source") dict_collection_visible_keys = ["id", "source_uri", "extra_files_path", "transform"] dict_element_visible_keys = [ "id", @@ -4419,7 +4374,7 @@ class DatasetSourceHash(Base, Serializable): dataset_source_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset_source.id"), index=True) hash_function: Mapped[Optional[str]] = mapped_column(TEXT) hash_value: Mapped[Optional[str]] = mapped_column(TEXT) - source: Mapped[Optional["DatasetSource"]] = relationship("DatasetSource", back_populates="hashes") + source: Mapped[Optional["DatasetSource"]] = relationship(back_populates="hashes") def _serialize(self, id_encoder, serialization_options): rval = dict_for( @@ -4445,7 +4400,7 @@ class DatasetHash(Base, Dictifiable, Serializable): hash_function: Mapped[Optional[str]] = mapped_column(TEXT) hash_value: Mapped[Optional[str]] = mapped_column(TEXT) extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) - dataset: Mapped[Optional["Dataset"]] = relationship("Dataset", back_populates="hashes") + dataset: Mapped[Optional["Dataset"]] = relationship(back_populates="hashes") dict_collection_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] dict_element_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] @@ -5526,10 +5481,8 @@ class HistoryDatasetAssociationDisplayAtAuthorization(Base, RepresentById): ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) site: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) - history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation" - ) - user: Mapped[Optional["User"]] = relationship("User") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() + user: Mapped[Optional["User"]] = relationship() def __init__(self, hda=None, user=None, site=None): self.history_dataset_association = hda @@ -5550,13 +5503,11 @@ class HistoryDatasetAssociationSubset(Base, RepresentById): location: Mapped[Optional[str]] = mapped_column(Unicode(255), index=True) hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=( lambda: HistoryDatasetAssociationSubset.history_dataset_association_id == HistoryDatasetAssociation.id ), ) subset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=( lambda: HistoryDatasetAssociationSubset.history_dataset_association_subset_id == HistoryDatasetAssociation.id @@ -5582,9 +5533,7 @@ class Library(Base, Dictifiable, HasName, Serializable): description: Mapped[Optional[str]] = mapped_column(TEXT) synopsis: Mapped[Optional[str]] = mapped_column(TEXT) root_folder = relationship("LibraryFolder", back_populates="library_root") - actions: Mapped[List["LibraryPermissions"]] = relationship( - "LibraryPermissions", back_populates="library", cascade_backrefs=False - ) + actions: Mapped[List["LibraryPermissions"]] = relationship(back_populates="library", cascade_backrefs=False) permitted_actions = get_permitted_actions(filter="LIBRARY") dict_collection_visible_keys = ["id", "name"] @@ -5665,17 +5614,13 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) folders: Mapped[List["LibraryFolder"]] = relationship( - "LibraryFolder", primaryjoin=(lambda: LibraryFolder.id == LibraryFolder.parent_id), order_by=asc(name), back_populates="parent", ) - parent: Mapped[Optional["LibraryFolder"]] = relationship( - "LibraryFolder", back_populates="folders", remote_side=[id] - ) + parent: Mapped[Optional["LibraryFolder"]] = relationship(back_populates="folders", remote_side=[id]) active_folders: Mapped[List["LibraryFolder"]] = relationship( - "LibraryFolder", primaryjoin=("and_(LibraryFolder.parent_id == LibraryFolder.id, not_(LibraryFolder.deleted))"), order_by=asc(name), # """sqlalchemy.exc.ArgumentError: Error creating eager relationship 'active_folders' @@ -5686,7 +5631,6 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): ) datasets: Mapped[List["LibraryDataset"]] = relationship( - "LibraryDataset", primaryjoin=( lambda: LibraryDataset.folder_id == LibraryFolder.id and LibraryDataset.library_dataset_dataset_association_id.isnot(None) @@ -5696,7 +5640,6 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): ) active_datasets: Mapped[List["LibraryDataset"]] = relationship( - "LibraryDataset", primaryjoin=( "and_(LibraryDataset.folder_id == LibraryFolder.id, not_(LibraryDataset.deleted), LibraryDataset.library_dataset_dataset_association_id.isnot(None))" ), @@ -5705,9 +5648,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): ) library_root = relationship("Library", back_populates="root_folder") - actions: Mapped[List["LibraryFolderPermissions"]] = relationship( - "LibraryFolderPermissions", back_populates="folder", cascade_backrefs=False - ) + actions: Mapped[List["LibraryFolderPermissions"]] = relationship(back_populates="folder", cascade_backrefs=False) dict_element_visible_keys = [ "id", @@ -5816,12 +5757,11 @@ class LibraryDataset(Base, Serializable): _info: Mapped[Optional[str]] = mapped_column("info", TrimmedString(255)) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - folder: Mapped[Optional["LibraryFolder"]] = relationship("LibraryFolder") + folder: Mapped[Optional["LibraryFolder"]] = relationship() library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=library_dataset_dataset_association_id, post_update=True ) expired_datasets: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", foreign_keys=[id, library_dataset_dataset_association_id], primaryjoin=( "and_(LibraryDataset.id == LibraryDatasetDatasetAssociation.library_dataset_id, \ @@ -5831,7 +5771,7 @@ class LibraryDataset(Base, Serializable): uselist=True, ) actions: Mapped[List["LibraryDatasetPermissions"]] = relationship( - "LibraryDatasetPermissions", back_populates="library_dataset", cascade_backrefs=False + back_populates="library_dataset", cascade_backrefs=False ) # This class acts as a proxy to the currently selected LDDA @@ -6100,9 +6040,7 @@ class ExtendedMetadata(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - children: Mapped[List["ExtendedMetadataIndex"]] = relationship( - "ExtendedMetadataIndex", back_populates="extended_metadata" - ) + children: Mapped[List["ExtendedMetadataIndex"]] = relationship(back_populates="extended_metadata") def __init__(self, data): self.data = data @@ -6117,9 +6055,7 @@ class ExtendedMetadataIndex(Base, RepresentById): ) path: Mapped[Optional[str]] = mapped_column(String(255)) value: Mapped[Optional[str]] = mapped_column(TEXT) - extended_metadata: Mapped[Optional["ExtendedMetadata"]] = relationship( - "ExtendedMetadata", back_populates="children" - ) + extended_metadata: Mapped[Optional["ExtendedMetadata"]] = relationship(back_populates="children") def __init__(self, extended_metadata, path, value): self.extended_metadata = extended_metadata @@ -6138,7 +6074,6 @@ class LibraryInfoAssociation(Base, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) library: Mapped[Optional["Library"]] = relationship( - "Library", primaryjoin=( lambda: and_( LibraryInfoAssociation.library_id == Library.id, @@ -6147,10 +6082,10 @@ class LibraryInfoAssociation(Base, RepresentById): ), ) template: Mapped[Optional["FormDefinition"]] = relationship( - "FormDefinition", primaryjoin=lambda: LibraryInfoAssociation.form_definition_id == FormDefinition.id + primaryjoin=lambda: LibraryInfoAssociation.form_definition_id == FormDefinition.id ) info: Mapped[Optional["FormValues"]] = relationship( - "FormValues", primaryjoin=lambda: LibraryInfoAssociation.form_values_id == FormValues.id # type: ignore[has-type] + primaryjoin=lambda: LibraryInfoAssociation.form_values_id == FormValues.id # type: ignore[has-type] ) def __init__(self, library, form_definition, info, inheritable=False): @@ -6171,17 +6106,16 @@ class LibraryFolderInfoAssociation(Base, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) folder: Mapped[Optional["LibraryFolder"]] = relationship( - "LibraryFolder", primaryjoin=( lambda: (LibraryFolderInfoAssociation.library_folder_id == LibraryFolder.id) & (not_(LibraryFolderInfoAssociation.deleted)) ), ) template: Mapped[Optional["FormDefinition"]] = relationship( - "FormDefinition", primaryjoin=(lambda: LibraryFolderInfoAssociation.form_definition_id == FormDefinition.id) + primaryjoin=(lambda: LibraryFolderInfoAssociation.form_definition_id == FormDefinition.id) ) info: Mapped[Optional["FormValues"]] = relationship( - "FormValues", primaryjoin=(lambda: LibraryFolderInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] + primaryjoin=(lambda: LibraryFolderInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] ) def __init__(self, folder, form_definition, info, inheritable=False): @@ -6203,7 +6137,6 @@ class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", primaryjoin=( lambda: ( LibraryDatasetDatasetInfoAssociation.library_dataset_dataset_association_id @@ -6213,11 +6146,10 @@ class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): ), ) template: Mapped[Optional["FormDefinition"]] = relationship( - "FormDefinition", primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_definition_id == FormDefinition.id), ) info: Mapped[Optional["FormValues"]] = relationship( - "FormValues", primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] + primaryjoin=(lambda: LibraryDatasetDatasetInfoAssociation.form_values_id == FormValues.id) # type: ignore[has-type] ) def __init__(self, library_dataset_dataset_association, form_definition, info): @@ -6248,22 +6180,18 @@ class ImplicitlyConvertedDatasetAssociation(Base, Serializable): type: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) parent_hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.hda_parent_id == HistoryDatasetAssociation.id), back_populates="implicitly_converted_datasets", ) dataset_ldda: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.ldda_id == LibraryDatasetDatasetAssociation.id), back_populates="implicitly_converted_parent_datasets", ) dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", primaryjoin=(lambda: ImplicitlyConvertedDatasetAssociation.hda_id == HistoryDatasetAssociation.id), back_populates="implicitly_converted_parent_datasets", ) parent_ldda: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", primaryjoin=( lambda: ImplicitlyConvertedDatasetAssociation.ldda_parent_id == LibraryDatasetDatasetAssociation.table.c.id ), @@ -6355,7 +6283,6 @@ class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) elements: Mapped[List["DatasetCollectionElement"]] = relationship( - "DatasetCollectionElement", primaryjoin=(lambda: DatasetCollection.id == DatasetCollectionElement.dataset_collection_id), # type: ignore[has-type] back_populates="collection", order_by=lambda: DatasetCollectionElement.element_index, # type: ignore[has-type] @@ -6823,7 +6750,7 @@ class HistoryDatasetCollectionAssociation( update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, index=True, nullable=True) collection = relationship("DatasetCollection") - history: Mapped[Optional["History"]] = relationship("History", back_populates="dataset_collections") + history: Mapped[Optional["History"]] = relationship(back_populates="dataset_collections") copied_from_history_dataset_collection_association = relationship( "HistoryDatasetCollectionAssociation", @@ -6837,7 +6764,6 @@ class HistoryDatasetCollectionAssociation( back_populates="copied_from_history_dataset_collection_association", ) implicit_input_collections: Mapped[List["ImplicitlyCreatedDatasetCollectionInput"]] = relationship( - "ImplicitlyCreatedDatasetCollectionInput", primaryjoin=( lambda: HistoryDatasetCollectionAssociation.id == ImplicitlyCreatedDatasetCollectionInput.dataset_collection_id @@ -6845,28 +6771,22 @@ class HistoryDatasetCollectionAssociation( ) implicit_collection_jobs = relationship("ImplicitCollectionJobs", uselist=False) job: Mapped[Optional["Job"]] = relationship( - "Job", back_populates="history_dataset_collection_associations", uselist=False, ) tags: Mapped[List["HistoryDatasetCollectionTagAssociation"]] = relationship( - "HistoryDatasetCollectionTagAssociation", order_by=lambda: HistoryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", ) annotations: Mapped[List["HistoryDatasetCollectionAssociationAnnotationAssociation"]] = relationship( - "HistoryDatasetCollectionAssociationAnnotationAssociation", order_by=lambda: HistoryDatasetCollectionAssociationAnnotationAssociation.id, back_populates="history_dataset_collection", ) ratings: Mapped[List["HistoryDatasetCollectionRatingAssociation"]] = relationship( - "HistoryDatasetCollectionRatingAssociation", order_by=lambda: HistoryDatasetCollectionRatingAssociation.id, # type: ignore[has-type] back_populates="dataset_collection", ) - creating_job_associations: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship( - "JobToOutputDatasetCollectionAssociation", viewonly=True - ) + creating_job_associations: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship(viewonly=True) dict_dbkeysandextensions_visible_keys = ["dbkeys", "extensions"] editable_keys = ("name", "deleted", "visible") @@ -7210,17 +7130,14 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre folder = relationship("LibraryFolder") tags: Mapped[List["LibraryDatasetCollectionTagAssociation"]] = relationship( - "LibraryDatasetCollectionTagAssociation", order_by=lambda: LibraryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", ) annotations: Mapped[List["LibraryDatasetCollectionAnnotationAssociation"]] = relationship( - "LibraryDatasetCollectionAnnotationAssociation", order_by=lambda: LibraryDatasetCollectionAnnotationAssociation.id, back_populates="dataset_collection", ) ratings: Mapped[List["LibraryDatasetCollectionRatingAssociation"]] = relationship( - "LibraryDatasetCollectionRatingAssociation", order_by=lambda: LibraryDatasetCollectionRatingAssociation.id, # type: ignore[has-type] back_populates="dataset_collection", ) @@ -7443,9 +7360,9 @@ class Event(Base, RepresentById): session_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_session.id"), index=True) tool_id: Mapped[Optional[str]] = mapped_column(String(255)) - history: Mapped[Optional["History"]] = relationship("History") - user: Mapped[Optional["User"]] = relationship("User") - galaxy_session: Mapped[Optional["GalaxySession"]] = relationship("GalaxySession") + history: Mapped[Optional["History"]] = relationship() + user: Mapped[Optional["User"]] = relationship() + galaxy_session: Mapped[Optional["GalaxySession"]] = relationship() class GalaxySession(Base, RepresentById): @@ -7466,11 +7383,11 @@ class GalaxySession(Base, RepresentById): prev_session_id: Mapped[Optional[int]] disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) last_action: Mapped[Optional[datetime]] - current_history: Mapped[Optional["History"]] = relationship("History") + current_history: Mapped[Optional["History"]] = relationship() histories: Mapped[List["GalaxySessionToHistoryAssociation"]] = relationship( - "GalaxySessionToHistoryAssociation", back_populates="galaxy_session", cascade_backrefs=False + back_populates="galaxy_session", cascade_backrefs=False ) - user: Mapped[Optional["User"]] = relationship("User", back_populates="galaxy_sessions") + user: Mapped[Optional["User"]] = relationship(back_populates="galaxy_sessions") def __init__(self, is_valid=False, **kwd): super().__init__(**kwd) @@ -7501,8 +7418,8 @@ class GalaxySessionToHistoryAssociation(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) session_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_session.id"), index=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) - galaxy_session: Mapped[Optional["GalaxySession"]] = relationship("GalaxySession", back_populates="histories") - history: Mapped[Optional["History"]] = relationship("History", back_populates="galaxy_sessions") + galaxy_session: Mapped[Optional["GalaxySession"]] = relationship(back_populates="histories") + history: Mapped[Optional["History"]] = relationship(back_populates="galaxy_sessions") def __init__(self, galaxy_session, history): self.galaxy_session = galaxy_session @@ -7539,10 +7456,9 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): published: Mapped[Optional[bool]] = mapped_column(index=True, default=False) user: Mapped["User"] = relationship( - "User", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" + primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" ) workflows: Mapped[List["Workflow"]] = relationship( - "Workflow", back_populates="stored_workflow", cascade="all, delete-orphan", primaryjoin=(lambda: StoredWorkflow.id == Workflow.stored_workflow_id), # type: ignore[has-type] @@ -7556,12 +7472,10 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): lazy=False, ) tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( - "StoredWorkflowTagAssociation", order_by=lambda: StoredWorkflowTagAssociation.id, back_populates="stored_workflow", ) owner_tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( - "StoredWorkflowTagAssociation", primaryjoin=( lambda: and_( StoredWorkflow.id == StoredWorkflowTagAssociation.stored_workflow_id, @@ -7572,17 +7486,15 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): order_by=lambda: StoredWorkflowTagAssociation.id, ) annotations: Mapped[List["StoredWorkflowAnnotationAssociation"]] = relationship( - "StoredWorkflowAnnotationAssociation", order_by=lambda: StoredWorkflowAnnotationAssociation.id, back_populates="stored_workflow", ) ratings: Mapped[List["StoredWorkflowRatingAssociation"]] = relationship( - "StoredWorkflowRatingAssociation", order_by=lambda: StoredWorkflowRatingAssociation.id, # type: ignore[has-type] back_populates="stored_workflow", ) users_shared_with: Mapped[List["StoredWorkflowUserShareAssociation"]] = relationship( - "StoredWorkflowUserShareAssociation", back_populates="stored_workflow" + back_populates="stored_workflow" ) average_rating = None @@ -7723,7 +7635,6 @@ class Workflow(Base, Dictifiable, RepresentById): lazy=False, ) comments: Mapped[List["WorkflowComment"]] = relationship( - "WorkflowComment", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), # type: ignore[has-type] cascade="all, delete-orphan", @@ -7903,37 +7814,33 @@ class WorkflowStep(Base, RepresentById): parent_comment_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_comment.id"), index=True) parent_comment: Mapped[Optional["WorkflowComment"]] = relationship( - "WorkflowComment", primaryjoin=(lambda: WorkflowComment.id == WorkflowStep.parent_comment_id), back_populates="child_steps", ) subworkflow: Mapped[Optional["Workflow"]] = relationship( - "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id), back_populates="parent_workflow_steps", ) dynamic_tool: Mapped[Optional["DynamicTool"]] = relationship( - "DynamicTool", primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id) + primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id) ) tags: Mapped[List["WorkflowStepTagAssociation"]] = relationship( - "WorkflowStepTagAssociation", order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" + order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" ) annotations: Mapped[List["WorkflowStepAnnotationAssociation"]] = relationship( - "WorkflowStepAnnotationAssociation", order_by=lambda: WorkflowStepAnnotationAssociation.id, back_populates="workflow_step", ) post_job_actions = relationship("PostJobAction", back_populates="workflow_step", cascade_backrefs=False) inputs = relationship("WorkflowStepInput", back_populates="workflow_step") workflow_outputs: Mapped[List["WorkflowOutput"]] = relationship( - "WorkflowOutput", back_populates="workflow_step", cascade_backrefs=False + back_populates="workflow_step", cascade_backrefs=False ) output_connections: Mapped[List["WorkflowStepConnection"]] = relationship( - "WorkflowStepConnection", primaryjoin=(lambda: WorkflowStepConnection.output_step_id == WorkflowStep.id) + primaryjoin=(lambda: WorkflowStepConnection.output_step_id == WorkflowStep.id) ) workflow: Mapped["Workflow"] = relationship( - "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), back_populates="steps", cascade_backrefs=False, @@ -8213,13 +8120,11 @@ class WorkflowStepInput(Base, RepresentById): runtime_value: Mapped[Optional[bool]] = mapped_column(default=False) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( - "WorkflowStep", back_populates="inputs", cascade="all", primaryjoin=(lambda: WorkflowStepInput.workflow_step_id == WorkflowStep.id), ) connections: Mapped[List["WorkflowStepConnection"]] = relationship( - "WorkflowStepConnection", back_populates="input_step_input", primaryjoin=(lambda: WorkflowStepConnection.input_step_input_id == WorkflowStepInput.id), cascade_backrefs=False, @@ -8307,7 +8212,6 @@ class WorkflowOutput(Base, Serializable): label: Mapped[Optional[str]] = mapped_column(Unicode(255)) uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType) workflow_step: Mapped["WorkflowStep"] = relationship( - "WorkflowStep", back_populates="workflow_outputs", primaryjoin=(lambda: WorkflowStep.id == WorkflowOutput.workflow_step_id), ) @@ -8353,7 +8257,6 @@ class WorkflowComment(Base, RepresentById): parent_comment_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_comment.id"), index=True) workflow: Mapped["Workflow"] = relationship( - "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), back_populates="comments", ) @@ -8365,14 +8268,12 @@ class WorkflowComment(Base, RepresentById): ) parent_comment: Mapped[Optional["WorkflowComment"]] = relationship( - "WorkflowComment", primaryjoin=(lambda: WorkflowComment.id == WorkflowComment.parent_comment_id), back_populates="child_comments", remote_side=[id], ) child_comments: Mapped[List["WorkflowComment"]] = relationship( - "WorkflowComment", primaryjoin=(lambda: WorkflowComment.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", ) @@ -8416,10 +8317,8 @@ class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(primary_key=True) stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped[User] = relationship("User") - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship( - "StoredWorkflow", back_populates="users_shared_with" - ) + user: Mapped[User] = relationship() + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="users_shared_with") class StoredWorkflowMenuEntry(Base, RepresentById): @@ -8430,9 +8329,8 @@ class StoredWorkflowMenuEntry(Base, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) order_index: Mapped[Optional[int]] - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship() user: Mapped[Optional["User"]] = relationship( - "User", back_populates="stored_workflow_menu_entries", primaryjoin=( lambda: (StoredWorkflowMenuEntry.user_id == User.id) @@ -9032,14 +8930,10 @@ class WorkflowInvocationMessage(Base, Dictifiable, Serializable): hda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id")) hdca_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_collection_association.id")) - workflow_invocation: Mapped["WorkflowInvocation"] = relationship( - "WorkflowInvocation", back_populates="messages", lazy=True - ) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( - "WorkflowStep", foreign_keys=workflow_step_id, lazy=True - ) + workflow_invocation: Mapped["WorkflowInvocation"] = relationship(back_populates="messages", lazy=True) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship(foreign_keys=workflow_step_id, lazy=True) dependent_workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( - "WorkflowStep", foreign_keys=dependent_workflow_step_id, lazy=True + foreign_keys=dependent_workflow_step_id, lazy=True ) @property @@ -9113,7 +9007,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): action: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step = relationship("WorkflowStep") - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="workflow_invocation_step", uselist=False) + job: Mapped[Optional["Job"]] = relationship(back_populates="workflow_invocation_step", uselist=False) implicit_collection_jobs = relationship("ImplicitCollectionJobs", uselist=False) output_dataset_collections = relationship( "WorkflowInvocationStepOutputDatasetCollectionAssociation", @@ -9125,7 +9019,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): back_populates="workflow_invocation_step", cascade_backrefs=False, ) - workflow_invocation: Mapped["WorkflowInvocation"] = relationship("WorkflowInvocation", back_populates="steps") + workflow_invocation: Mapped["WorkflowInvocation"] = relationship(back_populates="steps") output_value = relationship( "WorkflowInvocationOutputValue", foreign_keys="[WorkflowInvocationStep.workflow_invocation_id, WorkflowInvocationStep.workflow_step_id]", @@ -9317,9 +9211,7 @@ class WorkflowRequestInputParameter(Base, Dictifiable, Serializable): name: Mapped[Optional[str]] = mapped_column(Unicode(255)) value: Mapped[Optional[str]] = mapped_column(TEXT) type: Mapped[Optional[str]] = mapped_column(Unicode(255)) - workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="input_parameters" - ) + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="input_parameters") dict_collection_visible_keys = ["id", "name", "value", "type"] @@ -9348,10 +9240,8 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable): ) workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="step_states" - ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="step_states") dict_collection_visible_keys = ["id", "name", "value", "workflow_step_id"] @@ -9373,11 +9263,9 @@ class WorkflowRequestToInputDatasetAssociation(Base, Dictifiable, Serializable): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") - workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="input_datasets" - ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="input_datasets") history_content_type = "dataset" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_id", "name"] @@ -9404,12 +9292,10 @@ class WorkflowRequestToInputDatasetCollectionAssociation(Base, Dictifiable, Seri dataset_collection_id: Mapped[Optional[int]] = mapped_column( ForeignKey("history_dataset_collection_association.id"), index=True ) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation" - ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship() workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="input_dataset_collections" + back_populates="input_dataset_collections" ) history_content_type = "dataset_collection" @@ -9435,10 +9321,8 @@ class WorkflowRequestInputStepParameter(Base, Dictifiable, Serializable): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) parameter_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="input_step_parameters" - ) + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="input_step_parameters") dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "parameter_value"] @@ -9460,12 +9344,10 @@ class WorkflowInvocationOutputDatasetAssociation(Base, Dictifiable, Serializable dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) workflow_output_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_output.id"), index=True) - workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="output_datasets" - ) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") - workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship("WorkflowOutput") + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="output_datasets") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() + workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship() history_content_type = "dataset" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_id", "name"] @@ -9498,13 +9380,11 @@ class WorkflowInvocationOutputDatasetCollectionAssociation(Base, Dictifiable, Se ) workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="output_dataset_collections" - ) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation" + back_populates="output_dataset_collections" ) - workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship("WorkflowOutput") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship() + workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship() history_content_type = "dataset_collection" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_collection_id", "name"] @@ -9530,12 +9410,9 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): workflow_output_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_output.id"), index=True) value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( - "WorkflowInvocation", back_populates="output_values" - ) + workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="output_values") workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( - "WorkflowInvocationStep", foreign_keys="[WorkflowInvocationStep.workflow_invocation_id, WorkflowInvocationStep.workflow_step_id]", primaryjoin=( lambda: and_( @@ -9547,8 +9424,8 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): viewonly=True, ) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep") - workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship("WorkflowOutput") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() + workflow_output: Mapped[Optional["WorkflowOutput"]] = relationship() dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "value"] @@ -9572,9 +9449,9 @@ class WorkflowInvocationStepOutputDatasetAssociation(Base, Dictifiable, Represen dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) output_name: Mapped[Optional[str]] = mapped_column(String(255)) workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( - "WorkflowInvocationStep", back_populates="output_datasets" + back_populates="output_datasets" ) - dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") + dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() dict_collection_visible_keys = ["id", "workflow_invocation_step_id", "dataset_id", "output_name"] @@ -9597,11 +9474,9 @@ class WorkflowInvocationStepOutputDatasetCollectionAssociation(Base, Dictifiable output_name: Mapped[Optional[str]] = mapped_column(String(255)) workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( - "WorkflowInvocationStep", back_populates="output_dataset_collections" - ) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation" + back_populates="output_dataset_collections" ) + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship() dict_collection_visible_keys = ["id", "workflow_invocation_step_id", "dataset_collection_id", "output_name"] @@ -9620,7 +9495,7 @@ class MetadataFile(Base, StorableObject, Serializable): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - history_dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship("HistoryDatasetAssociation") + history_dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() library_dataset = relationship("LibraryDatasetDatasetAssociation") def __init__(self, dataset=None, name=None, uuid=None): @@ -9703,7 +9578,6 @@ class FormDefinition(Base, Dictifiable, RepresentById): type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) layout: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition_current: Mapped["FormDefinitionCurrent"] = relationship( - "FormDefinitionCurrent", back_populates="forms", primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), # type: ignore[has-type] ) @@ -9769,13 +9643,11 @@ class FormDefinitionCurrent(Base, RepresentById): latest_form_id: Mapped[Optional[int]] = mapped_column(ForeignKey("form_definition.id"), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) forms: Mapped[List["FormDefinition"]] = relationship( - "FormDefinition", back_populates="form_definition_current", cascade="all, delete-orphan", primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), ) latest_form: Mapped[Optional["FormDefinition"]] = relationship( - "FormDefinition", post_update=True, primaryjoin=(lambda: FormDefinitionCurrent.latest_form_id == FormDefinition.id), ) @@ -9793,7 +9665,7 @@ class FormValues(Base, RepresentById): form_definition_id: Mapped[Optional[int]] = mapped_column(ForeignKey("form_definition.id"), index=True) content: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition: Mapped[Optional["FormDefinition"]] = relationship( - "FormDefinition", primaryjoin=(lambda: FormValues.form_definition_id == FormDefinition.id) + primaryjoin=(lambda: FormValues.form_definition_id == FormDefinition.id) ) def __init__(self, form_def=None, content=None): @@ -9821,9 +9693,7 @@ class UserAddress(Base, RepresentById): purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) # `desc` needs to be fully qualified because it is shadowed by `desc` Column defined above # TODO: db migration to rename column, then use `desc` - user: Mapped[Optional["User"]] = relationship( - "User", back_populates="addresses", order_by=sqlalchemy.desc("update_time") - ) + user: Mapped[Optional["User"]] = relationship(back_populates="addresses", order_by=sqlalchemy.desc("update_time")) def to_dict(self, trans): return { @@ -10024,7 +9894,7 @@ class UserAuthnzToken(Base, UserMixin, RepresentById): extra_data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) lifetime: Mapped[Optional[int]] assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) - user: Mapped[Optional["User"]] = relationship("User", back_populates="social_auth") + user: Mapped[Optional["User"]] = relationship(back_populates="social_auth") # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -10205,8 +10075,8 @@ class CloudAuthz(Base): last_activity: Mapped[Optional[datetime]] description: Mapped[Optional[str]] = mapped_column(TEXT) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - user: Mapped[Optional["User"]] = relationship("User", back_populates="cloudauthz") - authn: Mapped[Optional["UserAuthnzToken"]] = relationship("UserAuthnzToken") + user: Mapped[Optional["User"]] = relationship(back_populates="cloudauthz") + authn: Mapped[Optional["UserAuthnzToken"]] = relationship() def __init__(self, user_id, provider, config, authn_id, description=None): self.user_id = user_id @@ -10244,33 +10114,28 @@ class Page(Base, HasTags, Dictifiable, RepresentById): importable: Mapped[Optional[bool]] = mapped_column(index=True, default=False) slug: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - user: Mapped["User"] = relationship("User") + user: Mapped["User"] = relationship() revisions: Mapped[List["PageRevision"]] = relationship( - "PageRevision", cascade="all, delete-orphan", primaryjoin=(lambda: Page.id == PageRevision.page_id), # type: ignore[has-type] back_populates="page", ) latest_revision: Mapped[Optional["PageRevision"]] = relationship( - "PageRevision", post_update=True, primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), # type: ignore[has-type] lazy=False, ) tags: Mapped[List["PageTagAssociation"]] = relationship( - "PageTagAssociation", order_by=lambda: PageTagAssociation.id, back_populates="page" + order_by=lambda: PageTagAssociation.id, back_populates="page" ) annotations: Mapped[List["PageAnnotationAssociation"]] = relationship( - "PageAnnotationAssociation", order_by=lambda: PageAnnotationAssociation.id, back_populates="page" + order_by=lambda: PageAnnotationAssociation.id, back_populates="page" ) ratings: Mapped[List["PageRatingAssociation"]] = relationship( - "PageRatingAssociation", order_by=lambda: PageRatingAssociation.id, # type: ignore[has-type] back_populates="page", ) - users_shared_with: Mapped[List["PageUserShareAssociation"]] = relationship( - "PageUserShareAssociation", back_populates="page" - ) + users_shared_with: Mapped[List["PageUserShareAssociation"]] = relationship(back_populates="page") average_rating = None @@ -10322,7 +10187,7 @@ class PageRevision(Base, Dictifiable, RepresentById): title: Mapped[Optional[str]] = mapped_column(TEXT) content: Mapped[Optional[str]] = mapped_column(TEXT) content_format: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) - page: Mapped["Page"] = relationship("Page", primaryjoin=(lambda: Page.id == PageRevision.page_id)) + page: Mapped["Page"] = relationship(primaryjoin=(lambda: Page.id == PageRevision.page_id)) DEFAULT_CONTENT_FORMAT = "html" dict_element_visible_keys = ["id", "page_id", "title", "content", "content_format"] @@ -10342,8 +10207,8 @@ class PageUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(primary_key=True) page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped[User] = relationship("User") - page: Mapped[Optional["Page"]] = relationship("Page", back_populates="users_shared_with") + user: Mapped[User] = relationship() + page: Mapped[Optional["Page"]] = relationship(back_populates="users_shared_with") class Visualization(Base, HasTags, Dictifiable, RepresentById): @@ -10369,36 +10234,30 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): slug: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(default=False, index=True) - user: Mapped["User"] = relationship("User") + user: Mapped["User"] = relationship() revisions: Mapped[List["VisualizationRevision"]] = relationship( - "VisualizationRevision", back_populates="visualization", cascade="all, delete-orphan", primaryjoin=(lambda: Visualization.id == VisualizationRevision.visualization_id), cascade_backrefs=False, ) latest_revision: Mapped[Optional["VisualizationRevision"]] = relationship( - "VisualizationRevision", post_update=True, primaryjoin=(lambda: Visualization.latest_revision_id == VisualizationRevision.id), lazy=False, ) tags: Mapped[List["VisualizationTagAssociation"]] = relationship( - "VisualizationTagAssociation", order_by=lambda: VisualizationTagAssociation.id, back_populates="visualization" + order_by=lambda: VisualizationTagAssociation.id, back_populates="visualization" ) annotations: Mapped[List["VisualizationAnnotationAssociation"]] = relationship( - "VisualizationAnnotationAssociation", order_by=lambda: VisualizationAnnotationAssociation.id, back_populates="visualization", ) ratings: Mapped[List["VisualizationRatingAssociation"]] = relationship( - "VisualizationRatingAssociation", order_by=lambda: VisualizationRatingAssociation.id, # type: ignore[has-type] back_populates="visualization", ) - users_shared_with: Mapped[List["VisualizationUserShareAssociation"]] = relationship( - "VisualizationUserShareAssociation", back_populates="visualization" - ) + users_shared_with: Mapped[List["VisualizationUserShareAssociation"]] = relationship(back_populates="visualization") average_rating = None @@ -10476,7 +10335,6 @@ class VisualizationRevision(Base, RepresentById): dbkey: Mapped[Optional[str]] = mapped_column(TEXT) config: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) visualization: Mapped["Visualization"] = relationship( - "Visualization", back_populates="revisions", primaryjoin=(lambda: Visualization.id == VisualizationRevision.visualization_id), ) @@ -10499,8 +10357,8 @@ class VisualizationUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(primary_key=True) visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped[User] = relationship("User") - visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="users_shared_with") + user: Mapped[User] = relationship() + visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="users_shared_with") class Tag(Base, RepresentById): @@ -10511,8 +10369,8 @@ class Tag(Base, RepresentById): type: Mapped[Optional[int]] parent_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id")) name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) - children: Mapped[List["Tag"]] = relationship("Tag", back_populates="parent") - parent: Mapped[Optional["Tag"]] = relationship("Tag", back_populates="children", remote_side=[id]) + children: Mapped[List["Tag"]] = relationship(back_populates="parent") + parent: Mapped[Optional["Tag"]] = relationship(back_populates="children", remote_side=[id]) def __str__(self): return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % (self.id, self.type or -1, self.parent_id, self.name) @@ -10548,9 +10406,9 @@ class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - history: Mapped[Optional["History"]] = relationship("History", back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + history: Mapped[Optional["History"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10564,11 +10422,9 @@ class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, Represen user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", back_populates="tags" - ) - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10583,10 +10439,10 @@ class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, R user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - "LibraryDatasetDatasetAssociation", back_populates="tags" + back_populates="tags" ) - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class PageTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10598,9 +10454,9 @@ class PageTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - page: Mapped[Optional["Page"]] = relationship("Page", back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + page: Mapped[Optional["Page"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10612,9 +10468,9 @@ class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep", back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10626,9 +10482,9 @@ class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow", back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10640,9 +10496,9 @@ class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10656,11 +10512,9 @@ class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", back_populates="tags" - ) - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10674,11 +10528,9 @@ class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( - "LibraryDatasetCollectionAssociation", back_populates="tags" - ) - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship(back_populates="tags") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): @@ -10690,8 +10542,8 @@ class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - tag: Mapped[Optional["Tag"]] = relationship("Tag") - user: Mapped[Optional["User"]] = relationship("User") + tag: Mapped[Optional["Tag"]] = relationship() + user: Mapped[Optional["User"]] = relationship() # Item annotation classes. @@ -10703,8 +10555,8 @@ class HistoryAnnotationAssociation(Base, RepresentById): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - history: Mapped[Optional["History"]] = relationship("History", back_populates="annotations") - user: Mapped[Optional["User"]] = relationship("User") + history: Mapped[Optional["History"]] = relationship(back_populates="annotations") + user: Mapped[Optional["User"]] = relationship() class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): @@ -10717,10 +10569,8 @@ class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", back_populates="annotations" - ) - user: Mapped[Optional["User"]] = relationship("User") + hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(back_populates="annotations") + user: Mapped[Optional["User"]] = relationship() class StoredWorkflowAnnotationAssociation(Base, RepresentById): @@ -10731,8 +10581,8 @@ class StoredWorkflowAnnotationAssociation(Base, RepresentById): stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow", back_populates="annotations") - user: Mapped[Optional["User"]] = relationship("User") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="annotations") + user: Mapped[Optional["User"]] = relationship() class WorkflowStepAnnotationAssociation(Base, RepresentById): @@ -10743,8 +10593,8 @@ class WorkflowStepAnnotationAssociation(Base, RepresentById): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship("WorkflowStep", back_populates="annotations") - user: Mapped[Optional["User"]] = relationship("User") + workflow_step: Mapped[Optional["WorkflowStep"]] = relationship(back_populates="annotations") + user: Mapped[Optional["User"]] = relationship() class PageAnnotationAssociation(Base, RepresentById): @@ -10755,8 +10605,8 @@ class PageAnnotationAssociation(Base, RepresentById): page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - page: Mapped[Optional["Page"]] = relationship("Page", back_populates="annotations") - user: Mapped[Optional["User"]] = relationship("User") + page: Mapped[Optional["Page"]] = relationship(back_populates="annotations") + user: Mapped[Optional["User"]] = relationship() class VisualizationAnnotationAssociation(Base, RepresentById): @@ -10767,8 +10617,8 @@ class VisualizationAnnotationAssociation(Base, RepresentById): visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) - visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="annotations") - user: Mapped[Optional["User"]] = relationship("User") + visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="annotations") + user: Mapped[Optional["User"]] = relationship() class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentById): @@ -10781,9 +10631,9 @@ class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentBy user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) history_dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", back_populates="annotations" + back_populates="annotations" ) - user: Mapped[Optional["User"]] = relationship("User") + user: Mapped[Optional["User"]] = relationship() class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): @@ -10796,9 +10646,9 @@ class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) annotation: Mapped[Optional[str]] = mapped_column(TEXT) dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( - "LibraryDatasetCollectionAssociation", back_populates="annotations" + back_populates="annotations" ) - user: Mapped[Optional["User"]] = relationship("User") + user: Mapped[Optional["User"]] = relationship() class Vault(Base): @@ -10806,8 +10656,8 @@ class Vault(Base): key: Mapped[str] = mapped_column(Text, primary_key=True) parent_key: Mapped[Optional[str]] = mapped_column(Text, ForeignKey(key), index=True) - children: Mapped[List["Vault"]] = relationship("Vault", back_populates="parent") - parent: Mapped[Optional["Vault"]] = relationship("Vault", back_populates="children", remote_side=[key]) + children: Mapped[List["Vault"]] = relationship(back_populates="parent") + parent: Mapped[Optional["Vault"]] = relationship(back_populates="children", remote_side=[key]) value: Mapped[Optional[str]] = mapped_column(Text) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) @@ -10834,8 +10684,8 @@ class HistoryRatingAssociation(ItemRatingAssociation, RepresentById): history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - history: Mapped[Optional["History"]] = relationship("History", back_populates="ratings") - user: Mapped[Optional["User"]] = relationship("User") + history: Mapped[Optional["History"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, history): add_object_to_object_session(self, history) @@ -10851,10 +10701,8 @@ class HistoryDatasetAssociationRatingAssociation(ItemRatingAssociation, Represen ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - "HistoryDatasetAssociation", back_populates="ratings" - ) - user: Mapped[Optional["User"]] = relationship("User") + history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, history_dataset_association): add_object_to_object_session(self, history_dataset_association) @@ -10868,8 +10716,8 @@ class StoredWorkflowRatingAssociation(ItemRatingAssociation, RepresentById): stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship("StoredWorkflow", back_populates="ratings") - user: Mapped[Optional["User"]] = relationship("User") + stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, stored_workflow): add_object_to_object_session(self, stored_workflow) @@ -10883,8 +10731,8 @@ class PageRatingAssociation(ItemRatingAssociation, RepresentById): page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - page: Mapped[Optional["Page"]] = relationship("Page", back_populates="ratings") - user: Mapped[Optional["User"]] = relationship("User") + page: Mapped[Optional["Page"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, page): add_object_to_object_session(self, page) @@ -10898,8 +10746,8 @@ class VisualizationRatingAssociation(ItemRatingAssociation, RepresentById): visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - visualization: Mapped[Optional["Visualization"]] = relationship("Visualization", back_populates="ratings") - user: Mapped[Optional["User"]] = relationship("User") + visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, visualization): add_object_to_object_session(self, visualization) @@ -10915,10 +10763,8 @@ class HistoryDatasetCollectionRatingAssociation(ItemRatingAssociation, Represent ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( - "HistoryDatasetCollectionAssociation", back_populates="ratings" - ) - user: Mapped[Optional["User"]] = relationship("User") + dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, dataset_collection): add_object_to_object_session(self, dataset_collection) @@ -10934,10 +10780,8 @@ class LibraryDatasetCollectionRatingAssociation(ItemRatingAssociation, Represent ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) rating: Mapped[Optional[int]] = mapped_column(index=True) - dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( - "LibraryDatasetCollectionAssociation", back_populates="ratings" - ) - user: Mapped[Optional["User"]] = relationship("User") + dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship(back_populates="ratings") + user: Mapped[Optional["User"]] = relationship() def _set_item(self, dataset_collection): add_object_to_object_session(self, dataset_collection) @@ -10953,8 +10797,8 @@ class DataManagerHistoryAssociation(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(index=True, default=now, onupdate=now, nullable=True) history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - history: Mapped[Optional["History"]] = relationship("History") - user: Mapped[Optional["User"]] = relationship("User", back_populates="data_manager_histories") + history: Mapped[Optional["History"]] = relationship() + user: Mapped[Optional["User"]] = relationship(back_populates="data_manager_histories") class DataManagerJobAssociation(Base, RepresentById): @@ -10966,7 +10810,7 @@ class DataManagerJobAssociation(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(index=True, default=now, onupdate=now, nullable=True) job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) data_manager_id: Mapped[Optional[str]] = mapped_column(TEXT) - job: Mapped[Optional["Job"]] = relationship("Job", back_populates="data_manager_association", uselist=False) + job: Mapped[Optional["Job"]] = relationship(back_populates="data_manager_association", uselist=False) class UserPreference(Base, RepresentById): @@ -10994,7 +10838,7 @@ class UserAction(Base, RepresentById): action: Mapped[Optional[str]] = mapped_column(Unicode(255)) context: Mapped[Optional[str]] = mapped_column(Unicode(512)) params: Mapped[Optional[str]] = mapped_column(Unicode(1024)) - user: Mapped[Optional["User"]] = relationship("User") + user: Mapped[Optional["User"]] = relationship() class APIKeys(Base, RepresentById): @@ -11004,7 +10848,7 @@ class APIKeys(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) key: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True, unique=True) - user: Mapped[Optional["User"]] = relationship("User", back_populates="api_keys") + user: Mapped[Optional["User"]] = relationship(back_populates="api_keys") deleted: Mapped[bool] = mapped_column(index=True, server_default=false()) From e5f422d12eea9ec515863013900b70e754b80ea2 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 10:52:16 -0400 Subject: [PATCH 515/669] Correct types in UserGroupAssoc (see note) foo and bar in a foo-bar-association model cannot be null. However, in our db schema, they usually are. To fix this, we type them as mandatory, adding nullable=True to the column definition, which preserves the db schema. --- lib/galaxy/model/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 63a495d5a414..814838c8141e 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2874,7 +2874,7 @@ class Group(Base, Dictifiable, RepresentById): deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) quotas: Mapped[List["GroupQuotaAssociation"]] = relationship(back_populates="group") roles: Mapped[List["GroupRoleAssociation"]] = relationship(back_populates="group", cascade_backrefs=False) - users = relationship("UserGroupAssociation", back_populates="group") + users: Mapped[List["UserGroupAssociation"]] = relationship("UserGroupAssociation", back_populates="group") dict_collection_visible_keys = ["id", "name"] dict_element_visible_keys = ["id", "name"] @@ -2888,12 +2888,12 @@ class UserGroupAssociation(Base, RepresentById): __tablename__ = "user_group_association" id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user: Mapped[Optional["User"]] = relationship(back_populates="groups") - group: Mapped[Optional["Group"]] = relationship(back_populates="users") + user: Mapped["User"] = relationship(back_populates="groups") + group: Mapped["Group"] = relationship(back_populates="users") def __init__(self, user, group): add_object_to_object_session(self, user) From e4bad49ea2d7d87e875a366fd0a60e4ad310e2d3 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 10:58:51 -0400 Subject: [PATCH 516/669] Correct types in JobToInputDatasetAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 814838c8141e..3837bad9928e 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2310,14 +2310,12 @@ class JobToInputDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + dataset_id: Mapped[int] = mapped_column(ForeignKey("history_dataset_association.id"), index=True, nullable=True) dataset_version: Mapped[Optional[int]] name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( - lazy="joined", back_populates="dependent_jobs" - ) - job: Mapped[Optional["Job"]] = relationship(back_populates="input_datasets") + dataset: Mapped["HistoryDatasetAssociation"] = relationship(lazy="joined", back_populates="dependent_jobs") + job: Mapped["Job"] = relationship(back_populates="input_datasets") def __init__(self, name, dataset): self.name = name From 53654e8316512b961de2e46dbb2f9e2c8c35e8a7 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:11:37 -0400 Subject: [PATCH 517/669] Correct types in JobToOutputDatasetAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 3837bad9928e..a54e7630a558 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2328,13 +2328,13 @@ class JobToOutputDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_output_dataset" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + dataset_id: Mapped[int] = mapped_column(ForeignKey("history_dataset_association.id"), index=True, nullable=True) name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship( + dataset: Mapped["HistoryDatasetAssociation"] = relationship( lazy="joined", back_populates="creating_job_associations" ) - job: Mapped[Optional["Job"]] = relationship(back_populates="output_datasets") + job: Mapped["Job"] = relationship(back_populates="output_datasets") def __init__(self, name, dataset): self.name = name From 35149a05fdc146e7474d69d5ede2615d7c5b044f Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:13:35 -0400 Subject: [PATCH 518/669] Correct types in JobToInputtDatasetCollectionAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index a54e7630a558..07d36763dcfc 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2350,13 +2350,13 @@ class JobToInputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset_collection" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_collection_association.id"), index=True + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) name: Mapped[Optional[str]] = mapped_column(String(255)) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(lazy="joined") - job: Mapped[Optional["Job"]] = relationship(back_populates="input_dataset_collections") + dataset_collection: Mapped["HistoryDatasetCollectionAssociation"] = relationship(lazy="joined") + job: Mapped["Job"] = relationship(back_populates="input_dataset_collections") def __init__(self, name, dataset_collection): self.name = name From 65de14a505f533184ec5cf8157bb58bafd3cc03d Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:15:21 -0400 Subject: [PATCH 519/669] Correct types in JobToOutputDatasetCollectionAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 07d36763dcfc..97b391328fd4 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2386,13 +2386,13 @@ class JobToOutputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_output_dataset_collection" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_collection_association.id"), index=True + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection_instance: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(lazy="joined") - job: Mapped[Optional["Job"]] = relationship(back_populates="output_dataset_collection_instances") + dataset_collection_instance: Mapped["HistoryDatasetCollectionAssociation"] = relationship(lazy="joined") + job: Mapped["Job"] = relationship(back_populates="output_dataset_collection_instances") def __init__(self, name, dataset_collection_instance): self.name = name From cc76654cb7f9ee4e2da10d3ee2ec27c9cc9e6a03 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:16:41 -0400 Subject: [PATCH 520/669] Correct types in JobToInputtDatasetCollectionElementAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 97b391328fd4..97ae5df15e43 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2367,13 +2367,13 @@ class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset_collection_element" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - dataset_collection_element_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("dataset_collection_element.id"), index=True + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + dataset_collection_element_id: Mapped[int] = mapped_column( + ForeignKey("dataset_collection_element.id"), index=True, nullable=True ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection_element: Mapped[Optional["DatasetCollectionElement"]] = relationship(lazy="joined") - job: Mapped[Optional["Job"]] = relationship(back_populates="input_dataset_collection_elements") + dataset_collection_element: Mapped["DatasetCollectionElement"] = relationship(lazy="joined") + job: Mapped["Job"] = relationship(back_populates="input_dataset_collection_elements") def __init__(self, name, dataset_collection_element): self.name = name From 6bd8b819618aa6a041830d71766a1776afefdf62 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:18:37 -0400 Subject: [PATCH 521/669] Correct types in JobToImplicitOutputDatasetCollectionAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 97ae5df15e43..eeb77fecc601 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2410,11 +2410,11 @@ class JobToImplicitOutputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_implicit_output_dataset_collection" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - dataset_collection_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset_collection.id"), index=True) + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + dataset_collection_id: Mapped[int] = mapped_column(ForeignKey("dataset_collection.id"), index=True, nullable=True) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - dataset_collection: Mapped[Optional["DatasetCollection"]] = relationship() - job: Mapped[Optional["Job"]] = relationship(back_populates="output_dataset_collections") + dataset_collection: Mapped["DatasetCollection"] = relationship() + job: Mapped["Job"] = relationship(back_populates="output_dataset_collections") def __init__(self, name, dataset_collection): self.name = name From b4073e1aca333ce8cb514aa8da64d9fba04df0cc Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:20:08 -0400 Subject: [PATCH 522/669] Correct types in JobToInputLibraryDatasetAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index eeb77fecc601..6a2eedac5ae8 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2425,13 +2425,13 @@ class JobToInputLibraryDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_input_library_dataset" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) - name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - job: Mapped[Optional["Job"]] = relationship(back_populates="input_library_datasets") - dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( - lazy="joined", back_populates="dependent_jobs" + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + ldda_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + job: Mapped["Job"] = relationship(back_populates="input_library_datasets") + dataset: Mapped["LibraryDatasetDatasetAssociation"] = relationship(lazy="joined", back_populates="dependent_jobs") def __init__(self, name, dataset): self.name = name From 0a1d3d3df4784a55bb295d06fab128ad7c6c8d38 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:21:20 -0400 Subject: [PATCH 523/669] Correct types in JobToOutputLibraryDatasetAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 6a2eedac5ae8..482deb9d52c3 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2443,11 +2443,13 @@ class JobToOutputLibraryDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_output_library_dataset" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) - ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + ldda_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) name: Mapped[Optional[str]] = mapped_column(Unicode(255)) - job: Mapped[Optional["Job"]] = relationship(back_populates="output_library_datasets") - dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + job: Mapped["Job"] = relationship(back_populates="output_library_datasets") + dataset: Mapped["LibraryDatasetDatasetAssociation"] = relationship( lazy="joined", back_populates="creating_job_associations" ) From de054fbb26f089fe98ef4372570945dd8808a188 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:26:31 -0400 Subject: [PATCH 524/669] Correct types in ImplicitCollectionJobsJobAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 482deb9d52c3..4974a05adac9 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2533,13 +2533,15 @@ class ImplicitCollectionJobsJobAssociation(Base, RepresentById): __tablename__ = "implicit_collection_jobs_job_association" id: Mapped[int] = mapped_column(primary_key=True) - implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("implicit_collection_jobs.id"), index=True + implicit_collection_jobs_id: Mapped[int] = mapped_column( + ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True ) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) # Consider making this nullable... + job_id: Mapped[int] = mapped_column( + ForeignKey("job.id"), index=True, nullable=True + ) # Consider making this nullable... order_index: Mapped[int] implicit_collection_jobs = relationship("ImplicitCollectionJobs", back_populates="jobs") - job: Mapped[Optional["Job"]] = relationship(back_populates="implicit_collection_jobs_association") + job: Mapped["Job"] = relationship(back_populates="implicit_collection_jobs_association") class PostJobAction(Base, RepresentById): From a45197966b6ceffd6192dd0fa2c3d9c1dc101ff1 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:32:31 -0400 Subject: [PATCH 525/669] Correct types in JobContainerAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 4974a05adac9..d8015db1ef4a 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2759,13 +2759,13 @@ class JobContainerAssociation(Base, RepresentById): __tablename__ = "job_container_association" id: Mapped[int] = mapped_column(primary_key=True) - job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True) + job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) container_type: Mapped[Optional[str]] = mapped_column(TEXT) container_name: Mapped[Optional[str]] = mapped_column(TEXT) container_info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) created_time: Mapped[Optional[datetime]] = mapped_column(default=now) modified_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) - job: Mapped[Optional["Job"]] = relationship(back_populates="container") + job: Mapped["Job"] = relationship(back_populates="container") def __init__(self, **kwd): if "job" in kwd: From 54d24763b771147fee7b0d56212e904dc93fc0f5 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:33:31 -0400 Subject: [PATCH 526/669] Correct types in UserNotificationAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index d8015db1ef4a..226a675559e8 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2941,14 +2941,14 @@ class UserNotificationAssociation(Base, RepresentById): __tablename__ = "user_notification_association" id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - notification_id: Mapped[Optional[int]] = mapped_column(ForeignKey("notification.id"), index=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + notification_id: Mapped[int] = mapped_column(ForeignKey("notification.id"), index=True, nullable=True) seen_time: Mapped[Optional[datetime]] deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) update_time: Mapped[Optional[datetime]] = mapped_column(default=now, onupdate=now) - user: Mapped[Optional["User"]] = relationship(back_populates="all_notifications") - notification: Mapped[Optional["Notification"]] = relationship(back_populates="user_notification_associations") + user: Mapped["User"] = relationship(back_populates="all_notifications") + notification: Mapped["Notification"] = relationship(back_populates="user_notification_associations") def __init__(self, user, notification): self.user = user From a8d8dea8a010e69b7987e117e035746311d12c30 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:41:01 -0400 Subject: [PATCH 527/669] Correct types in HistoryUserShareAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 226a675559e8..98e376788518 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3587,10 +3587,10 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): __tablename__ = "history_user_share_association" id: Mapped[int] = mapped_column(primary_key=True) - history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - user: Mapped[User] = relationship() - history: Mapped[Optional["History"]] = relationship(back_populates="users_shared_with") + history_id: Mapped[int] = mapped_column(ForeignKey("history.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + user: Mapped["User"] = relationship() + history: Mapped["History"] = relationship(back_populates="users_shared_with") class UserRoleAssociation(Base, RepresentById): From 48c4d1a9575c67768f4001fbbc84a735e5665da9 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:42:12 -0400 Subject: [PATCH 528/669] Correct types in UserRoleAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 98e376788518..86ed2c20e887 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3597,13 +3597,13 @@ class UserRoleAssociation(Base, RepresentById): __tablename__ = "user_role_association" id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True, nullable=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user: Mapped[Optional["User"]] = relationship(back_populates="roles") - role: Mapped[Optional["Role"]] = relationship(back_populates="users") + user: Mapped["User"] = relationship(back_populates="roles") + role: Mapped["Role"] = relationship(back_populates="users") def __init__(self, user, role): add_object_to_object_session(self, user) From e32c857afb48577df0fff8002d919e56c8a23d7b Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:43:15 -0400 Subject: [PATCH 529/669] Correct types in GroupRoleAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 86ed2c20e887..dde288135691 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3615,12 +3615,12 @@ class GroupRoleAssociation(Base, RepresentById): __tablename__ = "group_role_association" id: Mapped[int] = mapped_column(primary_key=True) - group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) - role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) + role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True, nullable=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - group: Mapped[Optional["Group"]] = relationship(back_populates="roles") - role: Mapped[Optional["Role"]] = relationship(back_populates="groups") + group: Mapped["Group"] = relationship(back_populates="roles") + role: Mapped["Role"] = relationship(back_populates="groups") def __init__(self, group, role): self.group = group From cdc7c99980e9c7e65f92f6b261bb7366bbd24928 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:44:14 -0400 Subject: [PATCH 530/669] Correct types in UserQuotaAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index dde288135691..3d274c704607 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3678,12 +3678,12 @@ class UserQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "user_quota_association" id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + quota_id: Mapped[int] = mapped_column(ForeignKey("quota.id"), index=True, nullable=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - user: Mapped[Optional["User"]] = relationship(back_populates="quotas") - quota: Mapped[Optional["Quota"]] = relationship(back_populates="users") + user: Mapped["User"] = relationship(back_populates="quotas") + quota: Mapped["Quota"] = relationship(back_populates="users") dict_element_visible_keys = ["user"] From 985c2f6eb70b7fd9c1806b057868044c7cf61625 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:45:10 -0400 Subject: [PATCH 531/669] Correct types in GroupQuotaAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 3d274c704607..0ec5e1dd9bd2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3697,12 +3697,12 @@ class GroupQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "group_quota_association" id: Mapped[int] = mapped_column(primary_key=True) - group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) - quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) + quota_id: Mapped[int] = mapped_column(ForeignKey("quota.id"), index=True, nullable=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - group: Mapped[Optional["Group"]] = relationship(back_populates="quotas") - quota: Mapped[Optional["Quota"]] = relationship(back_populates="groups") + group: Mapped["Group"] = relationship(back_populates="quotas") + quota: Mapped["Quota"] = relationship(back_populates="groups") dict_element_visible_keys = ["group"] From 3a8ae7260d0730904f9a234424ae536610ba6eb8 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:46:46 -0400 Subject: [PATCH 532/669] Correct types in DefaultQuotaAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 0ec5e1dd9bd2..a1ffac8dfbc2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3782,8 +3782,8 @@ class DefaultQuotaAssociation(Base, Dictifiable, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) type: Mapped[Optional[str]] = mapped_column(String(32)) - quota_id: Mapped[Optional[int]] = mapped_column(ForeignKey("quota.id"), index=True) - quota: Mapped[Optional["Quota"]] = relationship(back_populates="default") + quota_id: Mapped[int] = mapped_column(ForeignKey("quota.id"), index=True, nullable=True) + quota: Mapped["Quota"] = relationship(back_populates="default") dict_element_visible_keys = ["type"] From d29d5c2567358431655a38453f274b5694c0dd17 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:49:13 -0400 Subject: [PATCH 533/669] Correct types in LibraryDatasetDatasetAssociationPermissions (optional/nullable) --- lib/galaxy/model/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index a1ffac8dfbc2..d1a679d53eca 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3893,11 +3893,11 @@ class LibraryDatasetDatasetAssociationPermissions(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) action: Mapped[Optional[str]] = mapped_column(TEXT) - library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("library_dataset_dataset_association.id"), index=True + library_dataset_dataset_association_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) - library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + library_dataset_dataset_association: Mapped["LibraryDatasetDatasetAssociation"] = relationship( back_populates="actions" ) role: Mapped[Optional["Role"]] = relationship() From 21b24dd4e9e32520a072a03264673ac463ca9be8 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:53:44 -0400 Subject: [PATCH 534/669] Correct types in StoredWorkflowUserShareAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index d1a679d53eca..07b8cbf8f34e 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -8317,10 +8317,10 @@ class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): __tablename__ = "stored_workflow_user_share_connection" id: Mapped[int] = mapped_column(primary_key=True) - stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + stored_workflow_id: Mapped[int] = mapped_column(ForeignKey("stored_workflow.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) user: Mapped[User] = relationship() - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="users_shared_with") + stored_workflow: Mapped["StoredWorkflow"] = relationship(back_populates="users_shared_with") class StoredWorkflowMenuEntry(Base, RepresentById): From fb32618722056d5f9562cdfbb100c68244d3589c Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 22:56:37 -0400 Subject: [PATCH 535/669] Correct types in PageUserShareAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 07b8cbf8f34e..013654a04904 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10207,10 +10207,10 @@ class PageUserShareAssociation(Base, UserShareAssociation): __tablename__ = "page_user_share_association" id: Mapped[int] = mapped_column(primary_key=True) - page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + page_id: Mapped[int] = mapped_column(ForeignKey("page.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) user: Mapped[User] = relationship() - page: Mapped[Optional["Page"]] = relationship(back_populates="users_shared_with") + page: Mapped["Page"] = relationship(back_populates="users_shared_with") class Visualization(Base, HasTags, Dictifiable, RepresentById): From 6c2026849a6f20f91215dcc70f1fc238fa19f8c5 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 10:26:09 -0400 Subject: [PATCH 536/669] Add+fix types in CustosAuthnzToken.user --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 013654a04904..9d23043551a6 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10053,7 +10053,7 @@ class CustosAuthnzToken(Base, RepresentById): ) id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id")) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), nullable=True) external_user_id: Mapped[Optional[str]] = mapped_column(String(255)) provider: Mapped[Optional[str]] = mapped_column(String(255)) access_token: Mapped[Optional[str]] = mapped_column(Text) @@ -10061,7 +10061,7 @@ class CustosAuthnzToken(Base, RepresentById): refresh_token: Mapped[Optional[str]] = mapped_column(Text) expiration_time: Mapped[datetime] = mapped_column(nullable=True) refresh_expiration_time: Mapped[datetime] = mapped_column(nullable=True) - user = relationship("User", back_populates="custos_auth") + user: Mapped["User"] = relationship("User", back_populates="custos_auth") class CloudAuthz(Base): From ee127201202c713c1af0fb1ea701e69f4710b749 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 23:09:53 -0400 Subject: [PATCH 537/669] Correct types in VisualizationUserShareAssociation (optional/nullable) --- lib/galaxy/model/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 9d23043551a6..3af109f09ef2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10357,10 +10357,10 @@ class VisualizationUserShareAssociation(Base, UserShareAssociation): __tablename__ = "visualization_user_share_association" id: Mapped[int] = mapped_column(primary_key=True) - visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) - user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + visualization_id: Mapped[int] = mapped_column(ForeignKey("visualization.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) user: Mapped[User] = relationship() - visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="users_shared_with") + visualization: Mapped["Visualization"] = relationship(back_populates="users_shared_with") class Tag(Base, RepresentById): From b75154634b4cb27483dbbea626d23c7b31b2ea0f Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 11 Apr 2024 23:17:09 -0400 Subject: [PATCH 538/669] Correct types in item-tag-assoc models (optional/nullable) --- lib/galaxy/model/__init__.py | 86 ++++++++++++++++++------------------ 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 3af109f09ef2..dceaedc057d6 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10403,13 +10403,13 @@ class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + history_id: Mapped[int] = mapped_column(ForeignKey("history.id"), index=True, nullable=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - history: Mapped[Optional["History"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + history: Mapped["History"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10417,15 +10417,15 @@ class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, Represen __tablename__ = "history_dataset_association_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - history_dataset_association_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_association.id"), index=True + history_dataset_association_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_association.id"), index=True, nullable=True ) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + history_dataset_association: Mapped["HistoryDatasetAssociation"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10433,17 +10433,17 @@ class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, R __tablename__ = "library_dataset_dataset_association_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("library_dataset_dataset_association.id"), index=True + library_dataset_dataset_association_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - library_dataset_dataset_association: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship( + library_dataset_dataset_association: Mapped["LibraryDatasetDatasetAssociation"] = relationship( back_populates="tags" ) - tag: Mapped[Optional["Tag"]] = relationship() + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10451,13 +10451,13 @@ class PageTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "page_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + page_id: Mapped[int] = mapped_column(ForeignKey("page.id"), index=True, nullable=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - page: Mapped[Optional["Page"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + page: Mapped["Page"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10465,13 +10465,13 @@ class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "workflow_step_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + workflow_step_id: Mapped[int] = mapped_column(ForeignKey("workflow_step.id"), index=True, nullable=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + workflow_step: Mapped["WorkflowStep"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10479,13 +10479,13 @@ class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "stored_workflow_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + stored_workflow_id: Mapped[int] = mapped_column(ForeignKey("stored_workflow.id"), index=True, nullable=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + stored_workflow: Mapped["StoredWorkflow"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10493,13 +10493,13 @@ class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "visualization_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + visualization_id: Mapped[int] = mapped_column(ForeignKey("visualization.id"), index=True, nullable=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + visualization: Mapped["Visualization"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10507,15 +10507,15 @@ class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent __tablename__ = "history_dataset_collection_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_collection_association.id"), index=True + history_dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + dataset_collection: Mapped["HistoryDatasetCollectionAssociation"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10523,15 +10523,15 @@ class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent __tablename__ = "library_dataset_collection_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("library_dataset_collection_association.id"), index=True + library_dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_collection_association.id"), index=True, nullable=True ) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship(back_populates="tags") - tag: Mapped[Optional["Tag"]] = relationship() + dataset_collection: Mapped["LibraryDatasetCollectionAssociation"] = relationship(back_populates="tags") + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() @@ -10539,12 +10539,12 @@ class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "tool_tag_association" id: Mapped[int] = mapped_column(primary_key=True) - tool_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - tag_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id"), index=True) + tool_id: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + tag_id: Mapped[int] = mapped_column(ForeignKey("tag.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - tag: Mapped[Optional["Tag"]] = relationship() + tag: Mapped["Tag"] = relationship() user: Mapped[Optional["User"]] = relationship() From f9f9a68eea767e614b0fbca356cfee106bf72b38 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 00:08:17 -0400 Subject: [PATCH 539/669] Correct types in item-rating-assoc models (optional/nullable) --- lib/galaxy/model/__init__.py | 48 ++++++++++++++++++------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index dceaedc057d6..29d0078a6acf 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10683,10 +10683,10 @@ class HistoryRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) + history_id: Mapped[int] = mapped_column(ForeignKey("history.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - history: Mapped[Optional["History"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + history: Mapped["History"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, history): @@ -10698,12 +10698,12 @@ class HistoryDatasetAssociationRatingAssociation(ItemRatingAssociation, Represen __tablename__ = "history_dataset_association_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - history_dataset_association_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_association.id"), index=True + history_dataset_association_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_association.id"), index=True, nullable=True ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - history_dataset_association: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + history_dataset_association: Mapped["HistoryDatasetAssociation"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, history_dataset_association): @@ -10715,10 +10715,10 @@ class StoredWorkflowRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "stored_workflow_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) + stored_workflow_id: Mapped[int] = mapped_column(ForeignKey("stored_workflow.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + stored_workflow: Mapped["StoredWorkflow"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, stored_workflow): @@ -10730,10 +10730,10 @@ class PageRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "page_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) + page_id: Mapped[int] = mapped_column(ForeignKey("page.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - page: Mapped[Optional["Page"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + page: Mapped["Page"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, page): @@ -10745,10 +10745,10 @@ class VisualizationRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "visualization_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) + visualization_id: Mapped[int] = mapped_column(ForeignKey("visualization.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + visualization: Mapped["Visualization"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, visualization): @@ -10760,12 +10760,12 @@ class HistoryDatasetCollectionRatingAssociation(ItemRatingAssociation, Represent __tablename__ = "history_dataset_collection_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_collection_association.id"), index=True + history_dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + dataset_collection: Mapped["HistoryDatasetCollectionAssociation"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, dataset_collection): @@ -10777,12 +10777,12 @@ class LibraryDatasetCollectionRatingAssociation(ItemRatingAssociation, Represent __tablename__ = "library_dataset_collection_rating_association" id: Mapped[int] = mapped_column(primary_key=True) - library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("library_dataset_collection_association.id"), index=True + library_dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_collection_association.id"), index=True, nullable=True ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - rating: Mapped[Optional[int]] = mapped_column(index=True) - dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship(back_populates="ratings") + rating: Mapped[int] = mapped_column(index=True, nullable=True) + dataset_collection: Mapped["LibraryDatasetCollectionAssociation"] = relationship(back_populates="ratings") user: Mapped[Optional["User"]] = relationship() def _set_item(self, dataset_collection): From 8f02426ec78640144df82a86919e65cdb6087981 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 00:17:53 -0400 Subject: [PATCH 540/669] Correct types in cleanup-event-assoc models (optional/nullable) --- lib/galaxy/model/__init__.py | 40 +++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 29d0078a6acf..b46861a836a7 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10894,8 +10894,8 @@ class CleanupEventDatasetAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dataset.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + dataset_id: Mapped[int] = mapped_column(ForeignKey("dataset.id"), index=True, nullable=True) class CleanupEventMetadataFileAssociation(Base): @@ -10903,8 +10903,8 @@ class CleanupEventMetadataFileAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - metadata_file_id: Mapped[Optional[int]] = mapped_column(ForeignKey("metadata_file.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + metadata_file_id: Mapped[int] = mapped_column(ForeignKey("metadata_file.id"), index=True, nullable=True) class CleanupEventHistoryAssociation(Base): @@ -10912,8 +10912,8 @@ class CleanupEventHistoryAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + history_id: Mapped[int] = mapped_column(ForeignKey("history.id"), index=True, nullable=True) class CleanupEventHistoryDatasetAssociationAssociation(Base): @@ -10921,8 +10921,8 @@ class CleanupEventHistoryDatasetAssociationAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - hda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + hda_id: Mapped[int] = mapped_column(ForeignKey("history_dataset_association.id"), index=True, nullable=True) class CleanupEventLibraryAssociation(Base): @@ -10930,8 +10930,8 @@ class CleanupEventLibraryAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - library_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + library_id: Mapped[int] = mapped_column(ForeignKey("library.id"), index=True, nullable=True) class CleanupEventLibraryFolderAssociation(Base): @@ -10939,8 +10939,8 @@ class CleanupEventLibraryFolderAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - library_folder_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_folder.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + library_folder_id: Mapped[int] = mapped_column(ForeignKey("library_folder.id"), index=True, nullable=True) class CleanupEventLibraryDatasetAssociation(Base): @@ -10948,8 +10948,8 @@ class CleanupEventLibraryDatasetAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - library_dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + library_dataset_id: Mapped[int] = mapped_column(ForeignKey("library_dataset.id"), index=True, nullable=True) class CleanupEventLibraryDatasetDatasetAssociationAssociation(Base): @@ -10957,8 +10957,10 @@ class CleanupEventLibraryDatasetDatasetAssociationAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - ldda_id: Mapped[Optional[int]] = mapped_column(ForeignKey("library_dataset_dataset_association.id"), index=True) + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + ldda_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) class CleanupEventImplicitlyConvertedDatasetAssociationAssociation(Base): @@ -10966,9 +10968,9 @@ class CleanupEventImplicitlyConvertedDatasetAssociationAssociation(Base): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) - cleanup_event_id: Mapped[Optional[int]] = mapped_column(ForeignKey("cleanup_event.id"), index=True) - icda_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("implicitly_converted_dataset_association.id"), index=True + cleanup_event_id: Mapped[int] = mapped_column(ForeignKey("cleanup_event.id"), index=True, nullable=True) + icda_id: Mapped[int] = mapped_column( + ForeignKey("implicitly_converted_dataset_association.id"), index=True, nullable=True ) From ddf0da17d74001e98113388604e02f63f5f5c515 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 00:03:17 -0400 Subject: [PATCH 541/669] Correct types in item-annotation-assoc models (optional/nullable) --- lib/galaxy/model/__init__.py | 58 +++++++++++++++++------------------- 1 file changed, 28 insertions(+), 30 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index b46861a836a7..5092f6378c7f 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -10554,11 +10554,11 @@ class HistoryAnnotationAssociation(Base, RepresentById): __table_args__ = (Index("ix_history_anno_assoc_annotation", "annotation", mysql_length=200),) id: Mapped[int] = mapped_column(primary_key=True) - history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True) + history_id: Mapped[int] = mapped_column(ForeignKey("history.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - history: Mapped[Optional["History"]] = relationship(back_populates="annotations") - user: Mapped[Optional["User"]] = relationship() + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + history: Mapped["History"] = relationship(back_populates="annotations") + user: Mapped["User"] = relationship() class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): @@ -10566,12 +10566,12 @@ class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): __table_args__ = (Index("ix_history_dataset_anno_assoc_annotation", "annotation", mysql_length=200),) id: Mapped[int] = mapped_column(primary_key=True) - history_dataset_association_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_association.id"), index=True + history_dataset_association_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_association.id"), index=True, nullable=True ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - hda: Mapped[Optional["HistoryDatasetAssociation"]] = relationship(back_populates="annotations") + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + hda: Mapped["HistoryDatasetAssociation"] = relationship(back_populates="annotations") user: Mapped[Optional["User"]] = relationship() @@ -10580,10 +10580,10 @@ class StoredWorkflowAnnotationAssociation(Base, RepresentById): __table_args__ = (Index("ix_stored_workflow_ann_assoc_annotation", "annotation", mysql_length=200),) id: Mapped[int] = mapped_column(primary_key=True) - stored_workflow_id: Mapped[Optional[int]] = mapped_column(ForeignKey("stored_workflow.id"), index=True) + stored_workflow_id: Mapped[int] = mapped_column(ForeignKey("stored_workflow.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - stored_workflow: Mapped[Optional["StoredWorkflow"]] = relationship(back_populates="annotations") + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + stored_workflow: Mapped["StoredWorkflow"] = relationship(back_populates="annotations") user: Mapped[Optional["User"]] = relationship() @@ -10592,10 +10592,10 @@ class WorkflowStepAnnotationAssociation(Base, RepresentById): __table_args__ = (Index("ix_workflow_step_ann_assoc_annotation", "annotation", mysql_length=200),) id: Mapped[int] = mapped_column(primary_key=True) - workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True) + workflow_step_id: Mapped[int] = mapped_column(ForeignKey("workflow_step.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - workflow_step: Mapped[Optional["WorkflowStep"]] = relationship(back_populates="annotations") + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + workflow_step: Mapped["WorkflowStep"] = relationship(back_populates="annotations") user: Mapped[Optional["User"]] = relationship() @@ -10604,10 +10604,10 @@ class PageAnnotationAssociation(Base, RepresentById): __table_args__ = (Index("ix_page_annotation_association_annotation", "annotation", mysql_length=200),) id: Mapped[int] = mapped_column(primary_key=True) - page_id: Mapped[Optional[int]] = mapped_column(ForeignKey("page.id"), index=True) + page_id: Mapped[int] = mapped_column(ForeignKey("page.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - page: Mapped[Optional["Page"]] = relationship(back_populates="annotations") + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + page: Mapped["Page"] = relationship(back_populates="annotations") user: Mapped[Optional["User"]] = relationship() @@ -10616,10 +10616,10 @@ class VisualizationAnnotationAssociation(Base, RepresentById): __table_args__ = (Index("ix_visualization_annotation_association_annotation", "annotation", mysql_length=200),) id: Mapped[int] = mapped_column(primary_key=True) - visualization_id: Mapped[Optional[int]] = mapped_column(ForeignKey("visualization.id"), index=True) + visualization_id: Mapped[int] = mapped_column(ForeignKey("visualization.id"), index=True, nullable=True) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - visualization: Mapped[Optional["Visualization"]] = relationship(back_populates="annotations") + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + visualization: Mapped["Visualization"] = relationship(back_populates="annotations") user: Mapped[Optional["User"]] = relationship() @@ -10627,12 +10627,12 @@ class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentBy __tablename__ = "history_dataset_collection_annotation_association" id: Mapped[int] = mapped_column(primary_key=True) - history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("history_dataset_collection_association.id"), index=True + history_dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - history_dataset_collection: Mapped[Optional["HistoryDatasetCollectionAssociation"]] = relationship( + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + history_dataset_collection: Mapped["HistoryDatasetCollectionAssociation"] = relationship( back_populates="annotations" ) user: Mapped[Optional["User"]] = relationship() @@ -10642,14 +10642,12 @@ class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): __tablename__ = "library_dataset_collection_annotation_association" id: Mapped[int] = mapped_column(primary_key=True) - library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( - ForeignKey("library_dataset_collection_association.id"), index=True + library_dataset_collection_id: Mapped[int] = mapped_column( + ForeignKey("library_dataset_collection_association.id"), index=True, nullable=True ) user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) - annotation: Mapped[Optional[str]] = mapped_column(TEXT) - dataset_collection: Mapped[Optional["LibraryDatasetCollectionAssociation"]] = relationship( - back_populates="annotations" - ) + annotation: Mapped[str] = mapped_column(TEXT, nullable=True) + dataset_collection: Mapped["LibraryDatasetCollectionAssociation"] = relationship(back_populates="annotations") user: Mapped[Optional["User"]] = relationship() From 585d911e08ebefd19ca4b550200c213adab11200 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 10:47:22 -0400 Subject: [PATCH 542/669] Fix typing bug, add type in MetadatFile.dataset --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5092f6378c7f..6a72e7700207 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -9498,7 +9498,7 @@ class MetadataFile(Base, StorableObject, Serializable): purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) history_dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() - library_dataset = relationship("LibraryDatasetDatasetAssociation") + library_dataset: Mapped[Optional["LibraryDatasetDatasetAssociation"]] = relationship() def __init__(self, dataset=None, name=None, uuid=None): self.uuid = get_uuid(uuid) @@ -9509,7 +9509,7 @@ def __init__(self, dataset=None, name=None, uuid=None): self.name = name @property - def dataset(self) -> Optional[Dataset]: + def dataset(self) -> Optional["DatasetInstance"]: da = self.history_dataset or self.library_dataset return da and da.dataset From e7873d8aae3169edd68f3a86fac5ca60ecb4ebd9 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 11:11:47 -0400 Subject: [PATCH 543/669] Fix type in JobToInputDatasetAssociation.name --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 6a72e7700207..53d101b00fc8 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2313,7 +2313,7 @@ class JobToInputDatasetAssociation(Base, RepresentById): job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) dataset_id: Mapped[int] = mapped_column(ForeignKey("history_dataset_association.id"), index=True, nullable=True) dataset_version: Mapped[Optional[int]] - name: Mapped[Optional[str]] = mapped_column(String(255)) + name: Mapped[str] = mapped_column(String(255), nullable=True) dataset: Mapped["HistoryDatasetAssociation"] = relationship(lazy="joined", back_populates="dependent_jobs") job: Mapped["Job"] = relationship(back_populates="input_datasets") From 41d3054864ec0bb598cdcff7cf9618c810674e0f Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 11:40:56 -0400 Subject: [PATCH 544/669] Fix type in JobToOutputDatasetCollectionAssociation.name + add type hint to Job.output_dataset_collection_instances --- lib/galaxy/model/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 53d101b00fc8..0d102adbf393 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1404,7 +1404,9 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): input_dataset_collection_elements = relationship( "JobToInputDatasetCollectionElementAssociation", back_populates="job" ) - output_dataset_collection_instances = relationship("JobToOutputDatasetCollectionAssociation", back_populates="job") + output_dataset_collection_instances: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship( + "JobToOutputDatasetCollectionAssociation", back_populates="job" + ) output_dataset_collections = relationship("JobToImplicitOutputDatasetCollectionAssociation", back_populates="job") post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship( back_populates="job", cascade_backrefs=False @@ -2390,7 +2392,7 @@ class JobToOutputDatasetCollectionAssociation(Base, RepresentById): dataset_collection_id: Mapped[int] = mapped_column( ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) - name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + name: Mapped[str] = mapped_column(Unicode(255), nullable=True) dataset_collection_instance: Mapped["HistoryDatasetCollectionAssociation"] = relationship(lazy="joined") job: Mapped["Job"] = relationship(back_populates="output_dataset_collection_instances") From 78b2da5a15befb85030b5783308bb731bc04243c Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 11:48:03 -0400 Subject: [PATCH 545/669] Fix type in JobToImplicitOutputDatasetCollectionAssociation.name Add type to Job.output_dataset_collections --- lib/galaxy/model/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 0d102adbf393..06b266b2acc2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1407,7 +1407,9 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): output_dataset_collection_instances: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship( "JobToOutputDatasetCollectionAssociation", back_populates="job" ) - output_dataset_collections = relationship("JobToImplicitOutputDatasetCollectionAssociation", back_populates="job") + output_dataset_collections: Mapped[List["JobToImplicitOutputDatasetCollectionAssociation"]] = relationship( + back_populates="job" + ) post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship( back_populates="job", cascade_backrefs=False ) @@ -2414,7 +2416,7 @@ class JobToImplicitOutputDatasetCollectionAssociation(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) dataset_collection_id: Mapped[int] = mapped_column(ForeignKey("dataset_collection.id"), index=True, nullable=True) - name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + name: Mapped[str] = mapped_column(Unicode(255), nullable=True) dataset_collection: Mapped["DatasetCollection"] = relationship() job: Mapped["Job"] = relationship(back_populates="output_dataset_collections") From d76ac54f296c89ec1c3d5cb9d3bc40980135a409 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 11:56:32 -0400 Subject: [PATCH 546/669] Fix type in JobToInputLibraryDatasetAssociation.name Add type to Job.input_library_datasets --- lib/galaxy/model/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 06b266b2acc2..aa33167bd5e9 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1413,7 +1413,9 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship( back_populates="job", cascade_backrefs=False ) - input_library_datasets = relationship("JobToInputLibraryDatasetAssociation", back_populates="job") + input_library_datasets: Mapped[List["JobToInputLibraryDatasetAssociation"]] = relationship( + "JobToInputLibraryDatasetAssociation", back_populates="job" + ) output_library_datasets = relationship("JobToOutputLibraryDatasetAssociation", back_populates="job") external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship(back_populates="job") tasks: Mapped[List["Task"]] = relationship(back_populates="job") @@ -2433,7 +2435,7 @@ class JobToInputLibraryDatasetAssociation(Base, RepresentById): ldda_id: Mapped[int] = mapped_column( ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) - name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + name: Mapped[str] = mapped_column(Unicode(255), nullable=True) job: Mapped["Job"] = relationship(back_populates="input_library_datasets") dataset: Mapped["LibraryDatasetDatasetAssociation"] = relationship(lazy="joined", back_populates="dependent_jobs") From d9878d103d7bd9df78e3d2555b0dd0b893729472 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 12:09:25 -0400 Subject: [PATCH 547/669] Fix type in JobToOutputLibraryDatasetAssociation.name Add type to Job.output_library_datasets --- lib/galaxy/model/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index aa33167bd5e9..9659d98a7464 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1416,7 +1416,9 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): input_library_datasets: Mapped[List["JobToInputLibraryDatasetAssociation"]] = relationship( "JobToInputLibraryDatasetAssociation", back_populates="job" ) - output_library_datasets = relationship("JobToOutputLibraryDatasetAssociation", back_populates="job") + output_library_datasets: Mapped[List["JobToOutputLibraryDatasetAssociation"]] = relationship( + "JobToOutputLibraryDatasetAssociation", back_populates="job" + ) external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship(back_populates="job") tasks: Mapped[List["Task"]] = relationship(back_populates="job") output_datasets = relationship("JobToOutputDatasetAssociation", back_populates="job") @@ -2453,7 +2455,7 @@ class JobToOutputLibraryDatasetAssociation(Base, RepresentById): ldda_id: Mapped[int] = mapped_column( ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) - name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + name: Mapped[str] = mapped_column(Unicode(255), nullable=True) job: Mapped["Job"] = relationship(back_populates="output_library_datasets") dataset: Mapped["LibraryDatasetDatasetAssociation"] = relationship( lazy="joined", back_populates="creating_job_associations" From 9e408685123dfd543ba9a8ad5fe43fb2175f5f85 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 12:15:09 -0400 Subject: [PATCH 548/669] Fix type in JobToOutputDatasetAssociation.name --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 9659d98a7464..9059cf1fcac4 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2338,7 +2338,7 @@ class JobToOutputDatasetAssociation(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) dataset_id: Mapped[int] = mapped_column(ForeignKey("history_dataset_association.id"), index=True, nullable=True) - name: Mapped[Optional[str]] = mapped_column(String(255)) + name: Mapped[str] = mapped_column(String(255), nullable=True) dataset: Mapped["HistoryDatasetAssociation"] = relationship( lazy="joined", back_populates="creating_job_associations" ) From da55f4aca56e293b3801a366e9fd6e11db5d7520 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 00:22:50 -0400 Subject: [PATCH 549/669] Misc. typing edits --- lib/galaxy/model/__init__.py | 40 +++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 9059cf1fcac4..0c3ef12e7603 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1400,12 +1400,14 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): library_folder: Mapped[Optional["LibraryFolder"]] = relationship() parameters = relationship("JobParameter") input_datasets = relationship("JobToInputDatasetAssociation", back_populates="job") - input_dataset_collections = relationship("JobToInputDatasetCollectionAssociation", back_populates="job") - input_dataset_collection_elements = relationship( - "JobToInputDatasetCollectionElementAssociation", back_populates="job" + input_dataset_collections: Mapped[List["JobToInputDatasetCollectionAssociation"]] = relationship( + back_populates="job" + ) + input_dataset_collection_elements: Mapped[List["JobToInputDatasetCollectionElementAssociation"]] = relationship( + back_populates="job" ) output_dataset_collection_instances: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship( - "JobToOutputDatasetCollectionAssociation", back_populates="job" + back_populates="job" ) output_dataset_collections: Mapped[List["JobToImplicitOutputDatasetCollectionAssociation"]] = relationship( back_populates="job" @@ -1413,12 +1415,8 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship( back_populates="job", cascade_backrefs=False ) - input_library_datasets: Mapped[List["JobToInputLibraryDatasetAssociation"]] = relationship( - "JobToInputLibraryDatasetAssociation", back_populates="job" - ) - output_library_datasets: Mapped[List["JobToOutputLibraryDatasetAssociation"]] = relationship( - "JobToOutputLibraryDatasetAssociation", back_populates="job" - ) + input_library_datasets: Mapped[List["JobToInputLibraryDatasetAssociation"]] = relationship(back_populates="job") + output_library_datasets: Mapped[List["JobToOutputLibraryDatasetAssociation"]] = relationship(back_populates="job") external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship(back_populates="job") tasks: Mapped[List["Task"]] = relationship(back_populates="job") output_datasets = relationship("JobToOutputDatasetAssociation", back_populates="job") @@ -1428,16 +1426,18 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): interactivetool_entry_points: Mapped[List["InteractiveToolEntryPoint"]] = relationship( back_populates="job", uselist=True ) - implicit_collection_jobs_association = relationship( - "ImplicitCollectionJobsJobAssociation", back_populates="job", uselist=False, cascade_backrefs=False + implicit_collection_jobs_association: Mapped[List["ImplicitCollectionJobsJobAssociation"]] = relationship( + back_populates="job", uselist=False, cascade_backrefs=False ) - container = relationship("JobContainerAssociation", back_populates="job", uselist=False) - data_manager_association = relationship( - "DataManagerJobAssociation", back_populates="job", uselist=False, cascade_backrefs=False + container: Mapped[Optional["JobContainerAssociation"]] = relationship(back_populates="job", uselist=False) + data_manager_association: Mapped[Optional["DataManagerJobAssociation"]] = relationship( + back_populates="job", uselist=False, cascade_backrefs=False ) - history_dataset_collection_associations = relationship("HistoryDatasetCollectionAssociation", back_populates="job") - workflow_invocation_step = relationship( - "WorkflowInvocationStep", back_populates="job", uselist=False, cascade_backrefs=False + history_dataset_collection_associations: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( + back_populates="job" + ) + workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( + back_populates="job", uselist=False, cascade_backrefs=False ) any_output_dataset_collection_instances_deleted = None @@ -3733,7 +3733,9 @@ class Quota(Base, Dictifiable, RepresentById): operation: Mapped[Optional[str]] = mapped_column(String(8)) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), default=None) - default = relationship("DefaultQuotaAssociation", back_populates="quota", cascade_backrefs=False) + default: Mapped[List["DefaultQuotaAssociation"]] = relationship( + "DefaultQuotaAssociation", back_populates="quota", cascade_backrefs=False + ) groups: Mapped[List["GroupQuotaAssociation"]] = relationship(back_populates="quota") users: Mapped[List["UserQuotaAssociation"]] = relationship(back_populates="quota") From 83044490ee5913d556ba374136d882a11af181c2 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 13:19:28 -0400 Subject: [PATCH 550/669] Cast as list to fix mypy error (types are correct) --- lib/galaxy/managers/jobs.py | 6 +++++- lib/galaxy/model/__init__.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index f34d8a2dda2e..bf26cad31a0a 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -5,7 +5,10 @@ date, datetime, ) -from typing import List +from typing import ( + cast, + List, +) import sqlalchemy from boltons.iterutils import remap @@ -1065,6 +1068,7 @@ def summarize_job_outputs(job: model.Job, tool, params): ("hdca", "dataset_collection_id", job.output_dataset_collection_instances), ) for src, attribute, output_associations in possible_outputs: + output_associations = cast(List, output_associations) # during iteration, mypy sees it as object for output_association in output_associations: output_name = output_association.name if output_name not in output_labels and tool: diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 0c3ef12e7603..0bbd146d6256 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1419,7 +1419,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): output_library_datasets: Mapped[List["JobToOutputLibraryDatasetAssociation"]] = relationship(back_populates="job") external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship(back_populates="job") tasks: Mapped[List["Task"]] = relationship(back_populates="job") - output_datasets = relationship("JobToOutputDatasetAssociation", back_populates="job") + output_datasets: Mapped[List["JobToOutputDatasetAssociation"]] = relationship(back_populates="job") state_history: Mapped[List["JobStateHistory"]] = relationship() text_metrics: Mapped[List["JobMetricText"]] = relationship() numeric_metrics: Mapped[List["JobMetricNumeric"]] = relationship() From ce1cc2c05e8575e7533244271c34e5f4b6257573 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 13:39:23 -0400 Subject: [PATCH 551/669] Fix type in JobToInputDatasetCollectionAssociation.name --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 0bbd146d6256..0447fec47b01 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2362,7 +2362,7 @@ class JobToInputDatasetCollectionAssociation(Base, RepresentById): dataset_collection_id: Mapped[int] = mapped_column( ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) - name: Mapped[Optional[str]] = mapped_column(String(255)) + name: Mapped[str] = mapped_column(String(255), nullable=True) dataset_collection: Mapped["HistoryDatasetCollectionAssociation"] = relationship(lazy="joined") job: Mapped["Job"] = relationship(back_populates="input_dataset_collections") From 61fc66745f7cb6f493821de3f2c54ea6522daf8b Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 13:59:51 -0400 Subject: [PATCH 552/669] Fix type in JobToInputDatasetCollectionElementAssociation.name --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 0447fec47b01..e33b2fb9cf87 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2379,7 +2379,7 @@ class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): dataset_collection_element_id: Mapped[int] = mapped_column( ForeignKey("dataset_collection_element.id"), index=True, nullable=True ) - name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + name: Mapped[str] = mapped_column(Unicode(255), nullable=True) dataset_collection_element: Mapped["DatasetCollectionElement"] = relationship(lazy="joined") job: Mapped["Job"] = relationship(back_populates="input_dataset_collection_elements") From e96b00c45e17c455fb6d4d2dc761b69eca928872 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 14:07:57 -0400 Subject: [PATCH 553/669] Fix mypy errors, add type Can't reuse variable for different types: mypy complains. --- lib/galaxy/model/__init__.py | 4 +- lib/galaxy/model/store/__init__.py | 58 +++++++++++++++------------ lib/galaxy/webapps/galaxy/api/jobs.py | 2 +- 3 files changed, 36 insertions(+), 28 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index e33b2fb9cf87..d373f27dcfdf 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1399,7 +1399,9 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): history: Mapped[Optional["History"]] = relationship(back_populates="jobs") library_folder: Mapped[Optional["LibraryFolder"]] = relationship() parameters = relationship("JobParameter") - input_datasets = relationship("JobToInputDatasetAssociation", back_populates="job") + input_datasets: Mapped[List["JobToInputDatasetAssociation"]] = relationship( + "JobToInputDatasetAssociation", back_populates="job" + ) input_dataset_collections: Mapped[List["JobToInputDatasetCollectionAssociation"]] = relationship( back_populates="job" ) diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index f96b48c46f11..012a7d0cc53d 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -2076,67 +2076,73 @@ def export_jobs( if include_job_data: self.add_dataset(assoc.dataset) - for assoc in job.output_datasets: + for assoc2 in job.output_datasets: # Optional data inputs will not have a dataset. - if assoc.dataset: - name = assoc.name + if assoc2.dataset: + name = assoc2.name if name not in output_dataset_mapping: output_dataset_mapping[name] = [] - output_dataset_mapping[name].append(self.exported_key(assoc.dataset)) + output_dataset_mapping[name].append(self.exported_key(assoc2.dataset)) if include_job_data: - self.add_dataset(assoc.dataset) + self.add_dataset(assoc2.dataset) - for assoc in job.input_dataset_collections: + for assoc3 in job.input_dataset_collections: # Optional data inputs will not have a dataset. - if assoc.dataset_collection: - name = assoc.name + if assoc3.dataset_collection: + name = assoc3.name if name not in input_dataset_collection_mapping: input_dataset_collection_mapping[name] = [] - input_dataset_collection_mapping[name].append(self.exported_key(assoc.dataset_collection)) + input_dataset_collection_mapping[name].append(self.exported_key(assoc3.dataset_collection)) if include_job_data: - self.export_collection(assoc.dataset_collection) + self.export_collection(assoc3.dataset_collection) - for assoc in job.input_dataset_collection_elements: - if assoc.dataset_collection_element: - name = assoc.name + for assoc4 in job.input_dataset_collection_elements: + if assoc4.dataset_collection_element: + name = assoc4.name if name not in input_dataset_collection_element_mapping: input_dataset_collection_element_mapping[name] = [] input_dataset_collection_element_mapping[name].append( - self.exported_key(assoc.dataset_collection_element) + self.exported_key(assoc4.dataset_collection_element) ) if include_job_data: - if assoc.dataset_collection_element.is_collection: - self.export_collection(assoc.dataset_collection_element.element_object) + if assoc4.dataset_collection_element.is_collection: + assert isinstance( + assoc4.dataset_collection_element.element_object, model.DatasetCollection + ) + self.export_collection(assoc4.dataset_collection_element.element_object) else: - self.add_dataset(assoc.dataset_collection_element.element_object) + assert isinstance( + assoc4.dataset_collection_element.element_object, model.DatasetInstance + ) + self.add_dataset(assoc4.dataset_collection_element.element_object) - for assoc in job.output_dataset_collection_instances: + for assoc5 in job.output_dataset_collection_instances: # Optional data outputs will not have a dataset. # These are implicit outputs, we don't need to export them - if assoc.dataset_collection_instance: - name = assoc.name + if assoc5.dataset_collection_instance: + name = assoc5.name if name not in output_dataset_collection_mapping: output_dataset_collection_mapping[name] = [] output_dataset_collection_mapping[name].append( - self.exported_key(assoc.dataset_collection_instance) + self.exported_key(assoc5.dataset_collection_instance) ) - for assoc in job.output_dataset_collections: - if assoc.dataset_collection: - name = assoc.name + for assoc6 in job.output_dataset_collections: + if assoc6.dataset_collection: + name = assoc6.name if name not in implicit_output_dataset_collection_mapping: implicit_output_dataset_collection_mapping[name] = [] implicit_output_dataset_collection_mapping[name].append( - self.exported_key(assoc.dataset_collection) + self.exported_key(assoc6.dataset_collection) ) if include_job_data: - self.export_collection(assoc.dataset_collection) + self.export_collection(assoc6.dataset_collection) job_attrs["input_dataset_mapping"] = input_dataset_mapping job_attrs["input_dataset_collection_mapping"] = input_dataset_collection_mapping diff --git a/lib/galaxy/webapps/galaxy/api/jobs.py b/lib/galaxy/webapps/galaxy/api/jobs.py index 7d0d2757b333..e90a27368851 100644 --- a/lib/galaxy/webapps/galaxy/api/jobs.py +++ b/lib/galaxy/webapps/galaxy/api/jobs.py @@ -264,7 +264,7 @@ def common_problems( for job_input_assoc in job.input_datasets: input_dataset_instance = job_input_assoc.dataset if input_dataset_instance is None: - continue + continue # type:ignore[unreachable] # TODO if job_input_assoc.dataset is indeed never None, remove the above check if input_dataset_instance.get_total_size() == 0: has_empty_inputs = True input_instance_id = input_dataset_instance.id From 136ae7544c1295d5a4af39664880c14b2537ba07 Mon Sep 17 00:00:00 2001 From: Dannon Baker Date: Fri, 12 Apr 2024 14:53:43 -0400 Subject: [PATCH 554/669] In history view, add info message for when history import is complete --- client/src/components/History/HistoryView.vue | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/client/src/components/History/HistoryView.vue b/client/src/components/History/HistoryView.vue index b8bad9242b5e..103909b0a9f7 100644 --- a/client/src/components/History/HistoryView.vue +++ b/client/src/components/History/HistoryView.vue @@ -25,6 +25,8 @@
+ History imported and set to your active history. + - +
@@ -67,6 +69,7 @@ export default { data() { return { selectedCollections: [], + copySuccess: false, }; }, computed: { @@ -127,6 +130,9 @@ export default { onViewCollection(collection) { this.selectedCollections = [...this.selectedCollections, collection]; }, + copyOkay() { + this.copySuccess = true; + }, }, }; From 14ec0172a4aacb1274ada4caf95a7c8a9a3bc9c9 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 14:54:17 -0400 Subject: [PATCH 555/669] Fix type of Job.state, remove type:ignore --- lib/galaxy/jobs/__init__.py | 2 +- lib/galaxy/model/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index 69952eb0c88a..5ba851359c92 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -1554,7 +1554,7 @@ def change_state(self, state, info=False, flush=True, job=None): def get_state(self) -> str: job = self.get_job() self.sa_session.refresh(job) - return job.state # type:ignore[return-value] + return job.state def set_runner(self, runner_url, external_id): log.warning("set_runner() is deprecated, use set_job_destination()") diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index d373f27dcfdf..7e6bbc9a40dc 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1367,7 +1367,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): tool_version: Mapped[Optional[str]] = mapped_column(TEXT, default="1.0.0") galaxy_version: Mapped[Optional[str]] = mapped_column(String(64), default=None) dynamic_tool_id: Mapped[Optional[int]] = mapped_column(ForeignKey("dynamic_tool.id"), index=True) - state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + state: Mapped[str] = mapped_column(String(64), index=True, nullable=True) info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) copied_from_job_id: Mapped[Optional[int]] command_line: Mapped[Optional[str]] = mapped_column(TEXT) From 82a0ef40b74b307c289361ca6422252c83095956 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 12 Apr 2024 14:50:15 -0400 Subject: [PATCH 556/669] Start removing type:ingore comments from SA2.0 PR --- lib/galaxy/managers/notification.py | 8 +++----- lib/galaxy/managers/sharable.py | 2 +- lib/galaxy/workflow/modules.py | 4 ++-- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/lib/galaxy/managers/notification.py b/lib/galaxy/managers/notification.py index cbb1265f26ce..f480325d8818 100644 --- a/lib/galaxy/managers/notification.py +++ b/lib/galaxy/managers/notification.py @@ -275,8 +275,8 @@ def update_broadcasted_notification(self, notification_id: int, request: Notific def get_user_notification_preferences(self, user: User) -> UserNotificationPreferences: """Gets the user's current notification preferences or the default ones if no preferences are set.""" current_notification_preferences = ( - user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] # type:ignore[index] - if NOTIFICATION_PREFERENCES_SECTION_NAME in user.preferences # type:ignore[operator] + user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] + if NOTIFICATION_PREFERENCES_SECTION_NAME in user.preferences else None ) try: @@ -291,9 +291,7 @@ def update_user_notification_preferences( """Updates the user's notification preferences with the requested changes.""" notification_preferences = self.get_user_notification_preferences(user) notification_preferences.update(request.preferences) - user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] = ( - notification_preferences.model_dump_json() - ) # type:ignore[index] + user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] = notification_preferences.model_dump_json() with transaction(self.sa_session): self.sa_session.commit() return notification_preferences diff --git a/lib/galaxy/managers/sharable.py b/lib/galaxy/managers/sharable.py index 35e1afec570d..1f607f5d2589 100644 --- a/lib/galaxy/managers/sharable.py +++ b/lib/galaxy/managers/sharable.py @@ -179,7 +179,7 @@ def share_with(self, item, user: User, flush: bool = True): """ # precondition: user has been validated # get or create - existing = self.get_share_assocs(item, user=user) # type:ignore[dict-item] + existing = self.get_share_assocs(item, user=user) if existing: return existing.pop(0) return self._create_user_share_assoc(item, user, flush=flush) diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index d64bfdfca942..68d9d9e9e5e3 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -2516,13 +2516,13 @@ def inject(self, step: WorkflowStep, step_args=None, steps=None, **kwargs): If step_args is provided from a web form this is applied to generate 'state' else it is just obtained from the database. """ - step.upgrade_messages = {} # type: ignore[assignment] + step.upgrade_messages = {} # Make connection information available on each step by input name. step.setup_input_connections_by_name() # Populate module. - module = step.module = module_factory.from_workflow_step(self.trans, step, **kwargs) # type: ignore[assignment] + module = step.module = module_factory.from_workflow_step(self.trans, step, **kwargs) # Any connected input needs to have value DummyDataset (these # are not persisted so we need to do it every time) From bf60ea9fc78c08e1c0d3291bd31dcfd91aa09775 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 16:07:03 -0500 Subject: [PATCH 557/669] Fix bug: call unique() on result, not select stmt --- lib/galaxy/tools/actions/upload_common.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py index a345abde954a..a5460c08efa0 100644 --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -441,7 +441,6 @@ def active_folders(trans, folder): select(LibraryFolder) .filter_by(parent=folder, deleted=False) .options(joinedload(LibraryFolder.actions)) - .unique() .order_by(LibraryFolder.name) ) - return trans.sa_session.scalars(stmt).all() + return trans.sa_session.scalars(stmt).unique().all() From 92dfa466f54535eea184e793b58cc30eefcde010 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sat, 13 Apr 2024 13:52:22 +0200 Subject: [PATCH 558/669] Fix None passed to LengthValidator It is valid to pass None as a default value to the validator. It should just return ValueError, which it will do with this change via the inner validator. Fixes https://github.com/galaxyproject/galaxy/issues/17961 Ideally we'd add type annotations, but with the current structure and instantiation order it seems impossible to do this in a meaningful way. If we refactored the validators to be functions, and validators functions were registered per parameter type we could have narrow types on the validator signature. --- lib/galaxy/tools/parameters/validation.py | 4 +++- test/unit/app/tools/test_parameter_validation.py | 8 ++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/tools/parameters/validation.py b/lib/galaxy/tools/parameters/validation.py index 143b1c187029..cf02d86c56b5 100644 --- a/lib/galaxy/tools/parameters/validation.py +++ b/lib/galaxy/tools/parameters/validation.py @@ -186,7 +186,9 @@ def __init__(self, message, length_min, length_max, negate): super().__init__(message, range_min=length_min, range_max=length_max, negate=negate) def validate(self, value, trans=None): - super().validate(len(value), trans) + if value is None: + raise ValueError("No value provided") + super().validate(len(value) if value else 0, trans) class DatasetOkValidator(Validator): diff --git a/test/unit/app/tools/test_parameter_validation.py b/test/unit/app/tools/test_parameter_validation.py index 2e0bf0ffe857..bddb0e325309 100644 --- a/test/unit/app/tools/test_parameter_validation.py +++ b/test/unit/app/tools/test_parameter_validation.py @@ -186,6 +186,14 @@ def test_LengthValidator(self): p.validate("bar") p.validate("f") p.validate("foobarbaz") + p = self._parameter_for( + xml=""" + + +""" + ) + with self.assertRaisesRegex(ValueError, "No value provided"): + p.validate(None) def test_InRangeValidator(self): p = self._parameter_for( From 8354154204c4f2a63eb980b59091c6064465e4c2 Mon Sep 17 00:00:00 2001 From: John Davis Date: Sat, 13 Apr 2024 13:26:20 -0400 Subject: [PATCH 559/669] Improve variable naming Following suggestions from code review --- lib/galaxy/model/store/__init__.py | 66 +++++++++++++++--------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 012a7d0cc53d..967295d1202f 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -2065,84 +2065,84 @@ def export_jobs( output_dataset_collection_mapping: Dict[str, List[Union[str, int]]] = {} implicit_output_dataset_collection_mapping: Dict[str, List[Union[str, int]]] = {} - for assoc in job.input_datasets: + for id_assoc in job.input_datasets: # Optional data inputs will not have a dataset. - if assoc.dataset: - name = assoc.name + if id_assoc.dataset: + name = id_assoc.name if name not in input_dataset_mapping: input_dataset_mapping[name] = [] - input_dataset_mapping[name].append(self.exported_key(assoc.dataset)) + input_dataset_mapping[name].append(self.exported_key(id_assoc.dataset)) if include_job_data: - self.add_dataset(assoc.dataset) + self.add_dataset(id_assoc.dataset) - for assoc2 in job.output_datasets: + for od_assoc in job.output_datasets: # Optional data inputs will not have a dataset. - if assoc2.dataset: - name = assoc2.name + if od_assoc.dataset: + name = od_assoc.name if name not in output_dataset_mapping: output_dataset_mapping[name] = [] - output_dataset_mapping[name].append(self.exported_key(assoc2.dataset)) + output_dataset_mapping[name].append(self.exported_key(od_assoc.dataset)) if include_job_data: - self.add_dataset(assoc2.dataset) + self.add_dataset(od_assoc.dataset) - for assoc3 in job.input_dataset_collections: + for idc_assoc in job.input_dataset_collections: # Optional data inputs will not have a dataset. - if assoc3.dataset_collection: - name = assoc3.name + if idc_assoc.dataset_collection: + name = idc_assoc.name if name not in input_dataset_collection_mapping: input_dataset_collection_mapping[name] = [] - input_dataset_collection_mapping[name].append(self.exported_key(assoc3.dataset_collection)) + input_dataset_collection_mapping[name].append(self.exported_key(idc_assoc.dataset_collection)) if include_job_data: - self.export_collection(assoc3.dataset_collection) + self.export_collection(idc_assoc.dataset_collection) - for assoc4 in job.input_dataset_collection_elements: - if assoc4.dataset_collection_element: - name = assoc4.name + for idce_assoc in job.input_dataset_collection_elements: + if idce_assoc.dataset_collection_element: + name = idce_assoc.name if name not in input_dataset_collection_element_mapping: input_dataset_collection_element_mapping[name] = [] input_dataset_collection_element_mapping[name].append( - self.exported_key(assoc4.dataset_collection_element) + self.exported_key(idce_assoc.dataset_collection_element) ) if include_job_data: - if assoc4.dataset_collection_element.is_collection: + if idce_assoc.dataset_collection_element.is_collection: assert isinstance( - assoc4.dataset_collection_element.element_object, model.DatasetCollection + idce_assoc.dataset_collection_element.element_object, model.DatasetCollection ) - self.export_collection(assoc4.dataset_collection_element.element_object) + self.export_collection(idce_assoc.dataset_collection_element.element_object) else: assert isinstance( - assoc4.dataset_collection_element.element_object, model.DatasetInstance + idce_assoc.dataset_collection_element.element_object, model.DatasetInstance ) - self.add_dataset(assoc4.dataset_collection_element.element_object) + self.add_dataset(idce_assoc.dataset_collection_element.element_object) - for assoc5 in job.output_dataset_collection_instances: + for odci_assoc in job.output_dataset_collection_instances: # Optional data outputs will not have a dataset. # These are implicit outputs, we don't need to export them - if assoc5.dataset_collection_instance: - name = assoc5.name + if odci_assoc.dataset_collection_instance: + name = odci_assoc.name if name not in output_dataset_collection_mapping: output_dataset_collection_mapping[name] = [] output_dataset_collection_mapping[name].append( - self.exported_key(assoc5.dataset_collection_instance) + self.exported_key(odci_assoc.dataset_collection_instance) ) - for assoc6 in job.output_dataset_collections: - if assoc6.dataset_collection: - name = assoc6.name + for odc_assoc in job.output_dataset_collections: + if odc_assoc.dataset_collection: + name = odc_assoc.name if name not in implicit_output_dataset_collection_mapping: implicit_output_dataset_collection_mapping[name] = [] implicit_output_dataset_collection_mapping[name].append( - self.exported_key(assoc6.dataset_collection) + self.exported_key(odc_assoc.dataset_collection) ) if include_job_data: - self.export_collection(assoc6.dataset_collection) + self.export_collection(odc_assoc.dataset_collection) job_attrs["input_dataset_mapping"] = input_dataset_mapping job_attrs["input_dataset_collection_mapping"] = input_dataset_collection_mapping From 320012140cae85d4c735fc1ec84b2dbca47a5c21 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 21 Mar 2024 15:18:40 +0100 Subject: [PATCH 560/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Da?= =?UTF-8?q?tasetAttributes`=20to=20use=20`composition=20API`=20and=20`type?= =?UTF-8?q?Script`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation/DatasetAttributes.vue | 227 ++++++++++-------- 1 file changed, 125 insertions(+), 102 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetAttributes.vue b/client/src/components/DatasetInformation/DatasetAttributes.vue index e197e126b6a9..dfa3eb9a0fb8 100644 --- a/client/src/components/DatasetInformation/DatasetAttributes.vue +++ b/client/src/components/DatasetInformation/DatasetAttributes.vue @@ -1,157 +1,180 @@ + + - - From 22bae91fa28ed920cf7dc21836d8efb9ca0a53d5 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 21 Mar 2024 15:20:49 +0100 Subject: [PATCH 561/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Da?= =?UTF-8?q?tasetAttributes`=20test=20to=20`typeScript`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...utes.test.js => DatasetAttributes.test.ts} | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) rename client/src/components/DatasetInformation/{DatasetAttributes.test.js => DatasetAttributes.test.ts} (87%) diff --git a/client/src/components/DatasetInformation/DatasetAttributes.test.js b/client/src/components/DatasetInformation/DatasetAttributes.test.ts similarity index 87% rename from client/src/components/DatasetInformation/DatasetAttributes.test.js rename to client/src/components/DatasetInformation/DatasetAttributes.test.ts index 7862c91da571..36a921094a39 100644 --- a/client/src/components/DatasetInformation/DatasetAttributes.test.js +++ b/client/src/components/DatasetInformation/DatasetAttributes.test.ts @@ -1,16 +1,22 @@ +import { createTestingPinia } from "@pinia/testing"; import { mount } from "@vue/test-utils"; import axios from "axios"; import MockAdapter from "axios-mock-adapter"; import flushPromises from "flush-promises"; +import { setActivePinia } from "pinia"; import { getLocalVue } from "tests/jest/helpers"; -import MockProvider from "../providers/MockProvider"; -import DatasetAttributes from "./DatasetAttributes"; +import MockProvider from "@/components/providers/MockProvider"; + +import DatasetAttributes from "./DatasetAttributes.vue"; const localVue = getLocalVue(); async function buildWrapper(conversion_disable = false) { - const wrapper = mount(DatasetAttributes, { + const pinia = createTestingPinia(); + setActivePinia(pinia); + + const wrapper = mount(DatasetAttributes as object, { propsData: { datasetId: "dataset_id", }, @@ -28,8 +34,11 @@ async function buildWrapper(conversion_disable = false) { FontAwesomeIcon: false, FormElement: false, }, + pinia, }); + await flushPromises(); + return wrapper; } @@ -37,7 +46,9 @@ describe("DatasetAttributes", () => { it("check rendering", async () => { const axiosMock = new MockAdapter(axios); axiosMock.onPut(`/dataset/set_edit`).reply(200, { message: "success", status: "success" }); + const wrapper = await buildWrapper(); + expect(wrapper.findAll("button").length).toBe(6); expect(wrapper.findAll("#attribute_text").length).toBe(1); expect(wrapper.findAll("#conversion_text").length).toBe(1); @@ -45,14 +56,18 @@ describe("DatasetAttributes", () => { expect(wrapper.findAll("#permission_text").length).toBe(1); expect(wrapper.findAll(".tab-pane").length).toBe(3); expect(wrapper.findAll(".ui-portlet-section").length).toBe(2); + const $button = wrapper.find("#dataset-attributes-default-save"); + await $button.trigger("click"); await flushPromises(); + expect(wrapper.find("[role=alert]").text()).toBe("success"); }); it("check rendering without conversion option", async () => { const wrapper = await buildWrapper(true); + expect(wrapper.findAll("button").length).toBe(5); expect(wrapper.findAll("#attribute_text").length).toBe(1); expect(wrapper.findAll("#conversion_text").length).toBe(0); From 1a32cb8c4d6896818d368c704cdc8c1da6438767 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 21 Mar 2024 15:21:04 +0100 Subject: [PATCH 562/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Da?= =?UTF-8?q?tasetDetails`=20to=20use=20`composition=20API`=20and=20`typeScr?= =?UTF-8?q?ipt`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation/DatasetDetails.vue | 103 ++++++++---------- 1 file changed, 47 insertions(+), 56 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetDetails.vue b/client/src/components/DatasetInformation/DatasetDetails.vue index f9f2606669ed..c332a92be8ad 100644 --- a/client/src/components/DatasetInformation/DatasetDetails.vue +++ b/client/src/components/DatasetInformation/DatasetDetails.vue @@ -1,9 +1,41 @@ + + - - From 721f43fc7ed2ba433d74c978a5f66b80f07e8615 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Wed, 3 Apr 2024 16:08:22 +0200 Subject: [PATCH 579/669] =?UTF-8?q?=F0=9F=94=A5:=20drop=20unused=20`Datase?= =?UTF-8?q?tAttributesProvider?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/components/providers/DatasetProvider.js | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/client/src/components/providers/DatasetProvider.js b/client/src/components/providers/DatasetProvider.js index b2d393743042..a22a4632617d 100644 --- a/client/src/components/providers/DatasetProvider.js +++ b/client/src/components/providers/DatasetProvider.js @@ -1,21 +1,6 @@ -import axios from "axios"; -import { getAppRoot } from "onload/loadConfig"; - import { fetchDatasetDetails } from "@/api/datasets"; import { SingleQueryProvider } from "@/components/providers/SingleQueryProvider"; -import { rethrowSimple } from "@/utils/simple-error"; import { stateIsTerminal } from "./utils"; -async function getDatasetAttributes({ id }) { - const url = `${getAppRoot()}dataset/get_edit?dataset_id=${id}`; - try { - const { data } = await axios.get(url); - return data; - } catch (e) { - rethrowSimple(e); - } -} - -export const DatasetAttributesProvider = SingleQueryProvider(getDatasetAttributes); export default SingleQueryProvider(fetchDatasetDetails, stateIsTerminal); From 279c401e3653608663d2c99b8d6ee6bec130d0d8 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Wed, 3 Apr 2024 16:38:04 +0200 Subject: [PATCH 580/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Da?= =?UTF-8?q?tasetInformation`=20to=20drop=20`DatasetProvider`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation/DatasetDetails.vue | 10 +- .../DatasetInformation/DatasetInformation.vue | 252 +++++++++--------- 2 files changed, 130 insertions(+), 132 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetDetails.vue b/client/src/components/DatasetInformation/DatasetDetails.vue index 19e3c8ccd081..b115325e8596 100644 --- a/client/src/components/DatasetInformation/DatasetDetails.vue +++ b/client/src/components/DatasetInformation/DatasetDetails.vue @@ -103,9 +103,9 @@ onUnmounted(() => { {{ datasetLoadingError }} -
-
- +
+
+ @@ -135,8 +135,8 @@ onUnmounted(() => {
-
- +
+ diff --git a/client/src/components/DatasetInformation/DatasetInformation.vue b/client/src/components/DatasetInformation/DatasetInformation.vue index ab5f0fc636ab..7eed66fd12a7 100644 --- a/client/src/components/DatasetInformation/DatasetInformation.vue +++ b/client/src/components/DatasetInformation/DatasetInformation.vue @@ -1,5 +1,5 @@ From db0d78910f888a97f9cdbb12e52d49eba9fd3009 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Wed, 3 Apr 2024 17:01:46 +0200 Subject: [PATCH 581/669] =?UTF-8?q?=E2=9C=A8:=20add=20`fetchJobCommonProbl?= =?UTF-8?q?ems`=20and=20`JobInputSummary`=20type=20to=20`api/jobs`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/src/api/jobs.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/client/src/api/jobs.ts b/client/src/api/jobs.ts index 25ba2434364a..526b8c02b4c7 100644 --- a/client/src/api/jobs.ts +++ b/client/src/api/jobs.ts @@ -12,4 +12,7 @@ export const jobsFetcher = fetcher.path("/api/jobs").method("get").create(); export type JobDetails = components["schemas"]["ShowFullJobResponse"] | components["schemas"]["EncodedJobDetails"]; export const fetchJobDetails = fetcher.path("/api/jobs/{job_id}").method("get").create(); +export type JobInputSummary = components["schemas"]["JobInputSummary"]; +export const fetchJobCommonProblems = fetcher.path("/api/jobs/{job_id}/common_problems").method("get").create(); + export const jobsReportError = fetcher.path("/api/jobs/{job_id}/error").method("post").create(); From df99d810f573d6c1824e4c91933341b265f34d84 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Wed, 3 Apr 2024 17:06:21 +0200 Subject: [PATCH 582/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20refactor=20`Da?= =?UTF-8?q?tasetInformation`=20to=20drop=20`DatasetProvider`,=20`JobDetail?= =?UTF-8?q?sProvider`=20and=20`JobProblemProvider?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation/DatasetError.vue | 237 ++++++++++-------- 1 file changed, 137 insertions(+), 100 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetError.vue b/client/src/components/DatasetInformation/DatasetError.vue index 6489ec64fe48..838c936ad97f 100644 --- a/client/src/components/DatasetInformation/DatasetError.vue +++ b/client/src/components/DatasetInformation/DatasetError.vue @@ -2,19 +2,19 @@ import { library } from "@fortawesome/fontawesome-svg-core"; import { faBug } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome"; +import { AxiosError } from "axios"; import { BAlert, BButton } from "bootstrap-vue"; import { storeToRefs } from "pinia"; -import { computed, ref } from "vue"; +import { computed, onMounted, ref } from "vue"; import { DatasetDetails } from "@/api"; -import { DatasetProvider } from "@/components/providers"; -import { JobDetailsProvider, JobProblemProvider } from "@/components/providers/JobProvider"; +import { fetchDatasetDetails } from "@/api/datasets"; +import { fetchJobCommonProblems, fetchJobDetails, JobDetails, JobInputSummary } from "@/api/jobs"; +import { sendErrorReport } from "@/components/DatasetInformation/services"; import { useMarkdown } from "@/composables/markdown"; import { useUserStore } from "@/stores/userStore"; import localize from "@/utils/localization"; -import { sendErrorReport } from "./services"; - import DatasetErrorDetails from "@/components/DatasetInformation/DatasetErrorDetails.vue"; import FormElement from "@/components/Form/FormElement.vue"; @@ -24,7 +24,7 @@ interface Props { datasetId: string; } -defineProps(); +const props = defineProps(); const userStore = useUserStore(); const { currentUser } = storeToRefs(userStore); @@ -32,8 +32,13 @@ const { currentUser } = storeToRefs(userStore); const { renderMarkdown } = useMarkdown({ openLinksInNewPage: true }); const message = ref(""); +const jobLoading = ref(true); const errorMessage = ref(""); +const datasetLoading = ref(false); +const jobDetails = ref(); +const jobProblems = ref(); const resultMessages = ref([]); +const dataset = ref(null); const showForm = computed(() => { const noResult = !resultMessages.value.length; @@ -42,11 +47,51 @@ const showForm = computed(() => { return noResult || hasError; }); -function onError(err: string) { - errorMessage.value = err; +async function getDatasetDetails() { + datasetLoading.value = true; + + try { + const data = await fetchDatasetDetails({ id: props.datasetId }); + + dataset.value = data; + } catch (e) { + const error = e as AxiosError<{ err_msg?: string }>; + + errorMessage.value = error.response?.data?.err_msg || "Unable to fetch available dataset details."; + } finally { + datasetLoading.value = false; + } +} + +async function getJobDetails() { + jobLoading.value = true; + + try { + const { data } = await fetchJobDetails({ job_id: dataset.value?.creating_job as string, full: true }); + + jobDetails.value = data; + } catch (e) { + const error = e as AxiosError<{ err_msg?: string }>; + + errorMessage.value = error.response?.data?.err_msg || "Unable to fetch available dataset details."; + } finally { + jobLoading.value = false; + } +} + +async function getJobProblems() { + try { + const { data } = await fetchJobCommonProblems({ job_id: dataset.value?.creating_job as string }); + + jobProblems.value = data; + } catch (e) { + const error = e as AxiosError<{ err_msg?: string }>; + + errorMessage.value = error.response?.data?.err_msg || "Unable to fetch available dataset details."; + } } -function submit(dataset: DatasetDetails, userEmailJob: string) { +function submit(dataset: DatasetDetails, userEmailJob?: string | null) { const email = userEmailJob; sendErrorReport(dataset.creating_job, { dataset_id: dataset.id, message: message.value, email }).then( @@ -58,6 +103,15 @@ function submit(dataset: DatasetDetails, userEmailJob: string) { } ); } + +onMounted(async () => { + await getDatasetDetails(); + + if (dataset.value?.creating_job !== null) { + await getJobDetails(); + await getJobProblems(); + } +}); From 9945f356e322a6ad20dfbb559208036abc75e3f3 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Wed, 3 Apr 2024 17:07:08 +0200 Subject: [PATCH 583/669] =?UTF-8?q?=F0=9F=8E=A8:=20improve=20`DatasetDetai?= =?UTF-8?q?ls`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/src/components/DatasetInformation/DatasetDetails.vue | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetDetails.vue b/client/src/components/DatasetInformation/DatasetDetails.vue index b115325e8596..bbc0fa6929fc 100644 --- a/client/src/components/DatasetInformation/DatasetDetails.vue +++ b/client/src/components/DatasetInformation/DatasetDetails.vue @@ -60,17 +60,15 @@ async function loadJobDetails() { jobLoading.value = true; try { - const data = await fetchJobDetails({ job_id: dataset.value?.creating_job as string, full: true }); + const { data } = await fetchJobDetails({ job_id: dataset.value?.creating_job as string, full: true }); if (stateIsTerminal(data)) { - jobLoading.value = false; - clearTimeout(jobTimeOut.value); } else { jobTimeOut.value = setTimeout(loadJobDetails, 3000); } - jobDetails.value = data as unknown as JobDetails; + jobDetails.value = data; } catch (e) { const error = e as AxiosError<{ err_msg?: string }>; From eb77b743b741a4a41fdb625eb0b9f6d06752a7ec Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Wed, 3 Apr 2024 17:07:56 +0200 Subject: [PATCH 584/669] =?UTF-8?q?=F0=9F=94=A5:=20remove=20unused=20`JobP?= =?UTF-8?q?roblemsProvider`=20`JobProvider`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/src/components/providers/JobProvider.js | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/client/src/components/providers/JobProvider.js b/client/src/components/providers/JobProvider.js index 09f3983c412e..a17b32ae24fa 100644 --- a/client/src/components/providers/JobProvider.js +++ b/client/src/components/providers/JobProvider.js @@ -15,18 +15,7 @@ async function jobDetails({ jobId }) { } } -async function jobProblems({ jobId }) { - const url = `${getAppRoot()}api/jobs/${jobId}/common_problems`; - try { - const { data } = await axios.get(url); - return data; - } catch (e) { - rethrowSimple(e); - } -} - export const JobDetailsProvider = SingleQueryProvider(jobDetails, stateIsTerminal); -export const JobProblemProvider = SingleQueryProvider(jobProblems, stateIsTerminal); export function jobsProvider(ctx, callback, extraParams = {}) { const { root, ...requestParams } = ctx; From fbc45afa0f91c4fa21f5bd970a3d7bf86a324ae8 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 4 Apr 2024 15:27:56 +0200 Subject: [PATCH 585/669] =?UTF-8?q?=F0=9F=90=9B:=20fix=20`DatasetAttribute?= =?UTF-8?q?s`=20test=20to=20work=20with=20new=20changes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetAttributes.test.ts | 47 +++++++++---------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetAttributes.test.ts b/client/src/components/DatasetInformation/DatasetAttributes.test.ts index 36a921094a39..55526b250fa5 100644 --- a/client/src/components/DatasetInformation/DatasetAttributes.test.ts +++ b/client/src/components/DatasetInformation/DatasetAttributes.test.ts @@ -1,39 +1,36 @@ import { createTestingPinia } from "@pinia/testing"; +import { getLocalVue } from "@tests/jest/helpers"; import { mount } from "@vue/test-utils"; import axios from "axios"; import MockAdapter from "axios-mock-adapter"; import flushPromises from "flush-promises"; import { setActivePinia } from "pinia"; -import { getLocalVue } from "tests/jest/helpers"; - -import MockProvider from "@/components/providers/MockProvider"; import DatasetAttributes from "./DatasetAttributes.vue"; +const DATASET_ID = "dataset_id"; + const localVue = getLocalVue(); -async function buildWrapper(conversion_disable = false) { +async function montDatasetAttributes(conversion_disable = false) { const pinia = createTestingPinia(); setActivePinia(pinia); + const axiosMock = new MockAdapter(axios); + axiosMock.onPut(`/dataset/set_edit`).reply(200, { message: "success", status: "success" }); + axiosMock.onGet(`/dataset/get_edit?dataset_id=${DATASET_ID}`).reply(200, { + attribute_inputs: [{ name: "attribute_text", type: "text" }], + conversion_inputs: [{ name: "conversion_text", type: "text" }], + conversion_disable: conversion_disable, + datatype_inputs: [{ name: "datatype_text", type: "text" }], + permission_inputs: [{ name: "permission_text", type: "text" }], + }); + const wrapper = mount(DatasetAttributes as object, { propsData: { - datasetId: "dataset_id", + datasetId: DATASET_ID, }, localVue, - stubs: { - DatasetAttributesProvider: MockProvider({ - result: { - attribute_inputs: [{ name: "attribute_text", type: "text" }], - conversion_inputs: [{ name: "conversion_text", type: "text" }], - conversion_disable: conversion_disable, - datatype_inputs: [{ name: "datatype_text", type: "text" }], - permission_inputs: [{ name: "permission_text", type: "text" }], - }, - }), - FontAwesomeIcon: false, - FormElement: false, - }, pinia, }); @@ -44,10 +41,7 @@ async function buildWrapper(conversion_disable = false) { describe("DatasetAttributes", () => { it("check rendering", async () => { - const axiosMock = new MockAdapter(axios); - axiosMock.onPut(`/dataset/set_edit`).reply(200, { message: "success", status: "success" }); - - const wrapper = await buildWrapper(); + const wrapper = await montDatasetAttributes(); expect(wrapper.findAll("button").length).toBe(6); expect(wrapper.findAll("#attribute_text").length).toBe(1); @@ -57,16 +51,19 @@ describe("DatasetAttributes", () => { expect(wrapper.findAll(".tab-pane").length).toBe(3); expect(wrapper.findAll(".ui-portlet-section").length).toBe(2); - const $button = wrapper.find("#dataset-attributes-default-save"); + const saveButton = wrapper.find("#dataset-attributes-default-save"); + + await saveButton.trigger("click"); - await $button.trigger("click"); await flushPromises(); expect(wrapper.find("[role=alert]").text()).toBe("success"); }); it("check rendering without conversion option", async () => { - const wrapper = await buildWrapper(true); + const wrapper = await montDatasetAttributes(true); + + await flushPromises(); expect(wrapper.findAll("button").length).toBe(5); expect(wrapper.findAll("#attribute_text").length).toBe(1); From 1bb08900e6088b15faf71cafb687f5a86942262c Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 4 Apr 2024 15:29:34 +0200 Subject: [PATCH 586/669] =?UTF-8?q?=F0=9F=90=9B:=20fix=20`DatasetError`=20?= =?UTF-8?q?test=20to=20mock=20fetchers?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation/DatasetError.test.ts | 127 ++++++++++-------- .../DatasetInformation/DatasetError.vue | 23 ++-- 2 files changed, 82 insertions(+), 68 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetError.test.ts b/client/src/components/DatasetInformation/DatasetError.test.ts index cbf40327c86d..af238c067af1 100644 --- a/client/src/components/DatasetInformation/DatasetError.test.ts +++ b/client/src/components/DatasetInformation/DatasetError.test.ts @@ -1,45 +1,58 @@ import { mount } from "@vue/test-utils"; +import flushPromises from "flush-promises"; import { createPinia } from "pinia"; import { getLocalVue } from "tests/jest/helpers"; -import MockProvider from "@/components/providers/MockProvider"; +import { mockFetcher } from "@/api/schema/__mocks__"; import { useUserStore } from "@/stores/userStore"; import DatasetError from "./DatasetError.vue"; -jest.mock("@/components/providers", () => { - return {}; -}); - const localVue = getLocalVue(); -async function buildWrapper(has_duplicate_inputs = true, has_empty_inputs = true, user_email = "") { +const DATASET_ID = "dataset_id"; + +async function montDatasetError(has_duplicate_inputs = true, has_empty_inputs = true, user_email = "") { const pinia = createPinia(); + mockFetcher + .path("/api/datasets/{dataset_id}") + .method("get") + .mock({ + data: { + id: DATASET_ID, + creating_job: "creating_job", + }, + }); + + mockFetcher + .path("/api/jobs/{job_id}") + .method("get") + .mock({ + data: { + tool_id: "tool_id", + tool_stderr: "tool_stderr", + job_stderr: "job_stderr", + job_messages: [{ desc: "message_1" }, { desc: "message_2" }], + user_email: user_email, + }, + }); + + mockFetcher + .path("/api/jobs/{job_id}/common_problems") + .method("get") + .mock({ + data: { + has_duplicate_inputs: has_duplicate_inputs, + has_empty_inputs: has_empty_inputs, + }, + }); + const wrapper = await mount(DatasetError as object, { propsData: { - datasetId: "dataset_id", + datasetId: DATASET_ID, }, localVue, - stubs: { - JobDetailsProvider: MockProvider({ - result: { - tool_id: "tool_id", - tool_stderr: "tool_stderr", - job_stderr: "job_stderr", - job_messages: [{ desc: "message_1" }, { desc: "message_2" }], - user_email: user_email, - }, - }), - JobProblemProvider: MockProvider({ - result: { has_duplicate_inputs: has_duplicate_inputs, has_empty_inputs: has_empty_inputs }, - }), - DatasetProvider: MockProvider({ - result: { id: "dataset_id", creating_job: "creating_job" }, - }), - FontAwesomeIcon: false, - FormElement: false, - }, pinia, }); @@ -52,14 +65,14 @@ async function buildWrapper(has_duplicate_inputs = true, has_empty_inputs = true total_disk_usage: 0, }; + await flushPromises(); + return wrapper; } describe("DatasetError", () => { it("check props with common problems", async () => { - const wrapper = await buildWrapper(); - - console.log(wrapper.html()); + const wrapper = await montDatasetError(); expect(wrapper.find("#dataset-error-tool-id").text()).toBe("tool_id"); expect(wrapper.find("#dataset-error-tool-stderr").text()).toBe("tool_stderr"); @@ -73,42 +86,40 @@ describe("DatasetError", () => { expect(wrapper.find("#dataset-error-has-duplicate-inputs")).toBeDefined(); }); - // it("check props without common problems", async () => { - // const wrapper = await buildWrapper(false, false, "user_email"); + it("check props without common problems", async () => { + const wrapper = await montDatasetError(false, false, "user_email"); - // expect(wrapper.find("#dataset-error-tool-id").text()).toBe("tool_id"); - // expect(wrapper.find("#dataset-error-tool-stderr").text()).toBe("tool_stderr"); - // expect(wrapper.find("#dataset-error-job-stderr").text()).toBe("job_stderr"); - - // expect(wrapper.findAll("#dataset-error-has-empty-inputs").length).toBe(0); - // expect(wrapper.findAll("#dataset-error-has-duplicate-inputs").length).toBe(0); - // expect(wrapper.findAll("#dataset-error-email").length).toBe(0); - // }); - - // it("hides form fields and button on success", async () => { - // const wrapper = await buildWrapper(); + expect(wrapper.find("#dataset-error-tool-id").text()).toBe("tool_id"); + expect(wrapper.find("#dataset-error-tool-stderr").text()).toBe("tool_stderr"); + expect(wrapper.find("#dataset-error-job-stderr").text()).toBe("job_stderr"); - // const fieldsAndButton = "#fieldsAndButton"; - // expect(wrapper.find(fieldsAndButton).exists()).toBe(true); + expect(wrapper.findAll("#dataset-error-has-empty-inputs").length).toBe(0); + expect(wrapper.findAll("#dataset-error-has-duplicate-inputs").length).toBe(0); + expect(wrapper.findAll("#dataset-error-email").length).toBe(0); + }); - // await wrapper.setData({ resultMessages: [["message", "success"]] }); + it("hides form fields and button on success", async () => { + const wrapper = await montDatasetError(); - // expect(wrapper.find(fieldsAndButton).exists()).toBe(false); - // }); + mockFetcher + .path("/api/jobs/{job_id}/error") + .method("post") + .mock({ + data: { + messages: ["message", "success"], + }, + }); - // it("does not hide form fields and button on error", async () => { - // const wrapper = await buildWrapper(); + const FormAndSubmitButton = "#dataset-error-form"; + expect(wrapper.find(FormAndSubmitButton).exists()).toBe(true); - // const fieldsAndButton = "#fieldsAndButton"; - // expect(wrapper.find(fieldsAndButton).exists()).toBe(true); + const submitButton = "#dataset-error-submit"; + expect(wrapper.find(submitButton).exists()).toBe(true); - // const messages = [ - // ["message", "success"], - // ["message", "danger"], - // ]; // at least one has "danger" + await wrapper.find(submitButton).trigger("click"); - // await wrapper.setData({ resultMessages: messages }); + await flushPromises(); - // expect(wrapper.find(fieldsAndButton).exists()).toBe(true); - // }); + expect(wrapper.find(FormAndSubmitButton).exists()).toBe(false); + }); }); diff --git a/client/src/components/DatasetInformation/DatasetError.vue b/client/src/components/DatasetInformation/DatasetError.vue index 838c936ad97f..a11defa3eb69 100644 --- a/client/src/components/DatasetInformation/DatasetError.vue +++ b/client/src/components/DatasetInformation/DatasetError.vue @@ -91,17 +91,20 @@ async function getJobProblems() { } } -function submit(dataset: DatasetDetails, userEmailJob?: string | null) { +async function submit(dataset: DatasetDetails, userEmailJob?: string | null) { const email = userEmailJob; - sendErrorReport(dataset.creating_job, { dataset_id: dataset.id, message: message.value, email }).then( - (resMsg) => { - resultMessages.value = resMsg; - }, - (errMsg) => { - errorMessage.value = errMsg; - } - ); + try { + const res = await sendErrorReport(dataset.creating_job, { + dataset_id: dataset.id, + message: message.value, + email, + }); + + resultMessages.value = res as string[][]; + } catch (error: any) { + resultMessages.value = error; + } } onMounted(async () => { @@ -174,7 +177,7 @@ onMounted(async () => { -
+
{{ localize("Your email address") }} {{ currentUser.email }} {{ localize("You must be logged in to receive emails") }} From 761754389a9cbbf2c227253f474bcf737d524d28 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 4 Apr 2024 15:30:21 +0200 Subject: [PATCH 587/669] =?UTF-8?q?=F0=9F=90=9B:=20fix=20`DatasetInformati?= =?UTF-8?q?on`=20test=20to=20work=20with=20new=20changes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation.test.ts | 51 +++++++++++-------- 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetInformation.test.ts b/client/src/components/DatasetInformation/DatasetInformation.test.ts index 630d2fee5170..1ef0379d7ae8 100644 --- a/client/src/components/DatasetInformation/DatasetInformation.test.ts +++ b/client/src/components/DatasetInformation/DatasetInformation.test.ts @@ -5,19 +5,37 @@ import MockAdapter from "axios-mock-adapter"; import { format, parseISO } from "date-fns"; import flushPromises from "flush-promises"; -import datasetResponse from "./testData/datasetResponse.json"; - import DatasetInformation from "./DatasetInformation.vue"; const HDA_ID = "FOO_HDA_ID"; -const mockDatasetProvider = { - render() { - return this.$scopedSlots.default({ - loading: false, - result: datasetResponse, - }); - }, +interface DatasetResponse { + id: string; + hid: number; + uuid: string; + name: string; + file_ext: string; + file_name: string; + file_size: number; + dataset_id: string; + history_id: string; + create_time: string; + metadata_dbkey: string; + [key: string]: any; +} + +const datasetResponse: DatasetResponse = { + id: "FOO_HDA_ID", + hid: 32, + uuid: "5e89abe4-e8f7-468a-9ef1-d4e322183fa5", + name: "Add column on data 31", + file_size: 93, + file_ext: "txt", + dataset_id: "201592c8e20dac24", + history_id: "6fc9fbb81c497f69", + create_time: "2020-09-28T15:54:04.803756", + metadata_dbkey: "?", + file_name: "/home/oleg/galaxy/database/objects/5/e/8/dataset_5e89abe4-e8f7-468a-9ef1-d4e322183fa5.dat", }; const localVue = getLocalVue(); @@ -27,24 +45,17 @@ describe("DatasetInformation/DatasetInformation", () => { let axiosMock: MockAdapter; let datasetInfoTable: Wrapper; - beforeEach(() => { - axiosMock = new MockAdapter(axios); - axiosMock.onGet(new RegExp(`api/configuration/decode/*`)).reply(200, { decoded_id: 123 }); - }); - afterEach(() => { axiosMock.restore(); }); beforeEach(async () => { - const propsData = { - hda_id: HDA_ID, - }; + axiosMock = new MockAdapter(axios); + axiosMock.onGet(new RegExp(`api/configuration/decode/*`)).reply(200, { decoded_id: 123 }); wrapper = mount(DatasetInformation as object, { - propsData, - stubs: { - DatasetProvider: mockDatasetProvider, + propsData: { + dataset: datasetResponse, }, localVue, }); From 91269d0403d7d62df2cc76ca05de7f18df8f963f Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Thu, 4 Apr 2024 15:32:09 +0200 Subject: [PATCH 588/669] =?UTF-8?q?=F0=9F=94=A5:=20remove=20unused=20`Data?= =?UTF-8?q?setInformation/testData/datasetResponse.json`=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../testData/datasetResponse.json | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 client/src/components/DatasetInformation/testData/datasetResponse.json diff --git a/client/src/components/DatasetInformation/testData/datasetResponse.json b/client/src/components/DatasetInformation/testData/datasetResponse.json deleted file mode 100644 index da106122f80c..000000000000 --- a/client/src/components/DatasetInformation/testData/datasetResponse.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "id": "FOO_HDA_ID", - "hid": 32, - "create_time": "2020-09-28T15:54:04.803756", - "name": "Add column on data 31", - "file_size": 93, - "file_ext": "txt", - "metadata_dbkey": "?", - "dataset_id": "201592c8e20dac24", - "history_id": "6fc9fbb81c497f69", - "uuid": "5e89abe4-e8f7-468a-9ef1-d4e322183fa5", - "file_name": "/home/oleg/galaxy/database/objects/5/e/8/dataset_5e89abe4-e8f7-468a-9ef1-d4e322183fa5.dat" -} From d6c52ba101526d567789cd4e4b60af8df36b42f9 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Tue, 9 Apr 2024 11:56:03 +0200 Subject: [PATCH 589/669] Update client/src/components/DatasetInformation/DatasetError.test.ts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: David López <46503462+davelopez@users.noreply.github.com> --- client/src/components/DatasetInformation/DatasetError.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/src/components/DatasetInformation/DatasetError.test.ts b/client/src/components/DatasetInformation/DatasetError.test.ts index af238c067af1..c3213e8dc6fc 100644 --- a/client/src/components/DatasetInformation/DatasetError.test.ts +++ b/client/src/components/DatasetInformation/DatasetError.test.ts @@ -48,7 +48,7 @@ async function montDatasetError(has_duplicate_inputs = true, has_empty_inputs = }, }); - const wrapper = await mount(DatasetError as object, { + const wrapper = mount(DatasetError as object, { propsData: { datasetId: DATASET_ID, }, From cb2e8e4a155d6853d9fd1e30e2ac5e42a9e8c0b4 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Tue, 9 Apr 2024 12:01:15 +0200 Subject: [PATCH 590/669] =?UTF-8?q?=F0=9F=8E=A8:=20fix=20typo=20in=20`moun?= =?UTF-8?q?tDatasetAttributes`=20function=20name=20in=20`DatasetAttributes?= =?UTF-8?q?`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../components/DatasetInformation/DatasetAttributes.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetAttributes.test.ts b/client/src/components/DatasetInformation/DatasetAttributes.test.ts index 55526b250fa5..5ccd71b7e22c 100644 --- a/client/src/components/DatasetInformation/DatasetAttributes.test.ts +++ b/client/src/components/DatasetInformation/DatasetAttributes.test.ts @@ -12,7 +12,7 @@ const DATASET_ID = "dataset_id"; const localVue = getLocalVue(); -async function montDatasetAttributes(conversion_disable = false) { +async function mountDatasetAttributes(conversion_disable = false) { const pinia = createTestingPinia(); setActivePinia(pinia); @@ -41,7 +41,7 @@ async function montDatasetAttributes(conversion_disable = false) { describe("DatasetAttributes", () => { it("check rendering", async () => { - const wrapper = await montDatasetAttributes(); + const wrapper = await mountDatasetAttributes(); expect(wrapper.findAll("button").length).toBe(6); expect(wrapper.findAll("#attribute_text").length).toBe(1); @@ -61,7 +61,7 @@ describe("DatasetAttributes", () => { }); it("check rendering without conversion option", async () => { - const wrapper = await montDatasetAttributes(true); + const wrapper = await mountDatasetAttributes(true); await flushPromises(); From ff33001391ec6522e162a53aebb2356a1f08f5f2 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Tue, 9 Apr 2024 12:02:00 +0200 Subject: [PATCH 591/669] =?UTF-8?q?=F0=9F=8E=A8:=20use=20try/catch=20in=20?= =?UTF-8?q?`DatasetAttributes`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../DatasetInformation/DatasetAttributes.vue | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetAttributes.vue b/client/src/components/DatasetInformation/DatasetAttributes.vue index 6d026d8026b1..1bad64943b6e 100644 --- a/client/src/components/DatasetInformation/DatasetAttributes.vue +++ b/client/src/components/DatasetInformation/DatasetAttributes.vue @@ -51,13 +51,19 @@ function onError(message: string) { messageVariant.value = "danger"; } -function submit(key: string, operation: string) { - setAttributes(props.datasetId, formData.value[key], operation).then((response) => { +async function submit(key: string, operation: string) { + try { + const response = await setAttributes(props.datasetId, formData.value[key], operation); + messageText.value = response.message; messageVariant.value = response.status; historyStore.loadCurrentHistory(); - }, onError); + } catch (e) { + const error = e as AxiosError<{ err_msg?: string }>; + + onError(error.response?.data?.err_msg || "Unable to save dataset attributes."); + } } async function loadDatasetAttributes() { From a8afec611fab3920bdc62ddd746def3938752e37 Mon Sep 17 00:00:00 2001 From: Alireza Heidari Date: Tue, 9 Apr 2024 12:15:38 +0200 Subject: [PATCH 592/669] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F:=20update=20`Data?= =?UTF-8?q?setDetails`=20type=20usage=20to=20`HDADetailed`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/src/components/DatasetInformation/DatasetDetails.vue | 4 ++-- client/src/components/DatasetInformation/DatasetError.vue | 6 +++--- .../components/DatasetInformation/DatasetInformation.vue | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/client/src/components/DatasetInformation/DatasetDetails.vue b/client/src/components/DatasetInformation/DatasetDetails.vue index bbc0fa6929fc..667e33d2c802 100644 --- a/client/src/components/DatasetInformation/DatasetDetails.vue +++ b/client/src/components/DatasetInformation/DatasetDetails.vue @@ -4,7 +4,7 @@ import { BAlert } from "bootstrap-vue"; import { storeToRefs } from "pinia"; import { onMounted, onUnmounted, ref } from "vue"; -import { DatasetDetails } from "@/api"; +import { type HDADetailed } from "@/api"; import { fetchDatasetDetails } from "@/api/datasets"; import { fetchJobDetails, JobDetails } from "@/api/jobs"; import { useConfig } from "@/composables/config"; @@ -36,7 +36,7 @@ const loading = ref(false); const jobLoading = ref(true); const jobTimeOut = ref(null); const jobDetails = ref(); -const dataset = ref(null); +const dataset = ref(null); const jobLoadingError = ref(null); const datasetLoadingError = ref(null); diff --git a/client/src/components/DatasetInformation/DatasetError.vue b/client/src/components/DatasetInformation/DatasetError.vue index a11defa3eb69..9cb2d9ef0097 100644 --- a/client/src/components/DatasetInformation/DatasetError.vue +++ b/client/src/components/DatasetInformation/DatasetError.vue @@ -7,7 +7,7 @@ import { BAlert, BButton } from "bootstrap-vue"; import { storeToRefs } from "pinia"; import { computed, onMounted, ref } from "vue"; -import { DatasetDetails } from "@/api"; +import { type HDADetailed } from "@/api"; import { fetchDatasetDetails } from "@/api/datasets"; import { fetchJobCommonProblems, fetchJobDetails, JobDetails, JobInputSummary } from "@/api/jobs"; import { sendErrorReport } from "@/components/DatasetInformation/services"; @@ -38,7 +38,7 @@ const datasetLoading = ref(false); const jobDetails = ref(); const jobProblems = ref(); const resultMessages = ref([]); -const dataset = ref(null); +const dataset = ref(null); const showForm = computed(() => { const noResult = !resultMessages.value.length; @@ -91,7 +91,7 @@ async function getJobProblems() { } } -async function submit(dataset: DatasetDetails, userEmailJob?: string | null) { +async function submit(dataset: HDADetailed, userEmailJob?: string | null) { const email = userEmailJob; try { diff --git a/client/src/components/DatasetInformation/DatasetInformation.vue b/client/src/components/DatasetInformation/DatasetInformation.vue index 7eed66fd12a7..b311f6eda8ee 100644 --- a/client/src/components/DatasetInformation/DatasetInformation.vue +++ b/client/src/components/DatasetInformation/DatasetInformation.vue @@ -1,5 +1,5 @@ + + diff --git a/client/src/components/PageEditor/ObjectPermissionsModal.vue b/client/src/components/PageEditor/ObjectPermissionsModal.vue new file mode 100644 index 000000000000..cb250f1884e6 --- /dev/null +++ b/client/src/components/PageEditor/ObjectPermissionsModal.vue @@ -0,0 +1,15 @@ + + + diff --git a/client/src/components/PageEditor/PageEditorMarkdown.vue b/client/src/components/PageEditor/PageEditorMarkdown.vue index 84c061f2cd20..6e1cb5bcc76a 100644 --- a/client/src/components/PageEditor/PageEditorMarkdown.vue +++ b/client/src/components/PageEditor/PageEditorMarkdown.vue @@ -6,6 +6,20 @@ mode="page" @onUpdate="onUpdate">