diff --git a/client/src/components/History/CurrentHistory/HistoryCounter.vue b/client/src/components/History/CurrentHistory/HistoryCounter.vue
index 5e125b14a07f..4b0f64e6dfaf 100644
--- a/client/src/components/History/CurrentHistory/HistoryCounter.vue
+++ b/client/src/components/History/CurrentHistory/HistoryCounter.vue
@@ -1,67 +1,112 @@
-
-
-
- {{ history.size | niceFileSize }}
-
-
-
-
- {{ history.contents_active.active }}
-
-
-
- {{ history.contents_active.deleted }}
-
-
-
- {{ history.contents_active.hidden }}
-
-
-
-
-
-
+
+
+
+
+
+ {{ history.size | niceFileSize }}
+
+
+
+ {{ history.size | niceFileSize }}
+
+
+
+
+
+
+ {{ history.contents_active.active }}
+
+
+
+ {{ history.contents_active.deleted }}
+
+
+
+ {{ history.contents_active.hidden }}
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/components/History/CurrentHistory/HistorySelectPreferredObjectStore.test.js b/client/src/components/History/CurrentHistory/HistorySelectPreferredObjectStore.test.js
new file mode 100644
index 000000000000..6dc25f447c17
--- /dev/null
+++ b/client/src/components/History/CurrentHistory/HistorySelectPreferredObjectStore.test.js
@@ -0,0 +1,64 @@
+import { mount } from "@vue/test-utils";
+import { getLocalVue } from "jest/helpers";
+import HistorySelectPreferredObjectStore from "./HistorySelectPreferredObjectStore";
+import axios from "axios";
+import MockAdapter from "axios-mock-adapter";
+import flushPromises from "flush-promises";
+
+const localVue = getLocalVue(true);
+
+const TEST_ROOT = "/";
+const TEST_HISTORY_ID = "myTestHistoryId";
+
+const TEST_HISTORY = {
+ id: TEST_HISTORY_ID,
+ preferred_object_store_id: null,
+};
+
+function mountComponent() {
+ const wrapper = mount(HistorySelectPreferredObjectStore, {
+ propsData: { userPreferredObjectStoreId: null, history: TEST_HISTORY, root: TEST_ROOT },
+ localVue,
+ });
+ return wrapper;
+}
+
+import { ROOT_COMPONENT } from "utils/navigation";
+
+const OBJECT_STORES = [
+ { object_store_id: "object_store_1", badges: [], quota: { enabled: false } },
+ { object_store_id: "object_store_2", badges: [], quota: { enabled: false } },
+];
+
+describe("HistorySelectPreferredObjectStore.vue", () => {
+ let axiosMock;
+
+ beforeEach(async () => {
+ axiosMock = new MockAdapter(axios);
+ axiosMock.onGet("/api/object_store?selectable=true").reply(200, OBJECT_STORES);
+ });
+
+ afterEach(async () => {
+ axiosMock.restore();
+ });
+
+ it("updates object store to default on selection null", async () => {
+ const wrapper = mountComponent();
+ await flushPromises();
+ const els = wrapper.findAll(ROOT_COMPONENT.preferences.object_store_selection.option_buttons.selector);
+ expect(els.length).toBe(3);
+ const galaxyDefaultOption = wrapper.find(
+ ROOT_COMPONENT.preferences.object_store_selection.option_button({ object_store_id: "__null__" }).selector
+ );
+ expect(galaxyDefaultOption.exists()).toBeTruthy();
+ axiosMock
+ .onPut(`/api/histories/${TEST_HISTORY_ID}`, expect.objectContaining({ preferred_object_store_id: null }))
+ .reply(202);
+ await galaxyDefaultOption.trigger("click");
+ await flushPromises();
+ const errorEl = wrapper.find(".object-store-selection-error");
+ expect(errorEl.exists()).toBeFalsy();
+ const emitted = wrapper.emitted();
+ expect(emitted["updated"][0][0]).toEqual(null);
+ });
+});
diff --git a/client/src/components/History/CurrentHistory/HistorySelectPreferredObjectStore.vue b/client/src/components/History/CurrentHistory/HistorySelectPreferredObjectStore.vue
new file mode 100644
index 000000000000..622329363faf
--- /dev/null
+++ b/client/src/components/History/CurrentHistory/HistorySelectPreferredObjectStore.vue
@@ -0,0 +1,79 @@
+
+
+
+
+
diff --git a/client/src/components/History/CurrentHistory/HistoryTargetPreferredObjectStorePopover.vue b/client/src/components/History/CurrentHistory/HistoryTargetPreferredObjectStorePopover.vue
new file mode 100644
index 000000000000..1bdaddc0f71d
--- /dev/null
+++ b/client/src/components/History/CurrentHistory/HistoryTargetPreferredObjectStorePopover.vue
@@ -0,0 +1,48 @@
+
+
+ Preferred Target Object Store
+
+ This target object store has been set at the history level.
+
+
+ This target object store has been inherited from your user preferences (set in User -> Preferences ->
+ Preferred Object Store). If that option is updated, this history will target that new default.
+
+
+
+ Change this preference object store target by clicking on the storage button in the history panel.
+
+
+
+
+
diff --git a/client/src/components/ObjectStore/DescribeObjectStore.test.js b/client/src/components/ObjectStore/DescribeObjectStore.test.js
index 0bda7a01e887..63b6378f9b2e 100644
--- a/client/src/components/ObjectStore/DescribeObjectStore.test.js
+++ b/client/src/components/ObjectStore/DescribeObjectStore.test.js
@@ -11,18 +11,21 @@ const localVue = getLocalVue();
const TEST_STORAGE_API_RESPONSE_WITHOUT_ID = {
object_store_id: null,
private: false,
+ badges: [],
};
const TEST_RENDERED_MARKDOWN_AS_HTML = "
My cool markdown\n";
const TEST_STORAGE_API_RESPONSE_WITH_ID = {
object_store_id: "foobar",
private: false,
+ badges: [],
};
const TEST_STORAGE_API_RESPONSE_WITH_NAME = {
object_store_id: "foobar",
name: "my cool storage",
description: "My cool **markdown**",
private: true,
+ badges: [],
};
// works fine without mocking but I guess it is more JS unit-y with the mock?
diff --git a/client/src/components/ObjectStore/DescribeObjectStore.vue b/client/src/components/ObjectStore/DescribeObjectStore.vue
index a352bf0a291f..af924dbf67ec 100644
--- a/client/src/components/ObjectStore/DescribeObjectStore.vue
+++ b/client/src/components/ObjectStore/DescribeObjectStore.vue
@@ -14,6 +14,7 @@
the default configured Galaxy object store .
+
diff --git a/client/src/components/ObjectStore/ObjectStoreBadge.test.js b/client/src/components/ObjectStore/ObjectStoreBadge.test.js
new file mode 100644
index 000000000000..a02d930d575d
--- /dev/null
+++ b/client/src/components/ObjectStore/ObjectStoreBadge.test.js
@@ -0,0 +1,38 @@
+import { mount } from "@vue/test-utils";
+import { getLocalVue } from "jest/helpers";
+import ObjectStoreBadge from "./ObjectStoreBadge";
+import { ROOT_COMPONENT } from "utils/navigation";
+
+const localVue = getLocalVue(true);
+
+const TEST_MESSAGE = "a test message provided by backend";
+
+describe("ObjectStoreBadge", () => {
+ let wrapper;
+
+ function mountBadge(badge) {
+ wrapper = mount(ObjectStoreBadge, {
+ propsData: { badge },
+ localVue,
+ stubs: { "b-popover": true },
+ });
+ }
+
+ it("should render a valid badge for more_secure type", async () => {
+ mountBadge({ type: "more_secure", message: TEST_MESSAGE });
+ const selector = ROOT_COMPONENT.object_store_details.badge_of_type({ type: "more_secure" }).selector;
+ const iconEl = wrapper.find(selector);
+ expect(iconEl.exists()).toBeTruthy();
+ expect(wrapper.vm.message).toContain(TEST_MESSAGE);
+ expect(wrapper.vm.stockMessage).toContain("more secure by the Galaxy adminstrator");
+ });
+
+ it("should render a valid badge for less_secure type", async () => {
+ mountBadge({ type: "less_secure", message: TEST_MESSAGE });
+ const selector = ROOT_COMPONENT.object_store_details.badge_of_type({ type: "less_secure" }).selector;
+ const iconEl = wrapper.find(selector);
+ expect(iconEl.exists()).toBeTruthy();
+ expect(wrapper.vm.message).toContain(TEST_MESSAGE);
+ expect(wrapper.vm.stockMessage).toContain("less secure by the Galaxy adminstrator");
+ });
+});
diff --git a/client/src/components/ObjectStore/ObjectStoreBadge.vue b/client/src/components/ObjectStore/ObjectStoreBadge.vue
new file mode 100644
index 000000000000..3a32ca8b529c
--- /dev/null
+++ b/client/src/components/ObjectStore/ObjectStoreBadge.vue
@@ -0,0 +1,191 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ stockMessage }}
+
+
+
+
+
+
+
+
diff --git a/client/src/components/ObjectStore/ObjectStoreBadges.vue b/client/src/components/ObjectStore/ObjectStoreBadges.vue
new file mode 100644
index 000000000000..1d6d8e21a149
--- /dev/null
+++ b/client/src/components/ObjectStore/ObjectStoreBadges.vue
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
diff --git a/client/src/components/ObjectStore/SelectObjectStore.vue b/client/src/components/ObjectStore/SelectObjectStore.vue
new file mode 100644
index 000000000000..d384c0507d7e
--- /dev/null
+++ b/client/src/components/ObjectStore/SelectObjectStore.vue
@@ -0,0 +1,153 @@
+
+
+
+
+
+ {{ error }}
+
+
+
+
+ {{ defaultOptionTitle | localize }}
+ {{ object_store.name }}
+
+
+
+
+
+
+
+
+ {{ whyIsSelectionPreferredText }}
+
+
+
+
+ {{ defaultOptionTitle }}
+ {{ defaultOptionDescription }}
+
+
+ {{ object_store.name }}
+
+
+
+
+
+
+
diff --git a/client/src/components/ObjectStore/ShowSelectedObjectStore.test.js b/client/src/components/ObjectStore/ShowSelectedObjectStore.test.js
new file mode 100644
index 000000000000..8450ad447976
--- /dev/null
+++ b/client/src/components/ObjectStore/ShowSelectedObjectStore.test.js
@@ -0,0 +1,45 @@
+import { mount } from "@vue/test-utils";
+import { getLocalVue } from "jest/helpers";
+import ShowSelectedObjectStore from "./ShowSelectedObjectStore";
+import axios from "axios";
+import MockAdapter from "axios-mock-adapter";
+import flushPromises from "flush-promises";
+
+const localVue = getLocalVue(true);
+const TEST_OBJECT_ID = "os123";
+const OBJECT_STORE_DATA = {
+ object_store_id: TEST_OBJECT_ID,
+ badges: [],
+};
+
+describe("ShowSelectedObjectStore", () => {
+ let wrapper;
+ let axiosMock;
+
+ beforeEach(async () => {
+ axiosMock = new MockAdapter(axios);
+ });
+
+ afterEach(async () => {
+ axiosMock.restore();
+ });
+
+ it("should show a loading message and then a DescribeObjectStore component", async () => {
+ axiosMock.onGet(`/api/object_store/${TEST_OBJECT_ID}`).reply(200, OBJECT_STORE_DATA);
+ wrapper = mount(ShowSelectedObjectStore, {
+ propsData: { preferredObjectStoreId: TEST_OBJECT_ID, forWhat: "Data goes into..." },
+ localVue,
+ stubs: {
+ LoadingSpan: true,
+ DescribeObjectStore: true,
+ },
+ });
+ let loadingEl = wrapper.find("loadingspan-stub");
+ expect(loadingEl.exists()).toBeTruthy();
+ expect(loadingEl.attributes("message")).toBeLocalizationOf("Loading object store details");
+ await flushPromises();
+ loadingEl = wrapper.find("loadingspan-stub");
+ expect(loadingEl.exists()).toBeFalsy();
+ expect(wrapper.find("describeobjectstore-stub").exists()).toBeTruthy();
+ });
+});
diff --git a/client/src/components/ObjectStore/ShowSelectedObjectStore.vue b/client/src/components/ObjectStore/ShowSelectedObjectStore.vue
new file mode 100644
index 000000000000..a0a2e9cf938f
--- /dev/null
+++ b/client/src/components/ObjectStore/ShowSelectedObjectStore.vue
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
diff --git a/client/src/components/ObjectStore/adminConfigMixin.js b/client/src/components/ObjectStore/adminConfigMixin.js
new file mode 100644
index 000000000000..70f265a941ab
--- /dev/null
+++ b/client/src/components/ObjectStore/adminConfigMixin.js
@@ -0,0 +1,15 @@
+import MarkdownIt from "markdown-it";
+
+export default {
+ methods: {
+ adminMarkup(markup) {
+ let markupHtml;
+ if (markup) {
+ markupHtml = MarkdownIt({ html: true }).render(markup);
+ } else {
+ markupHtml = null;
+ }
+ return markupHtml;
+ },
+ },
+};
diff --git a/client/src/components/ObjectStore/showTargetPopoverMixin.js b/client/src/components/ObjectStore/showTargetPopoverMixin.js
new file mode 100644
index 000000000000..4fc88ce24863
--- /dev/null
+++ b/client/src/components/ObjectStore/showTargetPopoverMixin.js
@@ -0,0 +1,18 @@
+import ShowSelectedObjectStore from "./ShowSelectedObjectStore";
+
+export default {
+ components: {
+ ShowSelectedObjectStore,
+ },
+ props: {
+ titleSuffix: {
+ type: String,
+ default: null,
+ },
+ },
+ computed: {
+ title() {
+ return this.l(`Preferred Target Object Store ${this.titleSuffix || ""}`);
+ },
+ },
+};
diff --git a/client/src/components/Tool/ToolCard.vue b/client/src/components/Tool/ToolCard.vue
index b8c6230b428c..94e23c455448 100644
--- a/client/src/components/Tool/ToolCard.vue
+++ b/client/src/components/Tool/ToolCard.vue
@@ -75,6 +75,33 @@
@click="onRemoveFavorite">
+
+
+
+
+
+
+
+
@@ -117,6 +144,9 @@ import { addFavorite, removeFavorite } from "components/Tool/services";
import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome";
import { library } from "@fortawesome/fontawesome-svg-core";
import { faWrench } from "@fortawesome/free-solid-svg-icons";
+import ToolSelectPreferredObjectStore from "./ToolSelectPreferredObjectStore";
+import ToolTargetPreferredObjectStorePopover from "./ToolTargetPreferredObjectStorePopover";
+import { getAppRoot } from "onload/loadConfig";
library.add(faWrench);
@@ -127,6 +157,8 @@ export default {
ToolFooter,
ToolHelp,
ToolSourceMenuItem,
+ ToolSelectPreferredObjectStore,
+ ToolTargetPreferredObjectStorePopover,
},
props: {
id: {
@@ -165,11 +197,22 @@ export default {
type: Boolean,
default: false,
},
+ allowObjectStoreSelection: {
+ type: Boolean,
+ default: false,
+ },
+ preferredObjectStoreId: {
+ type: String,
+ default: null,
+ },
},
data() {
return {
+ root: getAppRoot(),
webhookDetails: [],
errorText: null,
+ showPreferredObjectStoreModal: false,
+ toolPreferredObjectStoreId: this.preferredObjectStoreId,
};
},
computed: {
@@ -246,6 +289,9 @@ export default {
}
);
},
+ onShow() {
+ this.showPreferredObjectStoreModal = true;
+ },
onRemoveFavorite() {
removeFavorite(this.user.id, this.id).then(
(data) => {
@@ -286,6 +332,11 @@ export default {
favorites[objectType] = newFavorites[objectType];
this.$emit("onUpdateFavorites", this.user, JSON.stringify(favorites));
},
+ onUpdatePreferredObjectStoreId(toolPreferredObjectStoreId) {
+ this.showPreferredObjectStoreModal = false;
+ this.toolPreferredObjectStoreId = toolPreferredObjectStoreId;
+ this.$emit("updatePreferredObjectStoreId", toolPreferredObjectStoreId);
+ },
},
};
diff --git a/client/src/components/Tool/ToolForm.vue b/client/src/components/Tool/ToolForm.vue
index b62a8b105712..1ee17466e280 100644
--- a/client/src/components/Tool/ToolForm.vue
+++ b/client/src/components/Tool/ToolForm.vue
@@ -39,8 +39,11 @@
:message-text="messageText"
:message-variant="messageVariant"
:disabled="disabled || showExecuting"
+ :allow-object-store-selection="config.object_store_allows_id_selection"
+ :preferred-object-store-id="preferredObjectStoreId"
itemscope="itemscope"
itemtype="https://schema.org/CreativeWork"
+ @updatePreferredObjectStoreId="onUpdatePreferredObjectStoreId"
@onChangeVersion="onChangeVersion"
@onUpdateFavorites="onUpdateFavorites">
@@ -164,6 +167,7 @@ export default {
validationInternal: null,
validationScrollTo: null,
currentVersion: this.version,
+ preferredObjectStoreId: null,
};
},
computed: {
@@ -269,6 +273,9 @@ export default {
this.showLoading = false;
});
},
+ onUpdatePreferredObjectStoreId(preferredObjectStoreId) {
+ this.preferredObjectStoreId = preferredObjectStoreId;
+ },
onExecute(config, historyId) {
if (this.validationInternal) {
this.validationScrollTo = this.validationInternal.slice();
@@ -292,6 +299,9 @@ export default {
if (this.useCachedJobs) {
jobDef.inputs["use_cached_job"] = true;
}
+ if (this.preferredObjectStoreId) {
+ jobDef.preferred_object_store_id = this.preferredObjectStoreId;
+ }
console.debug("toolForm::onExecute()", jobDef);
submitJob(jobDef).then(
(jobResponse) => {
diff --git a/client/src/components/Tool/ToolSelectPreferredObjectStore.vue b/client/src/components/Tool/ToolSelectPreferredObjectStore.vue
new file mode 100644
index 000000000000..8b055f9810aa
--- /dev/null
+++ b/client/src/components/Tool/ToolSelectPreferredObjectStore.vue
@@ -0,0 +1,48 @@
+
+
+
+
diff --git a/client/src/components/Tool/ToolTargetPreferredObjectStorePopover.vue b/client/src/components/Tool/ToolTargetPreferredObjectStorePopover.vue
new file mode 100644
index 000000000000..a284d85d46d0
--- /dev/null
+++ b/client/src/components/Tool/ToolTargetPreferredObjectStorePopover.vue
@@ -0,0 +1,35 @@
+
+
+ {{ title }}
+
+ This target object store has been set at the tool level, by default history or user preferences will be used
+ and if those are not set Galaxy will pick an adminstrator configured default.
+
+
+
+
+ No selection has been made for this tool execution. Defaults from history, user, or Galaxy will be used.
+
+
+ Change this preference object store target by clicking on the storage button in the tool header.
+
+
+
+
+
diff --git a/client/src/components/User/DiskUsage/Quota/ProvidedQuotaSourceUsageBar.vue b/client/src/components/User/DiskUsage/Quota/ProvidedQuotaSourceUsageBar.vue
new file mode 100644
index 000000000000..a07f2aeeccea
--- /dev/null
+++ b/client/src/components/User/DiskUsage/Quota/ProvidedQuotaSourceUsageBar.vue
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
diff --git a/client/src/components/User/DiskUsage/Quota/QuotaUsageBar.vue b/client/src/components/User/DiskUsage/Quota/QuotaUsageBar.vue
index 2218657a8049..dcd02b9a33c2 100644
--- a/client/src/components/User/DiskUsage/Quota/QuotaUsageBar.vue
+++ b/client/src/components/User/DiskUsage/Quota/QuotaUsageBar.vue
@@ -1,20 +1,20 @@
-
+
{{ quotaUsage.sourceLabel }}
{{ storageSourceText }}
-
+
{{ quotaUsage.niceTotalDiskUsage }}
of {{ quotaUsage.niceQuota }} used
-
+
{{ quotaUsage.quotaPercent }}{{ percentOfDiskQuotaUsedText }}
@@ -37,6 +37,10 @@ export default {
type: Boolean,
default: false,
},
+ compact: {
+ type: Boolean,
+ default: false,
+ },
},
data() {
return {
diff --git a/client/src/components/User/UserPreferences.vue b/client/src/components/User/UserPreferences.vue
index cbb8f2e20ea0..4a1bde8ac130 100644
--- a/client/src/components/User/UserPreferences.vue
+++ b/client/src/components/User/UserPreferences.vue
@@ -32,6 +32,16 @@
+
+
+
+
+
+
{
+ let axiosMock;
+
+ beforeEach(async () => {
+ axiosMock = new MockAdapter(axios);
+ axiosMock.onGet("/api/object_store?selectable=true").reply(200, OBJECT_STORES);
+ });
+
+ afterEach(async () => {
+ axiosMock.restore();
+ });
+
+ it("contains a localized link", async () => {
+ const wrapper = mountComponent();
+ expect(wrapper.vm.$refs["modal"].isHidden).toBeTruthy();
+ const el = await wrapper.find(ROOT_COMPONENT.preferences.object_store.selector);
+ expect(el.text()).toBeLocalizationOf("Preferred Object Store");
+ await el.trigger("click");
+ expect(wrapper.vm.$refs["modal"].isHidden).toBeFalsy();
+ });
+
+ it("updates object store to default on selection null", async () => {
+ const wrapper = mountComponent();
+ const el = await wrapper.find(ROOT_COMPONENT.preferences.object_store.selector);
+ await el.trigger("click");
+ const els = wrapper.findAll(ROOT_COMPONENT.preferences.object_store_selection.option_buttons.selector);
+ expect(els.length).toBe(3);
+ const galaxyDefaultOption = wrapper.find(
+ ROOT_COMPONENT.preferences.object_store_selection.option_button({ object_store_id: "__null__" }).selector
+ );
+ expect(galaxyDefaultOption.exists()).toBeTruthy();
+ axiosMock.onPut("/api/users/current", expect.objectContaining({ preferred_object_store_id: null })).reply(202);
+ await galaxyDefaultOption.trigger("click");
+ await flushPromises();
+ const errorEl = wrapper.find(".object-store-selection-error");
+ expect(errorEl.exists()).toBeFalsy();
+ });
+
+ it("updates object store to default on selection null", async () => {
+ const wrapper = mountComponent();
+ const el = await wrapper.find(ROOT_COMPONENT.preferences.object_store.selector);
+ await el.trigger("click");
+ const objectStore2Option = wrapper.find(
+ ROOT_COMPONENT.preferences.object_store_selection.option_button({ object_store_id: "object_store_2" })
+ .selector
+ );
+ expect(objectStore2Option.exists()).toBeTruthy();
+ axiosMock
+ .onPut("/api/users/current", expect.objectContaining({ preferred_object_store_id: "object_store_2" }))
+ .reply(202);
+ await objectStore2Option.trigger("click");
+ await flushPromises();
+ const errorEl = wrapper.find(".object-store-selection-error");
+ expect(errorEl.exists()).toBeFalsy();
+ });
+
+ it("displayed error is user update fails", async () => {
+ const wrapper = mountComponent();
+ const el = await wrapper.find(ROOT_COMPONENT.preferences.object_store.selector);
+ await el.trigger("click");
+ const galaxyDefaultOption = wrapper.find(
+ ROOT_COMPONENT.preferences.object_store_selection.option_button({ object_store_id: "__null__" }).selector
+ );
+ expect(galaxyDefaultOption.exists()).toBeTruthy();
+ axiosMock
+ .onPut("/api/users/current", expect.objectContaining({ preferred_object_store_id: null }))
+ .reply(400, { err_msg: "problem with selection.." });
+ await galaxyDefaultOption.trigger("click");
+ await flushPromises();
+ const errorEl = await wrapper.find(".object-store-selection-error");
+ expect(errorEl.exists()).toBeTruthy();
+ expect(wrapper.vm.error).toBe("problem with selection..");
+ });
+});
diff --git a/client/src/components/User/UserPreferredObjectStore.vue b/client/src/components/User/UserPreferredObjectStore.vue
new file mode 100644
index 000000000000..30b2a82f0ad0
--- /dev/null
+++ b/client/src/components/User/UserPreferredObjectStore.vue
@@ -0,0 +1,100 @@
+
+
+
+
+
Preferred Object Store
+
+ Select a preferred default object store for the outputs of new jobs to be created in.
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/components/Workflow/Run/WorkflowRunFormSimple.vue b/client/src/components/Workflow/Run/WorkflowRunFormSimple.vue
index adc53c4d7c99..c4cb4662a057 100644
--- a/client/src/components/Workflow/Run/WorkflowRunFormSimple.vue
+++ b/client/src/components/Workflow/Run/WorkflowRunFormSimple.vue
@@ -1,43 +1,57 @@
-
-
-
-
Workflow: {{ model.name }}
-
-
-
-
-
-
- Send results to a new history
- Attempt to re-use jobs with identical parameters?
-
-
+
+
+
+
+ Workflow: {{ model.name }}
+
+
+
+
+
+
+ Send results to a new history
+ Attempt to re-use jobs with identical parameters?
+ Send outputs and intermediate to different object stores?
+
+
+
+
+
+
+
+
Expand to full workflow form.
-
-
- Expand to full workflow form.
-
-
+
+
diff --git a/client/src/components/Workflow/Run/WorkflowStorageConfiguration.test.js b/client/src/components/Workflow/Run/WorkflowStorageConfiguration.test.js
new file mode 100644
index 000000000000..3cfdedbc171c
--- /dev/null
+++ b/client/src/components/Workflow/Run/WorkflowStorageConfiguration.test.js
@@ -0,0 +1,65 @@
+import WorkflowStorageConfiguration from "./WorkflowStorageConfiguration";
+import { mount } from "@vue/test-utils";
+import { getLocalVue, findViaNavigation } from "jest/helpers";
+import { ROOT_COMPONENT } from "utils/navigation";
+
+const localVue = getLocalVue(true);
+
+describe("WorkflowStorageConfiguration.vue", () => {
+ let wrapper;
+
+ async function doMount(split) {
+ const propsData = {
+ root: "/root/",
+ splitObjectStore: split,
+ invocationPreferredObjectStoreId: null,
+ invocationPreferredIntermediateObjectStoreId: null,
+ };
+ wrapper = mount(WorkflowStorageConfiguration, {
+ propsData,
+ localVue,
+ });
+ }
+
+ describe("rendering buttons", () => {
+ it("should show two buttons on splitObjectStore", async () => {
+ doMount(true);
+ const primaryEl = findViaNavigation(wrapper, ROOT_COMPONENT.workflow_run.primary_storage_indciator);
+ expect(primaryEl.exists()).toBeTruthy();
+ const intermediateEl = findViaNavigation(
+ wrapper,
+ ROOT_COMPONENT.workflow_run.intermediate_storage_indciator
+ );
+ expect(intermediateEl.exists()).toBeTruthy();
+ });
+
+ it("should show one button on not splitObjectStore", async () => {
+ doMount(false);
+ const primaryEl = findViaNavigation(wrapper, ROOT_COMPONENT.workflow_run.primary_storage_indciator);
+ expect(primaryEl.exists()).toBeTruthy();
+ const intermediateEl = findViaNavigation(
+ wrapper,
+ ROOT_COMPONENT.workflow_run.intermediate_storage_indciator
+ );
+ expect(intermediateEl.exists()).toBeFalsy();
+ });
+ });
+
+ describe("event handling", () => {
+ it("should fire update events when primary selection is updated", async () => {
+ doMount(true);
+ await wrapper.vm.onUpdate("storage123");
+ const emitted = wrapper.emitted();
+ expect(emitted["updated"][0][0]).toEqual("storage123");
+ expect(emitted["updated"][0][1]).toEqual(false);
+ });
+
+ it("should fire an update event when intermediate selection is updated", async () => {
+ doMount(true);
+ await wrapper.vm.onUpdateIntermediate("storage123");
+ const emitted = wrapper.emitted();
+ expect(emitted["updated"][0][0]).toEqual("storage123");
+ expect(emitted["updated"][0][1]).toEqual(true);
+ });
+ });
+});
diff --git a/client/src/components/Workflow/Run/WorkflowStorageConfiguration.vue b/client/src/components/Workflow/Run/WorkflowStorageConfiguration.vue
new file mode 100644
index 000000000000..55f9c51a6edf
--- /dev/null
+++ b/client/src/components/Workflow/Run/WorkflowStorageConfiguration.vue
@@ -0,0 +1,122 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/components/Workflow/Run/WorkflowTargetPreferredObjectStorePopover.vue b/client/src/components/Workflow/Run/WorkflowTargetPreferredObjectStorePopover.vue
new file mode 100644
index 000000000000..218a78a5ac40
--- /dev/null
+++ b/client/src/components/Workflow/Run/WorkflowTargetPreferredObjectStorePopover.vue
@@ -0,0 +1,36 @@
+
+
+ {{ title }}
+ This target object store has been set at the invocation level.
+
+
+
+ No selection has been made for this worklfow invocation. Defaults from history, user, or Galaxy will be
+ used.
+
+
+ Change this preference object store target by clicking on the storage button in the worklfow run header.
+
+
+
+
+
diff --git a/client/src/components/providers/ObjectStoreProvider.js b/client/src/components/providers/ObjectStoreProvider.js
new file mode 100644
index 000000000000..0cfc2dafb381
--- /dev/null
+++ b/client/src/components/providers/ObjectStoreProvider.js
@@ -0,0 +1,16 @@
+import axios from "axios";
+import { getAppRoot } from "onload/loadConfig";
+import { SingleQueryProvider } from "components/providers/SingleQueryProvider";
+import { rethrowSimple } from "utils/simple-error";
+
+async function objectStoreDetails({ id }) {
+ const url = `${getAppRoot()}api/object_store/${id}`;
+ try {
+ const { data } = await axios.get(url);
+ return data;
+ } catch (e) {
+ rethrowSimple(e);
+ }
+}
+
+export const ObjectStoreDetailsProvider = SingleQueryProvider(objectStoreDetails);
diff --git a/client/src/utils/navigation/navigation.yml b/client/src/utils/navigation/navigation.yml
index d9b110a8b862..3d98ffbd6125 100644
--- a/client/src/utils/navigation/navigation.yml
+++ b/client/src/utils/navigation/navigation.yml
@@ -78,10 +78,16 @@ preferences:
manage_api_key: '#edit-preferences-api-key'
current_email: "#user-preferences-current-email"
get_new_key: '#submit'
+ object_store: '#select-preferred-object-store'
delete_account: '#delete-account'
delete_account_input: '#name-input'
delete_account_ok_btn: '.modal-footer .btn-primary'
+ object_store_selection:
+ selectors:
+ option_buttons: '.preferred-object-store-select-button'
+ option_button: '.preferred-object-store-select-button[data-object-store-id="${object_store_id}"]'
+
toolbox_filters:
selectors:
input:
@@ -115,6 +121,10 @@ dataset_details:
transform_action: '[data-transform-action="${action}"]'
deferred_source_uri: '.deferred-dataset-source-uri'
+object_store_details:
+ selectors:
+ badge_of_type: '.object-store-badge-wrapper [data-badge-type="${type}"]'
+
history_panel:
menu:
labels:
@@ -566,6 +576,8 @@ workflow_run:
input_select_field:
type: xpath
selector: '//div[@data-label="${label}"]//span[@class="select2-chosen"]'
+ primary_storage_indciator: '.workflow-storage-indicator-primary'
+ intermediate_storage_indciator: '.workflow-storage-indicator-intermediate'
workflow_editor:
diff --git a/client/tests/jest/helpers.js b/client/tests/jest/helpers.js
index 273ec3a81550..8b8c7f494b63 100644
--- a/client/tests/jest/helpers.js
+++ b/client/tests/jest/helpers.js
@@ -14,6 +14,10 @@ import _l from "utils/localization";
const defaultComparator = (a, b) => a == b;
+export function findViaNavigation(wrapper, component) {
+ return wrapper.find(component.selector);
+}
+
function testLocalize(text) {
if(text) {
return `test_localized<${text}>`;
diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py
index e242eefc4a94..4503bf059cb2 100644
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1583,8 +1583,15 @@ def _set_object_store_ids(self, job):
# jobs may have this set. Skip this following code if that is the case.
return
- object_store_populator = ObjectStorePopulator(self.app, job.user)
+ object_store = self.app.object_store
+ if not object_store.object_store_allows_id_selection:
+ self._set_object_store_ids_basic(job)
+ else:
+ self._set_object_store_ids_full(job)
+
+ def _set_object_store_ids_basic(self, job):
object_store_id = self.get_destination_configuration("object_store_id", None)
+ object_store_populator = ObjectStorePopulator(self.app, job.user)
require_sharable = job.requires_sharable_storage(self.app.security_agent)
if object_store_id:
@@ -1603,6 +1610,68 @@ def _set_object_store_ids(self, job):
job.object_store_id = object_store_populator.object_store_id
self._setup_working_directory(job=job)
+ def _set_object_store_ids_full(self, job):
+ user = job.user
+ object_store_id = self.get_destination_configuration("object_store_id", None)
+ split_object_stores = None
+ if object_store_id is None:
+ object_store_id = job.preferred_object_store_id
+ if object_store_id is None and job.workflow_invocation_step:
+ workflow_invocation_step = job.workflow_invocation_step
+ workflow_invocation = job.workflow_invocation_step.workflow_invocation
+ invocation_object_stores = workflow_invocation.preferred_object_stores
+ if invocation_object_stores.is_split_configuration:
+ # Redo for subworkflows...
+ outputs_object_store_populator = ObjectStorePopulator(self.app, user)
+ outputs_object_store_populator.object_store_id = (
+ invocation_object_stores.preferred_outputs_object_store_id
+ )
+ intermediate_object_store_populator = ObjectStorePopulator(self.app, user)
+ intermediate_object_store_populator.object_store_id = (
+ invocation_object_stores.preferred_intermediate_object_store_id
+ )
+
+ # default for the job... probably isn't used in anyway but for job working
+ # directory?
+ object_store_id = invocation_object_stores.preferred_outputs_object_store_id
+ object_store_populator = intermediate_object_store_populator
+ output_names = [o.output_name for o in workflow_invocation_step.workflow_step.unique_workflow_outputs]
+
+ def split_object_stores(output_name):
+ if output_name in output_names:
+ return outputs_object_store_populator
+ else:
+ return intermediate_object_store_populator
+
+ if object_store_id is None:
+ history = job.history
+ if history is not None:
+ object_store_id = history.preferred_object_store_id
+ if object_store_id is None:
+ if user is not None:
+ object_store_id = user.preferred_object_store_id
+
+ require_sharable = job.requires_sharable_storage(self.app.security_agent)
+ if not split_object_stores:
+ object_store_populator = ObjectStorePopulator(self.app, user)
+
+ if object_store_id:
+ object_store_populator.object_store_id = object_store_id
+
+ for dataset_assoc in job.output_datasets + job.output_library_datasets:
+ dataset = dataset_assoc.dataset
+ object_store_populator.set_object_store_id(dataset, require_sharable=require_sharable)
+
+ job.object_store_id = object_store_populator.object_store_id
+ self._setup_working_directory(job=job)
+ else:
+ for dataset_assoc in job.output_datasets + job.output_library_datasets:
+ dataset = dataset_assoc.dataset
+ dataset_object_store_populator = split_object_stores(dataset_assoc.name)
+ dataset_object_store_populator.set_object_store_id(dataset, require_sharable=require_sharable)
+ job.object_store_id = object_store_populator.object_store_id
+ self._setup_working_directory(job=job)
+
def _finish_dataset(self, output_name, dataset, job, context, final_job_state, remote_metadata_directory):
implicit_collection_jobs = job.implicit_collection_jobs_association
purged = dataset.dataset.purged
diff --git a/lib/galaxy/managers/configuration.py b/lib/galaxy/managers/configuration.py
index 9b3c8da9efe1..a20f5f226814 100644
--- a/lib/galaxy/managers/configuration.py
+++ b/lib/galaxy/managers/configuration.py
@@ -126,6 +126,7 @@ def _use_config(item, key: str, **context):
def _config_is_truthy(item, key, **context):
return True if item.get(key) else False
+ object_store = self.app.object_store
self.serializers: Dict[str, base.Serializer] = {
# TODO: this is available from user data, remove
"is_admin_user": lambda *a, **c: False,
@@ -209,8 +210,10 @@ def _config_is_truthy(item, key, **context):
"enable_tool_source_display": _use_config,
"enable_celery_tasks": _use_config,
"quota_source_labels": lambda item, key, **context: list(
- self.app.object_store.get_quota_source_map().get_quota_source_labels()
+ object_store.get_quota_source_map().get_quota_source_labels()
),
+ "object_store_allows_id_selection": lambda item, key, **context: object_store.object_store_allows_id_selection(),
+ "object_store_ids_allowing_selection": lambda item, key, **context: object_store.object_store_ids_allowing_selection(),
"user_library_import_dir_available": lambda item, key, **context: bool(item.get("user_library_import_dir")),
"welcome_directory": _use_config,
}
diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py
index e3e873e766c9..53093c95adf8 100644
--- a/lib/galaxy/managers/histories.py
+++ b/lib/galaxy/managers/histories.py
@@ -30,6 +30,7 @@
sharable,
)
from galaxy.managers.base import (
+ ModelDeserializingError,
Serializer,
SortableManager,
)
@@ -37,6 +38,7 @@
HDABasicInfo,
ShareHistoryExtra,
)
+from galaxy.security.validate_user_input import validate_preferred_object_store_id
from galaxy.structured_app import MinimalManagerApp
log = logging.getLogger(__name__)
@@ -425,6 +427,7 @@ def __init__(
"annotation",
"tags",
"update_time",
+ "preferred_object_store_id",
],
)
self.add_view(
@@ -445,6 +448,7 @@ def __init__(
"state_details",
"state_ids",
"hid_counter",
+ "preferred_object_store_id",
# 'community_rating',
# 'user_rating',
],
@@ -468,6 +472,7 @@ def __init__(
# 'contents_states',
"contents_active",
"hid_counter",
+ "preferred_object_store_id",
],
include_keys_from="summary",
)
@@ -627,9 +632,17 @@ def add_deserializers(self):
{
"name": self.deserialize_basestring,
"genome_build": self.deserialize_genome_build,
+ "preferred_object_store_id": self.deserialize_preferred_object_store_id,
}
)
+ def deserialize_preferred_object_store_id(self, item, key, val, **context):
+ preferred_object_store_id = val
+ validation_error = validate_preferred_object_store_id(self.app.object_store, preferred_object_store_id)
+ if validation_error:
+ raise ModelDeserializingError(validation_error)
+ return self.default_deserializer(item, key, preferred_object_store_id, **context)
+
class HistoryFilters(sharable.SharableModelFilters, deletable.PurgableFiltersMixin):
model_class = model.History
diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py
index 81191083584b..2736171dd4cf 100644
--- a/lib/galaxy/managers/users.py
+++ b/lib/galaxy/managers/users.py
@@ -39,6 +39,7 @@
VALID_EMAIL_RE,
validate_email,
validate_password,
+ validate_preferred_object_store_id,
validate_publicname,
)
from galaxy.structured_app import (
@@ -682,6 +683,7 @@ def __init__(self, app: MinimalManagerApp):
"tags_used",
# all annotations
# 'annotations'
+ "preferred_object_store_id",
],
include_keys_from="summary",
)
@@ -735,11 +737,18 @@ class UserDeserializer(base.ModelDeserializer):
def add_deserializers(self):
super().add_deserializers()
- self.deserializers.update(
- {
- "username": self.deserialize_username,
- }
- )
+ history_deserializers: Dict[str, base.Deserializer] = {
+ "username": self.deserialize_username,
+ "preferred_object_store_id": self.deserialize_preferred_object_store_id,
+ }
+ self.deserializers.update(history_deserializers)
+
+ def deserialize_preferred_object_store_id(self, item: Any, key: Any, val: Any, **context):
+ preferred_object_store_id = val
+ validation_error = validate_preferred_object_store_id(self.app.object_store, preferred_object_store_id)
+ if validation_error:
+ raise base.ModelDeserializingError(validation_error)
+ return self.default_deserializer(item, key, preferred_object_store_id, **context)
def deserialize_username(self, item, key, username, trans=None, **context):
# TODO: validate_publicname requires trans and should(?) raise exceptions
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
index a39c42aa6a06..f66a3078033a 100644
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -640,6 +640,7 @@ class User(Base, Dictifiable, RepresentById):
last_password_change = Column(DateTime, default=now)
external = Column(Boolean, default=False)
form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True)
+ preferred_object_store_id = Column(String(255), nullable=True)
deleted = Column(Boolean, index=True, default=False)
purged = Column(Boolean, index=True, default=False)
disk_usage = Column(Numeric(15, 0), index=True)
@@ -715,6 +716,7 @@ class User(Base, Dictifiable, RepresentById):
"deleted",
"active",
"last_password_change",
+ "preferred_object_store_id",
]
def __init__(self, email=None, password=None, username=None):
@@ -1210,6 +1212,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable):
imported = Column(Boolean, default=False, index=True)
params = Column(TrimmedString(255), index=True)
handler = Column(TrimmedString(255), index=True)
+ preferred_object_store_id = Column(String(255), nullable=True)
user = relationship("User")
galaxy_session = relationship("GalaxySession")
@@ -2689,6 +2692,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
importable = Column(Boolean, default=False)
slug = Column(TEXT)
published = Column(Boolean, index=True, default=False)
+ preferred_object_store_id = Column(String(255), nullable=True)
datasets = relationship(
"HistoryDatasetAssociation", back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type]
@@ -2787,6 +2791,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
"importable",
"slug",
"empty",
+ "preferred_object_store_id",
]
default_name = "Unnamed history"
@@ -7689,6 +7694,21 @@ class StoredWorkflowMenuEntry(Base, RepresentById):
)
+class WorkflowInvocationObjectStores(NamedTuple):
+ preferred_object_store_id: Optional[str]
+ preferred_outputs_object_store_id: Optional[str]
+ preferred_intermediate_object_store_id: Optional[str]
+
+ @property
+ def is_split_configuration(self):
+ preferred_outputs_object_store_id = self.preferred_outputs_object_store_id
+ preferred_intermediate_object_store_id = self.preferred_intermediate_object_store_id
+ has_typed_preferences = (
+ preferred_outputs_object_store_id is not None or preferred_intermediate_object_store_id is not None
+ )
+ return has_typed_preferences and preferred_outputs_object_store_id != preferred_intermediate_object_store_id
+
+
class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializable):
__tablename__ = "workflow_invocation"
@@ -8110,6 +8130,27 @@ def resource_parameters(self):
return _resource_parameters
+ @property
+ def preferred_object_stores(self) -> WorkflowInvocationObjectStores:
+ meta_type = WorkflowRequestInputParameter.types.META_PARAMETERS
+ preferred_object_store_id = None
+ preferred_outputs_object_store_id = None
+ preferred_intermediate_object_store_id = None
+
+ for input_parameter in self.input_parameters:
+ if input_parameter.type != meta_type:
+ continue
+ if input_parameter.name == "preferred_object_store_id":
+ preferred_object_store_id = input_parameter.value
+ elif input_parameter.name == "preferred_outputs_object_store_id":
+ preferred_outputs_object_store_id = input_parameter.value
+ elif input_parameter.name == "preferred_intermediate_object_store_id":
+ preferred_intermediate_object_store_id = input_parameter.value
+
+ return WorkflowInvocationObjectStores(
+ preferred_object_store_id, preferred_outputs_object_store_id, preferred_intermediate_object_store_id
+ )
+
def has_input_for_step(self, step_id):
for content in self.input_datasets:
if content.workflow_step_id == step_id:
diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/9540a051226e_preferred_object_store_ids.py b/lib/galaxy/model/migrations/alembic/versions_gxy/9540a051226e_preferred_object_store_ids.py
new file mode 100644
index 000000000000..a0cc97d8898e
--- /dev/null
+++ b/lib/galaxy/model/migrations/alembic/versions_gxy/9540a051226e_preferred_object_store_ids.py
@@ -0,0 +1,33 @@
+"""preferred_object_store_ids
+
+Revision ID: 9540a051226e
+Revises: d0583094c8cd
+Create Date: 2022-06-10 10:38:25.212102
+
+"""
+from alembic import op
+from sqlalchemy import (
+ Column,
+ String,
+)
+
+from galaxy.model.migrations.util import drop_column
+
+# revision identifiers, used by Alembic.
+revision = "9540a051226e"
+down_revision = "d0583094c8cd"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ preferred_object_store_type = String(255)
+ op.add_column("galaxy_user", Column("preferred_object_store_id", preferred_object_store_type, default=None))
+ op.add_column("history", Column("preferred_object_store_id", preferred_object_store_type, default=None))
+ op.add_column("job", Column("preferred_object_store_id", preferred_object_store_type, default=None))
+
+
+def downgrade():
+ drop_column("galaxy_user", "preferred_object_store_id")
+ drop_column("history", "preferred_object_store_id")
+ drop_column("job", "preferred_object_store_id")
diff --git a/lib/galaxy/model/unittest_utils/data_app.py b/lib/galaxy/model/unittest_utils/data_app.py
index 60f62f1077cc..8c4430ab8b26 100644
--- a/lib/galaxy/model/unittest_utils/data_app.py
+++ b/lib/galaxy/model/unittest_utils/data_app.py
@@ -68,6 +68,7 @@ def __init__(self, root=None, **kwd):
self.new_file_path = os.path.join(self.data_dir, "tmp")
self.file_path = os.path.join(self.data_dir, "files")
self.server_name = "main"
+ self.enable_quotas = False
def __del__(self):
if self._remove_root:
diff --git a/lib/galaxy/objectstore/__init__.py b/lib/galaxy/objectstore/__init__.py
index 3b7ba837d927..bc51062fe8cb 100644
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -18,6 +18,7 @@
List,
NamedTuple,
Optional,
+ Set,
Type,
)
@@ -51,6 +52,22 @@
log = logging.getLogger(__name__)
+BADGE_SPECIFICATION = [
+ {"type": "faster", "conflicts": ["slower"]},
+ {"type": "slower", "conflicts": ["faster"]},
+ {"type": "short_term", "conflicts": []},
+ {"type": "cloud", "conflicts": []},
+ {"type": "backed_up", "conflicts": ["not_backed_up"]},
+ {"type": "not_backed_up", "conflicts": ["backed_up"]},
+ {"type": "more_secure", "conflicts": ["less_secure"]},
+ {"type": "less_secure", "conflicts": ["more_secure"]},
+ {"type": "more_stable", "conflicts": ["less_stable"]},
+ {"type": "less_stable", "conflicts": ["more_stable"]},
+]
+KNOWN_BADGE_TYPES = [s["type"] for s in BADGE_SPECIFICATION]
+BADGE_SPECIFCATION_BY_TYPE = {s["type"]: s for s in BADGE_SPECIFICATION}
+
+
class ObjectStore(metaclass=abc.ABCMeta):
"""ObjectStore interface.
@@ -251,6 +268,10 @@ def get_concrete_store_description_markdown(self, obj):
yet been set, this may return ``None``.
"""
+ @abc.abstractmethod
+ def get_concrete_store_badges(self, obj):
+ """Return a list of dictified badges summarizing the object store configuration."""
+
@abc.abstractmethod
def is_private(self, obj):
"""Return True iff supplied object is stored in private ConcreteObjectStore."""
@@ -264,6 +285,18 @@ def object_store_ids(self, private=None):
"""
return []
+ def object_store_allows_id_selection(self) -> bool:
+ """Return True if this object store respects object_store_id and allow selection of this."""
+ return False
+
+ def object_store_ids_allowing_selection(self) -> List[str]:
+ """Return a non-emtpy list of allowed selectable object store IDs during creation."""
+ return []
+
+ def get_concrete_store_by_object_store_id(self, object_store_id: str) -> Optional["ConcreteObjectStore"]:
+ """If this is a distributed object store, get ConcreteObjectStore by object_store_id."""
+ return None
+
@abc.abstractmethod
def get_store_usage_percent(self):
"""Return the percentage indicating how full the store is."""
@@ -304,6 +337,7 @@ def __init__(self, config, config_dict=None, **kwargs):
self.running = True
self.config = config
self.check_old_style = config.object_store_check_old_style
+ self.galaxy_enable_quotas = config.enable_quotas
extra_dirs = {}
extra_dirs["job_work"] = config.jobs_directory
extra_dirs["temp"] = config.new_file_path
@@ -397,6 +431,9 @@ def get_concrete_store_name(self, obj):
def get_concrete_store_description_markdown(self, obj):
return self._invoke("get_concrete_store_description_markdown", obj)
+ def get_concrete_store_badges(self, obj) -> List[Dict[str, Any]]:
+ return self._invoke("get_concrete_store_badges", obj)
+
def get_store_usage_percent(self):
return self._invoke("get_store_usage_percent")
@@ -413,6 +450,15 @@ def parse_private_from_config_xml(clazz, config_xml):
private = asbool(config_xml.attrib.get("private", DEFAULT_PRIVATE))
return private
+ @classmethod
+ def parse_badges_from_config_xml(clazz, badges_xml):
+ badges = []
+ for e in badges_xml:
+ type = e.tag
+ message = e.text
+ badges.append({"type": type, "message": message})
+ return badges
+
def get_quota_source_map(self):
# I'd rather keep this abstract... but register_singleton wants it to be instantiable...
raise NotImplementedError()
@@ -452,6 +498,31 @@ def __init__(self, config, config_dict=None, **kwargs):
quota_config = config_dict.get("quota", {})
self.quota_source = quota_config.get("source", DEFAULT_QUOTA_SOURCE)
self.quota_enabled = quota_config.get("enabled", DEFAULT_QUOTA_ENABLED)
+ raw_badges = config_dict.get("badges", [])
+ badges = []
+ badge_types: Set[str] = set()
+ badge_conflicts: Dict[str, str] = {}
+ for badge in raw_badges:
+ # when recovering serialized badges, skip ones that are set by Galaxy
+ badge_source = badge.get("source")
+ if badge_source and badge_source != "admin":
+ continue
+ assert "type" in badge
+ badge_type = badge["type"]
+ if badge_type not in KNOWN_BADGE_TYPES:
+ raise Exception(f"badge_type {badge_type} unimplemented/unknown {badge}")
+ message = badge.get("message", None)
+ badges.append({"type": badge_type, "message": message})
+ badge_types.add(badge_type)
+ if badge_type in badge_conflicts:
+ conflicting_badge_type = badge_conflicts[badge_type]
+ raise Exception(
+ f"Conflicting badge to [{badge_type}] defined on the object store [{conflicting_badge_type}]."
+ )
+ conflicts = BADGE_SPECIFCATION_BY_TYPE[badge_type]["conflicts"]
+ for conflict in conflicts:
+ badge_conflicts[conflict] = badge_type
+ self.badges = badges
def to_dict(self):
rval = super().to_dict()
@@ -463,8 +534,39 @@ def to_dict(self):
"source": self.quota_source,
"enabled": self.quota_enabled,
}
+ rval["badges"] = self._get_concrete_store_badges(None)
return rval
+ def _get_concrete_store_badges(self, obj) -> List[Dict[str, Any]]:
+ badge_dicts: List[Dict[str, Any]] = []
+ for badge in self.badges:
+ badge_dict = badge.copy()
+ badge_dict["source"] = "admin"
+ badge_dicts.append(badge_dict)
+
+ quota_badge_dict: Dict[str, Any]
+ if self.galaxy_enable_quotas and self.quota_enabled:
+ quota_badge_dict = {
+ "type": "quota",
+ "message": None,
+ "source": "galaxy",
+ }
+ else:
+ quota_badge_dict = {
+ "type": "no_quota",
+ "message": None,
+ "source": "galaxy",
+ }
+ badge_dicts.append(quota_badge_dict)
+ if self.private:
+ restricted_badge_dict = {
+ "type": "restricted",
+ "message": None,
+ "source": "galaxy",
+ }
+ badge_dicts.append(restricted_badge_dict)
+ return badge_dicts
+
def _get_concrete_store_name(self, obj):
return self.name
@@ -495,7 +597,7 @@ class DiskObjectStore(ConcreteObjectStore):
>>> import tempfile
>>> file_path=tempfile.mkdtemp()
>>> obj = Bunch(id=1)
- >>> s = DiskObjectStore(Bunch(umask=0o077, jobs_directory=file_path, new_file_path=file_path, object_store_check_old_style=False), dict(files_dir=file_path))
+ >>> s = DiskObjectStore(Bunch(umask=0o077, jobs_directory=file_path, new_file_path=file_path, object_store_check_old_style=False, enable_quotas=True), dict(files_dir=file_path))
>>> o = s.create(obj)
>>> s.exists(obj)
True
@@ -544,6 +646,8 @@ def parse_xml(clazz, config_xml):
config_dict["files_dir"] = e.get("path")
elif e.tag == "description":
config_dict["description"] = e.text
+ elif e.tag == "badges":
+ config_dict["badges"] = BaseObjectStore.parse_badges_from_config_xml(e)
else:
extra_dirs.append({"type": e.get("type"), "path": e.get("path")})
@@ -863,6 +967,9 @@ def _get_concrete_store_name(self, obj):
def _get_concrete_store_description_markdown(self, obj):
return self._call_method("_get_concrete_store_description_markdown", obj, None, False)
+ def _get_concrete_store_badges(self, obj):
+ return self._call_method("_get_concrete_store_badges", obj, [], False)
+
def _is_private(self, obj):
return self._call_method("_is_private", obj, ObjectNotFound, True)
@@ -929,11 +1036,14 @@ def __init__(self, config, config_dict, fsmon=False):
self.search_for_missing = config_dict.get("search_for_missing", True)
random.seed()
+ user_selection_allowed = []
for backend_def in config_dict["backends"]:
backened_id = backend_def["id"]
maxpctfull = backend_def.get("max_percent_full", 0)
weight = backend_def["weight"]
-
+ allow_selection = backend_def.get("allow_selection")
+ if allow_selection:
+ user_selection_allowed.append(backened_id)
backend = build_object_store_from_config(config, config_dict=backend_def, fsmon=fsmon)
self.backends[backened_id] = backend
@@ -946,7 +1056,8 @@ def __init__(self, config, config_dict, fsmon=False):
self.weighted_backend_ids.append(backened_id)
self.original_weighted_backend_ids = self.weighted_backend_ids
-
+ self.user_selection_allowed = user_selection_allowed
+ self.allow_user_selection = bool(user_selection_allowed)
self.sleeper = None
if fsmon and (self.global_max_percent_full or [_ for _ in self.max_percent_full.values() if _ != 0.0]):
self.sleeper = Sleeper()
@@ -975,6 +1086,7 @@ def parse_xml(clazz, config_xml, legacy=False):
store_maxpctfull = float(b.get("maxpctfull", 0))
store_type = b.get("type", "disk")
store_by = b.get("store_by", None)
+ allow_selection = asbool(b.get("allow_selection"))
objectstore_class, _ = type_to_object_store_class(store_type)
backend_config_dict = objectstore_class.parse_xml(b)
@@ -982,6 +1094,7 @@ def parse_xml(clazz, config_xml, legacy=False):
backend_config_dict["weight"] = store_weight
backend_config_dict["max_percent_full"] = store_maxpctfull
backend_config_dict["type"] = store_type
+ backend_config_dict["allow_selection"] = allow_selection
if store_by is not None:
backend_config_dict["store_by"] = store_by
backends.append(backend_config_dict)
@@ -1121,6 +1234,18 @@ def object_store_ids(self, private=None):
object_store_ids.append(backend_id)
return object_store_ids
+ def object_store_allows_id_selection(self) -> bool:
+ """Return True if this object store respects object_store_id and allow selection of this."""
+ return self.allow_user_selection
+
+ def object_store_ids_allowing_selection(self) -> List[str]:
+ """Return a non-emtpy list of allowed selectable object store IDs during creation."""
+ return self.user_selection_allowed
+
+ def get_concrete_store_by_object_store_id(self, object_store_id: str) -> Optional["ConcreteObjectStore"]:
+ """If this is a distributed object store, get ConcreteObjectStore by object_store_id."""
+ return self.backends[object_store_id]
+
class HierarchicalObjectStore(NestedObjectStore):
"""
@@ -1350,6 +1475,7 @@ def config_to_dict(config):
"""Dict-ify the portion of a config object consumed by the ObjectStore class and its subclasses."""
return {
"object_store_check_old_style": config.object_store_check_old_style,
+ "enable_quotas": config.enable_quotas,
"file_path": config.file_path,
"umask": config.umask,
"jobs_directory": config.jobs_directory,
diff --git a/lib/galaxy/objectstore/unittest_utils/__init__.py b/lib/galaxy/objectstore/unittest_utils/__init__.py
index 738a309264fa..2cc0239aef22 100644
--- a/lib/galaxy/objectstore/unittest_utils/__init__.py
+++ b/lib/galaxy/objectstore/unittest_utils/__init__.py
@@ -74,6 +74,7 @@ def __init__(self, temp_directory, config_file, store_by="id"):
self.new_file_path = temp_directory
self.umask = 0000
self.gid = 1000
+ self.enable_quotas = True
__all__ = [
diff --git a/lib/galaxy/security/validate_user_input.py b/lib/galaxy/security/validate_user_input.py
index 3a5b0666a72f..de6489c19f48 100644
--- a/lib/galaxy/security/validate_user_input.py
+++ b/lib/galaxy/security/validate_user_input.py
@@ -7,9 +7,12 @@
import logging
import re
import socket
+from typing import Optional
from sqlalchemy import func
+from galaxy.objectstore import ObjectStore
+
log = logging.getLogger(__name__)
# Email validity parameters
@@ -148,3 +151,12 @@ def validate_password(trans, password, confirm):
if password != confirm:
return "Passwords do not match."
return validate_password_str(password)
+
+
+def validate_preferred_object_store_id(object_store: ObjectStore, preferred_object_store_id: Optional[str]) -> str:
+ if not object_store.object_store_allows_id_selection() and preferred_object_store_id is not None:
+ return "The current configuration doesn't allow selecting preferred object stores."
+ if object_store.object_store_allows_id_selection() and preferred_object_store_id:
+ if preferred_object_store_id not in object_store.object_store_ids_allowing_selection():
+ return "Supplied object store id is not an allowed object store selection"
+ return ""
diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py
index cf7336c6b942..162f885d7692 100644
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1787,7 +1787,15 @@ def expand_incoming(self, trans, incoming, request_context, input_format="legacy
log.info(validation_timer)
return all_params, all_errors, rerun_remap_job_id, collection_info
- def handle_input(self, trans, incoming, history=None, use_cached_job=False, input_format="legacy"):
+ def handle_input(
+ self,
+ trans,
+ incoming,
+ history=None,
+ use_cached_job=False,
+ preferred_object_store_id: Optional[str] = None,
+ input_format="legacy",
+ ):
"""
Process incoming parameters for this tool from the dict `incoming`,
update the tool state (or create if none existed), and either return
@@ -1834,6 +1842,7 @@ def handle_input(self, trans, incoming, history=None, use_cached_job=False, inpu
mapping_params,
history=request_context.history,
rerun_remap_job_id=rerun_remap_job_id,
+ preferred_object_store_id=preferred_object_store_id,
collection_info=collection_info,
completed_jobs=completed_jobs,
)
@@ -1865,6 +1874,7 @@ def handle_single_execution(
completed_job=None,
collection_info=None,
job_callback=None,
+ preferred_object_store_id=None,
flush_job=True,
):
"""
@@ -1882,6 +1892,7 @@ def handle_single_execution(
completed_job=completed_job,
collection_info=collection_info,
job_callback=job_callback,
+ preferred_object_store_id=preferred_object_store_id,
flush_job=flush_job,
)
job = rval[0]
diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py
index aa9f97e500aa..324525a1b779 100644
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -358,6 +358,7 @@ def execute(
completed_job=None,
collection_info=None,
job_callback=None,
+ preferred_object_store_id=None,
flush_job=True,
):
"""
@@ -645,6 +646,7 @@ def handle_output(name, output, hidden=None):
job_setup_timer = ExecutionTimer()
# Create the job object
job, galaxy_session = self._new_job_for_session(trans, tool, history)
+ job.preferred_object_store_id = preferred_object_store_id
self._record_inputs(trans, tool, job, incoming, inp_data, inp_dataset_collections)
self._record_outputs(job, out_data, output_collections)
# execute immediate post job actions and associate post job actions that are to be executed after the job is complete
diff --git a/lib/galaxy/tools/execute.py b/lib/galaxy/tools/execute.py
index b64f91ec873a..6506417c3f42 100644
--- a/lib/galaxy/tools/execute.py
+++ b/lib/galaxy/tools/execute.py
@@ -50,6 +50,7 @@ def execute(
mapping_params,
history: model.History,
rerun_remap_job_id=None,
+ preferred_object_store_id=None,
collection_info=None,
workflow_invocation_uuid=None,
invocation_step=None,
@@ -110,6 +111,7 @@ def execute_single_job(execution_slice, completed_job):
completed_job,
collection_info,
job_callback=job_callback,
+ preferred_object_store_id=preferred_object_store_id,
flush_job=False,
)
if job:
diff --git a/lib/galaxy/webapps/galaxy/api/object_store.py b/lib/galaxy/webapps/galaxy/api/object_store.py
new file mode 100644
index 000000000000..f53e267339a2
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/object_store.py
@@ -0,0 +1,80 @@
+"""
+API operations on Galaxy's object store.
+"""
+import logging
+from typing import (
+ Any,
+ Dict,
+ List,
+)
+
+from fastapi import (
+ Path,
+ Query,
+)
+
+from galaxy.exceptions import (
+ ObjectNotFound,
+ RequestParameterInvalidException,
+)
+from galaxy.managers.context import ProvidesUserContext
+from galaxy.objectstore import BaseObjectStore
+from . import (
+ depends,
+ DependsOnTrans,
+ Router,
+)
+
+log = logging.getLogger(__name__)
+
+router = Router(tags=["object sstore"])
+
+ConcreteObjectStoreIdPathParam: str = Path(
+ ..., title="Concrete Object Store ID", description="The concrete object store ID."
+)
+
+SelectableQueryParam: bool = Query(
+ default=False, title="Selectable", description="Restrict index query to user selectable object stores."
+)
+
+
+@router.cbv
+class FastAPIObjectStore:
+ object_store: BaseObjectStore = depends(BaseObjectStore)
+
+ @router.get(
+ "/api/object_store",
+ summary="",
+ response_description="",
+ )
+ def index(
+ self,
+ trans: ProvidesUserContext = DependsOnTrans,
+ selectable: bool = SelectableQueryParam,
+ ) -> List[Dict[str, Any]]:
+ if not selectable:
+ raise RequestParameterInvalidException(
+ "The object store index query currently needs to be called with selectable=true"
+ )
+ selectable_ids = self.object_store.object_store_ids_allowing_selection()
+ return [self._dict_for(selectable_id) for selectable_id in selectable_ids]
+
+ @router.get(
+ "/api/object_store/{object_store_id}",
+ summary="Return boolean to indicate if Galaxy's default object store allows selection.",
+ response_description="A list with details about the remote files available to the user.",
+ )
+ def show_info(
+ self,
+ trans: ProvidesUserContext = DependsOnTrans,
+ object_store_id: str = ConcreteObjectStoreIdPathParam,
+ ) -> Dict[str, Any]:
+ return self._dict_for(object_store_id)
+
+ def _dict_for(self, object_store_id: str) -> Dict[str, Any]:
+ concrete_object_store = self.object_store.get_concrete_store_by_object_store_id(object_store_id)
+ if concrete_object_store is None:
+ raise ObjectNotFound()
+ as_dict = concrete_object_store.to_dict()
+ as_dict["object_store_id"] = object_store_id
+ return as_dict
diff --git a/lib/galaxy/webapps/galaxy/services/datasets.py b/lib/galaxy/webapps/galaxy/services/datasets.py
index 738029fae9a2..490d1566a53a 100644
--- a/lib/galaxy/webapps/galaxy/services/datasets.py
+++ b/lib/galaxy/webapps/galaxy/services/datasets.py
@@ -113,6 +113,9 @@ class DatasetStorageDetails(Model):
description="Is this dataset sharable.",
)
quota: dict = Field(description="Information about quota sources around dataset storage.")
+ badges: List[Dict[str, Any]] = Field(
+ description="Fast summary parsed for digging into about target object store dataset is stored on."
+ )
class DatasetInheritanceChainEntry(Model):
@@ -353,6 +356,7 @@ def show_storage(
object_store_id = dataset.object_store_id
name = object_store.get_concrete_store_name(dataset)
description = object_store.get_concrete_store_description_markdown(dataset)
+ badges = object_store.get_concrete_store_badges(dataset)
# not really working (existing problem)
try:
percent_used = object_store.get_store_usage_percent()
@@ -382,6 +386,7 @@ def show_storage(
hashes=hashes,
sources=sources,
quota=quota,
+ badges=badges,
)
def show_inheritance_chain(
diff --git a/lib/galaxy/webapps/galaxy/services/tools.py b/lib/galaxy/webapps/galaxy/services/tools.py
index 2457de45aeed..da112c1c329a 100644
--- a/lib/galaxy/webapps/galaxy/services/tools.py
+++ b/lib/galaxy/webapps/galaxy/services/tools.py
@@ -161,11 +161,16 @@ def _create(self, trans: ProvidesHistoryContext, payload, **kwd):
use_cached_job = payload.get("use_cached_job", False) or util.string_as_bool(
inputs.get("use_cached_job", "false")
)
-
+ preferred_object_store_id = payload.get("preferred_object_store_id", None)
input_format = str(payload.get("input_format", "legacy"))
vars = tool.handle_input(
- trans, incoming, history=target_history, use_cached_job=use_cached_job, input_format=input_format
+ trans,
+ incoming,
+ history=target_history,
+ use_cached_job=use_cached_job,
+ input_format=input_format,
+ preferred_object_store_id=preferred_object_store_id,
)
new_pja_flush = False
diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py
index 6cb858ac9489..065c659848e5 100644
--- a/lib/galaxy/workflow/run_request.py
+++ b/lib/galaxy/workflow/run_request.py
@@ -53,6 +53,9 @@ def __init__(
allow_tool_state_corrections=False,
use_cached_job=False,
resource_params=None,
+ preferred_object_store_id=None,
+ preferred_outputs_object_store_id=None,
+ preferred_intermediate_object_store_id=None,
):
self.target_history = target_history
self.replacement_dict = replacement_dict
@@ -62,6 +65,9 @@ def __init__(
self.resource_params = resource_params or {}
self.allow_tool_state_corrections = allow_tool_state_corrections
self.use_cached_job = use_cached_job
+ self.preferred_object_store_id = preferred_object_store_id
+ self.preferred_outputs_object_store_id = preferred_outputs_object_store_id
+ self.preferred_intermediate_object_store_id = preferred_intermediate_object_store_id
def _normalize_inputs(steps, inputs, inputs_by):
@@ -413,6 +419,16 @@ def build_workflow_run_configs(trans, workflow, payload):
f"Invalid value for parameter '{name}' found."
)
history.add_pending_items()
+ preferred_object_store_id = payload.get("preferred_object_store_id")
+ preferred_outputs_object_store_id = payload.get("preferred_outputs_object_store_id")
+ preferred_intermediate_object_store_id = payload.get("preferred_intermediate_object_store_id")
+ split_object_store_config = bool(
+ preferred_outputs_object_store_id is not None or preferred_intermediate_object_store_id is not None
+ )
+ if split_object_store_config and preferred_object_store_id:
+ raise exceptions.RequestParameterInvalidException(
+ "May specified either 'preferred_object_store_id' or one/both of 'preferred_outputs_object_store_id' and 'preferred_intermediate_object_store_id' but not both"
+ )
run_configs.append(
WorkflowRunConfig(
target_history=history,
@@ -422,6 +438,9 @@ def build_workflow_run_configs(trans, workflow, payload):
allow_tool_state_corrections=allow_tool_state_corrections,
use_cached_job=use_cached_job,
resource_params=resource_params,
+ preferred_object_store_id=preferred_object_store_id,
+ preferred_outputs_object_store_id=preferred_outputs_object_store_id,
+ preferred_intermediate_object_store_id=preferred_intermediate_object_store_id,
)
)
@@ -498,6 +517,15 @@ def add_parameter(name, value, type):
"copy_inputs_to_history", "true" if run_config.copy_inputs_to_history else "false", param_types.META_PARAMETERS
)
add_parameter("use_cached_job", "true" if run_config.use_cached_job else "false", param_types.META_PARAMETERS)
+ for param in [
+ "preferred_object_store_id",
+ "preferred_outputs_object_store_id",
+ "preferred_intermediate_object_store_id",
+ ]:
+ value = getattr(run_config, param)
+ if value:
+ add_parameter(param, value, param_types.META_PARAMETERS)
+
return workflow_invocation
@@ -510,6 +538,10 @@ def workflow_request_to_run_config(work_request_context, workflow_invocation):
resource_params = {}
copy_inputs_to_history = None
use_cached_job = False
+ # Preferred object store IDs - either split or join.
+ preferred_object_store_id = None
+ preferred_outputs_object_store_id = None
+ preferred_intermediate_object_store_id = None
for parameter in workflow_invocation.input_parameters:
parameter_type = parameter.type
@@ -520,6 +552,12 @@ def workflow_request_to_run_config(work_request_context, workflow_invocation):
copy_inputs_to_history = parameter.value == "true"
if parameter.name == "use_cached_job":
use_cached_job = parameter.value == "true"
+ if parameter.name == "preferred_object_store_id":
+ preferred_object_store_id = parameter.value
+ if parameter.name == "preferred_outputs_object_store_id":
+ preferred_outputs_object_store_id = parameter.value
+ if parameter.name == "preferred_intermediate_object_store_id":
+ preferred_intermediate_object_store_id = parameter.value
elif parameter_type == param_types.RESOURCE_PARAMETERS:
resource_params[parameter.name] = parameter.value
elif parameter_type == param_types.STEP_PARAMETERS:
@@ -546,5 +584,8 @@ def workflow_request_to_run_config(work_request_context, workflow_invocation):
copy_inputs_to_history=copy_inputs_to_history,
use_cached_job=use_cached_job,
resource_params=resource_params,
+ preferred_object_store_id=preferred_object_store_id,
+ preferred_outputs_object_store_id=preferred_outputs_object_store_id,
+ preferred_intermediate_object_store_id=preferred_intermediate_object_store_id,
)
return workflow_run_config
diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py
index 47479057e70d..9215072c68b1 100644
--- a/lib/galaxy_test/base/populators.py
+++ b/lib/galaxy_test/base/populators.py
@@ -993,6 +993,15 @@ def get_usage_for(self, label: Optional[str]) -> Dict[str, Any]:
usage_response.raise_for_status()
return usage_response.json()
+ def update_user(self, properties: Dict[str, Any]) -> Dict[str, Any]:
+ update_response = self.update_user_raw(properties)
+ update_response.raise_for_status()
+ return update_response.json()
+
+ def update_user_raw(self, properties: Dict[str, Any]) -> Response:
+ update_response = self.galaxy_interactor.put("users/current", properties, json=True)
+ return update_response
+
def create_role(self, user_ids: list, description: Optional[str] = None) -> dict:
payload = {
"name": self.get_random_name(prefix="testpop"),
@@ -1144,9 +1153,12 @@ def import_history_and_wait_for_name(self, import_data, history_name):
def history_names(self) -> Dict[str, Dict]:
return {h["name"]: h for h in self.get_histories()}
- def rename_history(self, history_id, new_name):
+ def rename_history(self, history_id: str, new_name: str):
+ self.update_history(history_id, {"name": new_name})
+
+ def update_history(self, history_id: str, payload: Dict[str, Any]) -> Response:
update_url = f"histories/{history_id}"
- put_response = self._put(update_url, {"name": new_name}, json=True)
+ put_response = self._put(update_url, payload, json=True)
return put_response
def get_histories(self):
@@ -1660,6 +1672,7 @@ def run_workflow(
expected_response=200,
assert_ok=True,
client_convert=None,
+ extra_invocation_kwds=None,
round_trip_format_conversion=False,
invocations=1,
raw_yaml=False,
@@ -1705,6 +1718,8 @@ def run_workflow(
workflow_request["parameters_normalized"] = True
if replacement_parameters:
workflow_request["replacement_params"] = json.dumps(replacement_parameters)
+ if extra_invocation_kwds is not None:
+ workflow_request.update(extra_invocation_kwds)
if has_uploads:
self.dataset_populator.wait_for_history(history_id, assert_ok=True)
assert invocations > 0
@@ -1874,6 +1889,9 @@ class RunJobsSummary(NamedTuple):
invocation: dict
workflow_request: dict
+ def jobs_for_tool(self, tool_id):
+ return [j for j in self.jobs if j["tool_id"] == tool_id]
+
class WorkflowPopulator(GalaxyInteractorHttpMixin, BaseWorkflowPopulator, ImporterGalaxyInterface):
def __init__(self, galaxy_interactor):
diff --git a/test/integration/objectstore/test_selection_with_user_preferred_object_store.py b/test/integration/objectstore/test_selection_with_user_preferred_object_store.py
new file mode 100644
index 000000000000..e9bb661f0f3d
--- /dev/null
+++ b/test/integration/objectstore/test_selection_with_user_preferred_object_store.py
@@ -0,0 +1,232 @@
+"""Test selecting an object store with user's preferred object store."""
+
+import os
+import string
+from typing import (
+ Any,
+ Dict,
+)
+
+from galaxy.model import Dataset
+from galaxy_test.base.populators import WorkflowPopulator
+from ._base import BaseObjectStoreIntegrationTestCase
+
+SCRIPT_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
+
+DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE = string.Template(
+ """
+
+
+
+ This is my description of the default store with *markdown*.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+"""
+)
+
+
+TEST_WORKFLOW = """
+class: GalaxyWorkflow
+inputs:
+ input1: data
+outputs:
+ wf_output_1:
+ outputSource: second_cat/out_file1
+steps:
+ first_cat:
+ tool_id: cat
+ in:
+ input1: input1
+ second_cat:
+ tool_id: cat
+ in:
+ input1: first_cat/out_file1
+"""
+
+TEST_WORKFLOW_TEST_DATA = """
+input1:
+ value: 1.fasta
+ type: File
+ name: fasta1
+"""
+
+TEST_WORKFLOW_MAPPED_COLLECTION_OUTPUT = """
+class: GalaxyWorkflow
+inputs:
+ input1:
+ type: data_collection_input
+ collection_type: list
+outputs:
+ wf_output_1:
+ outputSource: second_cat/out_file1
+steps:
+ first_cat:
+ tool_id: cat
+ in:
+ input1: input1
+ second_cat:
+ tool_id: cat
+ in:
+ input1: first_cat/out_file1
+"""
+
+
+def assert_storage_name_is(storage_dict: Dict[str, Any], name: str):
+ storage_name = storage_dict["name"]
+ assert name == storage_name, f"Found incorrect storage name {storage_name}, expected {name} in {storage_dict}"
+
+
+class ObjectStoreSelectionWithPreferredObjectStoresIntegrationTestCase(BaseObjectStoreIntegrationTestCase):
+ # populated by config_object_store
+ files_default_path: str
+ files_static_path: str
+ files_dynamic_path: str
+ files_dynamic_ebs_path: str
+ files_dynamic_s3_path: str
+
+ @classmethod
+ def handle_galaxy_config_kwds(cls, config):
+ super().handle_galaxy_config_kwds(config)
+ cls._configure_object_store(DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE, config)
+ config["object_store_store_by"] = "uuid"
+ config["outputs_to_working_directory"] = True
+
+ def setUp(self):
+ super().setUp()
+ self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
+
+ def test_setting_unselectable_object_store_id_not_allowed(self):
+ response = self.dataset_populator.update_user_raw({"preferred_object_store_id": "dynamic_s3"})
+ assert response.status_code == 400
+
+ def test_index_query(self):
+ selectable_object_stores_response = self._get("object_store?selectable=true")
+ selectable_object_stores_response.raise_for_status()
+ selectable_object_stores = selectable_object_stores_response.json()
+ selectable_object_store_ids = [s["object_store_id"] for s in selectable_object_stores]
+ assert "default" in selectable_object_store_ids
+ assert "static" in selectable_object_store_ids
+ assert "dynamic_s3" not in selectable_object_store_ids
+
+ def test_objectstore_selection(self):
+
+ with self.dataset_populator.test_history() as history_id:
+
+ def _create_hda_get_storage_info():
+ hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
+ self.dataset_populator.wait_for_history(history_id)
+ return self._storage_info(hda1), hda1
+
+ def _run_tool(tool_id, inputs, preferred_object_store_id=None):
+ response = self.dataset_populator.run_tool(
+ tool_id,
+ inputs,
+ history_id,
+ preferred_object_store_id=preferred_object_store_id,
+ )
+ self.dataset_populator.wait_for_history(history_id)
+ return response
+
+ user_properties = self.dataset_populator.update_user({"preferred_object_store_id": "static"})
+ assert user_properties["preferred_object_store_id"] == "static"
+
+ storage_info, hda1 = _create_hda_get_storage_info()
+ assert_storage_name_is(storage_info, "Static Storage")
+
+ user_properties = self.dataset_populator.update_user({"preferred_object_store_id": None})
+
+ storage_info, _ = _create_hda_get_storage_info()
+ assert_storage_name_is(storage_info, "Default Store")
+
+ self.dataset_populator.update_history(history_id, {"preferred_object_store_id": "static"})
+ storage_info, _ = _create_hda_get_storage_info()
+ assert_storage_name_is(storage_info, "Static Storage")
+
+ hda1_input = {"src": "hda", "id": hda1["id"]}
+ response = _run_tool("multi_data_param", {"f1": hda1_input, "f2": hda1_input})
+ storage_info = self._storage_info_for_job_output(response)
+ assert_storage_name_is(storage_info, "Static Storage")
+
+ hda1_input = {"src": "hda", "id": hda1["id"]}
+ response = _run_tool(
+ "multi_data_param", {"f1": hda1_input, "f2": hda1_input}, preferred_object_store_id="default"
+ )
+ storage_info = self._storage_info_for_job_output(response)
+ assert_storage_name_is(storage_info, "Default Store")
+
+ # reset preferred object store...
+ self.dataset_populator.update_user({"preferred_object_store_id": None})
+
+ def test_workflow_objectstore_selection(self):
+
+ with self.dataset_populator.test_history() as history_id:
+ output_dict, intermediate_dict = self._run_workflow_get_output_storage_info_dicts(history_id)
+ assert_storage_name_is(output_dict, "Default Store")
+ assert_storage_name_is(intermediate_dict, "Default Store")
+
+ output_dict, intermediate_dict = self._run_workflow_get_output_storage_info_dicts(
+ history_id, {"preferred_object_store_id": "static"}
+ )
+ assert_storage_name_is(output_dict, "Static Storage")
+ assert_storage_name_is(intermediate_dict, "Static Storage")
+
+ output_dict, intermediate_dict = self._run_workflow_get_output_storage_info_dicts(
+ history_id, {"preferred_outputs_object_store_id": "static", "preferred_intermediate_object_store_id": "dynamic_ebs"}
+ )
+ assert_storage_name_is(output_dict, "Static Storage")
+ assert_storage_name_is(intermediate_dict, "Dynamic EBS")
+
+
+ def _run_workflow_get_output_storage_info_dicts(self, history_id, extra_invocation_kwds=None):
+ wf_run = self.workflow_populator.run_workflow(
+ TEST_WORKFLOW,
+ test_data=TEST_WORKFLOW_TEST_DATA,
+ history_id=history_id,
+ extra_invocation_kwds=extra_invocation_kwds,
+ )
+ jobs = wf_run.jobs_for_tool("cat")
+ print(jobs)
+ assert len(jobs) == 2
+ output_cat = self.dataset_populator.get_job_details(jobs[0]["id"], full=True).json()
+ intermediate_cat = self.dataset_populator.get_job_details(jobs[1]["id"], full=True).json()
+ output_info = self._storage_info_for_job_output(output_cat)
+ intermediate_info = self._storage_info_for_job_output(intermediate_cat)
+ return output_info, intermediate_info
+
+ def _storage_info_for_job_output(self, job_dict):
+ outputs = job_dict["outputs"] # could be a list or dictionary depending on source
+ try:
+ output = outputs[0]
+ except KeyError:
+ output = list(outputs.values())[0]
+ storage_info = self._storage_info(output)
+ return storage_info
+
+ def _storage_info(self, hda):
+ return self.dataset_populator.dataset_storage_info(hda["id"])
+
+ @property
+ def _latest_dataset(self):
+ latest_dataset = self._app.model.session.query(Dataset).order_by(Dataset.table.c.id.desc()).first()
+ return latest_dataset
diff --git a/test/unit/objectstore/test_objectstore.py b/test/unit/objectstore/test_objectstore.py
index 0bdfd4d5bd56..dbf2ee2966bb 100644
--- a/test/unit/objectstore/test_objectstore.py
+++ b/test/unit/objectstore/test_objectstore.py
@@ -384,6 +384,106 @@ def test_mixed_private():
assert as_dict["private"] is True
+BADGES_TEST_1_CONFIG_XML = """
+
+
+
+
+
+
+ Fast interconnects.
+
+
+ Storage is backed up to tape nightly.
+
+
+"""
+
+
+BADGES_TEST_1_CONFIG_YAML = """
+type: disk
+files_dir: "${temp_directory}/files1"
+store_by: uuid
+extra_dirs:
+ - type: temp
+ path: "${temp_directory}/tmp1"
+ - type: job_work
+ path: "${temp_directory}/job_working_directory1"
+badges:
+ - type: short_term
+ - type: faster
+ message: Fast interconnects.
+ - type: less_stable
+ - type: more_secure
+ - type: backed_up
+ message: Storage is backed up to tape nightly.
+"""
+
+
+def test_badges_parsing():
+ for config_str in [BADGES_TEST_1_CONFIG_XML, BADGES_TEST_1_CONFIG_YAML]:
+ with TestConfig(config_str) as (directory, object_store):
+ badges = object_store.to_dict()["badges"]
+ assert len(badges) == 6
+ badge_1 = badges[0]
+ assert badge_1["type"] == "short_term"
+ assert badge_1["message"] is None
+
+ badge_2 = badges[1]
+ assert badge_2["type"] == "faster"
+ assert badge_2["message"] == "Fast interconnects."
+
+ badge_3 = badges[2]
+ assert badge_3["type"] == "less_stable"
+ assert badge_3["message"] is None
+
+ badge_4 = badges[3]
+ assert badge_4["type"] == "more_secure"
+ assert badge_4["message"] is None
+
+
+BADGES_TEST_CONFLICTS_1_CONFIG_YAML = """
+type: disk
+files_dir: "${temp_directory}/files1"
+badges:
+ - type: slower
+ - type: faster
+"""
+
+
+BADGES_TEST_CONFLICTS_2_CONFIG_YAML = """
+type: disk
+files_dir: "${temp_directory}/files1"
+badges:
+ - type: more_secure
+ - type: less_secure
+"""
+
+
+def test_badges_parsing_conflicts():
+ for config_str in [BADGES_TEST_CONFLICTS_1_CONFIG_YAML]:
+ exception_raised = False
+ try:
+ with TestConfig(config_str) as (directory, object_store):
+ pass
+ except Exception as e:
+ assert "faster" in str(e)
+ assert "slower" in str(e)
+ exception_raised = True
+ assert exception_raised
+
+ for config_str in [BADGES_TEST_CONFLICTS_2_CONFIG_YAML]:
+ exception_raised = False
+ try:
+ with TestConfig(config_str) as (directory, object_store):
+ pass
+ except Exception as e:
+ assert "more_secure" in str(e)
+ assert "less_secure" in str(e)
+ exception_raised = True
+ assert exception_raised
+
+
DISTRIBUTED_TEST_CONFIG = """