diff --git a/client/src/components/Dataset/DatasetStorage/DatasetStorage.test.js b/client/src/components/Dataset/DatasetStorage/DatasetStorage.test.js
index 6b34492d590a..229c7e170c0c 100644
--- a/client/src/components/Dataset/DatasetStorage/DatasetStorage.test.js
+++ b/client/src/components/Dataset/DatasetStorage/DatasetStorage.test.js
@@ -10,14 +10,17 @@ const localVue = getLocalVue();
const TEST_STORAGE_API_RESPONSE_WITHOUT_ID = {
object_store_id: null,
+ private: false,
};
const TEST_STORAGE_API_RESPONSE_WITH_ID = {
object_store_id: "foobar",
+ private: false,
};
const TEST_STORAGE_API_RESPONSE_WITH_NAME = {
object_store_id: "foobar",
name: "my cool storage",
description: "My cool **markdown**",
+ private: true,
};
const TEST_DATASET_ID = "1";
const TEST_STORAGE_URL = `/api/datasets/${TEST_DATASET_ID}/storage`;
@@ -46,9 +49,6 @@ describe("Dataset Storage", () => {
wrapper = shallowMount(DatasetStorage, {
propsData: { datasetId: TEST_DATASET_ID },
localVue,
- stubs: {
- "loading-span": true,
- },
});
}
@@ -102,6 +102,7 @@ describe("Dataset Storage", () => {
expect(byIdSpan.length).toBe(1);
const byNameSpan = wrapper.findAll(".display-os-by-name");
expect(byNameSpan.length).toBe(0);
+ expect(wrapper.find("object-store-restriction-span-stub").props("isPrivate")).toBeFalsy();
});
it("test dataset storage with object store name", async () => {
@@ -116,6 +117,7 @@ describe("Dataset Storage", () => {
expect(byIdSpan.length).toBe(0);
const byNameSpan = wrapper.findAll(".display-os-by-name");
expect(byNameSpan.length).toBe(1);
+ expect(wrapper.find("object-store-restriction-span-stub").props("isPrivate")).toBeTruthy();
});
afterEach(() => {
diff --git a/client/src/components/Dataset/DatasetStorage/DatasetStorage.vue b/client/src/components/Dataset/DatasetStorage/DatasetStorage.vue
index 7232d97e508e..401a8ed42f27 100644
--- a/client/src/components/Dataset/DatasetStorage/DatasetStorage.vue
+++ b/client/src/components/Dataset/DatasetStorage/DatasetStorage.vue
@@ -7,12 +7,16 @@
This dataset is stored in
- a Galaxy object store named {{ storageInfo.name }}
+ a Galaxy object store named
+ {{ storageInfo.name }}
- a Galaxy object store with id {{ storageInfo.object_store_id }}
+ a Galaxy object store with id
+ {{ storageInfo.object_store_id }}
- the default configured Galaxy object store .
+
+ the default configured Galaxy object store .
@@ -25,10 +29,12 @@ import { getAppRoot } from "onload/loadConfig";
import LoadingSpan from "components/LoadingSpan";
import MarkdownIt from "markdown-it";
import { errorMessageAsString } from "utils/simple-error";
+import ObjectStoreRestrictionSpan from "./ObjectStoreRestrictionSpan";
export default {
components: {
LoadingSpan,
+ ObjectStoreRestrictionSpan,
},
props: {
datasetId: {
@@ -60,6 +66,11 @@ export default {
this.errorMessage = errorMessageAsString(errorMessage);
});
},
+ computed: {
+ isPrivate() {
+ return this.storageInfo.private;
+ },
+ },
methods: {
handleResponse(response) {
const storageInfo = response.data;
diff --git a/client/src/components/Dataset/DatasetStorage/ObjectStoreRestrictionSpan.test.js b/client/src/components/Dataset/DatasetStorage/ObjectStoreRestrictionSpan.test.js
new file mode 100644
index 000000000000..0f199a2a8f51
--- /dev/null
+++ b/client/src/components/Dataset/DatasetStorage/ObjectStoreRestrictionSpan.test.js
@@ -0,0 +1,27 @@
+import { shallowMount } from "@vue/test-utils";
+import { getLocalVue } from "jest/helpers";
+import ObjectStoreRestrictionSpan from "./ObjectStoreRestrictionSpan";
+
+const localVue = getLocalVue();
+
+describe("ObjectStoreRestrictionSpan", () => {
+ let wrapper;
+
+ it("should render info about private storage if isPrivate", () => {
+ wrapper = shallowMount(ObjectStoreRestrictionSpan, {
+ propsData: { isPrivate: true },
+ localVue,
+ });
+ expect(wrapper.find(".stored-how").text()).toBe("private");
+ expect(wrapper.find(".stored-how").attributes("title")).toBeTruthy();
+ });
+
+ it("should render info about unrestricted storage if not isPrivate", () => {
+ wrapper = shallowMount(ObjectStoreRestrictionSpan, {
+ propsData: { isPrivate: false },
+ localVue,
+ });
+ expect(wrapper.find(".stored-how").text()).toBe("unrestricted");
+ expect(wrapper.find(".stored-how").attributes("title")).toBeTruthy();
+ });
+});
diff --git a/client/src/components/Dataset/DatasetStorage/ObjectStoreRestrictionSpan.vue b/client/src/components/Dataset/DatasetStorage/ObjectStoreRestrictionSpan.vue
new file mode 100644
index 000000000000..29d313a72142
--- /dev/null
+++ b/client/src/components/Dataset/DatasetStorage/ObjectStoreRestrictionSpan.vue
@@ -0,0 +1,38 @@
+
+ {{ text }}
+
+
+
+
+
diff --git a/client/src/mvc/library/library-model.js b/client/src/mvc/library/library-model.js
index d9cccef37128..da48ab9154f0 100644
--- a/client/src/mvc/library/library-model.js
+++ b/client/src/mvc/library/library-model.js
@@ -140,7 +140,7 @@ var HistoryContents = Backbone.Collection.extend({
this.id = options.id;
},
url: function () {
- return `${this.urlRoot + this.id}/contents`;
+ return `${this.urlRoot + this.id}/contents?sharable=true`;
},
model: HistoryItem,
});
diff --git a/lib/galaxy/job_execution/output_collect.py b/lib/galaxy/job_execution/output_collect.py
index 199f4ccc465c..d9b21fc73cec 100644
--- a/lib/galaxy/job_execution/output_collect.py
+++ b/lib/galaxy/job_execution/output_collect.py
@@ -274,7 +274,7 @@ def add_library_dataset_to_folder(self, library_folder, ld):
# Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
trans.app.security_agent.copy_library_permissions(trans, ld, ldda)
# Copy the current user's DefaultUserPermissions to the new LibraryDatasetDatasetAssociation.dataset
- trans.app.security_agent.set_all_dataset_permissions(ldda.dataset, trans.app.security_agent.user_get_default_permissions(trans.user))
+ trans.app.security_agent.set_all_dataset_permissions(ldda.dataset, trans.app.security_agent.user_get_default_permissions(trans.user), flush=False, new=True)
library_folder.add_library_dataset(ld, genome_build=ldda.dbkey)
trans.sa_session.add(library_folder)
trans.sa_session.flush()
diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py
index 2ef3f7a3f631..ef7c5021fd8c 100644
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1483,6 +1483,8 @@ def _set_object_store_ids(self, job):
object_store_populator = ObjectStorePopulator(self.app, job.user)
object_store_id = self.get_destination_configuration("object_store_id", None)
+ require_sharable = job.requires_sharable_storage(self.app.security_agent)
+
if object_store_id:
object_store_populator.object_store_id = object_store_id
@@ -1494,7 +1496,7 @@ def _set_object_store_ids(self, job):
# afterward. State below needs to happen the same way.
for dataset_assoc in job.output_datasets + job.output_library_datasets:
dataset = dataset_assoc.dataset
- object_store_populator.set_object_store_id(dataset)
+ object_store_populator.set_object_store_id(dataset, require_sharable=require_sharable)
job.object_store_id = object_store_populator.object_store_id
self._setup_working_directory(job=job)
diff --git a/lib/galaxy/jobs/runners/__init__.py b/lib/galaxy/jobs/runners/__init__.py
index 80656afb6fef..140f318aa304 100644
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -150,7 +150,12 @@ def put(self, job_wrapper):
"""Add a job to the queue (by job identifier), indicate that the job is ready to run.
"""
put_timer = ExecutionTimer()
- job_wrapper.enqueue()
+ try:
+ job_wrapper.enqueue()
+ except Exception as e:
+ job_wrapper.fail(str(e), exception=e)
+ log.debug(f"Job [{job_wrapper.job_id}] failed to queue {put_timer}")
+ return
self.mark_as_queued(job_wrapper)
log.debug(f"Job [{job_wrapper.job_id}] queued {put_timer}")
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
index c102fe6ab23f..2c05d92fea5e 100644
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -88,6 +88,7 @@
JOB_METRIC_SCALE = 7
# Tags that get automatically propagated from inputs to outputs when running jobs.
AUTO_PROPAGATED_TAGS = ["name"]
+CANNOT_SHARE_PRIVATE_DATASET_MESSAGE = "Attempting to share a non-sharable dataset."
if TYPE_CHECKING:
@@ -1129,6 +1130,19 @@ def remap_objects(p, k, obj):
job_attrs['params'] = params_dict
return job_attrs
+ def requires_sharable_storage(self, security_agent):
+ # An easy optimization would be to calculate this in galaxy.tools.actions when the
+ # job is created and all the output permissions are already known. Having to reload
+ # these permissions in the job code shouldn't strictly be needed.
+
+ requires_sharing = False
+ for dataset_assoc in self.output_datasets + self.output_library_datasets:
+ if not security_agent.dataset_is_private_to_a_user(dataset_assoc.dataset.dataset):
+ requires_sharing = True
+ break
+
+ return requires_sharing
+
def to_dict(self, view='collection', system_details=False):
rval = super().to_dict(view=view)
rval['tool_id'] = self.tool_id
@@ -2183,7 +2197,12 @@ def __filter_contents(self, content_class, **kwds):
visible = galaxy.util.string_as_bool_or_none(kwds.get('visible', None))
if visible is not None:
query = query.filter(content_class.visible == visible)
+ if 'object_store_ids' in kwds:
+ if content_class == HistoryDatasetAssociation:
+ query = query.join(content_class.dataset).filter(Dataset.table.c.object_store_id.in_(kwds.get("object_store_ids")))
+ # else ignoring object_store_ids on HDCAs...
if 'ids' in kwds:
+ assert 'object_store_ids' not in kwds
ids = kwds['ids']
max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH)
if len(ids) < max_in_filter_length:
@@ -2453,11 +2472,24 @@ def is_new(self):
def in_ready_state(self):
return self.state in self.ready_states
+ @property
+ def sharable(self):
+ """Return True if placed into an objectstore not labeled as ``private``."""
+ if self.external_filename:
+ return True
+ else:
+ object_store = self._assert_object_store_set()
+ return not object_store.is_private(self)
+
+ def ensure_sharable(self):
+ if not self.sharable:
+ raise Exception(CANNOT_SHARE_PRIVATE_DATASET_MESSAGE)
+
def get_file_name(self):
if not self.external_filename:
- assert self.object_store is not None, "Object Store has not been initialized for dataset %s" % self.id
- if self.object_store.exists(self):
- return self.object_store.get_filename(self)
+ object_store = self._assert_object_store_set()
+ if object_store.exists(self):
+ return object_store.get_filename(self)
else:
return ''
else:
@@ -2472,6 +2504,10 @@ def set_file_name(self, filename):
self.external_filename = filename
file_name = property(get_file_name, set_file_name)
+ def _assert_object_store_set(self):
+ assert self.object_store is not None, "Object Store has not been initialized for dataset %s" % self.id
+ return self.object_store
+
def get_extra_files_path(self):
# Unlike get_file_name - external_extra_files_path is not backed by an
# actual database column so if SA instantiates this object - the
@@ -3330,6 +3366,9 @@ def to_library_dataset_dataset_association(self, trans, target_folder, replace_d
"""
Copy this HDA to a library optionally replacing an existing LDDA.
"""
+ if not self.dataset.sharable:
+ raise Exception("Attempting to share a non-sharable dataset.")
+
if replace_dataset:
# The replace_dataset param ( when not None ) refers to a LibraryDataset that
# is being replaced with a new version.
diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py
index ebfbddf07417..6221990c02bc 100644
--- a/lib/galaxy/model/security.py
+++ b/lib/galaxy/model/security.py
@@ -814,16 +814,23 @@ def set_all_dataset_permissions(self, dataset, permissions=None, new=False, flus
# Make sure that DATASET_MANAGE_PERMISSIONS is associated with at least 1 role
has_dataset_manage_permissions = False
permissions = permissions or {}
- for action, roles in permissions.items():
- if isinstance(action, Action):
- if action == self.permitted_actions.DATASET_MANAGE_PERMISSIONS and roles:
- has_dataset_manage_permissions = True
- break
- elif action == self.permitted_actions.DATASET_MANAGE_PERMISSIONS.action and roles:
- has_dataset_manage_permissions = True
- break
+ for _ in _walk_action_roles(permissions, self.permitted_actions.DATASET_MANAGE_PERMISSIONS):
+ has_dataset_manage_permissions = True
+ break
if not has_dataset_manage_permissions:
return "At least 1 role must be associated with manage permissions on this dataset."
+
+ # If this is new, the objectstore likely hasn't been set yet - defer check until
+ # the job handler assigns it.
+ if not new and not dataset.sharable:
+ # ensure dataset not shared.
+ dataset_access_roles = []
+ for _, roles in _walk_action_roles(permissions, self.permitted_actions.DATASET_ACCESS):
+ dataset_access_roles.extend(roles)
+
+ if len(dataset_access_roles) != 1 or dataset_access_roles[0].type != self.model.Role.types.PRIVATE:
+ return galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE
+
flush_needed = False
# Delete all of the current permissions on the dataset
if not new:
@@ -852,6 +859,12 @@ def set_dataset_permission(self, dataset, permission=None):
Permission looks like: { Action.action : [ Role, Role ] }
"""
permission = permission or {}
+
+ # if modifying access - ensure it is sharable.
+ for _ in _walk_action_roles(permission, self.permitted_actions.DATASET_ACCESS):
+ dataset.ensure_sharable()
+ break
+
flush_needed = False
for action, roles in permission.items():
if isinstance(action, Action):
@@ -891,6 +904,7 @@ def copy_dataset_permissions(self, src, dst):
self.set_all_dataset_permissions(dst, self.get_permissions(src))
def privately_share_dataset(self, dataset, users=None):
+ dataset.ensure_sharable()
intersect = None
users = users or []
for user in users:
@@ -1064,6 +1078,19 @@ def dataset_is_private_to_user(self, trans, dataset):
else:
return False
+ def dataset_is_private_to_a_user(self, dataset):
+ """
+ If the Dataset object has exactly one access role and that is
+ the current user's private role then we consider the dataset private.
+ """
+ access_roles = dataset.get_access_roles(self)
+
+ if len(access_roles) != 1:
+ return False
+ else:
+ access_role = access_roles[0]
+ return access_role.type == self.model.Role.types.PRIVATE
+
def datasets_are_public(self, trans, datasets):
'''
Given a transaction object and a list of Datasets, return
@@ -1092,6 +1119,8 @@ def datasets_are_public(self, trans, datasets):
def make_dataset_public(self, dataset):
# A dataset is considered public if there are no "access" actions associated with it. Any
# other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+ dataset.ensure_sharable()
+
flush_needed = False
for dp in dataset.actions:
if dp.action == self.permitted_actions.DATASET_ACCESS.action:
@@ -1481,3 +1510,12 @@ def set_dataset_permissions(self, hda, user, site):
hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization(hda=hda, user=user, site=site)
self.sa_session.add(hdadaa)
self.sa_session.flush()
+
+
+def _walk_action_roles(permissions, query_action):
+ for action, roles in permissions.items():
+ if isinstance(action, Action):
+ if action == query_action and roles:
+ yield action, roles
+ elif action == query_action.action and roles:
+ yield action, roles
diff --git a/lib/galaxy/objectstore/__init__.py b/lib/galaxy/objectstore/__init__.py
index 3a16e3329a5a..73455d275b16 100644
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -17,6 +17,7 @@
from galaxy.exceptions import ObjectInvalid, ObjectNotFound
from galaxy.util import (
+ asbool,
directory_hash_id,
force_symlink,
parse_xml,
@@ -30,6 +31,7 @@
from galaxy.util.sleeper import Sleeper
NO_SESSION_ERROR_MESSAGE = "Attempted to 'create' object store entity in configuration with no database session present."
+DEFAULT_PRIVATE = False
log = logging.getLogger(__name__)
@@ -90,6 +92,9 @@ def create(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_a
This method will create a proper directory structure for
the file if the directory does not already exist.
+
+ The method returns the concrete objectstore the supplied object is stored
+ in.
"""
raise NotImplementedError()
@@ -198,6 +203,19 @@ def get_concrete_store_description_markdown(self, obj):
yet been set, this may return ``None``.
"""
+ @abc.abstractmethod
+ def is_private(self, obj):
+ """Return True iff supplied object is stored in private ConcreteObjectStore."""
+
+ def object_store_ids(self, private=None):
+ """Return IDs of all concrete object stores - either private ones or non-private ones.
+
+ This should just return an empty list for non-DistributedObjectStore object stores,
+ i.e. concrete objectstores and the HierarchicalObjectStore since these do not
+ use the object_store_id column for objects (Galaxy Datasets).
+ """
+ return []
+
@abc.abstractmethod
def get_store_usage_percent(self):
"""Return the percentage indicating how full the store is."""
@@ -269,10 +287,11 @@ def to_dict(self):
extra_dirs = []
for extra_dir_type, extra_dir_path in self.extra_dirs.items():
extra_dirs.append({"type": extra_dir_type, "path": extra_dir_path})
+ store_type = self.store_type
return {
'config': config_to_dict(self.config),
'extra_dirs': extra_dirs,
- 'type': self.store_type,
+ 'type': store_type,
}
def _get_object_id(self, obj):
@@ -329,6 +348,16 @@ def get_store_usage_percent(self):
def get_store_by(self, obj, **kwargs):
return self._invoke('get_store_by', obj, **kwargs)
+ def is_private(self, obj):
+ return self._invoke('is_private', obj)
+
+ @classmethod
+ def parse_private_from_config_xml(clazz, config_xml):
+ private = DEFAULT_PRIVATE
+ if config_xml is not None:
+ private = asbool(config_xml.attrib.get('private', DEFAULT_PRIVATE))
+ return private
+
class ConcreteObjectStore(BaseObjectStore):
"""Subclass of ObjectStore for stores that don't delegate (non-nested).
@@ -356,9 +385,12 @@ def __init__(self, config, config_dict=None, **kwargs):
self.store_by = config_dict.get("store_by", None) or getattr(config, "object_store_store_by", "id")
self.name = config_dict.get("name", None)
self.description = config_dict.get("description", None)
+ # Annotate this as true to prevent sharing of data.
+ self.private = config_dict.get("private", DEFAULT_PRIVATE)
def to_dict(self):
rval = super().to_dict()
+ rval['private'] = self.private
rval["store_by"] = self.store_by
rval["name"] = self.name
rval["description"] = self.description
@@ -373,6 +405,9 @@ def _get_concrete_store_description_markdown(self, obj):
def _get_store_by(self, obj):
return self.store_by
+ def _is_private(self, obj):
+ return self.private
+
class DiskObjectStore(ConcreteObjectStore):
"""
@@ -385,7 +420,7 @@ class DiskObjectStore(ConcreteObjectStore):
>>> file_path=tempfile.mkdtemp()
>>> obj = Bunch(id=1)
>>> s = DiskObjectStore(Bunch(umask=0o077, jobs_directory=file_path, new_file_path=file_path, object_store_check_old_style=False), dict(files_dir=file_path))
- >>> s.create(obj)
+ >>> o = s.create(obj)
>>> s.exists(obj)
True
>>> assert s.get_filename(obj) == file_path + '/000/dataset_1.dat'
@@ -431,6 +466,7 @@ def parse_xml(clazz, config_xml):
extra_dirs.append({"type": e.get('type'), "path": e.get('path')})
config_dict["extra_dirs"] = extra_dirs
+ config_dict["private"] = BaseObjectStore.parse_private_from_config_xml(config_xml)
return config_dict
def to_dict(self):
@@ -542,6 +578,7 @@ def _create(self, obj, **kwargs):
if not dir_only:
open(path, 'w').close() # Should be rb?
umask_fix_perms(path, self.config.umask, 0o666)
+ return self
def _empty(self, obj, **kwargs):
"""Override `ObjectStore`'s stub by checking file size on disk."""
@@ -672,7 +709,8 @@ def file_ready(self, obj, **kwargs):
def _create(self, obj, **kwargs):
"""Create a backing file in a random backend."""
- random.choice(list(self.backends.values())).create(obj, **kwargs)
+ objectstore = random.choice(list(self.backends.values()))
+ return objectstore.create(obj, **kwargs)
def _empty(self, obj, **kwargs):
"""For the first backend that has this `obj`, determine if it is empty."""
@@ -711,6 +749,9 @@ def _get_concrete_store_name(self, obj):
def _get_concrete_store_description_markdown(self, obj):
return self._call_method('_get_concrete_store_description_markdown', obj, None, False)
+ def _is_private(self, obj):
+ return self._call_method('_is_private', obj, ObjectNotFound, True)
+
def _get_store_by(self, obj):
return self._call_method('_get_store_by', obj, None, False)
@@ -879,20 +920,24 @@ def __filesystem_monitor(self):
def _create(self, obj, **kwargs):
"""The only method in which obj.object_store_id may be None."""
- if obj.object_store_id is None or not self._exists(obj, **kwargs):
- if obj.object_store_id is None or obj.object_store_id not in self.backends:
+ object_store_id = obj.object_store_id
+ if object_store_id is None or not self._exists(obj, **kwargs):
+ if object_store_id is None or object_store_id not in self.backends:
try:
- obj.object_store_id = random.choice(self.weighted_backend_ids)
+ object_store_id = random.choice(self.weighted_backend_ids)
+ obj.object_store_id = object_store_id
except IndexError:
raise ObjectInvalid('objectstore.create, could not generate '
- 'obj.object_store_id: %s, kwargs: %s'
+ 'object_store_id: %s, kwargs: %s'
% (str(obj), str(kwargs)))
log.debug("Selected backend '%s' for creation of %s %s"
- % (obj.object_store_id, obj.__class__.__name__, obj.id))
+ % (object_store_id, obj.__class__.__name__, obj.id))
else:
log.debug("Using preferred backend '%s' for creation of %s %s"
- % (obj.object_store_id, obj.__class__.__name__, obj.id))
- self.backends[obj.object_store_id].create(obj, **kwargs)
+ % (object_store_id, obj.__class__.__name__, obj.id))
+ return self.backends[object_store_id].create(obj, **kwargs)
+ else:
+ return self.backends[object_store_id]
def _call_method(self, method, obj, default, default_is_exception, **kwargs):
object_store_id = self.__get_store_id_for(obj, **kwargs)
@@ -922,6 +967,14 @@ def __get_store_id_for(self, obj, **kwargs):
return id
return None
+ def object_store_ids(self, private=None):
+ object_store_ids = []
+ for backend_id, backend in self.backends.items():
+ object_store_ids.extend(backend.object_store_ids(private=private))
+ if backend.private is private or private is None:
+ object_store_ids.append(backend_id)
+ return object_store_ids
+
class HierarchicalObjectStore(NestedObjectStore):
@@ -938,22 +991,31 @@ def __init__(self, config, config_dict, fsmon=False):
super().__init__(config, config_dict)
backends = {}
+ is_private = config_dict.get("private", DEFAULT_PRIVATE)
for order, backend_def in enumerate(config_dict["backends"]):
+ backend_is_private = backend_def.get("private")
+ if backend_is_private is not None:
+ assert is_private == backend_is_private, "The private attribute must be defined on the HierarchicalObjectStore and not contained concrete objectstores."
backends[order] = build_object_store_from_config(config, config_dict=backend_def, fsmon=fsmon)
self.backends = backends
+ self.private = is_private
@classmethod
def parse_xml(clazz, config_xml):
backends_list = []
+ is_private = BaseObjectStore.parse_private_from_config_xml(config_xml)
for b in sorted(config_xml.find('backends'), key=lambda b: int(b.get('order'))):
store_type = b.get("type")
objectstore_class, _ = type_to_object_store_class(store_type)
backend_config_dict = objectstore_class.parse_xml(b)
+ backend_config_dict["private"] = is_private
backend_config_dict["type"] = store_type
backends_list.append(backend_config_dict)
- return {"backends": backends_list}
+ config_dict = {"backends": backends_list}
+ config_dict["private"] = is_private
+ return config_dict
def to_dict(self):
as_dict = super().to_dict()
@@ -962,6 +1024,7 @@ def to_dict(self):
backend_as_dict = backend.to_dict()
backends.append(backend_as_dict)
as_dict["backends"] = backends
+ as_dict["private"] = self.private
return as_dict
def _exists(self, obj, **kwargs):
@@ -973,7 +1036,13 @@ def _exists(self, obj, **kwargs):
def _create(self, obj, **kwargs):
"""Call the primary object store."""
- self.backends[0].create(obj, **kwargs)
+ return self.backends[0].create(obj, **kwargs)
+
+ def _is_private(self, obj):
+ # Unlink the DistributedObjectStore - the HierarchicalObjectStore does not use
+ # object_store_id - so all the contained object stores need to define is_private
+ # the same way.
+ return self.private
def type_to_object_store_class(store, fsmon=False):
@@ -1127,13 +1196,16 @@ def __init__(self, app, user):
self.object_store_id = None
self.user = user
- def set_object_store_id(self, data):
+ def set_object_store_id(self, data, require_sharable=False):
# Create an empty file immediately. The first dataset will be
# created in the "default" store, all others will be created in
# the same store as the first.
data.dataset.object_store_id = self.object_store_id
try:
- self.object_store.create(data.dataset)
+ ensure_non_private = require_sharable
+ concrete_store = self.object_store.create(data.dataset, ensure_non_private=ensure_non_private)
+ if concrete_store.private and require_sharable:
+ raise Exception("Attempted to create shared output datasets in objectstore with sharing disabled")
except ObjectInvalid:
raise Exception('Unable to create output dataset: object store is full')
self.object_store_id = data.dataset.object_store_id # these will be the same thing after the first output
diff --git a/lib/galaxy/objectstore/azure_blob.py b/lib/galaxy/objectstore/azure_blob.py
index e77dedaa6794..ce2457c4a65c 100644
--- a/lib/galaxy/objectstore/azure_blob.py
+++ b/lib/galaxy/objectstore/azure_blob.py
@@ -74,6 +74,7 @@ def parse_config_xml(config_xml):
'path': staging_path,
},
'extra_dirs': extra_dirs,
+ 'private': ConcreteObjectStore.parse_private_from_config_xml(config_xml),
}
except Exception:
# Toss it back up after logging, we can't continue loading at this point.
diff --git a/lib/galaxy/objectstore/cloud.py b/lib/galaxy/objectstore/cloud.py
index 3cd34fbbef79..7b7ca7ac67f0 100644
--- a/lib/galaxy/objectstore/cloud.py
+++ b/lib/galaxy/objectstore/cloud.py
@@ -571,6 +571,7 @@ def _create(self, obj, **kwargs):
rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % self._get_object_id(obj))
open(os.path.join(self.staging_path, rel_path), 'w').close()
self._push_to_os(rel_path, from_string='')
+ return self
def _empty(self, obj, **kwargs):
if self._exists(obj, **kwargs):
diff --git a/lib/galaxy/objectstore/irods.py b/lib/galaxy/objectstore/irods.py
index c6505af9c51a..d77eae12b3bc 100644
--- a/lib/galaxy/objectstore/irods.py
+++ b/lib/galaxy/objectstore/irods.py
@@ -101,6 +101,7 @@ def parse_config_xml(config_xml):
'path': staging_path,
},
'extra_dirs': extra_dirs,
+ 'private': DiskObjectStore.parse_private_from_config_xml(config_xml),
}
except Exception:
# Toss it back up after logging, we can't continue loading at this point.
@@ -511,6 +512,7 @@ def _create(self, obj, **kwargs):
open(os.path.join(self.staging_path, rel_path), 'w').close()
self._push_to_irods(rel_path, from_string='')
log.debug("irods_pt _create: %s", ipt_timer)
+ return self
def _empty(self, obj, **kwargs):
if self._exists(obj, **kwargs):
diff --git a/lib/galaxy/objectstore/pithos.py b/lib/galaxy/objectstore/pithos.py
index 9cab7fffc020..a079739751c4 100644
--- a/lib/galaxy/objectstore/pithos.py
+++ b/lib/galaxy/objectstore/pithos.py
@@ -67,6 +67,7 @@ def parse_config_xml(config_xml):
raise Exception(msg)
r['extra_dirs'] = [
{k: e.get(k) for k in attrs} for e in extra_dirs]
+ r['private'] = ConcreteObjectStore.parse_private_from_config_xml(config_xml)
if 'job_work' not in (d['type'] for d in r['extra_dirs']):
msg = f'No value for {tag}:type="job_work" in XML tree'
log.error(msg)
@@ -287,6 +288,7 @@ def _create(self, obj, **kwargs):
new_file = os.path.join(self.staging_path, rel_path)
open(new_file, 'w').close()
self.pithos.upload_from_string(rel_path, '')
+ return self
def _empty(self, obj, **kwargs):
"""
diff --git a/lib/galaxy/objectstore/s3.py b/lib/galaxy/objectstore/s3.py
index 479cb31ba2dd..4c9852706f88 100644
--- a/lib/galaxy/objectstore/s3.py
+++ b/lib/galaxy/objectstore/s3.py
@@ -96,6 +96,7 @@ def parse_config_xml(config_xml):
'path': staging_path,
},
'extra_dirs': extra_dirs,
+ 'private': ConcreteObjectStore.parse_private_from_config_xml(config_xml),
}
except Exception:
# Toss it back up after logging, we can't continue loading at this point.
@@ -575,6 +576,7 @@ def _create(self, obj, **kwargs):
rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % self._get_object_id(obj))
open(os.path.join(self.staging_path, rel_path), 'w').close()
self._push_to_os(rel_path, from_string='')
+ return self
def _empty(self, obj, **kwargs):
if self._exists(obj, **kwargs):
diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py
index 425f35811676..db9591f19199 100644
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -187,7 +187,7 @@ def __new_history_upload(trans, uploaded_dataset, history=None, state=None):
trans.sa_session.flush()
history.add_dataset(hda, genome_build=uploaded_dataset.dbkey, quota=False)
permissions = trans.app.security_agent.history_get_default_permissions(history)
- trans.app.security_agent.set_all_dataset_permissions(hda.dataset, permissions)
+ trans.app.security_agent.set_all_dataset_permissions(hda.dataset, permissions, new=True, flush=False)
trans.sa_session.flush()
return hda
@@ -250,7 +250,7 @@ def __new_library_upload(trans, cntrller, uploaded_dataset, library_bunch, tag_h
trans.app.security_agent.copy_dataset_permissions(library_bunch.replace_dataset.library_dataset_dataset_association.dataset, ldda.dataset)
else:
# Copy the current user's DefaultUserPermissions to the new LibraryDatasetDatasetAssociation.dataset
- trans.app.security_agent.set_all_dataset_permissions(ldda.dataset, trans.app.security_agent.user_get_default_permissions(trans.user))
+ trans.app.security_agent.set_all_dataset_permissions(ldda.dataset, trans.app.security_agent.user_get_default_permissions(trans.user), new=True)
folder.add_library_dataset(ld, genome_build=uploaded_dataset.dbkey)
trans.sa_session.add(folder)
trans.sa_session.flush()
diff --git a/lib/galaxy/webapps/galaxy/api/datasets.py b/lib/galaxy/webapps/galaxy/api/datasets.py
index 8c553f4e4a6e..43ce439200b1 100644
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -187,6 +187,7 @@ def show_storage(self, trans, dataset_id, hda_ldda='hda', **kwd):
return {
'object_store_id': object_store_id,
+ 'sharable': dataset.sharable,
'name': name,
'description': description,
'percent_used': percent_used,
diff --git a/lib/galaxy/webapps/galaxy/api/history_contents.py b/lib/galaxy/webapps/galaxy/api/history_contents.py
index dd3845e7f7ad..dc828834d0c9 100644
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -100,6 +100,13 @@ def index(self, trans, history_id, ids=None, v=None, **kwd):
else:
contents_kwds['deleted'] = kwd.get('deleted', None)
contents_kwds['visible'] = kwd.get('visible', None)
+ sharable = kwd.get('sharable', None)
+ if sharable is not None and sharable.lower() not in ["none", "null"]:
+ sharable = util.string_as_bool(sharable)
+ object_store_ids = self.app.object_store.object_store_ids(private=not sharable)
+ if object_store_ids:
+ contents_kwds['object_store_ids'] = object_store_ids
+
# details param allows a mixed set of summary and detailed hdas
# Ever more convoluted due to backwards compat..., details
# should be considered deprecated in favor of more specific
diff --git a/lib/galaxy/webapps/galaxy/controllers/dataset.py b/lib/galaxy/webapps/galaxy/controllers/dataset.py
index 39d1c39da653..92917818f213 100644
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -277,7 +277,15 @@ def get_edit(self, trans, dataset_id=None, **kwd):
permission_disable = True
permission_inputs = list()
if trans.user:
- if data.dataset.actions:
+ if not data.dataset.sharable:
+ permission_message = 'The dataset is stored on private storage to you and cannot be shared.'
+ permission_inputs.append({
+ 'name': 'not_sharable',
+ 'type': 'hidden',
+ 'label': permission_message,
+ 'readonly': True
+ })
+ elif data.dataset.actions:
in_roles = {}
for action, roles in trans.app.security_agent.get_permissions(data.dataset).items():
in_roles[action.action] = [trans.security.encode_id(role.id) for role in roles]
diff --git a/lib/galaxy_test/api/test_dataset_collections.py b/lib/galaxy_test/api/test_dataset_collections.py
index 39577f41d581..b6d71d908c31 100644
--- a/lib/galaxy_test/api/test_dataset_collections.py
+++ b/lib/galaxy_test/api/test_dataset_collections.py
@@ -160,7 +160,7 @@ def test_list_list_list_download(self):
assert len(namelist) == 3, "Expected 3 elements in [%s]" % namelist
def test_hda_security(self):
- element_identifiers = self.dataset_collection_populator.pair_identifiers(self.history_id)
+ element_identifiers = self.dataset_collection_populator.pair_identifiers(self.history_id, wait=True)
self.dataset_populator.make_private(self.history_id, element_identifiers[0]["id"])
with self._different_user():
history_id = self.dataset_populator.new_history()
diff --git a/lib/galaxy_test/api/test_libraries.py b/lib/galaxy_test/api/test_libraries.py
index 9955eb824191..6bd6f73e85a3 100644
--- a/lib/galaxy_test/api/test_libraries.py
+++ b/lib/galaxy_test/api/test_libraries.py
@@ -82,7 +82,7 @@ def test_create_dataset_denied(self):
self._assert_status_code_is(folder_response, 200)
folder_id = folder_response.json()[0]['id']
history_id = self.dataset_populator.new_history()
- hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
+ hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3", wait=True)['id']
with self._different_user():
payload = {'from_hda_id': hda_id}
create_response = self._post("folders/%s/contents" % folder_id, payload)
@@ -257,7 +257,7 @@ def test_create_dataset_in_folder(self):
self._assert_status_code_is(folder_response, 200)
folder_id = folder_response.json()[0]['id']
history_id = self.dataset_populator.new_history()
- hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
+ hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3", wait=True)['id']
payload = {'from_hda_id': hda_id}
create_response = self._post("folders/%s/contents" % folder_id, payload)
self._assert_status_code_is(create_response, 200)
@@ -273,7 +273,7 @@ def test_create_dataset_in_subfolder(self):
print(subfolder_response.json())
subfolder_id = subfolder_response.json()['id']
history_id = self.dataset_populator.new_history()
- hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3 sub")['id']
+ hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3 sub", wait=True)['id']
payload = {'from_hda_id': hda_id}
create_response = self._post("folders/%s/contents" % subfolder_id, payload)
self._assert_status_code_is(create_response, 200)
@@ -434,4 +434,5 @@ def _create_dataset_in_folder_in_library(self, library_name, wait=False):
hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3", wait=wait)['id']
payload = {'from_hda_id': hda_id, 'create_type': 'file', 'folder_id': folder_id}
ld = self._post("libraries/%s/contents" % folder_id, payload)
+ ld.raise_for_status()
return ld
diff --git a/lib/galaxy_test/api/test_tools.py b/lib/galaxy_test/api/test_tools.py
index faa693d9f5ea..7252407a272c 100644
--- a/lib/galaxy_test/api/test_tools.py
+++ b/lib/galaxy_test/api/test_tools.py
@@ -761,7 +761,7 @@ def assert_inputs(inputs, can_be_used=True):
else:
self._assert_dataset_permission_denied_response(response)
- new_dataset = self.dataset_populator.new_dataset(history_id, content='Cat1Test')
+ new_dataset = self.dataset_populator.new_dataset(history_id, content='Cat1Test', wait=True)
inputs = dict(
input1=dataset_to_param(new_dataset),
)
diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py
index 53de0a7f6c86..f0c857543a9d 100644
--- a/lib/galaxy_test/base/populators.py
+++ b/lib/galaxy_test/base/populators.py
@@ -626,9 +626,23 @@ def make_private(self, history_id: str, dataset_id: str) -> dict:
"access": json.dumps([role_id]),
"manage": json.dumps([role_id]),
}
+ response = self.update_permissions_raw(history_id, dataset_id, payload)
+ response.raise_for_status()
+ return response.json()
+
+ def make_public_raw(self, history_id, dataset_id):
+ role_id = self.user_private_role_id()
+ payload = {
+ "access": json.dumps([]),
+ "manage": json.dumps([role_id]),
+ }
+ response = self.update_permissions_raw(history_id, dataset_id, payload)
+ return response
+
+ def update_permissions_raw(self, history_id, dataset_id, payload):
url = f"histories/{history_id}/contents/{dataset_id}/permissions"
update_response = self._put(url, payload, admin=True)
- assert update_response.status_code == 200, update_response.content
+ update_response.raise_for_status()
return update_response.json()
def validate_dataset(self, history_id, dataset_id):
@@ -1526,8 +1540,8 @@ def __create_payload_collection(self, history_id, identifiers_func, collection_t
)
return payload
- def pair_identifiers(self, history_id, contents=None):
- hda1, hda2 = self.__datasets(history_id, count=2, contents=contents)
+ def pair_identifiers(self, history_id, contents=None, wait=False):
+ hda1, hda2 = self.__datasets(history_id, count=2, contents=contents, wait=wait)
element_identifiers = [
dict(name="forward", src="hda", id=hda1["id"]),
@@ -1561,10 +1575,12 @@ def __create(self, payload):
else:
return self.dataset_populator.fetch(payload)
- def __datasets(self, history_id, count, contents=None):
+ def __datasets(self, history_id, count, contents=None, wait=False):
datasets = []
for i in range(count):
- new_kwds = {}
+ new_kwds = {
+ 'wait': wait,
+ }
if contents:
new_kwds["content"] = contents[i]
datasets.append(self.dataset_populator.new_dataset(history_id, **new_kwds))
diff --git a/test/integration/objectstore/test_private_handling.py b/test/integration/objectstore/test_private_handling.py
new file mode 100644
index 000000000000..efc0eb84be2d
--- /dev/null
+++ b/test/integration/objectstore/test_private_handling.py
@@ -0,0 +1,50 @@
+"""Integration tests for mixing store_by."""
+
+import string
+
+from galaxy_test.base import api_asserts
+from ._base import BaseObjectStoreIntegrationTestCase
+
+PRIVATE_OBJECT_STORE_CONFIG_TEMPLATE = string.Template("""
+
+
+
+
+
+""")
+
+TEST_INPUT_FILES_CONTENT = "1 2 3"
+
+
+class PrivatePreventsSharingObjectStoreIntegrationTestCase(BaseObjectStoreIntegrationTestCase):
+
+ @classmethod
+ def handle_galaxy_config_kwds(cls, config):
+ config["new_user_dataset_access_role_default_private"] = True
+ cls._configure_object_store(PRIVATE_OBJECT_STORE_CONFIG_TEMPLATE, config)
+
+ def test_both_types(self):
+ """Test each object store configures files correctly.
+ """
+ with self.dataset_populator.test_history() as history_id:
+ hda = self.dataset_populator.new_dataset(history_id, content=TEST_INPUT_FILES_CONTENT, wait=True)
+ content = self.dataset_populator.get_history_dataset_content(history_id, hda["id"])
+ assert content.startswith(TEST_INPUT_FILES_CONTENT)
+ response = self.dataset_populator.make_public_raw(history_id, hda["id"])
+ assert response.status_code != 200
+ api_asserts.assert_error_message_contains(response, "Attempting to share a non-sharable dataset.")
+
+
+class PrivateCannotWritePublicDataObjectStoreIntegrationTestCase(BaseObjectStoreIntegrationTestCase):
+
+ @classmethod
+ def handle_galaxy_config_kwds(cls, config):
+ config["new_user_dataset_access_role_default_private"] = False
+ cls._configure_object_store(PRIVATE_OBJECT_STORE_CONFIG_TEMPLATE, config)
+
+ def test_both_types(self):
+ with self.dataset_populator.test_history() as history_id:
+ response = self.dataset_populator.new_dataset_request(history_id, content=TEST_INPUT_FILES_CONTENT, wait=True, assert_ok=False)
+ job = response.json()["jobs"][0]
+ final_state = self.dataset_populator.wait_for_job(job["id"])
+ assert final_state == "error"
diff --git a/test/unit/data/test_galaxy_mapping.py b/test/unit/data/test_galaxy_mapping.py
index 94064615a2d5..b11a7153d118 100644
--- a/test/unit/data/test_galaxy_mapping.py
+++ b/test/unit/data/test_galaxy_mapping.py
@@ -23,6 +23,7 @@
not os.environ.get('GALAXY_TEST_UNIT_MAPPING_URI_POSTGRES_BASE'),
reason="GALAXY_TEST_UNIT_MAPPING_URI_POSTGRES_BASE not set"
)
+PRIVATE_OBJECT_STORE_ID = "my_private_data"
class BaseModelTestCase(unittest.TestCase):
@@ -204,11 +205,13 @@ def assert_display_name_converts_to_unicode(item, name):
assert history.get_display_name() == 'Hello₩◎ґʟⅾ'
def test_hda_to_library_dataset_dataset_association(self):
+ model = self.model
u = self.model.User(email="mary@example.com", password="password")
- hda = self.model.HistoryDatasetAssociation(name='hda_name')
+ h1 = model.History(name="History 1", user=u)
+ hda = model.HistoryDatasetAssociation(name='hda_name', create_dataset=True, history=h1, sa_session=model.session)
self.persist(hda)
trans = collections.namedtuple('trans', 'user')
- target_folder = self.model.LibraryFolder(name='library_folder')
+ target_folder = model.LibraryFolder(name='library_folder')
ldda = hda.to_library_dataset_dataset_association(
trans=trans(user=u),
target_folder=target_folder,
@@ -233,6 +236,21 @@ def test_hda_to_library_dataset_dataset_association(self):
assert new_ldda.library_dataset.expired_datasets[0] == ldda
assert target_folder.item_count == 1
+ def test_hda_to_library_dataset_dataset_association_fails_if_private(self):
+ model = self.model
+ u = model.User(email="mary2@example.com", password="password")
+ h1 = model.History(name="History 1", user=u)
+ hda = model.HistoryDatasetAssociation(name='hda_name', create_dataset=True, history=h1, sa_session=model.session)
+ hda.dataset.object_store_id = PRIVATE_OBJECT_STORE_ID
+ self.persist(hda)
+ trans = collections.namedtuple('trans', 'user')
+ target_folder = model.LibraryFolder(name='library_folder')
+ with pytest.raises(Exception):
+ hda.to_library_dataset_dataset_association(
+ trans=trans(user=u),
+ target_folder=target_folder,
+ )
+
def test_tags(self):
model = self.model
@@ -449,8 +467,8 @@ def test_history_contents(self):
self.persist(u, h1, expunge=False)
d1 = self.new_hda(h1, name="1")
- d2 = self.new_hda(h1, name="2", visible=False)
- d3 = self.new_hda(h1, name="3", deleted=True)
+ d2 = self.new_hda(h1, name="2", visible=False, object_store_id="foobar")
+ d3 = self.new_hda(h1, name="3", deleted=True, object_store_id="three_store")
d4 = self.new_hda(h1, name="4", visible=False, deleted=True)
self.session().flush()
@@ -464,8 +482,11 @@ def contents_iter_names(**kwds):
self.assertEqual(contents_iter_names(), ["1", "2", "3", "4"])
assert contents_iter_names(deleted=False) == ["1", "2"]
assert contents_iter_names(visible=True) == ["1", "3"]
+ assert contents_iter_names(visible=True, object_store_ids=["three_store"]) == ["3"]
assert contents_iter_names(visible=False) == ["2", "4"]
assert contents_iter_names(deleted=True, visible=False) == ["4"]
+ assert contents_iter_names(deleted=False, object_store_ids=["foobar"]) == ["2"]
+ assert contents_iter_names(deleted=False, object_store_ids=["foobar2"]) == []
assert contents_iter_names(ids=[d1.id, d2.id, d3.id, d4.id]) == ["1", "2", "3", "4"]
assert contents_iter_names(ids=[d1.id, d2.id, d3.id, d4.id], max_in_filter_length=1) == ["1", "2", "3", "4"]
@@ -754,6 +775,69 @@ def test_can_manage_private_dataset(self):
assert security_agent.can_manage_dataset(u_from.all_roles(), d1.dataset)
assert not security_agent.can_manage_dataset(u_other.all_roles(), d1.dataset)
+ def test_cannot_make_private_objectstore_dataset_public(self):
+ security_agent = GalaxyRBACAgent(self.model)
+ u_from, u_to, _ = self._three_users("cannot_make_private_public")
+
+ h = self.model.History(name="History for Prevent Sharing", user=u_from)
+ d1 = self.model.HistoryDatasetAssociation(extension="txt", history=h, create_dataset=True, sa_session=self.model.session)
+ self.persist(h, d1)
+
+ d1.dataset.object_store_id = PRIVATE_OBJECT_STORE_ID
+ self._make_private(security_agent, u_from, d1)
+
+ with pytest.raises(Exception) as exec_info:
+ self._make_owned(security_agent, u_from, d1)
+ assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value)
+
+ def test_cannot_make_private_objectstore_dataset_shared(self):
+ security_agent = GalaxyRBACAgent(self.model)
+ u_from, u_to, _ = self._three_users("cannot_make_private_shared")
+
+ h = self.model.History(name="History for Prevent Sharing", user=u_from)
+ d1 = self.model.HistoryDatasetAssociation(extension="txt", history=h, create_dataset=True, sa_session=self.model.session)
+ self.persist(h, d1)
+
+ d1.dataset.object_store_id = PRIVATE_OBJECT_STORE_ID
+ self._make_private(security_agent, u_from, d1)
+
+ with pytest.raises(Exception) as exec_info:
+ security_agent.privately_share_dataset(d1.dataset, [u_to])
+ assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value)
+
+ def test_cannot_set_dataset_permisson_on_private(self):
+ security_agent = GalaxyRBACAgent(self.model)
+ u_from, u_to, _ = self._three_users("cannot_set_permissions_on_private")
+
+ h = self.model.History(name="History for Prevent Sharing", user=u_from)
+ d1 = self.model.HistoryDatasetAssociation(extension="txt", history=h, create_dataset=True, sa_session=self.model.session)
+ self.persist(h, d1)
+
+ d1.dataset.object_store_id = PRIVATE_OBJECT_STORE_ID
+ self._make_private(security_agent, u_from, d1)
+
+ role = security_agent.get_private_user_role(u_to, auto_create=True)
+ access_action = security_agent.permitted_actions.DATASET_ACCESS.action
+
+ with pytest.raises(Exception) as exec_info:
+ security_agent.set_dataset_permission(d1.dataset, {access_action: [role]})
+ assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value)
+
+ def test_cannot_make_private_dataset_public(self):
+ security_agent = GalaxyRBACAgent(self.model)
+ u_from, u_to, u_other = self._three_users("cannot_make_private_dataset_public")
+
+ h = self.model.History(name="History for Annotation", user=u_from)
+ d1 = self.model.HistoryDatasetAssociation(extension="txt", history=h, create_dataset=True, sa_session=self.model.session)
+ self.persist(h, d1)
+
+ d1.dataset.object_store_id = PRIVATE_OBJECT_STORE_ID
+ self._make_private(security_agent, u_from, d1)
+
+ with pytest.raises(Exception) as exec_info:
+ security_agent.make_dataset_public(d1.dataset)
+ assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value)
+
def _three_users(self, suffix):
email_from = f"user_{suffix}e1@example.com"
email_to = f"user_{suffix}e2@example.com"
@@ -770,16 +854,26 @@ def _make_private(self, security_agent, user, hda):
access_action = security_agent.permitted_actions.DATASET_ACCESS.action
manage_action = security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action
permissions = {access_action: [role], manage_action: [role]}
- security_agent.set_all_dataset_permissions(hda.dataset, permissions)
+ self._set_permissions(security_agent, hda.dataset, permissions)
def _make_owned(self, security_agent, user, hda):
role = security_agent.get_private_user_role(user, auto_create=True)
manage_action = security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action
permissions = {manage_action: [role]}
- security_agent.set_all_dataset_permissions(hda.dataset, permissions)
+ self._set_permissions(security_agent, hda.dataset, permissions)
+
+ def _set_permissions(self, security_agent, dataset, permissions):
+ # TODO: refactor set_all_dataset_permissions to actually throw an exception :|
+ error = security_agent.set_all_dataset_permissions(dataset, permissions)
+ if error:
+ raise Exception(error)
def new_hda(self, history, **kwds):
- return history.add_dataset(self.model.HistoryDatasetAssociation(create_dataset=True, sa_session=self.model.session, **kwds))
+ object_store_id = kwds.pop('object_store_id', None)
+ hda = self.model.HistoryDatasetAssociation(create_dataset=True, sa_session=self.model.session, **kwds)
+ if object_store_id is not None:
+ hda.dataset.object_store_id = object_store_id
+ return history.add_dataset(hda)
@skip_if_not_postgres_base
@@ -811,6 +905,12 @@ def get_filename(self, *args, **kwds):
def get_store_by(self, *args, **kwds):
return 'id'
+ def is_private(self, object):
+ if object.object_store_id == PRIVATE_OBJECT_STORE_ID:
+ return True
+ else:
+ return False
+
def get_suite():
suite = unittest.TestSuite()
diff --git a/test/unit/objectstore/test_objectstore.py b/test/unit/objectstore/test_objectstore.py
index 6809cf71f452..434790b52d4f 100644
--- a/test/unit/objectstore/test_objectstore.py
+++ b/test/unit/objectstore/test_objectstore.py
@@ -282,8 +282,26 @@ def test_concrete_name_without_objectstore_id():
assert files1_name is None
+MIXED_STORE_BY_DISTRIBUTED_TEST_CONFIG = """
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+"""
+
+
MIXED_STORE_BY_HIERARCHICAL_TEST_CONFIG = """
-
+
@@ -301,12 +319,45 @@ def test_concrete_name_without_objectstore_id():
def test_mixed_store_by():
+ with TestConfig(MIXED_STORE_BY_DISTRIBUTED_TEST_CONFIG) as (directory, object_store):
+ as_dict = object_store.to_dict()
+ assert as_dict["backends"][0]["store_by"] == "id"
+ assert as_dict["backends"][1]["store_by"] == "uuid"
+
with TestConfig(MIXED_STORE_BY_HIERARCHICAL_TEST_CONFIG) as (directory, object_store):
as_dict = object_store.to_dict()
assert as_dict["backends"][0]["store_by"] == "id"
assert as_dict["backends"][1]["store_by"] == "uuid"
+def test_mixed_private():
+ # Distributed object store can combine private and non-private concrete objectstores
+ with TestConfig(MIXED_STORE_BY_DISTRIBUTED_TEST_CONFIG) as (directory, object_store):
+ ids = object_store.object_store_ids()
+ print(ids)
+ assert len(ids) == 2
+
+ ids = object_store.object_store_ids(private=True)
+ assert len(ids) == 1
+ assert ids[0] == "files2"
+
+ ids = object_store.object_store_ids(private=False)
+ assert len(ids) == 1
+ assert ids[0] == "files1"
+
+ as_dict = object_store.to_dict()
+ assert not as_dict["backends"][0]["private"]
+ assert as_dict["backends"][1]["private"]
+
+ with TestConfig(MIXED_STORE_BY_HIERARCHICAL_TEST_CONFIG) as (directory, object_store):
+ as_dict = object_store.to_dict()
+ assert as_dict["backends"][0]["private"]
+ assert as_dict["backends"][1]["private"]
+
+ assert object_store.private
+ assert as_dict["private"] is True
+
+
DISTRIBUTED_TEST_CONFIG = """
@@ -465,7 +516,7 @@ def test_config_parse_pithos():
assert len(extra_dirs) == 2
-S3_TEST_CONFIG = """
+S3_TEST_CONFIG = """
@@ -477,6 +528,7 @@ def test_config_parse_pithos():
S3_TEST_CONFIG_YAML = """
type: s3
+private: true
auth:
access_key: access_moo
secret_key: secret_cow
@@ -500,6 +552,7 @@ def test_config_parse_pithos():
def test_config_parse_s3():
for config_str in [S3_TEST_CONFIG, S3_TEST_CONFIG_YAML]:
with TestConfig(config_str, clazz=UnitializeS3ObjectStore) as (directory, object_store):
+ assert object_store.private
assert object_store.access_key == "access_moo"
assert object_store.secret_key == "secret_cow"