Skip to content

Commit

Permalink
Merge branch 'release_24.1' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Aug 15, 2024
2 parents 2dcd285 + ea35ee5 commit 64bd95d
Show file tree
Hide file tree
Showing 10 changed files with 92 additions and 25 deletions.
11 changes: 7 additions & 4 deletions client/src/components/Upload/DefaultBox.vue
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ const props = defineProps({
},
lazyLoad: {
type: Number,
default: 50,
default: 150,
},
listDbKeys: {
type: Array,
Expand All @@ -84,6 +84,7 @@ const uploadCompleted = ref(0);
const uploadFile = ref(null);
const uploadItems = ref({});
const uploadSize = ref(0);
const queue = ref(createUploadQueue());
const counterNonRunning = computed(() => counterAnnounce.value + counterSuccess.value + counterError.value);
const enableBuild = computed(
Expand All @@ -99,16 +100,13 @@ const listExtensions = computed(() => props.effectiveExtensions.filter((ext) =>
const showHelper = computed(() => Object.keys(uploadItems.value).length === 0);
const uploadValues = computed(() => Object.values(uploadItems.value));
const queue = computed(() => createUploadQueue());
function createUploadQueue() {
return new UploadQueue({
announce: eventAnnounce,
chunkSize: props.chunkUploadSize,
complete: eventComplete,
error: eventError,
get: (index) => uploadItems.value[index],
historyId: historyId.value,
multiple: props.multiple,
progress: eventProgress,
success: eventSuccess,
Expand Down Expand Up @@ -268,6 +266,11 @@ function eventStart() {
uploadValues.value.forEach((model) => {
if (model.status === "init") {
model.status = "queued";
if (!model.targetHistoryId) {
// Associate with current history once upload starts
// This will not change if the current history is changed during upload
model.targetHistoryId = historyId.value;
}
uploadSize.value += model.fileSize;
}
});
Expand Down
15 changes: 10 additions & 5 deletions client/src/components/Upload/UploadModal.vue
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
<script setup>
import { setIframeEvents } from "components/Upload/utils";
import { useConfig } from "composables/config";
import { useUserHistories } from "composables/userHistories";
import { storeToRefs } from "pinia";
import { ref, watch } from "vue";
import { setIframeEvents } from "@/components/Upload/utils";
import { useConfig } from "@/composables/config";
import { useUserHistories } from "@/composables/userHistories";
import { useUserStore } from "@/stores/userStore";
import { wait } from "@/utils/utils";
import UploadContainer from "./UploadContainer.vue";
const { currentUser } = storeToRefs(useUserStore());
const { currentHistoryId } = useUserHistories(currentUser);
const { currentHistoryId, currentHistory } = useUserHistories(currentUser);
const { config, isConfigLoaded } = useConfig();
Expand Down Expand Up @@ -81,7 +81,12 @@ defineExpose({
no-enforce-focus
hide-footer>
<template v-slot:modal-header>
<h2 class="title h-sm" tabindex="0">{{ options.title }}</h2>
<h2 class="title h-sm" tabindex="0">
{{ options.title }}
<span v-if="currentHistory">
to <b>{{ currentHistory.name }}</b>
</span>
</h2>
</template>
<UploadContainer
v-if="currentHistoryId"
Expand Down
6 changes: 5 additions & 1 deletion client/src/composables/userHistories.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ export function useUserHistories(user) {
);

const currentHistoryId = computed(() => historyStore.currentHistoryId);
const currentHistory = computed(() => historyStore.currentHistory);

return { currentHistoryId };
return {
currentHistoryId,
currentHistory,
};
}
24 changes: 18 additions & 6 deletions client/src/utils/upload-queue.js
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,11 @@ export class UploadQueue {
// Remove item from queue
this.remove(index);
// Collect upload request data
const data = uploadPayload([this.opts.get(index)], this.opts.historyId);
const item = this.opts.get(index);
if (!item.targetHistoryId) {
throw new Error(`Missing target history for upload item [${index}] ${item.fileName}`);
}
const data = uploadPayload([item], item.targetHistoryId);
// Initiate upload request
this._processSubmit(index, data);
} catch (e) {
Expand All @@ -107,19 +111,27 @@ export class UploadQueue {
}
}

// Submit remote files as single batch request
// Submit remote files as single batch request per target history
_processUrls() {
const list = [];
const batchByHistory = {};
for (const index of this.queue.keys()) {
const model = this.opts.get(index);
if (model.status === "queued" && model.fileMode === "url") {
list.push({ index, ...model });
if (!model.targetHistoryId) {
throw new Error(`Missing target history for upload item [${index}] ${model.fileName}`);
}
if (!batchByHistory[model.targetHistoryId]) {
batchByHistory[model.targetHistoryId] = [];
}
batchByHistory[model.targetHistoryId].push({ index, ...model });
this.remove(index);
}
}
if (list.length > 0) {

for (const historyId in batchByHistory) {
const list = batchByHistory[historyId];
try {
const data = uploadPayload(list, this.opts.historyId);
const data = uploadPayload(list, historyId);
sendPayload(data, {
success: (message) => {
list.forEach((model) => {
Expand Down
1 change: 1 addition & 0 deletions client/src/utils/upload-queue.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,7 @@ describe("UploadQueue", () => {
spaceToTab: true,
status: "queued",
toPosixLines: false,
targetHistoryId: "historyId",
};
},
get: (index) => fileEntries[index],
Expand Down
13 changes: 9 additions & 4 deletions lib/galaxy/celery/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,14 +185,19 @@ def set_metadata(
dataset_id: int,
model_class: str = "HistoryDatasetAssociation",
overwrite: bool = True,
ensure_can_set_metadata: bool = True,
task_user_id: Optional[int] = None,
):
"""
ensure_can_set_metadata can be bypassed for new outputs.
"""
manager = _get_dataset_manager(hda_manager, ldda_manager, model_class)
dataset_instance = manager.by_id(dataset_id)
can_set_metadata = manager.ensure_can_set_metadata(dataset_instance, raiseException=False)
if not can_set_metadata:
log.info(f"Setting metadata is not allowed for {model_class} {dataset_instance.id}")
return
if ensure_can_set_metadata:
can_set_metadata = manager.ensure_can_set_metadata(dataset_instance, raiseException=False)
if not can_set_metadata:
log.info(f"Setting metadata is not allowed for {model_class} {dataset_instance.id}")
return
try:
if overwrite:
hda_manager.overwrite_metadata(dataset_instance)
Expand Down
7 changes: 6 additions & 1 deletion lib/galaxy/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2020,8 +2020,9 @@ def fail(message=job.info, exception=None):
# Certain tools require tasks to be completed after job execution
# ( this used to be performed in the "exec_after_process" hook, but hooks are deprecated ).
param_dict = self.get_param_dict(job)
task_wrapper = None
try:
self.tool.exec_after_process(
task_wrapper = self.tool.exec_after_process(
self.app, inp_data, out_data, param_dict, job=job, final_job_state=final_job_state
)
except Exception as e:
Expand Down Expand Up @@ -2063,6 +2064,10 @@ def fail(message=job.info, exception=None):
self.sa_session.commit()
if job.state == job.states.ERROR:
self._report_error()
elif task_wrapper:
# Only task is setting metadata (if necessary) on expression tool output.
# The dataset state is SETTING_METADATA, which delays dependent jobs until the task completes.
task_wrapper.delay()
cleanup_job = self.cleanup_job
delete_files = cleanup_job == "always" or (job.state == job.states.OK and cleanup_job == "onsuccess")
self.cleanup(delete_files=delete_files)
Expand Down
22 changes: 21 additions & 1 deletion lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2929,7 +2929,27 @@ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, **kw
break
if copy_object is None:
raise exceptions.MessageException("Failed to find dataset output.")
out_data[key].copy_from(copy_object, include_metadata=True)
output = out_data[key]
# if change_datatype PJA is associated with expression tool output the new output already has
# the desired datatype, so we use it. If the extension is "data" there's no change_dataset PJA and
# we want to use the existing extension.
new_ext = output.extension if output.extension != "data" else copy_object.extension
require_metadata_regeneration = copy_object.extension != new_ext
output.copy_from(copy_object, include_metadata=not require_metadata_regeneration)
output.extension = new_ext
if require_metadata_regeneration:
if app.config.enable_celery_tasks:
from galaxy.celery.tasks import set_metadata

output._state = model.Dataset.states.SETTING_METADATA
return set_metadata.si(
dataset_id=output.id, task_user_id=output.history.user_id, ensure_can_set_metadata=False
)
else:
# TODO: move exec_after_process into metadata script so this doesn't run on the headnode ?
output.init_meta()
output.set_meta()
output.set_metadata_success_state()

def parse_environment_variables(self, tool_source):
"""Setup environment variable for inputs file."""
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/controllers/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def sort(self, trans, query, ascending, column_name=None):
.group_by(model.GalaxySession.table.c.user_id)
.subquery()
)
query = query.outerjoin((last_login_subquery, model.User.table.c.id == last_login_subquery.c.user_id))
query = query.outerjoin(last_login_subquery, model.User.table.c.id == last_login_subquery.c.user_id)

if not ascending:
query = query.order_by((last_login_subquery.c.last_login).desc().nullslast())
Expand Down
16 changes: 14 additions & 2 deletions lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -2056,14 +2056,25 @@ def test_run_workflow_pick_value_bam_pja(self):
pick_from:
- value:
__class__: RuntimeValue
consume_index:
tool_id: metadata_bam
in:
input_bam: pick_value/data_param
tool_state:
ref_names:
- chr10_random
- chr11
- chrM
- chrX
- chr16
outputs:
pick_out:
outputSource: pick_value/data_param
""",
test_data="""
some_file:
value: 1.bam
file_type: bam
value: 3.bam
file_type: unsorted.bam
type: File
""",
history_id=history_id,
Expand All @@ -2076,6 +2087,7 @@ def test_run_workflow_pick_value_bam_pja(self):
)
assert dataset_details["metadata_reference_names"]
assert dataset_details["metadata_bam_index"]
assert dataset_details["file_ext"] == "bam"

def test_run_workflow_simple_conditional_step(self):
with self.dataset_populator.test_history() as history_id:
Expand Down

0 comments on commit 64bd95d

Please sign in to comment.