diff --git a/README.md b/README.md
index 213a215..32687bb 100644
--- a/README.md
+++ b/README.md
@@ -24,6 +24,9 @@ You can go to http://aind-data-transfer-service to submit a `.csv` or `.xlsx` fi
What each column means in the job submission template:
+- **processor_full_name**: Name of the person submitting the upload job
+- **project_name**: Project name. A full list can be downloaded at [Project Names](http://aind-metadata-service/project_names)
+- **process_capsule_id**: Optional Code Ocean capsule or pipeline to run when data is uploaded
- **platform**: For a list of platforms click [here](https://github.com/AllenNeuralDynamics/aind-data-schema/blob/main/src/aind_data_schema/models/platforms.py).
- **acq_datetime**: The time that the data was acquired
- **subject_id**: The unique id of the subject
@@ -70,8 +73,12 @@ platform = Platform.BEHAVIOR
behavior_config = ModalityConfigs(modality=Modality.BEHAVIOR, source=(source_dir / "Behavior"))
behavior_videos_config = ModalityConfigs(modality=Modality.BEHAVIOR_VIDEOS, source=(source_dir / "Behavior videos"))
metadata_dir = source_dir / "Config" # This is an optional folder of pre-compiled metadata json files
+processor_full_name="Anna Apple"
+project_name="Ephys Platform"
upload_job_configs = BasicUploadJobConfigs(
+ processor_full_name=processor_full_name,
+ project_name=project_name,
s3_bucket = s3_bucket,
platform = platform,
subject_id = subject_id,
diff --git a/src/aind_data_transfer_service/configs/job_configs.py b/src/aind_data_transfer_service/configs/job_configs.py
index 8c2a91a..72c4ce9 100644
--- a/src/aind_data_transfer_service/configs/job_configs.py
+++ b/src/aind_data_transfer_service/configs/job_configs.py
@@ -131,6 +131,19 @@ class BasicUploadJobConfigs(BaseSettings):
aws_param_store_name: Optional[str] = Field(None)
+ processor_full_name: str = Field(
+ ...,
+ description="Name of person uploading data",
+ title="Processor Full Name",
+ )
+ project_name: str = Field(
+ ..., description="Name of project", title="Project Name"
+ )
+ process_capsule_id: Optional[str] = Field(
+ None,
+ description="Use custom codeocean capsule or pipeline id",
+ title="Process Capsule ID",
+ )
s3_bucket: Optional[str] = Field(
None,
description="Bucket where data will be uploaded",
diff --git a/src/aind_data_transfer_service/configs/job_upload_template.py b/src/aind_data_transfer_service/configs/job_upload_template.py
index f49c946..bd8762f 100644
--- a/src/aind_data_transfer_service/configs/job_upload_template.py
+++ b/src/aind_data_transfer_service/configs/job_upload_template.py
@@ -1,6 +1,7 @@
"""Module to configure and create xlsx job upload template"""
import datetime
from io import BytesIO
+from typing import Any, Dict, List
from aind_data_schema.models.modalities import Modality
from aind_data_schema.models.platforms import Platform
@@ -18,6 +19,9 @@ class JobUploadTemplate:
NUM_TEMPLATE_ROWS = 20
XLSX_DATETIME_FORMAT = "YYYY-MM-DDTHH:mm:ss"
HEADERS = [
+ "processor_full_name",
+ "project_name",
+ "process_capsule_id",
"platform",
"acq_datetime",
"subject_id",
@@ -29,6 +33,9 @@ class JobUploadTemplate:
]
SAMPLE_JOBS = [
[
+ "Anna Apple",
+ "Behavior Platform",
+ "1f999652-00a0-4c4b-99b5-64c2985ad070",
Platform.BEHAVIOR.abbreviation,
datetime.datetime(2023, 10, 4, 4, 0, 0),
"123456",
@@ -39,6 +46,9 @@ class JobUploadTemplate:
"/allen/aind/stage/fake/dir",
],
[
+ "John Smith",
+ "Ophys Platform - SLAP2",
+ None,
Platform.SMARTSPIM.abbreviation,
datetime.datetime(2023, 3, 4, 16, 30, 0),
"654321",
@@ -47,6 +57,9 @@ class JobUploadTemplate:
"/allen/aind/stage/fake/dir",
],
[
+ "Anna Apple",
+ "Ephys Platform",
+ None,
Platform.ECEPHYS.abbreviation,
datetime.datetime(2023, 1, 30, 19, 1, 0),
"654321",
@@ -57,42 +70,51 @@ class JobUploadTemplate:
"/allen/aind/stage/fake/dir",
],
]
- VALIDATORS = [
- {
- "name": "platform",
- "type": "list",
- "options": [p().abbreviation for p in Platform._ALL],
- "column_indexes": [HEADERS.index("platform")],
- },
- {
- "name": "modality",
- "type": "list",
- "options": [m().abbreviation for m in Modality._ALL],
- "column_indexes": [
- HEADERS.index("modality0"),
- HEADERS.index("modality1"),
- ],
- },
- {
- "name": "datetime",
- "type": "date",
- "column_indexes": [HEADERS.index("acq_datetime")],
- },
- ]
- @staticmethod
- def create_job_template():
+ @property
+ def validators(self) -> List[Dict[str, Any]]:
+ """
+ Returns
+ -------
+ List[Dict[str, Any]]
+ A list of validators for fields that require validation.
+
+ """
+ return [
+ {
+ "name": "platform",
+ "type": "list",
+ "options": [p().abbreviation for p in Platform._ALL],
+ "column_indexes": [self.HEADERS.index("platform")],
+ },
+ {
+ "name": "modality",
+ "type": "list",
+ "options": [m().abbreviation for m in Modality._ALL],
+ "column_indexes": [
+ self.HEADERS.index("modality0"),
+ self.HEADERS.index("modality1"),
+ ],
+ },
+ {
+ "name": "datetime",
+ "type": "date",
+ "column_indexes": [self.HEADERS.index("acq_datetime")],
+ },
+ ]
+
+ @property
+ def excel_sheet_filestream(self) -> BytesIO:
"""Create job template as xlsx filestream"""
- # job template
xl_io = BytesIO()
workbook = Workbook()
workbook.iso_dates = True
worksheet = workbook.active
- worksheet.append(JobUploadTemplate.HEADERS)
- for job in JobUploadTemplate.SAMPLE_JOBS:
+ worksheet.append(self.HEADERS)
+ for job in self.SAMPLE_JOBS:
worksheet.append(job)
# data validators
- for validator in JobUploadTemplate.VALIDATORS:
+ for validator in self.validators:
dv_type = validator["type"]
dv_name = validator["name"]
dv_params = {
@@ -108,19 +130,17 @@ def create_job_template():
dv_params["prompt"] = f"Select a {dv_name} from the dropdown"
elif dv_type == "date":
dv_params["prompt"] = "Provide a {} using {}".format(
- dv_name, JobUploadTemplate.XLSX_DATETIME_FORMAT
+ dv_name, self.XLSX_DATETIME_FORMAT
)
dv = DataValidation(**dv_params)
for i in validator["column_indexes"]:
col = get_column_letter(i + 1)
- col_range = (
- f"{col}2:{col}{JobUploadTemplate.NUM_TEMPLATE_ROWS}"
- )
+ col_range = f"{col}2:{col}{self.NUM_TEMPLATE_ROWS}"
dv.add(col_range)
if dv_type != "date":
continue
for (cell,) in worksheet[col_range]:
- cell.number_format = JobUploadTemplate.XLSX_DATETIME_FORMAT
+ cell.number_format = self.XLSX_DATETIME_FORMAT
worksheet.add_data_validation(dv)
# formatting
bold = Font(bold=True)
diff --git a/src/aind_data_transfer_service/server.py b/src/aind_data_transfer_service/server.py
index 26c871d..de9a2a3 100644
--- a/src/aind_data_transfer_service/server.py
+++ b/src/aind_data_transfer_service/server.py
@@ -329,11 +329,28 @@ async def jobs(request: Request):
)
-def download_job_template(request: Request):
+async def download_job_template(_: Request):
"""Get job template as xlsx filestream for download"""
+
+ # TODO: Cache list of project names
try:
- xl_io = JobUploadTemplate.create_job_template()
+ job_template = JobUploadTemplate()
+ xl_io = job_template.excel_sheet_filestream
+ return StreamingResponse(
+ io.BytesIO(xl_io.getvalue()),
+ media_type=(
+ "application/"
+ "vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+ ),
+ headers={
+ "Content-Disposition": (
+ f"attachment; filename={job_template.FILE_NAME}"
+ )
+ },
+ status_code=200,
+ )
except Exception as e:
+ logging.error(e)
return JSONResponse(
content={
"message": "Error creating job template",
@@ -341,18 +358,6 @@ def download_job_template(request: Request):
},
status_code=500,
)
- return StreamingResponse(
- io.BytesIO(xl_io.getvalue()),
- media_type=(
- "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
- ),
- headers={
- "Content-Disposition": (
- f"attachment; filename={JobUploadTemplate.FILE_NAME}"
- )
- },
- status_code=200,
- )
routes = [
diff --git a/src/aind_data_transfer_service/templates/index.html b/src/aind_data_transfer_service/templates/index.html
index d20a1e4..5fcef8f 100644
--- a/src/aind_data_transfer_service/templates/index.html
+++ b/src/aind_data_transfer_service/templates/index.html
@@ -47,6 +47,7 @@
Submit Jobs |
Job Status |
Job Submit Template |
+ Project Names |
Help
@@ -144,7 +145,7 @@
Submit Jobs
let jobsLength = jobs.length;
var table = document.createElement('table'), tr, td, row;
addTableRow(
- [ "s3_bucket", "platform", "subject_id", "acq_datetime", "metadata_dir", "modality", "modality.source" ],
+ [ "processor_full_name", "project_name", "process_capsule_id", "s3_bucket", "platform", "subject_id", "acq_datetime", "metadata_dir", "modality", "modality.source" ],
table, tr, td, true
);
for (row = 0; row < jobsLength; row++) {
@@ -152,11 +153,14 @@ Submit Jobs
let modalities = job.modalities;
let modalitiesLength = modalities.length;
addTableRow(
- [ { value: job.s3_bucket, rowspan: modalitiesLength },
- { value: job.platform.abbreviation, rowspan: modalitiesLength },
- { value: job.subject_id, rowspan: modalitiesLength },
- { value: job.acq_datetime, rowspan: modalitiesLength },
- { value: job.metadata_dir ?? "", rowspan: modalitiesLength },
+ [ { value: job.processor_full_name, rowspan: modalitiesLength },
+ { value: job.project_name, rowspan: modalitiesLength },
+ { value: job.process_capsule_id ?? "", rowspan: modalitiesLength },
+ { value: job.s3_bucket, rowspan: modalitiesLength },
+ { value: job.platform.abbreviation, rowspan: modalitiesLength },
+ { value: job.subject_id, rowspan: modalitiesLength },
+ { value: job.acq_datetime, rowspan: modalitiesLength },
+ { value: job.metadata_dir ?? "", rowspan: modalitiesLength },
modalities ? modalities[0].modality.abbreviation : "",
modalities ? modalities[0].source : ""
], table, tr, td, false
diff --git a/src/aind_data_transfer_service/templates/job_status.html b/src/aind_data_transfer_service/templates/job_status.html
index 73bb820..f27726c 100644
--- a/src/aind_data_transfer_service/templates/job_status.html
+++ b/src/aind_data_transfer_service/templates/job_status.html
@@ -26,7 +26,9 @@
Jobs Submitted: {{num_of_jobs}}
diff --git a/tests/resources/job_upload_template.xlsx b/tests/resources/job_upload_template.xlsx
index 5c0a875..9203d33 100644
Binary files a/tests/resources/job_upload_template.xlsx and b/tests/resources/job_upload_template.xlsx differ
diff --git a/tests/resources/sample.csv b/tests/resources/sample.csv
index 370494a..2305c47 100644
--- a/tests/resources/sample.csv
+++ b/tests/resources/sample.csv
@@ -1,4 +1,4 @@
-modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
-ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
-BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
-BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+processor_full_name, project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
+Anna Apple, Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
+John Smith, Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+Anna Apple, Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
diff --git a/tests/resources/sample.xlsx b/tests/resources/sample.xlsx
index 52a0fa1..94578d7 100644
Binary files a/tests/resources/sample.xlsx and b/tests/resources/sample.xlsx differ
diff --git a/tests/resources/sample_alt_modality_case.csv b/tests/resources/sample_alt_modality_case.csv
index 1ce7623..c58f227 100644
--- a/tests/resources/sample_alt_modality_case.csv
+++ b/tests/resources/sample_alt_modality_case.csv
@@ -1,4 +1,4 @@
-modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
-ecephys, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
-behavior-videos, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
-behavior-videos, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+processor_full_name, project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
+Anna Apple, Ephys Platform, , ecephys, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
+John Smith, Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, behavior-videos, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+Anna Apple, Behavior Platform, , behavior-videos, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
diff --git a/tests/resources/sample_empty_rows.csv b/tests/resources/sample_empty_rows.csv
index 0dadca7..9013962 100644
--- a/tests/resources/sample_empty_rows.csv
+++ b/tests/resources/sample_empty_rows.csv
@@ -1,7 +1,7 @@
-modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
-ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
-BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
-BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+processor_full_name, project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
+Anna Apple, Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
+John Smith, Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+Anna Apple, Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
,,,,,,
,,,,,,
,,,,,,
diff --git a/tests/resources/sample_empty_rows.xlsx b/tests/resources/sample_empty_rows.xlsx
index 8c3c02e..62817e5 100644
Binary files a/tests/resources/sample_empty_rows.xlsx and b/tests/resources/sample_empty_rows.xlsx differ
diff --git a/tests/resources/sample_invalid_ext.txt b/tests/resources/sample_invalid_ext.txt
index beecb88..2305c47 100644
--- a/tests/resources/sample_invalid_ext.txt
+++ b/tests/resources/sample_invalid_ext.txt
@@ -1,4 +1,4 @@
-modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
-ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
-BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, some_bucket2, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
-BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, some_bucket2, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+processor_full_name, project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
+Anna Apple, Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
+John Smith, Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+Anna Apple, Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
diff --git a/tests/resources/sample_malformed.csv b/tests/resources/sample_malformed.csv
index 8e24662..690c713 100644
--- a/tests/resources/sample_malformed.csv
+++ b/tests/resources/sample_malformed.csv
@@ -1,4 +1,4 @@
-modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
-ECEPHYS, dir/data_set_1, , , some_bucket, 123454, ecephys, 2020-10-10 14:10:10
-WRONG_MODALITY_HERE, dir/data_set_2, MRI, dir/data_set_3, some_bucket2, 123456, Other, 10/13/2020 1:10:10 PM
-SPIM, dir/data_set_2, SPIM, dir/data_set_3, some_bucket2, 123456, smartSPIM, 10/13/2020 1:10:10 PM
+processor_full_name, project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
+Anna Apple, Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
+John Smith, Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, WRONG_MODALITY_HERE, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
+Anna Apple, Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, BEHAVIOR_VIDEOS, dir/data_set_3, scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
diff --git a/tests/resources/sample_malformed.xlsx b/tests/resources/sample_malformed.xlsx
index 153dc75..66afb63 100644
Binary files a/tests/resources/sample_malformed.xlsx and b/tests/resources/sample_malformed.xlsx differ
diff --git a/tests/test_configs.py b/tests/test_configs.py
index f11cb56..0bbc4a2 100644
--- a/tests/test_configs.py
+++ b/tests/test_configs.py
@@ -27,6 +27,9 @@ class TestJobConfigs(unittest.TestCase):
expected_job_configs = [
BasicUploadJobConfigs(
aws_param_store_name="/some/param/store",
+ processor_full_name="Anna Apple",
+ project_name="Ephys Platform",
+ process_capsule_id=None,
s3_bucket="private",
platform=Platform.ECEPHYS,
modalities=[
@@ -50,6 +53,9 @@ class TestJobConfigs(unittest.TestCase):
),
BasicUploadJobConfigs(
aws_param_store_name="/some/param/store",
+ processor_full_name="John Smith",
+ project_name="Behavior Platform",
+ process_capsule_id="1f999652-00a0-4c4b-99b5-64c2985ad070",
s3_bucket="open",
platform=Platform.BEHAVIOR,
modalities=[
@@ -80,6 +86,9 @@ class TestJobConfigs(unittest.TestCase):
),
BasicUploadJobConfigs(
aws_param_store_name="/some/param/store",
+ processor_full_name="Anna Apple",
+ project_name="Behavior Platform",
+ process_capsule_id=None,
s3_bucket="scratch",
platform=Platform.BEHAVIOR,
modalities=[
@@ -149,6 +158,8 @@ def test_parse_csv_file(self):
# not formatted correctly
with self.assertRaises(Exception) as e1:
BasicUploadJobConfigs(
+ processor_full_name="Anna Apple",
+ project_name="Behavior Platform",
s3_bucket="",
platform=Platform.BEHAVIOR,
modalities=[Modality.BEHAVIOR_VIDEOS],
@@ -214,6 +225,8 @@ def test_malformed_platform(self):
with self.assertRaises(AttributeError) as e:
BasicUploadJobConfigs(
aws_param_store_name="/some/param/store",
+ processor_full_name="Anna Apple",
+ project_name="Behavior Platform",
s3_bucket="some_bucket2",
platform="MISSING",
modalities=[
@@ -308,6 +321,9 @@ def test_from_job_and_server_configs(self):
" python -m aind_data_transfer.jobs.basic_job"
" --json-args ' "
'{"aws_param_store_name":"/some/param/store",'
+ '"processor_full_name":"Anna Apple",'
+ '"project_name":"Ephys Platform",'
+ '"process_capsule_id":null,'
'"s3_bucket":"private",'
'"platform":{"name":"Electrophysiology platform",'
'"abbreviation":"ecephys"},'
diff --git a/tests/test_hpc_models.py b/tests/test_hpc_models.py
index 1860d54..7f7094d 100644
--- a/tests/test_hpc_models.py
+++ b/tests/test_hpc_models.py
@@ -133,6 +133,8 @@ class TestHpcJobSubmitSettings(unittest.TestCase):
example_config = BasicUploadJobConfigs(
aws_param_store_name="/some/param/store",
+ processor_full_name="John Smith",
+ project_name="Behavior Platform",
s3_bucket="some_bucket",
platform=Platform.ECEPHYS,
modalities=[
diff --git a/tests/test_job_upload_template.py b/tests/test_job_upload_template.py
index 11c52b3..bd6bb72 100644
--- a/tests/test_job_upload_template.py
+++ b/tests/test_job_upload_template.py
@@ -18,7 +18,8 @@
class TestJobUploadTemplate(unittest.TestCase):
"""Tests job upload template class"""
- def read_xl_helper(self, source, return_validators=False):
+ @staticmethod
+ def _read_xl_helper(source, return_validators=False):
"""Helper function to read xlsx contents and validators"""
lines = []
workbook = load_workbook(source, read_only=(not return_validators))
@@ -42,15 +43,26 @@ def read_xl_helper(self, source, return_validators=False):
workbook.close()
return result
+ @classmethod
+ def setUpClass(cls):
+ """Set up test class"""
+ expected_lines = cls._read_xl_helper(SAMPLE_JOB_TEMPLATE)
+ job_template = JobUploadTemplate()
+ (template_lines, template_validators) = cls._read_xl_helper(
+ job_template.excel_sheet_filestream, True
+ )
+ cls.job_template = job_template
+ cls.expected_lines = expected_lines
+ cls.template_lines = template_lines
+ cls.template_validators = template_validators
+ return cls
+
def test_create_job_template(self):
"""Tests that xlsx job template is created with
correct contents and validators"""
- expected_lines = self.read_xl_helper(SAMPLE_JOB_TEMPLATE)
- (template_lines, template_validators) = self.read_xl_helper(
- JobUploadTemplate.create_job_template(), True
- )
- self.assertEqual(expected_lines, template_lines)
- for validator in template_validators:
+ expected_lines = self.expected_lines
+ self.assertEqual(expected_lines, self.template_lines)
+ for validator in self.template_validators:
validator["column_indexes"] = []
for r in validator["ranges"]:
rb = (col, *_) = range_boundaries(r)
@@ -60,7 +72,7 @@ def test_create_job_template(self):
validator["column_indexes"].append(col - 1)
del validator["ranges"]
self.assertCountEqual(
- JobUploadTemplate.VALIDATORS, template_validators
+ self.job_template.validators, self.template_validators
)
diff --git a/tests/test_server.py b/tests/test_server.py
index 6d75725..1b0f30d 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -8,10 +8,14 @@
from pathlib import Path, PurePosixPath
from unittest.mock import MagicMock, patch
+from fastapi.responses import StreamingResponse
from fastapi.testclient import TestClient
from pydantic import SecretStr
from requests import Response
+from aind_data_transfer_service.configs.job_upload_template import (
+ JobUploadTemplate,
+)
from aind_data_transfer_service.server import app
from tests.test_configs import TestJobConfigs
@@ -294,7 +298,10 @@ def test_submit_hpc_jobs(
{
"hpc_settings": '{"qos":"production", "name": "job1"}',
"upload_job_settings": (
- '{"s3_bucket": "private", '
+ '{"processor_full_name":"Anna Apple", '
+ '"project_name":"Ephys Platform", '
+ '"process_capsule_id":null, '
+ '"s3_bucket": "private", '
'"platform": {"name": "Behavior platform", '
'"abbreviation": "behavior"}, '
'"modalities": ['
@@ -360,7 +367,10 @@ def test_submit_hpc_jobs_open_data(
{
"hpc_settings": '{"qos":"production", "name": "job1"}',
"upload_job_settings": (
- '{"s3_bucket": "open", '
+ '{"processor_full_name":"Anna Apple", '
+ '"project_name":"Ephys Platform", '
+ '"process_capsule_id":null, '
+ '"s3_bucket": "open", '
'"platform": {"name": "Behavior platform", '
'"abbreviation": "behavior"}, '
'"modalities": ['
@@ -534,44 +544,50 @@ def test_jobs_failure(self, mock_get: MagicMock):
self.assertEqual(response.status_code, 200)
self.assertIn("Submit Jobs", response.text)
- @patch(
- "aind_data_transfer_service.configs.job_upload_template"
- ".JobUploadTemplate.create_job_template"
- )
- def test_download_job_template(self, mock_create_template: MagicMock):
+ def test_download_job_template(self):
"""Tests that job template downloads as xlsx file."""
- mock_create_template.return_value = BytesIO(b"mock_template_stream")
+
with TestClient(app) as client:
response = client.get("/api/job_upload_template")
- expected_file_name_header = (
- "attachment; filename=job_upload_template.xlsx"
+
+ expected_job_template = JobUploadTemplate()
+ expected_file_stream = expected_job_template.excel_sheet_filestream
+ expected_streaming_response = StreamingResponse(
+ BytesIO(expected_file_stream.getvalue()),
+ media_type=(
+ "application/"
+ "vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+ ),
+ headers={
+ "Content-Disposition": (
+ f"attachment; filename={expected_job_template.FILE_NAME}"
+ )
+ },
+ status_code=200,
)
- self.assertEqual(1, mock_create_template.call_count)
- self.assertEqual(200, response.status_code)
+
self.assertEqual(
- expected_file_name_header, response.headers["Content-Disposition"]
+ expected_streaming_response.headers.items(),
+ list(response.headers.items()),
)
+ self.assertEqual(200, response.status_code)
- @patch(
- "aind_data_transfer_service.configs.job_upload_template"
- ".JobUploadTemplate.create_job_template"
- )
+ @patch("aind_data_transfer_service.server.JobUploadTemplate")
+ @patch("logging.error")
def test_download_invalid_job_template(
- self, mock_create_template: MagicMock
+ self, mock_log_error: MagicMock, mock_job_template: MagicMock
):
"""Tests that download invalid job template returns errors."""
- mock_create_template.side_effect = Exception(
- "mock invalid job template"
- )
+ mock_job_template.side_effect = Exception("mock invalid job template")
with TestClient(app) as client:
response = client.get("/api/job_upload_template")
expected_response = {
"message": "Error creating job template",
"data": {"error": "Exception('mock invalid job template',)"},
}
- self.assertEqual(1, mock_create_template.call_count)
self.assertEqual(500, response.status_code)
self.assertEqual(expected_response, response.json())
+ mock_log_error.assert_called_once()
if __name__ == "__main__":