Skip to content

Commit

Permalink
feat: updates aind-data-transfer-models
Browse files Browse the repository at this point in the history
  • Loading branch information
jtyoung84 committed Oct 23, 2024
1 parent b603fe0 commit 547b7a4
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 7 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ dependencies = [
'pydantic>=2.7,<2.9',
'pydantic-settings>=2.0',
'aind-data-schema>=1.0.0',
'aind-data-transfer-models==0.10.0'
'aind-data-transfer-models==0.13.1'
]

[project.optional-dependencies]
Expand Down
14 changes: 14 additions & 0 deletions src/aind_data_transfer_service/configs/csv_handler.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
"""Module to handle processing legacy csv files"""

import json

from aind_data_transfer_models.core import (
BasicUploadJobConfigs,
CodeOceanPipelineMonitorConfigs,
ModalityConfigs,
)

DEFAULT_CODEOCEAN_CONFIGS = json.loads(
CodeOceanPipelineMonitorConfigs().model_dump_json()
)


def map_csv_row_to_job(row: dict) -> BasicUploadJobConfigs:
"""
Expand Down Expand Up @@ -41,6 +48,13 @@ def map_csv_row_to_job(row: dict) -> BasicUploadJobConfigs:
modality_configs[modality_key] = {sub_key: clean_val}
elif clean_val is not None:
modality_configs[modality_key].update({sub_key: clean_val})
elif clean_key == "job_type":
if clean_val is not None:
codeocean_configs = json.loads(
CodeOceanPipelineMonitorConfigs().model_dump_json()
)
codeocean_configs["job_type"] = clean_val
basic_job_configs["codeocean_configs"] = codeocean_configs
else:
basic_job_configs[clean_key] = clean_val
modalities = []
Expand Down
4 changes: 3 additions & 1 deletion src/aind_data_transfer_service/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,9 @@ async def submit_jobs(request: Request):
content = await request.json()
try:
model = SubmitJobRequest.model_validate_json(json.dumps(content))
full_content = json.loads(model.model_dump_json(warnings=False))
full_content = json.loads(
model.model_dump_json(warnings=False, exclude_none=True)
)
# TODO: Replace with httpx async client
response = requests.post(
url=os.getenv("AIND_AIRFLOW_SERVICE_URL"),
Expand Down
8 changes: 4 additions & 4 deletions tests/resources/new_sample.csv
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime
Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, , , scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM
project_name, process_capsule_id, modality0, modality0.source, modality1, modality1.source, s3-bucket, subject-id, platform, acq-datetime, job_type
Ephys Platform, , ECEPHYS, dir/data_set_1, ,, some_bucket, 123454, ecephys, 2020-10-10 14:10:10,
Behavior Platform, 1f999652-00a0-4c4b-99b5-64c2985ad070, BEHAVIOR_VIDEOS, dir/data_set_2, MRI, dir/data_set_3, open, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM, custom
Behavior Platform, , BEHAVIOR_VIDEOS, dir/data_set_2, , , scratch, 123456, BEHAVIOR, 10/13/2020 1:10:10 PM,
5 changes: 4 additions & 1 deletion tests/test_csv_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from aind_data_schema_models.platforms import Platform
from aind_data_transfer_models.core import (
BasicUploadJobConfigs,
CodeOceanPipelineMonitorConfigs,
ModalityConfigs,
)

Expand Down Expand Up @@ -77,6 +78,9 @@ def test_map_csv_row_to_job(self):
metadata_dir=None,
metadata_dir_force=False,
force_cloud_sync=False,
codeocean_configs=CodeOceanPipelineMonitorConfigs(
job_type="custom"
),
),
BasicUploadJobConfigs(
project_name="Behavior Platform",
Expand All @@ -99,7 +103,6 @@ def test_map_csv_row_to_job(self):
force_cloud_sync=False,
),
]

self.assertEqual(expected_jobs, jobs)


Expand Down

0 comments on commit 547b7a4

Please sign in to comment.