diff --git a/src/aind_data_transfer_service/models.py b/src/aind_data_transfer_service/models.py
index 36d4a2d..ebf9d04 100644
--- a/src/aind_data_transfer_service/models.py
+++ b/src/aind_data_transfer_service/models.py
@@ -2,9 +2,10 @@
import ast
from datetime import datetime, timedelta, timezone
-from typing import List, Optional
+from typing import List, Optional, Union
from pydantic import AwareDatetime, BaseModel, Field, field_validator
+from starlette.datastructures import QueryParams
class AirflowDagRun(BaseModel):
@@ -58,12 +59,93 @@ def validate_min_execution_date(cls, execution_date_gte: str):
return execution_date_gte
@classmethod
- def from_query_params(cls, query_params: dict):
+ def from_query_params(cls, query_params: QueryParams):
"""Maps the query parameters to the model"""
params = dict(query_params)
if "state" in params:
params["state"] = ast.literal_eval(params["state"])
- return cls(**params)
+ return cls.model_validate(params)
+
+
+class AirflowDagRunRequestParameters(BaseModel):
+ """Model for parameters when requesting info from dag_run endpoint"""
+
+ dag_run_id: str = Field(..., min_length=1)
+
+ @classmethod
+ def from_query_params(cls, query_params: QueryParams):
+ """Maps the query parameters to the model"""
+ params = dict(query_params)
+ return cls.model_validate(params)
+
+
+class AirflowTaskInstancesRequestParameters(BaseModel):
+ """Model for parameters when requesting info from task_instances
+ endpoint"""
+
+ dag_run_id: str = Field(..., min_length=1)
+
+ @classmethod
+ def from_query_params(cls, query_params: QueryParams):
+ """Maps the query parameters to the model"""
+ params = dict(query_params)
+ return cls.model_validate(params)
+
+
+class AirflowTaskInstance(BaseModel):
+ """Data model for task_instance entry when requesting info from airflow"""
+
+ dag_id: Optional[str]
+ dag_run_id: Optional[str]
+ duration: Optional[Union[int, float]]
+ end_date: Optional[AwareDatetime]
+ execution_date: Optional[AwareDatetime]
+ executor_config: Optional[str]
+ hostname: Optional[str]
+ map_index: Optional[int]
+ max_tries: Optional[int]
+ note: Optional[str]
+ operator: Optional[str]
+ pid: Optional[int]
+ pool: Optional[str]
+ pool_slots: Optional[int]
+ priority_weight: Optional[int]
+ queue: Optional[str]
+ queued_when: Optional[AwareDatetime]
+ rendered_fields: Optional[dict]
+ sla_miss: Optional[dict]
+ start_date: Optional[AwareDatetime]
+ state: Optional[str]
+ task_id: Optional[str]
+ trigger: Optional[dict]
+ triggerer_job: Optional[dict]
+ try_number: Optional[int]
+ unixname: Optional[str]
+
+
+class AirflowTaskInstancesResponse(BaseModel):
+ """Data model for response when requesting info from task_instances
+ endpoint"""
+
+ task_instances: List[AirflowTaskInstance]
+ total_entries: int
+
+
+class AirflowTaskInstanceLogsRequestParameters(BaseModel):
+ """Model for parameters when requesting info from task_instance_logs
+ endpoint"""
+
+ # excluded fields are used to build the url
+ dag_run_id: str = Field(..., min_length=1, exclude=True)
+ task_id: str = Field(..., min_length=1, exclude=True)
+ try_number: int = Field(..., ge=0, exclude=True)
+ full_content: bool = True
+
+ @classmethod
+ def from_query_params(cls, query_params: QueryParams):
+ """Maps the query parameters to the model"""
+ params = dict(query_params)
+ return cls.model_validate(params)
class JobStatus(BaseModel):
@@ -95,3 +177,38 @@ def from_airflow_dag_run(cls, airflow_dag_run: AirflowDagRun):
def jinja_dict(self):
"""Map model to a dictionary that jinja can render"""
return self.model_dump(exclude_none=True)
+
+
+class JobTasks(BaseModel):
+ """Model for what is rendered to the user for each task."""
+
+ job_id: Optional[str] = Field(None)
+ task_id: Optional[str] = Field(None)
+ try_number: Optional[int] = Field(None)
+ task_state: Optional[str] = Field(None)
+ priority_weight: Optional[int] = Field(None)
+ map_index: Optional[int] = Field(None)
+ submit_time: Optional[datetime] = Field(None)
+ start_time: Optional[datetime] = Field(None)
+ end_time: Optional[datetime] = Field(None)
+ duration: Optional[Union[int, float]] = Field(None)
+ comment: Optional[str] = Field(None)
+
+ @classmethod
+ def from_airflow_task_instance(
+ cls, airflow_task_instance: AirflowTaskInstance
+ ):
+ """Maps the fields from the HpcJobStatusResponse to this model"""
+ return cls(
+ job_id=airflow_task_instance.dag_run_id,
+ task_id=airflow_task_instance.task_id,
+ try_number=airflow_task_instance.try_number,
+ task_state=airflow_task_instance.state,
+ priority_weight=airflow_task_instance.priority_weight,
+ map_index=airflow_task_instance.map_index,
+ submit_time=airflow_task_instance.execution_date,
+ start_time=airflow_task_instance.start_date,
+ end_time=airflow_task_instance.end_date,
+ duration=airflow_task_instance.duration,
+ comment=airflow_task_instance.note,
+ )
diff --git a/src/aind_data_transfer_service/server.py b/src/aind_data_transfer_service/server.py
index 19398b2..3a9c890 100644
--- a/src/aind_data_transfer_service/server.py
+++ b/src/aind_data_transfer_service/server.py
@@ -31,9 +31,14 @@
from aind_data_transfer_service.hpc.models import HpcJobSubmitSettings
from aind_data_transfer_service.models import (
AirflowDagRun,
+ AirflowDagRunRequestParameters,
AirflowDagRunsRequestParameters,
AirflowDagRunsResponse,
+ AirflowTaskInstanceLogsRequestParameters,
+ AirflowTaskInstancesRequestParameters,
+ AirflowTaskInstancesResponse,
JobStatus,
+ JobTasks,
)
template_directory = os.path.abspath(
@@ -394,15 +399,18 @@ async def get_job_status_list(request: Request):
url = os.getenv("AIND_AIRFLOW_SERVICE_JOBS_URL", "").strip("/")
get_one_job = request.query_params.get("dag_run_id") is not None
if get_one_job:
- dag_run_id = request.query_params["dag_run_id"]
+ params = AirflowDagRunRequestParameters.from_query_params(
+ request.query_params
+ )
+ url = f"{url}/{params.dag_run_id}"
else:
params = AirflowDagRunsRequestParameters.from_query_params(
request.query_params
)
- params_dict = json.loads(params.model_dump_json())
+ params_dict = json.loads(params.model_dump_json())
# Send request to Airflow to ListDagRuns or GetDagRun
response_jobs = requests.get(
- url=f"{url}/{dag_run_id}" if get_one_job else url,
+ url=url,
auth=(
os.getenv("AIND_AIRFLOW_SERVICE_USER"),
os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
@@ -427,9 +435,7 @@ async def get_job_status_list(request: Request):
]
message = "Retrieved job status list from airflow"
data = {
- "params": (
- {"dag_run_id": dag_run_id} if get_one_job else params_dict
- ),
+ "params": params_dict,
"total_entries": dag_runs.total_entries,
"job_status_list": [
json.loads(j.model_dump_json()) for j in job_status_list
@@ -438,9 +444,7 @@ async def get_job_status_list(request: Request):
else:
message = "Error retrieving job status list from airflow"
data = {
- "params": (
- {"dag_run_id": dag_run_id} if get_one_job else params_dict
- ),
+ "params": params_dict,
"errors": [response_jobs.json()],
}
except ValidationError as e:
@@ -462,6 +466,115 @@ async def get_job_status_list(request: Request):
)
+async def get_tasks_list(request: Request):
+ """Get list of tasks instances given a job id."""
+ try:
+ url = os.getenv("AIND_AIRFLOW_SERVICE_JOBS_URL", "").strip("/")
+ params = AirflowTaskInstancesRequestParameters.from_query_params(
+ request.query_params
+ )
+ params_dict = json.loads(params.model_dump_json())
+ response_tasks = requests.get(
+ url=f"{url}/{params.dag_run_id}/taskInstances",
+ auth=(
+ os.getenv("AIND_AIRFLOW_SERVICE_USER"),
+ os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
+ ),
+ )
+ status_code = response_tasks.status_code
+ if response_tasks.status_code == 200:
+ task_instances = AirflowTaskInstancesResponse.model_validate_json(
+ json.dumps(response_tasks.json())
+ )
+ job_tasks_list = sorted(
+ [
+ JobTasks.from_airflow_task_instance(t)
+ for t in task_instances.task_instances
+ ],
+ key=lambda t: (-t.priority_weight, t.map_index),
+ )
+ message = "Retrieved job tasks list from airflow"
+ data = {
+ "params": params_dict,
+ "total_entries": task_instances.total_entries,
+ "job_tasks_list": [
+ json.loads(t.model_dump_json()) for t in job_tasks_list
+ ],
+ }
+ else:
+ message = "Error retrieving job tasks list from airflow"
+ data = {
+ "params": params_dict,
+ "errors": [response_tasks.json()],
+ }
+ except ValidationError as e:
+ logging.error(e)
+ status_code = 406
+ message = "Error validating request parameters"
+ data = {"errors": json.loads(e.json())}
+ except Exception as e:
+ logging.error(e)
+ status_code = 500
+ message = "Unable to retrieve job tasks list from airflow"
+ data = {"errors": [f"{e.__class__.__name__}{e.args}"]}
+ return JSONResponse(
+ status_code=status_code,
+ content={
+ "message": message,
+ "data": data,
+ },
+ )
+
+
+async def get_task_logs(request: Request):
+ """Get task logs given a job id, task id, and task try number."""
+ try:
+ url = os.getenv("AIND_AIRFLOW_SERVICE_JOBS_URL", "").strip("/")
+ params = AirflowTaskInstanceLogsRequestParameters.from_query_params(
+ request.query_params
+ )
+ params_dict = json.loads(params.model_dump_json())
+ params_full = dict(params)
+ response_logs = requests.get(
+ url=(
+ f"{url}/{params.dag_run_id}/taskInstances/{params.task_id}"
+ f"/logs/{params.try_number}"
+ ),
+ auth=(
+ os.getenv("AIND_AIRFLOW_SERVICE_USER"),
+ os.getenv("AIND_AIRFLOW_SERVICE_PASSWORD"),
+ ),
+ params=params_dict,
+ )
+ status_code = response_logs.status_code
+ if response_logs.status_code == 200:
+ message = "Retrieved task logs from airflow"
+ data = {"params": params_full, "logs": response_logs.text}
+ else:
+ message = "Error retrieving task logs from airflow"
+ data = {
+ "params": params_full,
+ "errors": [response_logs.json()],
+ }
+ except ValidationError as e:
+ logging.error(e)
+ status_code = 406
+ message = "Error validating request parameters"
+ data = {"errors": json.loads(e.json())}
+ except Exception as e:
+ logging.error(e)
+ status_code = 500
+ message = "Unable to retrieve task logs from airflow"
+ data = {"errors": [f"{e.__class__.__name__}{e.args}"]}
+ return JSONResponse(
+ status_code=status_code,
+ content={
+ "message": message,
+ "data": data,
+ },
+ )
+
+
async def index(request: Request):
"""GET|POST /: form handler"""
return templates.TemplateResponse(
@@ -500,6 +613,45 @@ async def job_status_table(request: Request):
)
+async def job_tasks_table(request: Request):
+ """Get Job Tasks table given a job id"""
+ response_tasks = await get_tasks_list(request)
+ response_tasks_json = json.loads(response_tasks.body)
+ data = response_tasks_json.get("data")
+ return templates.TemplateResponse(
+ name="job_tasks_table.html",
+ context=(
+ {
+ "request": request,
+ "status_code": response_tasks.status_code,
+ "message": response_tasks_json.get("message"),
+ "errors": data.get("errors", []),
+ "total_entries": data.get("total_entries", 0),
+ "job_tasks_list": data.get("job_tasks_list", []),
+ }
+ ),
+ )
+
+
+async def task_logs(request: Request):
+ """Get task logs given a job id, task id, and task try number."""
+ response_tasks = await get_task_logs(request)
+ response_tasks_json = json.loads(response_tasks.body)
+ data = response_tasks_json.get("data")
+ return templates.TemplateResponse(
+ name="task_logs.html",
+ context=(
+ {
+ "request": request,
+ "status_code": response_tasks.status_code,
+ "message": response_tasks_json.get("message"),
+ "errors": data.get("errors", []),
+ "logs": data.get("logs"),
+ }
+ ),
+ )
+
+
async def jobs(request: Request):
"""Get Job Status page with pagination"""
default_limit = AirflowDagRunsRequestParameters.model_fields[
@@ -571,8 +723,12 @@ async def download_job_template(_: Request):
endpoint=get_job_status_list,
methods=["GET"],
),
+ Route("/api/v1/get_tasks_list", endpoint=get_tasks_list, methods=["GET"]),
+ Route("/api/v1/get_task_logs", endpoint=get_task_logs, methods=["GET"]),
Route("/jobs", endpoint=jobs, methods=["GET"]),
Route("/job_status_table", endpoint=job_status_table, methods=["GET"]),
+ Route("/job_tasks_table", endpoint=job_tasks_table, methods=["GET"]),
+ Route("/task_logs", endpoint=task_logs, methods=["GET"]),
Route(
"/api/job_upload_template",
endpoint=download_job_template,
diff --git a/src/aind_data_transfer_service/templates/job_status_table.html b/src/aind_data_transfer_service/templates/job_status_table.html
index b283c7f..b32f905 100644
--- a/src/aind_data_transfer_service/templates/job_status_table.html
+++ b/src/aind_data_transfer_service/templates/job_status_table.html
@@ -4,8 +4,14 @@
@@ -82,6 +120,40 @@
{{ message }}
el.innerText = utcTime.format('YYYY-MM-DD h:mm:ss a'); // Write the local time back to the element
};
});
+ var tasksModal = document.getElementById('tasks-modal');
+ tasksModal.addEventListener('show.bs.modal', function (event) {
+ var sourceData = event.relatedTarget?.dataset;
+ updateJobTasksModal(sourceData?.jobId, sourceData?.jobName, sourceData?.jobState);
+ })
+ tasksModal.addEventListener('hidden.bs.modal', function (event) {
+ updateJobTasksModal(null, null, null);
+ })
+ }
+ function updateJobTasksModal(jobId, jobName, jobState) {
+ // Update the modal header with the job id and name
+ document.getElementById('modal-title-job-id').textContent = jobId;
+ document.getElementById('modal-title-job-name').textContent = jobName;
+ var modalTitleJobState = document.getElementById('modal-title-job-state');
+ modalTitleJobState.textContent = jobState;
+ if (jobState) {
+ modalTitleJobState.classList.add(
+ jobState === 'success' ? 'bg-success'
+ : jobState === 'failed' ? 'bg-danger'
+ : jobState === 'running' ? 'bg-info'
+ : 'bg-secondary'
+ );
+ } else {
+ modalTitleJobState.classList.value = 'badge';
+ }
+ // Update the iframe src with the job id
+ var tasksIframe = document.getElementById('tasks-iframe');
+ if (jobId) {
+ var url = new URL("{{ url_for('job_tasks_table') }}");
+ url.searchParams.append('dag_run_id', jobId);
+ tasksIframe.src = url;
+ } else {
+ tasksIframe.src = "";
+ }
}
diff --git a/src/aind_data_transfer_service/templates/job_tasks_table.html b/src/aind_data_transfer_service/templates/job_tasks_table.html
new file mode 100644
index 0000000..ec08f8f
--- /dev/null
+++ b/src/aind_data_transfer_service/templates/job_tasks_table.html
@@ -0,0 +1,136 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Task ID |
+ Try Number |
+ Status |
+ Submit Time |
+ Start Time |
+ End Time |
+ Duration |
+ Logs |
+
+ {% for job_task in job_tasks_list %}
+
+ {{job_task.task_id}} |
+ {{job_task.try_number}} |
+ {{job_task.task_state}} |
+ {{job_task.submit_time}} |
+ {{job_task.start_time}} |
+ {{job_task.end_time}} |
+ {{job_task.duration}} |
+
+ {% if job_task.try_number > 0 %}
+
+ {% endif %}
+ |
+
+ {% endfor %}
+
+
+
+
+ {% if status_code != 200 %}
+
+
{{ message }}
+
+ {% for error in errors %}
+ {% if error is string %}{{ error }}
+ {% elif error is mapping %}
+ {% for key, value in error.items() %}
+ {{ key }}: {{ value }}
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+
+
+ {% endif %}
+
+
+
+
\ No newline at end of file
diff --git a/src/aind_data_transfer_service/templates/task_logs.html b/src/aind_data_transfer_service/templates/task_logs.html
new file mode 100644
index 0000000..4fd5563
--- /dev/null
+++ b/src/aind_data_transfer_service/templates/task_logs.html
@@ -0,0 +1,31 @@
+
+
+
+
+
+
+
+
+ {% if status_code == 200 %}
+
+
+ {% else %}
+
+
+
{{ message }}
+
+ {% for error in errors %}
+ {% if error is string %}{{ error }}
+ {% elif error is mapping %}
+ {% for key, value in error.items() %}
+ {{ key }}: {{ value }}
+ {% endfor %}
+ {% endif %}
+ {% endfor %}
+
+
+ {% endif %}
+
+
\ No newline at end of file
diff --git a/tests/resources/airflow_task_instances_response.json b/tests/resources/airflow_task_instances_response.json
new file mode 100644
index 0000000..4cc57af
--- /dev/null
+++ b/tests/resources/airflow_task_instances_response.json
@@ -0,0 +1,501 @@
+{
+ "task_instances": [
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 1.042997,
+ "end_date": "2024-08-21T16:17:27.278459+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-create-default-settings-msnfo8r2",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 12,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:17:11.816045+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {},
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:17:26.235462+00:00",
+ "state": "success",
+ "task_id": "create_default_settings",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 0.899175,
+ "end_date": "2024-08-21T16:17:11.720301+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-send-job-start-email-s1wxty2a",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 13,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:16:55.335522+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {},
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:17:10.821126+00:00",
+ "state": "success",
+ "task_id": "send_job_start_email",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 0.805628,
+ "end_date": "2024-08-21T16:18:00.491290+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-create-default-slurm-environment-1xph7dgd",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 10,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:17:44.511503+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "default_transfer_settings": "default_transfer_settings"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:17:59.685662+00:00",
+ "state": "success",
+ "task_id": "create_default_slurm_environment",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 1.062627,
+ "end_date": "2024-08-21T16:17:44.463969+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-check-s3-folder-exist-jy7ufnu9",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 11,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:17:28.252817+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "default_transfer_settings": "default_transfer_settings"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:17:43.401342+00:00",
+ "state": "success",
+ "task_id": "check_s3_folder_exist",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 61.686775,
+ "end_date": "2024-08-21T16:20:04.200273+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-create-folder-hxbnqe9h",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 8,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:18:47.513924+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "default_transfer_settings": "default_transfer_settings",
+ "slurm_job_props": "slurm_job_props"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:19:02.513498+00:00",
+ "state": "success",
+ "task_id": "create_folder",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 31.563301,
+ "end_date": "2024-08-21T16:18:47.027590+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-check-source-folders-exist-xfz0r3qi",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 9,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:18:00.998977+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "slurm_job_props": "slurm_job_props"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:18:15.464289+00:00",
+ "state": "success",
+ "task_id": "check_source_folders_exist",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 0.941469,
+ "end_date": "2024-08-21T16:31:46.502387+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-send-job-end-email-aby2oyt5",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 1,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:31:30.258599+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {},
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:31:45.560918+00:00",
+ "state": "success",
+ "task_id": "send_job_end_email",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 0.755632,
+ "end_date": "2024-08-21T16:17:13.886610+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-make-modality-list-3epc13xm",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 7,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:16:55.335522+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {},
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:17:13.130978+00:00",
+ "state": "success",
+ "task_id": "make_modality_list",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 121.900177,
+ "end_date": "2024-08-21T16:26:54.539535+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-gather-final-metadata-5cn2eudo",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 5,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:24:38.460800+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": "op_kwargs",
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:24:52.639358+00:00",
+ "state": "success",
+ "task_id": "gather_final_metadata",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 121.718611,
+ "end_date": "2024-08-21T16:22:20.807546+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-gather-preliminary-metadata-2jggqh8n",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 7,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:20:04.384178+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "slurm_job_props": "slurm_job_props",
+ "staging_folder": "/allen/aind/stage/svc_aind_airflow/dev/ISI_721679_job_1724257080_fc7ee"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:20:19.088935+00:00",
+ "state": "success",
+ "task_id": "gather_preliminary_metadata",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 121.897442,
+ "end_date": "2024-08-21T16:24:38.400648+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-compress-data-giexmirv",
+ "map_index": 0,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 6,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:22:21.400173+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "default_transfer_settings": "default_transfer_settings",
+ "modality_conf": {
+ "compress_raw_data": true,
+ "extra_configs": null,
+ "modality": {
+ "abbreviation": "ISI",
+ "name": "Intrinsic signal imaging"
+ },
+ "output_folder_name": "ISI",
+ "platform": "ISI",
+ "s3_bucket": "private",
+ "s3_prefix": "ISI_721679_2024-08-21_00-00-00",
+ "slurm_settings": null,
+ "source": "/allen/aind/scratch/svc_aind_upload/test_data_sets/tmp_data"
+ },
+ "prelim_metadata": "Success",
+ "slurm_job_props": "slurm_job_props",
+ "staging_folder": "/allen/aind/stage/svc_aind_airflow/dev/ISI_721679_job_1724257080_fc7ee"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:22:36.503206+00:00",
+ "state": "success",
+ "task_id": "compress_data",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 10.542992,
+ "end_date": "2024-08-21T16:29:39.612352+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-send-codeocean-request-vxltb79h",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonVirtualenvDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 2,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:29:12.187996+00:00",
+ "rendered_fields": {
+ "index_urls": null,
+ "op_args": [
+ {
+ "context_as_str": "{\"acq_datetime\": \"2024-08-21T00:00:00\", \"email_notification_types\": [\"fail\"], \"force_cloud_sync\": false, \"input_data_mount\": null, \"metadata_dir\": null, \"metadata_dir_force\": false, \"modalities\": [{\"compress_raw_data\": true, \"extra_configs\": null, \"modality\": {\"abbreviation\": \"ISI\", \"name\": \"Intrinsic signal imaging\"}, \"output_folder_name\": \"ISI\", \"slurm_settings\": null, \"source\": \"/allen/aind/scratch/svc_aind_upload/test_data_sets/tmp_data\"}], \"platform\": {\"abbreviation\": \"ISI\", \"name\": \"Intrinsic signal imaging platform\"}, \"process_capsule_id\": null, \"project_name\": \"MSMA Platform\", \"s3_bucket\": \"private\", \"s3_prefix\": \"ISI_721679_2024-08-21_00-00-00\", \"subject_id\": \"721679\", \"user_email\": null}",
+ "default_transfer_settings": "default_transfer_settings"
+ }
+ ],
+ "op_kwargs": {},
+ "requirements": [
+ "aind-codeocean-api==0.4.2"
+ ],
+ "templates_dict": null,
+ "venv_cache_path": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:29:29.069360+00:00",
+ "state": "success",
+ "task_id": "send_codeocean_request",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 121.805605,
+ "end_date": "2024-08-21T16:31:29.653235+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-remove-folder-prhy7w2q",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 20,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 2,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:29:12.187996+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "slurm_job_props": "slurm_job_props",
+ "staging_folder": "/allen/aind/stage/svc_aind_airflow/dev/ISI_721679_job_1724257080_fc7ee"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:29:27.847630+00:00",
+ "state": "success",
+ "task_id": "remove_folder",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ },
+ {
+ "dag_id": "transform_and_upload",
+ "dag_run_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "duration": 121.853576,
+ "end_date": "2024-08-21T16:29:11.984181+00:00",
+ "execution_date": "2024-08-21T16:16:54.302335+00:00",
+ "executor_config": "{}",
+ "hostname": "transform-and-upload-upload-data-to-s3-eg7g6zr2",
+ "map_index": -1,
+ "max_tries": 0,
+ "note": null,
+ "operator": "_PythonDecoratedOperator",
+ "pid": 19,
+ "pool": "default_pool",
+ "pool_slots": 1,
+ "priority_weight": 4,
+ "queue": "default",
+ "queued_when": "2024-08-21T16:26:55.339993+00:00",
+ "rendered_fields": {
+ "op_args": [],
+ "op_kwargs": {
+ "default_transfer_settings": "default_transfer_settings",
+ "slurm_job_props": "slurm_job_props",
+ "staging_folder": "/allen/aind/stage/svc_aind_airflow/dev/ISI_721679_job_1724257080_fc7ee"
+ },
+ "templates_dict": null
+ },
+ "sla_miss": null,
+ "start_date": "2024-08-21T16:27:10.130605+00:00",
+ "state": "success",
+ "task_id": "upload_data_to_s3",
+ "trigger": null,
+ "triggerer_job": null,
+ "try_number": 1,
+ "unixname": "airflow"
+ }
+ ],
+ "total_entries": 14
+}
\ No newline at end of file
diff --git a/tests/test_server.py b/tests/test_server.py
index eace5f5..9f3ff45 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -49,6 +49,9 @@
GET_DAG_RUN_RESPONSE = (
TEST_DIRECTORY / "resources" / "airflow_dag_run_response.json"
)
+LIST_TASK_INSTANCES_RESPONSE = (
+ TEST_DIRECTORY / "resources" / "airflow_task_instances_response.json"
+)
class TestServer(unittest.TestCase):
@@ -87,6 +90,9 @@ class TestServer(unittest.TestCase):
with open(GET_DAG_RUN_RESPONSE) as f:
get_dag_run_response = json.load(f)
+ with open(LIST_TASK_INSTANCES_RESPONSE) as f:
+ list_task_instances_response = json.load(f)
+
expected_job_configs = deepcopy(TestJobConfigs.expected_job_configs)
for config in expected_job_configs:
config.aws_param_store_name = None
@@ -756,6 +762,374 @@ def test_get_job_status_list_error(
)
mock_log_error.assert_called_once()
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("requests.get")
+ def test_get_tasks_list_query_params(
+ self,
+ mock_get,
+ ):
+ """Tests get_tasks_list gets tasks from airflow using query_params."""
+ mock_task_instances_response = Response()
+ mock_task_instances_response.status_code = 200
+ mock_task_instances_response._content = json.dumps(
+ self.list_task_instances_response
+ ).encode("utf-8")
+ mock_get.return_value = mock_task_instances_response
+ expected_message = "Retrieved job tasks list from airflow"
+ expected_params = {
+ "dag_run_id": "mock_dag_run_id",
+ }
+ expected_task_list = [
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "send_job_start_email",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 13,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:17:10.821126Z",
+ "end_time": "2024-08-21T16:17:11.720301Z",
+ "duration": 0.899175,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "create_default_settings",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 12,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:17:26.235462Z",
+ "end_time": "2024-08-21T16:17:27.278459Z",
+ "duration": 1.042997,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "check_s3_folder_exist",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 11,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:17:43.401342Z",
+ "end_time": "2024-08-21T16:17:44.463969Z",
+ "duration": 1.062627,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "create_default_slurm_environment",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 10,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:17:59.685662Z",
+ "end_time": "2024-08-21T16:18:00.491290Z",
+ "duration": 0.805628,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "check_source_folders_exist",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 9,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:18:15.464289Z",
+ "end_time": "2024-08-21T16:18:47.027590Z",
+ "duration": 31.563301,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "create_folder",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 8,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:19:02.513498Z",
+ "end_time": "2024-08-21T16:20:04.200273Z",
+ "duration": 61.686775,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "make_modality_list",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 7,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:17:13.130978Z",
+ "end_time": "2024-08-21T16:17:13.886610Z",
+ "duration": 0.755632,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "gather_preliminary_metadata",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 7,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:20:19.088935Z",
+ "end_time": "2024-08-21T16:22:20.807546Z",
+ "duration": 121.718611,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "compress_data",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 6,
+ "map_index": 0,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:22:36.503206Z",
+ "end_time": "2024-08-21T16:24:38.400648Z",
+ "duration": 121.897442,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "gather_final_metadata",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 5,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:24:52.639358Z",
+ "end_time": "2024-08-21T16:26:54.539535Z",
+ "duration": 121.900177,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "upload_data_to_s3",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 4,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:27:10.130605Z",
+ "end_time": "2024-08-21T16:29:11.984181Z",
+ "duration": 121.853576,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "send_codeocean_request",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 2,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:29:29.069360Z",
+ "end_time": "2024-08-21T16:29:39.612352Z",
+ "duration": 10.542992,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "remove_folder",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 2,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:29:27.847630Z",
+ "end_time": "2024-08-21T16:31:29.653235Z",
+ "duration": 121.805605,
+ "comment": None,
+ },
+ {
+ "job_id": "manual__2024-08-21T16:16:54.302335+00:00",
+ "task_id": "send_job_end_email",
+ "try_number": 1,
+ "task_state": "success",
+ "priority_weight": 1,
+ "map_index": -1,
+ "submit_time": "2024-08-21T16:16:54.302335Z",
+ "start_time": "2024-08-21T16:31:45.560918Z",
+ "end_time": "2024-08-21T16:31:46.502387Z",
+ "duration": 0.941469,
+ "comment": None,
+ },
+ ]
+ with TestClient(app) as client:
+ response = client.get(
+ "/api/v1/get_tasks_list",
+ params={
+ "dag_run_id": "mock_dag_run_id",
+ },
+ )
+ response_content = response.json()
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(
+ response_content,
+ {
+ "message": expected_message,
+ "data": {
+ "params": expected_params,
+ "total_entries": self.list_task_instances_response[
+ "total_entries"
+ ],
+ "job_tasks_list": expected_task_list,
+ },
+ },
+ )
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("requests.get")
+ @patch("logging.error")
+ def test_get_tasks_list_validation_error(
+ self,
+ mock_log_error: MagicMock,
+ mock_get,
+ ):
+ """Tests get_tasks_list when query_params are invalid."""
+ invalid_params = {
+ "job_id": "mock_dag_run_id",
+ }
+ with TestClient(app) as client:
+ response = client.get(
+ "/api/v1/get_tasks_list", params=invalid_params
+ )
+ response_content = response.json()
+ self.assertEqual(response.status_code, 406)
+ self.assertEqual(
+ response_content["message"],
+ "Error validating request parameters",
+ )
+ mock_log_error.assert_called()
+ mock_get.assert_not_called()
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("logging.error")
+ @patch("requests.get")
+ def test_get_tasks_list_error(
+ self,
+ mock_get: MagicMock,
+ mock_log_error: MagicMock,
+ ):
+ """Tests get_tasks_list when there is an error sending request."""
+ mock_get.side_effect = Exception("mock error")
+ with TestClient(app) as client:
+ response = client.get(
+ "/api/v1/get_tasks_list",
+ params={
+ "dag_run_id": "mock_dag_run_id",
+ },
+ )
+ response_content = response.json()
+ self.assertEqual(response.status_code, 500)
+ self.assertEqual(
+ response_content["message"],
+ "Unable to retrieve job tasks list from airflow",
+ )
+ mock_log_error.assert_called_once()
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("requests.get")
+ def test_get_task_logs_query_params(
+ self,
+ mock_get,
+ ):
+ """Tests get_task_logs gets logs from airflow using query_params."""
+ mock_logs_response = Response()
+ mock_logs_response.status_code = 200
+ mock_logs_response._content = b"mock logs"
+ mock_get.return_value = mock_logs_response
+ expected_message = "Retrieved task logs from airflow"
+ expected_default_params = {
+ "dag_run_id": "mock_dag_run_id",
+ "task_id": "mock_task_id",
+ "try_number": 1,
+ "full_content": True,
+ }
+ with TestClient(app) as client:
+ response = client.get(
+ "/api/v1/get_task_logs",
+ params={
+ "dag_run_id": "mock_dag_run_id",
+ "task_id": "mock_task_id",
+ "try_number": 1,
+ },
+ )
+ response_content = response.json()
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(
+ response_content,
+ {
+ "message": expected_message,
+ "data": {
+ "params": expected_default_params,
+ "logs": "mock logs",
+ },
+ },
+ )
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("requests.get")
+ @patch("logging.error")
+ def test_get_task_logs_validation_error(
+ self,
+ mock_log_error: MagicMock,
+ mock_get,
+ ):
+ """Tests get_task_logs when query_params are invalid."""
+ invalid_params = {
+ "dag_run_id": "mock_dag_run_id",
+ "task_id": "mock_task_id",
+ "try_number": "invalid",
+ }
+ with TestClient(app) as client:
+ response = client.get(
+ "/api/v1/get_task_logs", params=invalid_params
+ )
+ response_content = response.json()
+ self.assertEqual(response.status_code, 406)
+ self.assertEqual(
+ response_content["message"],
+ "Error validating request parameters",
+ )
+ mock_log_error.assert_called()
+ mock_get.assert_not_called()
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("logging.error")
+ @patch("requests.get")
+ def test_get_task_logs_error(
+ self,
+ mock_get: MagicMock,
+ mock_log_error: MagicMock,
+ ):
+ """Tests get_task_logs when there is an error sending request."""
+ mock_get.side_effect = Exception("mock error")
+ with TestClient(app) as client:
+ response = client.get(
+ "/api/v1/get_task_logs",
+ params={
+ "dag_run_id": "mock_dag_run_id",
+ "task_id": "mock_task_id",
+ "try_number": 1,
+ },
+ )
+ response_content = response.json()
+ self.assertEqual(response.status_code, 500)
+ self.assertEqual(
+ response_content["message"],
+ "Unable to retrieve task logs from airflow",
+ )
+ mock_log_error.assert_called_once()
+
@patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
def test_index(self):
"""Tests that form renders at startup as expected."""
@@ -805,6 +1179,87 @@ def test_jobs_table_failure(self, mock_get: MagicMock):
)
self.assertIn("test airflow error", response.text)
+ @patch("requests.get")
+ def test_tasks_table_success(self, mock_get: MagicMock):
+ """Tests that job tasks table renders as expected."""
+ mock_response = Response()
+ mock_response.status_code = 200
+ mock_response._content = json.dumps(
+ self.list_task_instances_response
+ ).encode("utf-8")
+ mock_get.return_value = mock_response
+ with TestClient(app) as client:
+ response = client.get(
+ "/job_tasks_table", params={"dag_run_id": "dag_run_id"}
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertIn("Task ID", response.text)
+ self.assertIn("Try Number", response.text)
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("requests.get")
+ def test_tasks_table_failure(self, mock_get: MagicMock):
+ """Tests that job status table renders error message from airflow."""
+ mock_response = Response()
+ mock_response.status_code = 500
+ mock_response._content = json.dumps(
+ {"message": "test airflow error"}
+ ).encode("utf-8")
+ mock_get.return_value = mock_response
+ with TestClient(app) as client:
+ response = client.get(
+ "/job_tasks_table", params={"dag_run_id": "dag_run_id"}
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertIn("Task ID", response.text)
+ self.assertIn("Try Number", response.text)
+ self.assertIn(
+ "Error retrieving job tasks list from airflow", response.text
+ )
+ self.assertIn("test airflow error", response.text)
+
+ @patch("requests.get")
+ def test_logs_success(self, mock_get: MagicMock):
+ """Tests that task logs page renders as expected."""
+ mock_response = Response()
+ mock_response.status_code = 200
+ mock_response._content = b"mock log content"
+ mock_get.return_value = mock_response
+ with TestClient(app) as client:
+ response = client.get(
+ "/task_logs",
+ params={
+ "dag_run_id": "dag_run_id",
+ "task_id": "task_id",
+ "try_number": 1,
+ },
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertIn("mock log content", response.text)
+
+ @patch.dict(os.environ, EXAMPLE_ENV_VAR1, clear=True)
+ @patch("requests.get")
+ def test_logs_failure(self, mock_get: MagicMock):
+ """Tests that task logs page renders error message from airflow."""
+ mock_response = Response()
+ mock_response.status_code = 500
+ mock_response._content = json.dumps(
+ {"message": "test airflow error"}
+ ).encode("utf-8")
+ mock_get.return_value = mock_response
+ with TestClient(app) as client:
+ response = client.get(
+ "/task_logs",
+ params={
+ "dag_run_id": "dag_run_id",
+ "task_id": "task_id",
+ "try_number": 1,
+ },
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertIn("Error retrieving task logs from airflow", response.text)
+ self.assertIn("test airflow error", response.text)
+
def test_download_job_template(self):
"""Tests that job template downloads as xlsx file."""