Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(airflow): retry callback, support ExternalTaskSensor subclasses #8514

Merged
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def _get_dependencies(
# It is possible to tie an external sensor to DAG if external_task_id is omitted but currently we can't tie
# jobflow to anothet jobflow.
external_task_upstreams = []
if task.task_type == "ExternalTaskSensor":
if isinstance(task, ExternalTaskSensor):
task = cast(ExternalTaskSensor, task)
if hasattr(task, "external_task_id") and task.external_task_id is not None:
external_task_upstreams = [
Expand Down Expand Up @@ -155,6 +155,8 @@ def generate_dataflow(
"_concurrency",
# "_default_view",
"catchup",
"description",
"doc_md",
"fileloc",
"is_paused_upon_creation",
"start_date",
Expand Down Expand Up @@ -431,6 +433,9 @@ def run_datajob(
job_property_bag["operator"] = str(ti.operator)
job_property_bag["priority_weight"] = str(ti.priority_weight)
job_property_bag["log_url"] = ti.log_url
job_property_bag["orchestrator"] = "airflow"
job_property_bag["dag_id"] = str(dag.dag_id)
job_property_bag["task_id"] = str(ti.task_id)
dpi.properties.update(job_property_bag)
dpi.url = ti.log_url

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

TASK_ON_FAILURE_CALLBACK = "on_failure_callback"
TASK_ON_SUCCESS_CALLBACK = "on_success_callback"
TASK_ON_RETRY_CALLBACK = "on_retry_callback"


def get_task_inlets_advanced(task: BaseOperator, context: Any) -> Iterable[Any]:
Expand Down Expand Up @@ -259,6 +260,26 @@ def custom_on_success_callback(context):
return custom_on_success_callback


def _wrap_on_retry_callback(on_retry_callback):
def custom_on_retry_callback(context):
config = get_lineage_config()
if config.enabled:
context["_datahub_config"] = config
try:
datahub_task_status_callback(context, status=InstanceRunResult.UP_FOR_RETRY)
except Exception as e:
if not config.graceful_exceptions:
raise e
else:
print(f"Exception: {traceback.format_exc()}")

# Call original policy
if on_retry_callback:
on_retry_callback(context)

return custom_on_retry_callback


def task_policy(task: Union[BaseOperator, MappedOperator]) -> None:
task.log.debug(f"Setting task policy for Dag: {task.dag_id} Task: {task.task_id}")
# task.add_inlets(["auto"])
Expand All @@ -274,7 +295,10 @@ def task_policy(task: Union[BaseOperator, MappedOperator]) -> None:
on_success_callback_prop: property = getattr(
MappedOperator, TASK_ON_SUCCESS_CALLBACK
)
if not on_failure_callback_prop.fset or not on_success_callback_prop.fset:
on_retry_callback_prop: property = getattr(
MappedOperator, TASK_ON_RETRY_CALLBACK
)
if not on_failure_callback_prop.fset or not on_success_callback_prop.fset or not on_retry_callback_prop.fset:
task.log.debug(
"Using MappedOperator's partial_kwargs instead of callback properties"
)
Expand All @@ -284,10 +308,14 @@ def task_policy(task: Union[BaseOperator, MappedOperator]) -> None:
task.partial_kwargs[TASK_ON_SUCCESS_CALLBACK] = _wrap_on_success_callback(
task.on_success_callback
)
task.partial_kwargs[TASK_ON_RETRY_CALLBACK] = _wrap_on_retry_callback(
task.on_retry_callback
)
return

task.on_failure_callback = _wrap_on_failure_callback(task.on_failure_callback) # type: ignore
task.on_success_callback = _wrap_on_success_callback(task.on_success_callback) # type: ignore
task.on_retry_callback = _wrap_on_retry_callback(task.on_retry_callback) # type: ignore
# task.pre_execute = _wrap_pre_execution(task.pre_execute)


Expand Down
Loading