diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 105aed43d1..b5a4485385 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -3,6 +3,11 @@ Changelog in development -------------- +* implemented zstandard compression for parameters and results. #5995 + contributed by @guzzijones12 + +* removed embedded liveaction in action execution database table #5995 + contributed by @guzzijones12 Python 3.6 is no longer supported; Stackstorm requires at least Python 3.8. diff --git a/conf/st2.conf.sample b/conf/st2.conf.sample index 5eed1ffd4c..15da5bb3ba 100644 --- a/conf/st2.conf.sample +++ b/conf/st2.conf.sample @@ -140,6 +140,9 @@ connection_timeout = 3000 db_name = st2 # host of db server host = 127.0.0.1 +# compression for parameter and result storage in liveaction and execution models +# Valid values: zstandard, none +parameter_result_compression = zstandard # password for db login password = None # port of db server diff --git a/conf/st2.dev.conf b/conf/st2.dev.conf index cf2b5b6596..c2276ec332 100644 --- a/conf/st2.dev.conf +++ b/conf/st2.dev.conf @@ -1,6 +1,7 @@ # Config used by local development environment (tools/launch.dev.sh) [database] host = 127.0.0.1 +parameter_result_compression = zstandard [api] # Host and port to bind the API server. diff --git a/contrib/runners/action_chain_runner/action_chain_runner/action_chain_runner.py b/contrib/runners/action_chain_runner/action_chain_runner/action_chain_runner.py index 57c015adcf..b923a38c20 100644 --- a/contrib/runners/action_chain_runner/action_chain_runner/action_chain_runner.py +++ b/contrib/runners/action_chain_runner/action_chain_runner/action_chain_runner.py @@ -333,7 +333,7 @@ def cancel(self): and child_exec.status in action_constants.LIVEACTION_CANCELABLE_STATES ): action_service.request_cancellation( - LiveAction.get(id=child_exec.liveaction["id"]), + LiveAction.get(id=child_exec.liveaction_id), self.context.get("user", None), ) @@ -353,7 +353,7 @@ def pause(self): and child_exec.status == action_constants.LIVEACTION_STATUS_RUNNING ): action_service.request_pause( - LiveAction.get(id=child_exec.liveaction["id"]), + LiveAction.get(id=child_exec.liveaction_id), self.context.get("user", None), ) @@ -966,7 +966,7 @@ def _format_action_exec_result( execution_db = None if liveaction_db: - execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) + execution_db = ActionExecution.get(liveaction_id=str(liveaction_db.id)) result["id"] = action_node.name result["name"] = action_node.name diff --git a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py index bf6ffdd47a..e04d5c01b1 100644 --- a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py +++ b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_cancel.py @@ -171,7 +171,7 @@ def test_chain_cancel_cascade_to_subworkflow(self): # Wait until the subworkflow is running. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_RUNNING ) @@ -189,7 +189,7 @@ def test_chain_cancel_cascade_to_subworkflow(self): # Wait until the subworkflow is canceling. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_CANCELING ) @@ -206,7 +206,7 @@ def test_chain_cancel_cascade_to_subworkflow(self): # Wait until the subworkflow is canceled. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_CANCELED ) @@ -248,7 +248,7 @@ def test_chain_cancel_cascade_to_parent_workflow(self): # Wait until the subworkflow is running. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_RUNNING ) @@ -260,7 +260,7 @@ def test_chain_cancel_cascade_to_parent_workflow(self): # Wait until the subworkflow is canceling. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_CANCELING ) @@ -271,7 +271,7 @@ def test_chain_cancel_cascade_to_parent_workflow(self): # Wait until the subworkflow is canceled. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_CANCELED ) diff --git a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_notifications.py b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_notifications.py index f2f2a680c8..df1c1567d9 100644 --- a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_notifications.py +++ b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_notifications.py @@ -151,7 +151,7 @@ def test_skip_notify_for_task_with_notify(self): # Assert task1 notify is skipped task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -162,7 +162,7 @@ def test_skip_notify_for_task_with_notify(self): # Assert task2 notify is not skipped task2_exec = ActionExecution.get_by_id(execution.children[1]) - task2_live = LiveAction.get_by_id(task2_exec.liveaction["id"]) + task2_live = LiveAction.get_by_id(task2_exec.liveaction_id) notify = notify_api_models.NotificationsHelper.from_model( notify_model=task2_live.notify ) @@ -186,7 +186,7 @@ def test_skip_notify_default_for_task_with_notify(self): # Assert task1 notify is set. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_on_status( task1_live, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -200,7 +200,7 @@ def test_skip_notify_default_for_task_with_notify(self): # Assert task2 notify is not skipped by default. task2_exec = ActionExecution.get_by_id(execution.children[1]) - task2_live = LiveAction.get_by_id(task2_exec.liveaction["id"]) + task2_live = LiveAction.get_by_id(task2_exec.liveaction_id) self.assertIsNone(task2_live.notify) MockLiveActionPublisherNonBlocking.wait_all() diff --git a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py index e0dfecc4d6..6187522d42 100644 --- a/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py +++ b/contrib/runners/action_chain_runner/tests/unit/test_actionchain_pause_resume.py @@ -431,7 +431,7 @@ def test_chain_pause_resume_cascade_to_subworkflow(self): # Wait until the subworkflow is running. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_RUNNING ) @@ -452,7 +452,7 @@ def test_chain_pause_resume_cascade_to_subworkflow(self): # Wait until the subworkflow is pausing. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_PAUSING ) @@ -477,7 +477,7 @@ def test_chain_pause_resume_cascade_to_subworkflow(self): # Wait until the subworkflow is paused. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_PAUSED ) @@ -548,7 +548,7 @@ def test_chain_pause_resume_cascade_to_parent_workflow(self): # Wait until the subworkflow is running. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_RUNNING ) @@ -559,7 +559,7 @@ def test_chain_pause_resume_cascade_to_parent_workflow(self): # Wait until the subworkflow is pausing. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_PAUSING ) @@ -574,7 +574,7 @@ def test_chain_pause_resume_cascade_to_parent_workflow(self): # Wait until the subworkflow is paused. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_PAUSED ) @@ -611,7 +611,7 @@ def test_chain_pause_resume_cascade_to_parent_workflow(self): # Wait until the subworkflow is paused. task1_exec = ActionExecution.get_by_id(execution.children[0]) - task1_live = LiveAction.get_by_id(task1_exec.liveaction["id"]) + task1_live = LiveAction.get_by_id(task1_exec.liveaction_id) task1_live = self._wait_for_status( task1_live, action_constants.LIVEACTION_STATUS_SUCCEEDED ) diff --git a/contrib/runners/inquirer_runner/inquirer_runner/inquirer_runner.py b/contrib/runners/inquirer_runner/inquirer_runner/inquirer_runner.py index af0f0c6f34..f080c7ab30 100644 --- a/contrib/runners/inquirer_runner/inquirer_runner/inquirer_runner.py +++ b/contrib/runners/inquirer_runner/inquirer_runner/inquirer_runner.py @@ -74,7 +74,7 @@ def pre_run(self): def run(self, action_parameters): liveaction_db = action_utils.get_liveaction_by_id(self.liveaction_id) - exc = ex_db_access.ActionExecution.get(liveaction__id=str(liveaction_db.id)) + exc = ex_db_access.ActionExecution.get(liveaction_id=str(liveaction_db.id)) # Assemble and dispatch trigger trigger_ref = sys_db_models.ResourceReference.to_string_reference( diff --git a/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py b/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py index 586a9d0cc9..717ec979c4 100644 --- a/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py +++ b/contrib/runners/orquesta_runner/orquesta_runner/orquesta_runner.py @@ -136,12 +136,22 @@ def start_workflow(self, action_parameters): wf_def, self.execution, st2_ctx, notify_cfg=notify_cfg ) except wf_exc.WorkflowInspectionError as e: + _, ex, tb = sys.exc_info() status = ac_const.LIVEACTION_STATUS_FAILED - result = {"errors": e.args[1], "output": None} + result = { + "errors": e.args[1], + "output": None, + "traceback": "".join(traceback.format_tb(tb, 20)), + } return (status, result, self.context) except Exception as e: + _, ex, tb = sys.exc_info() status = ac_const.LIVEACTION_STATUS_FAILED - result = {"errors": [{"message": six.text_type(e)}], "output": None} + result = { + "errors": [{"message": six.text_type(e)}], + "output": None, + "traceback": "".join(traceback.format_tb(tb, 20)), + } return (status, result, self.context) return self._handle_workflow_return_value(wf_ex_db) @@ -178,7 +188,7 @@ def pause(self): child_ex = ex_db_access.ActionExecution.get(id=child_ex_id) if self.task_pauseable(child_ex): ac_svc.request_pause( - lv_db_access.LiveAction.get(id=child_ex.liveaction["id"]), + lv_db_access.LiveAction.get(id=child_ex.liveaction_id), self.context.get("user", None), ) @@ -209,7 +219,7 @@ def resume(self): child_ex = ex_db_access.ActionExecution.get(id=child_ex_id) if self.task_resumeable(child_ex): ac_svc.request_resume( - lv_db_access.LiveAction.get(id=child_ex.liveaction["id"]), + lv_db_access.LiveAction.get(id=child_ex.liveaction_id), self.context.get("user", None), ) @@ -270,7 +280,7 @@ def cancel(self): child_ex = ex_db_access.ActionExecution.get(id=child_ex_id) if self.task_cancelable(child_ex): ac_svc.request_cancellation( - lv_db_access.LiveAction.get(id=child_ex.liveaction["id"]), + lv_db_access.LiveAction.get(id=child_ex.liveaction_id), self.context.get("user", None), ) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_basic.py b/contrib/runners/orquesta_runner/tests/unit/test_basic.py index 9b84c94b3a..e0d22a6446 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_basic.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_basic.py @@ -185,7 +185,7 @@ def test_run_workflow(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.context.get("user"), username) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertTrue(wf_svc.is_action_execution_under_workflow_context(tk1_ac_ex_db)) @@ -205,7 +205,7 @@ def test_run_workflow(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.context.get("user"), username) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertTrue(wf_svc.is_action_execution_under_workflow_context(tk2_ac_ex_db)) @@ -225,7 +225,7 @@ def test_run_workflow(self): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertEqual(tk3_lv_ac_db.context.get("user"), username) self.assertEqual(tk3_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertTrue(wf_svc.is_action_execution_under_workflow_context(tk3_ac_ex_db)) @@ -275,7 +275,7 @@ def test_run_workflow_with_unicode_input(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk1_ac_ex_db) tk1_ex_db = wf_db_access.TaskExecution.get_by_id(tk1_ex_db.id) @@ -287,7 +287,7 @@ def test_run_workflow_with_unicode_input(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk2_ac_ex_db) tk2_ex_db = wf_db_access.TaskExecution.get_by_id(tk2_ex_db.id) @@ -299,7 +299,7 @@ def test_run_workflow_with_unicode_input(self): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertEqual(tk3_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk3_ac_ex_db) tk3_ex_db = wf_db_access.TaskExecution.get_by_id(tk3_ex_db.id) @@ -348,7 +348,7 @@ def test_run_workflow_action_config_context(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertTrue(wf_svc.is_action_execution_under_workflow_context(tk1_ac_ex_db)) @@ -401,7 +401,7 @@ def test_run_workflow_with_action_less_tasks(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. @@ -413,7 +413,7 @@ def test_run_workflow_with_action_less_tasks(self): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertEqual(tk3_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. @@ -434,7 +434,7 @@ def test_run_workflow_with_action_less_tasks(self): tk5_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk5_ex_db.id) )[0] - tk5_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk5_ac_ex_db.liveaction["id"]) + tk5_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk5_ac_ex_db.liveaction_id) self.assertEqual(tk5_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. diff --git a/contrib/runners/orquesta_runner/tests/unit/test_cancel.py b/contrib/runners/orquesta_runner/tests/unit/test_cancel.py index cdffd6949d..1c6df3cf11 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_cancel.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_cancel.py @@ -139,9 +139,7 @@ def test_cancel_workflow_cascade_down_to_subworkflow(self): ) self.assertEqual(len(tk_ac_ex_dbs), 1) - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk_ac_ex_dbs[0].liveaction["id"] - ) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_dbs[0].liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Cancel the main workflow. @@ -182,9 +180,7 @@ def test_cancel_subworkflow_cascade_up_to_workflow(self): ) self.assertEqual(len(tk_ac_ex_dbs), 1) - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk_ac_ex_dbs[0].liveaction["id"] - ) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_dbs[0].liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Cancel the subworkflow. @@ -230,9 +226,7 @@ def test_cancel_subworkflow_cascade_up_to_workflow_with_other_subworkflows(self) ) self.assertEqual(len(tk1_ac_ex_dbs), 1) - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk1_ac_ex_dbs[0].liveaction["id"] - ) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_dbs[0].liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) tk2_ac_ex_dbs = ex_db_access.ActionExecution.query( @@ -240,9 +234,7 @@ def test_cancel_subworkflow_cascade_up_to_workflow_with_other_subworkflows(self) ) self.assertEqual(len(tk2_ac_ex_dbs), 1) - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk2_ac_ex_dbs[0].liveaction["id"] - ) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_dbs[0].liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Cancel the subworkflow which should cascade up to the root. diff --git a/contrib/runners/orquesta_runner/tests/unit/test_data_flow.py b/contrib/runners/orquesta_runner/tests/unit/test_data_flow.py index eadadbf469..910ff2cf18 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_data_flow.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_data_flow.py @@ -143,7 +143,7 @@ def assert_data_flow(self, data): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. @@ -161,7 +161,7 @@ def assert_data_flow(self, data): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. @@ -179,7 +179,7 @@ def assert_data_flow(self, data): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertEqual(tk3_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. @@ -197,7 +197,7 @@ def assert_data_flow(self, data): tk4_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk4_ex_db.id) )[0] - tk4_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk4_ac_ex_db.liveaction["id"]) + tk4_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk4_ac_ex_db.liveaction_id) self.assertEqual(tk4_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion. diff --git a/contrib/runners/orquesta_runner/tests/unit/test_error_handling.py b/contrib/runners/orquesta_runner/tests/unit/test_error_handling.py index 9aafac018f..f6fcf8977b 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_error_handling.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_error_handling.py @@ -364,7 +364,18 @@ def test_fail_start_task_input_value_type(self): workflow_execution=str(wf_ex_db.id) )[0] self.assertEqual(tk_ex_db.status, wf_statuses.FAILED) - self.assertDictEqual(tk_ex_db.result, {"errors": expected_errors}) + self.assertEqual( + tk_ex_db.result["errors"][0]["type"], expected_errors[0]["type"] + ) + self.assertEqual( + tk_ex_db.result["errors"][0]["message"], expected_errors[0]["message"] + ) + self.assertEqual( + tk_ex_db.result["errors"][0]["task_id"], expected_errors[0]["task_id"] + ) + self.assertEqual( + tk_ex_db.result["errors"][0]["route"], expected_errors[0]["route"] + ) lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id)) self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) @@ -405,7 +416,7 @@ def test_fail_next_task_action(self): tk_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_db.id) )[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion for task1 which has an error in publish. @@ -461,7 +472,7 @@ def test_fail_next_task_input_expr_eval(self): tk_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_db.id) )[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion for task1 which has an error in publish. @@ -513,7 +524,7 @@ def test_fail_next_task_input_value_type(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertEqual(wf_ex_db.status, wf_statuses.RUNNING) @@ -523,13 +534,37 @@ def test_fail_next_task_input_value_type(self): # Assert workflow execution and task2 execution failed. wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(str(wf_ex_db.id)) self.assertEqual(wf_ex_db.status, wf_statuses.FAILED) - self.assertListEqual( - self.sort_workflow_errors(wf_ex_db.errors), expected_errors + self.assertEqual( + self.sort_workflow_errors(wf_ex_db.errors)[0]["type"], + expected_errors[0]["type"], + ) + self.assertEqual( + self.sort_workflow_errors(wf_ex_db.errors)[0]["message"], + expected_errors[0]["message"], + ) + self.assertEqual( + self.sort_workflow_errors(wf_ex_db.errors)[0]["task_id"], + expected_errors[0]["task_id"], + ) + self.assertEqual( + self.sort_workflow_errors(wf_ex_db.errors)[0]["route"], + expected_errors[0]["route"], ) tk2_ex_db = wf_db_access.TaskExecution.query(task_id="task2")[0] self.assertEqual(tk2_ex_db.status, wf_statuses.FAILED) - self.assertDictEqual(tk2_ex_db.result, {"errors": expected_errors}) + self.assertEqual( + tk2_ex_db.result["errors"][0]["type"], expected_errors[0]["type"] + ) + self.assertEqual( + tk2_ex_db.result["errors"][0]["message"], expected_errors[0]["message"] + ) + self.assertEqual( + tk2_ex_db.result["errors"][0]["task_id"], expected_errors[0]["task_id"] + ) + self.assertEqual( + tk2_ex_db.result["errors"][0]["route"], expected_errors[0]["route"] + ) lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id)) self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) @@ -573,7 +608,7 @@ def test_fail_task_execution(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) wf_svc.handle_action_execution_completion(tk1_ac_ex_db) @@ -624,7 +659,7 @@ def test_fail_task_transition(self): tk_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_db.id) )[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion for task1 which has an error in publish. @@ -680,7 +715,7 @@ def test_fail_task_publish(self): tk_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_db.id) )[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion for task1 which has an error in publish. @@ -733,7 +768,7 @@ def test_fail_output_rendering(self): tk_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_db.id) )[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # Manually handle action execution completion for task1 which has an error in publish. @@ -789,7 +824,7 @@ def test_output_on_error(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk1_ac_ex_db) wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id) @@ -801,7 +836,7 @@ def test_output_on_error(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) wf_svc.handle_action_execution_completion(tk2_ac_ex_db) @@ -832,7 +867,7 @@ def test_fail_manually(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) wf_svc.handle_action_execution_completion(tk1_ac_ex_db) wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id) @@ -844,7 +879,7 @@ def test_fail_manually(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk2_ac_ex_db) wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id) @@ -890,7 +925,7 @@ def test_fail_manually_with_recovery_failure(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) wf_svc.handle_action_execution_completion(tk1_ac_ex_db) wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id) @@ -903,7 +938,7 @@ def test_fail_manually_with_recovery_failure(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual(tk2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) wf_svc.handle_action_execution_completion(tk2_ac_ex_db) wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id) @@ -979,7 +1014,7 @@ def test_include_result_to_error_log(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.context.get("user"), username) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_FAILED) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_functions_common.py b/contrib/runners/orquesta_runner/tests/unit/test_functions_common.py index 4019f9a890..6efd2c0f8b 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_functions_common.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_functions_common.py @@ -115,7 +115,7 @@ def _execute_workflow(self, wf_name, expected_output): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertTrue(wf_svc.is_action_execution_under_workflow_context(tk1_ac_ex_db)) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_functions_task.py b/contrib/runners/orquesta_runner/tests/unit/test_functions_task.py index b325839c9d..721f5c5de7 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_functions_task.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_functions_task.py @@ -129,9 +129,7 @@ def _execute_workflow( tk_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_db.id) )[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk_ac_ex_db.liveaction["id"] - ) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertTrue( diff --git a/contrib/runners/orquesta_runner/tests/unit/test_inquiries.py b/contrib/runners/orquesta_runner/tests/unit/test_inquiries.py index f60f9415e8..9e8ad560c3 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_inquiries.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_inquiries.py @@ -113,7 +113,7 @@ def test_inquiry(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t1_ex_db.id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) self.assertEqual( t1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -134,7 +134,7 @@ def test_inquiry(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_ex_db.id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PENDING) workflows.get_engine().process(t2_ac_ex_db) t2_ex_db = wf_db_access.TaskExecution.get_by_id(t2_ex_db.id) @@ -170,7 +170,7 @@ def test_inquiry(self): t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t3_ex_db.id) )[0] - t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction["id"]) + t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction_id) self.assertEqual( t3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -203,7 +203,7 @@ def test_consecutive_inquiries(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t1_ex_db.id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) self.assertEqual( t1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -224,7 +224,7 @@ def test_consecutive_inquiries(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_ex_db.id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PENDING) workflows.get_engine().process(t2_ac_ex_db) t2_ex_db = wf_db_access.TaskExecution.get_by_id(t2_ex_db.id) @@ -263,7 +263,7 @@ def test_consecutive_inquiries(self): t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t3_ex_db.id) )[0] - t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction["id"]) + t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction_id) self.assertEqual(t3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PENDING) workflows.get_engine().process(t3_ac_ex_db) t3_ex_db = wf_db_access.TaskExecution.get_by_id(t3_ex_db.id) @@ -299,7 +299,7 @@ def test_consecutive_inquiries(self): t4_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t4_ex_db.id) )[0] - t4_lv_ac_db = lv_db_access.LiveAction.get_by_id(t4_ac_ex_db.liveaction["id"]) + t4_lv_ac_db = lv_db_access.LiveAction.get_by_id(t4_ac_ex_db.liveaction_id) self.assertEqual( t4_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -332,7 +332,7 @@ def test_parallel_inquiries(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t1_ex_db.id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) self.assertEqual( t1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -350,7 +350,7 @@ def test_parallel_inquiries(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_ex_db.id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PENDING) workflows.get_engine().process(t2_ac_ex_db) t2_ex_db = wf_db_access.TaskExecution.get_by_id(t2_ex_db.id) @@ -366,7 +366,7 @@ def test_parallel_inquiries(self): t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t3_ex_db.id) )[0] - t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction["id"]) + t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction_id) self.assertEqual(t3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PENDING) workflows.get_engine().process(t3_ac_ex_db) t3_ex_db = wf_db_access.TaskExecution.get_by_id(t3_ex_db.id) @@ -423,7 +423,7 @@ def test_parallel_inquiries(self): t4_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t4_ex_db.id) )[0] - t4_lv_ac_db = lv_db_access.LiveAction.get_by_id(t4_ac_ex_db.liveaction["id"]) + t4_lv_ac_db = lv_db_access.LiveAction.get_by_id(t4_ac_ex_db.liveaction_id) self.assertEqual( t4_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -456,7 +456,7 @@ def test_nested_inquiry(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t1_ex_db.id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) self.assertEqual( t1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -477,7 +477,7 @@ def test_nested_inquiry(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_ex_db.id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING) workflows.get_engine().process(t2_ac_ex_db) t2_ex_db = wf_db_access.TaskExecution.get_by_id(t2_ex_db.id) @@ -493,9 +493,7 @@ def test_nested_inquiry(self): t2_t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_t1_ex_db.id) )[0] - t2_t1_lv_ac_db = lv_db_access.LiveAction.get_by_id( - t2_t1_ac_ex_db.liveaction["id"] - ) + t2_t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_t1_ac_ex_db.liveaction_id) self.assertEqual( t2_t1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -514,9 +512,7 @@ def test_nested_inquiry(self): t2_t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_t2_ex_db.id) )[0] - t2_t2_lv_ac_db = lv_db_access.LiveAction.get_by_id( - t2_t2_ac_ex_db.liveaction["id"] - ) + t2_t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_t2_ac_ex_db.liveaction_id) self.assertEqual( t2_t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PENDING ) @@ -530,7 +526,7 @@ def test_nested_inquiry(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_ex_db.id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_PAUSED) workflows.get_engine().process(t2_ac_ex_db) t2_ex_db = wf_db_access.TaskExecution.get_by_id(t2_ex_db.id) @@ -568,9 +564,7 @@ def test_nested_inquiry(self): t2_t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_t3_ex_db.id) )[0] - t2_t3_lv_ac_db = lv_db_access.LiveAction.get_by_id( - t2_t3_ac_ex_db.liveaction["id"] - ) + t2_t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_t3_ac_ex_db.liveaction_id) self.assertEqual( t2_t3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -582,7 +576,7 @@ def test_nested_inquiry(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t2_ex_db.id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual( t2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -598,7 +592,7 @@ def test_nested_inquiry(self): t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t3_ex_db.id) )[0] - t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction["id"]) + t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction_id) self.assertEqual( t3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_notify.py b/contrib/runners/orquesta_runner/tests/unit/test_notify.py index ff7114a318..04c786fb78 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_notify.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_notify.py @@ -235,7 +235,11 @@ def test_notify_task_list_nonexistent_task(self): } self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(lv_ac_db.result, expected_result) + self.assertEqual( + lv_ac_db.result["errors"][0]["message"], + expected_result["errors"][0]["message"], + ) + self.assertIsNone(lv_ac_db.result["output"], expected_result["output"]) def test_notify_task_list_item_value(self): wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, "sequential.yaml") @@ -275,7 +279,7 @@ def test_cascade_notify_to_tasks(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertIsNone(tk1_lv_ac_db.notify) self.assertEqual( tk1_ac_ex_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED @@ -296,7 +300,7 @@ def test_cascade_notify_to_tasks(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) notify = notify_api_models.NotificationsHelper.from_model( notify_model=tk2_lv_ac_db.notify ) @@ -320,7 +324,7 @@ def test_cascade_notify_to_tasks(self): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertIsNone(tk3_lv_ac_db.notify) self.assertEqual( tk3_ac_ex_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED @@ -367,7 +371,7 @@ def test_notify_task_list_for_task_with_notify(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertIsNone(tk1_lv_ac_db.notify) # Assert task2 notify is set. query_filters = {"workflow_execution": str(wf_ex_db.id), "task_id": "task2"} @@ -375,7 +379,7 @@ def test_notify_task_list_for_task_with_notify(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) notify = notify_api_models.NotificationsHelper.from_model( notify_model=tk2_lv_ac_db.notify ) @@ -402,7 +406,7 @@ def test_no_notify_for_task_with_notify(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertIsNone(tk1_lv_ac_db.notify) # Assert task2 notify is not set. @@ -411,5 +415,5 @@ def test_no_notify_for_task_with_notify(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertIsNone(tk2_lv_ac_db.notify) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_pause_and_resume.py b/contrib/runners/orquesta_runner/tests/unit/test_pause_and_resume.py index 7473d9db8e..bef405cb1c 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_pause_and_resume.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_pause_and_resume.py @@ -153,9 +153,7 @@ def test_pause_subworkflow_not_cascade_up_to_workflow(self): ) self.assertEqual(len(tk_ac_ex_dbs), 1) - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk_ac_ex_dbs[0].liveaction["id"] - ) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_dbs[0].liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Pause the subworkflow. @@ -196,7 +194,7 @@ def test_pause_workflow_cascade_down_to_subworkflow(self): self.assertEqual(len(tk_ac_ex_dbs), 1) tk_ac_ex_db = tk_ac_ex_dbs[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Identify the records for the subworkflow. @@ -263,7 +261,7 @@ def test_pause_subworkflow_while_another_subworkflow_running(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[0].id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) t1_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t1_ac_ex_db.id) )[0] @@ -273,7 +271,7 @@ def test_pause_subworkflow_while_another_subworkflow_running(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[1].id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) t2_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t2_ac_ex_db.id) )[0] @@ -291,7 +289,7 @@ def test_pause_subworkflow_while_another_subworkflow_running(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the task in the subworkflow. @@ -316,7 +314,7 @@ def test_pause_subworkflow_while_another_subworkflow_running(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the tasks in the other subworkflow. @@ -375,7 +373,7 @@ def test_pause_subworkflow_while_another_subworkflow_completed(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[0].id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) t1_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t1_ac_ex_db.id) )[0] @@ -385,7 +383,7 @@ def test_pause_subworkflow_while_another_subworkflow_completed(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[1].id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) t2_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t2_ac_ex_db.id) )[0] @@ -403,7 +401,7 @@ def test_pause_subworkflow_while_another_subworkflow_completed(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the tasks in the other subworkflow. @@ -441,7 +439,7 @@ def test_pause_subworkflow_while_another_subworkflow_completed(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the target subworkflow is still pausing. - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) self.assertEqual(t1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_PAUSING) # Manually notify action execution completion for the task in the subworkflow. @@ -491,9 +489,7 @@ def test_resume(self): tk_ac_ex_dbs = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[0].id) ) - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id( - tk_ac_ex_dbs[0].liveaction["id"] - ) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_dbs[0].liveaction_id) self.assertEqual(tk_ac_ex_dbs[0].status, ac_const.LIVEACTION_STATUS_SUCCEEDED) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk_ac_ex_dbs[0]) @@ -550,7 +546,7 @@ def test_resume_cascade_to_subworkflow(self): self.assertEqual(len(tk_ac_ex_dbs), 1) tk_ac_ex_db = tk_ac_ex_dbs[0] - tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction["id"]) + tk_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk_ac_ex_db.liveaction_id) self.assertEqual(tk_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Identify the records for the subworkflow. @@ -626,7 +622,7 @@ def test_resume_from_each_subworkflow_when_parent_is_paused(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[0].id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) t1_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t1_ac_ex_db.id) )[0] @@ -636,7 +632,7 @@ def test_resume_from_each_subworkflow_when_parent_is_paused(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[1].id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) t2_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t2_ac_ex_db.id) )[0] @@ -654,7 +650,7 @@ def test_resume_from_each_subworkflow_when_parent_is_paused(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the task in the subworkflow. @@ -679,7 +675,7 @@ def test_resume_from_each_subworkflow_when_parent_is_paused(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Pause the other subworkflow. @@ -773,7 +769,7 @@ def test_resume_from_subworkflow_when_parent_is_paused(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[0].id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) t1_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t1_ac_ex_db.id) )[0] @@ -783,7 +779,7 @@ def test_resume_from_subworkflow_when_parent_is_paused(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[1].id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) t2_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t2_ac_ex_db.id) )[0] @@ -801,7 +797,7 @@ def test_resume_from_subworkflow_when_parent_is_paused(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the task in the subworkflow. @@ -826,7 +822,7 @@ def test_resume_from_subworkflow_when_parent_is_paused(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the tasks in the other subworkflow. @@ -907,7 +903,7 @@ def test_resume_from_subworkflow_when_parent_is_paused(self): t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t3_ex_db.id) )[0] - t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction["id"]) + t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction_id) self.assertEqual(t3_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(t3_ac_ex_db) @@ -937,7 +933,7 @@ def test_resume_from_subworkflow_when_parent_is_running(self): t1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[0].id) )[0] - t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction["id"]) + t1_lv_ac_db = lv_db_access.LiveAction.get_by_id(t1_ac_ex_db.liveaction_id) t1_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t1_ac_ex_db.id) )[0] @@ -947,7 +943,7 @@ def test_resume_from_subworkflow_when_parent_is_running(self): t2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk_ex_dbs[1].id) )[0] - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) t2_wf_ex_db = wf_db_access.WorkflowExecution.query( action_execution=str(t2_ac_ex_db.id) )[0] @@ -965,7 +961,7 @@ def test_resume_from_subworkflow_when_parent_is_running(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the task in the subworkflow. @@ -990,7 +986,7 @@ def test_resume_from_subworkflow_when_parent_is_running(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Resume the subworkflow and assert it is running. @@ -1005,7 +1001,7 @@ def test_resume_from_subworkflow_when_parent_is_running(self): self.assertEqual(lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Assert the other subworkflow is still running. - t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction["id"]) + t2_lv_ac_db = lv_db_access.LiveAction.get_by_id(t2_ac_ex_db.liveaction_id) self.assertEqual(t2_lv_ac_db.status, ac_const.LIVEACTION_STATUS_RUNNING) # Manually notify action execution completion for the tasks in the subworkflow. @@ -1071,7 +1067,7 @@ def test_resume_from_subworkflow_when_parent_is_running(self): t3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(t3_ex_db.id) )[0] - t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction["id"]) + t3_lv_ac_db = lv_db_access.LiveAction.get_by_id(t3_ac_ex_db.liveaction_id) self.assertEqual(t3_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(t3_ac_ex_db) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_rerun.py b/contrib/runners/orquesta_runner/tests/unit/test_rerun.py index 420b909e27..22c40aa8c1 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_rerun.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_rerun.py @@ -127,7 +127,7 @@ def test_rerun_workflow(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_FAILED) workflow_service.handle_action_execution_completion(tk1_ac_ex_db) tk1_ex_db = wf_db_access.TaskExecution.get_by_id(tk1_ex_db.id) @@ -166,7 +166,7 @@ def test_rerun_workflow(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual( tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -196,7 +196,7 @@ def test_rerun_with_missing_workflow_execution_id(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_FAILED) workflow_service.handle_action_execution_completion(tk1_ac_ex_db) tk1_ex_db = wf_db_access.TaskExecution.get_by_id(tk1_ex_db.id) @@ -264,7 +264,7 @@ def test_rerun_with_invalid_workflow_execution(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_FAILED) workflow_service.handle_action_execution_completion(tk1_ac_ex_db) tk1_ex_db = wf_db_access.TaskExecution.get_by_id(tk1_ex_db.id) @@ -322,7 +322,7 @@ def test_rerun_workflow_still_running(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual( tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING ) @@ -381,7 +381,7 @@ def test_rerun_with_unexpected_error(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_FAILED) workflow_service.handle_action_execution_completion(tk1_ac_ex_db) tk1_ex_db = wf_db_access.TaskExecution.get_by_id(tk1_ex_db.id) @@ -436,7 +436,7 @@ def test_rerun_workflow_already_succeeded(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual( tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -450,7 +450,7 @@ def test_rerun_workflow_already_succeeded(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual( tk2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -464,7 +464,7 @@ def test_rerun_workflow_already_succeeded(self): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertEqual( tk3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -505,7 +505,7 @@ def test_rerun_workflow_already_succeeded(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual( tk1_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -522,7 +522,7 @@ def test_rerun_workflow_already_succeeded(self): tk2_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk2_ex_db.id) )[0] - tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction["id"]) + tk2_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk2_ac_ex_db.liveaction_id) self.assertEqual( tk2_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) @@ -539,7 +539,7 @@ def test_rerun_workflow_already_succeeded(self): tk3_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk3_ex_db.id) )[0] - tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction["id"]) + tk3_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk3_ac_ex_db.liveaction_id) self.assertEqual( tk3_lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED ) diff --git a/contrib/runners/orquesta_runner/tests/unit/test_with_items.py b/contrib/runners/orquesta_runner/tests/unit/test_with_items.py index 8e8b67bd94..de9b0bea07 100644 --- a/contrib/runners/orquesta_runner/tests/unit/test_with_items.py +++ b/contrib/runners/orquesta_runner/tests/unit/test_with_items.py @@ -368,7 +368,7 @@ def test_with_items_cancellation(self): # Manually succeed the action executions and process completion. for ac_ex in t1_ac_ex_dbs: self.set_execution_status( - ac_ex.liveaction["id"], action_constants.LIVEACTION_STATUS_SUCCEEDED + ac_ex.liveaction_id, action_constants.LIVEACTION_STATUS_SUCCEEDED ) t1_ac_ex_dbs = ex_db_access.ActionExecution.query( @@ -440,7 +440,7 @@ def test_with_items_concurrency_cancellation(self): # Manually succeed the action executions and process completion. for ac_ex in t1_ac_ex_dbs: self.set_execution_status( - ac_ex.liveaction["id"], action_constants.LIVEACTION_STATUS_SUCCEEDED + ac_ex.liveaction_id, action_constants.LIVEACTION_STATUS_SUCCEEDED ) t1_ac_ex_dbs = ex_db_access.ActionExecution.query( @@ -509,7 +509,7 @@ def test_with_items_pause_and_resume(self): # Manually succeed the action executions and process completion. for ac_ex in t1_ac_ex_dbs: self.set_execution_status( - ac_ex.liveaction["id"], action_constants.LIVEACTION_STATUS_SUCCEEDED + ac_ex.liveaction_id, action_constants.LIVEACTION_STATUS_SUCCEEDED ) t1_ac_ex_dbs = ex_db_access.ActionExecution.query( @@ -599,7 +599,7 @@ def test_with_items_concurrency_pause_and_resume(self): # Manually succeed the action executions and process completion. for ac_ex in t1_ac_ex_dbs: self.set_execution_status( - ac_ex.liveaction["id"], action_constants.LIVEACTION_STATUS_SUCCEEDED + ac_ex.liveaction_id, action_constants.LIVEACTION_STATUS_SUCCEEDED ) t1_ac_ex_dbs = ex_db_access.ActionExecution.query( diff --git a/st2actions/st2actions/container/base.py b/st2actions/st2actions/container/base.py index 67f3bf6fc8..77317fa9e6 100644 --- a/st2actions/st2actions/container/base.py +++ b/st2actions/st2actions/container/base.py @@ -141,11 +141,14 @@ def _do_run(self, runner): ): queries.setup_query(runner.liveaction.id, runner.runner_type, context) except: - LOG.exception("Failed to run action.") _, ex, tb = sys.exc_info() # mark execution as failed. status = action_constants.LIVEACTION_STATUS_FAILED # include the error message and traceback to try and provide some hints. + LOG.exception( + "Failed to run action. traceback: %s" + % "".join(traceback.format_tb(tb, 20)) + ) result = { "error": str(ex), "traceback": "".join(traceback.format_tb(tb, 20)), @@ -460,7 +463,7 @@ def _get_runner(self, runner_type_db, action_db, liveaction_db): runner.action_name = action_db.name runner.liveaction = liveaction_db runner.liveaction_id = str(liveaction_db.id) - runner.execution = ActionExecution.get(liveaction__id=runner.liveaction_id) + runner.execution = ActionExecution.get(liveaction_id=str(runner.liveaction_id)) runner.execution_id = str(runner.execution.id) runner.entry_point = resolved_entry_point runner.context = context diff --git a/st2actions/st2actions/notifier/notifier.py b/st2actions/st2actions/notifier/notifier.py index ea1a537733..2b8fe49059 100644 --- a/st2actions/st2actions/notifier/notifier.py +++ b/st2actions/st2actions/notifier/notifier.py @@ -83,7 +83,7 @@ def process(self, execution_db): LOG.debug('Processing action execution "%s".', execution_id, extra=extra) # Get the corresponding liveaction record. - liveaction_db = LiveAction.get_by_id(execution_db.liveaction["id"]) + liveaction_db = LiveAction.get_by_id(execution_db.liveaction_id) if execution_db.status in LIVEACTION_COMPLETED_STATES: # If the action execution is executed under an orquesta workflow, policies for the diff --git a/st2actions/st2actions/policies/concurrency_by_attr.py b/st2actions/st2actions/policies/concurrency_by_attr.py index 11e0cdf4da..0308c1dc88 100644 --- a/st2actions/st2actions/policies/concurrency_by_attr.py +++ b/st2actions/st2actions/policies/concurrency_by_attr.py @@ -15,10 +15,9 @@ from __future__ import absolute_import -import six - from st2common.constants import action as action_constants from st2common import log as logging +from st2common.fields import JSONDictEscapedFieldCompatibilityField from st2common.persistence import action as action_access from st2common.services import action as action_service from st2common.policies.concurrency import BaseConcurrencyApplicator @@ -41,31 +40,43 @@ def __init__( ) self.attributes = attributes or [] - def _get_filters(self, target): - filters = { - ("parameters__%s" % k): v - for k, v in six.iteritems(target.parameters) - if k in self.attributes - } - - filters["action"] = target.action - filters["status"] = None - - return filters - def _apply_before(self, target): - # Get the count of scheduled and running instances of the action. - filters = self._get_filters(target) - # Get the count of scheduled instances of the action. - filters["status"] = action_constants.LIVEACTION_STATUS_SCHEDULED - scheduled = action_access.LiveAction.count(**filters) + scheduled_filters = { + "status": action_constants.LIVEACTION_STATUS_SCHEDULED, + "action": target.action, + } + scheduled = [i for i in action_access.LiveAction.query(**scheduled_filters)] - # Get the count of running instances of the action. - filters["status"] = action_constants.LIVEACTION_STATUS_RUNNING - running = action_access.LiveAction.count(**filters) + running_filters = { + "status": action_constants.LIVEACTION_STATUS_RUNNING, + "action": target.action, + } + running = [i for i in action_access.LiveAction.query(**running_filters)] + running.extend(scheduled) + count = 0 + target_parameters = JSONDictEscapedFieldCompatibilityField().parse_field_value( + target.parameters + ) + target_key_value_policy_attributes = { + k: v for k, v in target_parameters.items() if k in self.attributes + } - count = scheduled + running + for i in running: + running_event_parameters = ( + JSONDictEscapedFieldCompatibilityField().parse_field_value(i.parameters) + ) + # list of event parameter values that are also in policy + running_event_policy_item_key_value_attributes = { + k: v + for k, v in running_event_parameters.items() + if k in self.attributes + } + if ( + running_event_policy_item_key_value_attributes + == target_key_value_policy_attributes + ): + count += 1 # Mark the execution as scheduled if threshold is not reached or delayed otherwise. if count < self.threshold: diff --git a/st2actions/st2actions/scheduler/entrypoint.py b/st2actions/st2actions/scheduler/entrypoint.py index 14d816ded3..47e3295a5d 100644 --- a/st2actions/st2actions/scheduler/entrypoint.py +++ b/st2actions/st2actions/scheduler/entrypoint.py @@ -97,7 +97,7 @@ def _create_execution_queue_item_db_from_liveaction(self, liveaction, delay=None """ Create ActionExecutionSchedulingQueueItemDB from live action. """ - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) execution_queue_item_db = ActionExecutionSchedulingQueueItemDB() execution_queue_item_db.action_execution_id = str(execution.id) diff --git a/st2actions/st2actions/scheduler/handler.py b/st2actions/st2actions/scheduler/handler.py index b6eb994f72..abe10d91f3 100644 --- a/st2actions/st2actions/scheduler/handler.py +++ b/st2actions/st2actions/scheduler/handler.py @@ -136,7 +136,7 @@ def _fix_missing_action_execution_id(self): for entry in ActionExecutionSchedulingQueue.query( action_execution_id__in=["", None] ): - execution_db = ActionExecution.get(liveaction__id=entry.liveaction_id) + execution_db = ActionExecution.get(liveaction_id=entry.liveaction_id) if not execution_db: continue diff --git a/st2actions/st2actions/worker.py b/st2actions/st2actions/worker.py index 30af0d56a7..b1d3fc790e 100644 --- a/st2actions/st2actions/worker.py +++ b/st2actions/st2actions/worker.py @@ -235,7 +235,7 @@ def _run_action(self, liveaction_db): return result def _cancel_action(self, liveaction_db): - action_execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) + action_execution_db = ActionExecution.get(liveaction_id=str(liveaction_db.id)) extra = { "action_execution_db": action_execution_db, "liveaction_db": liveaction_db, @@ -265,7 +265,7 @@ def _cancel_action(self, liveaction_db): return result def _pause_action(self, liveaction_db): - action_execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) + action_execution_db = ActionExecution.get(liveaction_id=str(liveaction_db.id)) extra = { "action_execution_db": action_execution_db, "liveaction_db": liveaction_db, @@ -294,7 +294,7 @@ def _pause_action(self, liveaction_db): return result def _resume_action(self, liveaction_db): - action_execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) + action_execution_db = ActionExecution.get(liveaction_id=str(liveaction_db.id)) extra = { "action_execution_db": action_execution_db, "liveaction_db": liveaction_db, diff --git a/st2actions/st2actions/workflows/workflows.py b/st2actions/st2actions/workflows/workflows.py index 700244d058..6672069e6f 100644 --- a/st2actions/st2actions/workflows/workflows.py +++ b/st2actions/st2actions/workflows/workflows.py @@ -100,6 +100,7 @@ def process(self, message): # error handling routine will fail as well because it will try to update # the database and fail the workflow execution gracefully. In this case, # the garbage collector will find and cancel these workflow executions. + LOG.error(e, exc_info=True) self.fail_workflow_execution(message, e) finally: with self._semaphore: @@ -132,7 +133,7 @@ def shutdown(self): if cfg.CONF.coordination.service_registry and not member_ids: ac_ex_dbs = self._get_running_workflows() for ac_ex_db in ac_ex_dbs: - lv_ac = action_utils.get_liveaction_by_id(ac_ex_db.liveaction["id"]) + lv_ac = action_utils.get_liveaction_by_id(ac_ex_db.liveaction_id) ac_svc.request_pause(lv_ac, WORKFLOW_ENGINE_START_STOP_SEQ) def _get_running_workflows(self): @@ -251,7 +252,7 @@ def handle_action_execution(self, ac_ex_db): return # Apply post run policies. - lv_ac_db = lv_db_access.LiveAction.get_by_id(ac_ex_db.liveaction["id"]) + lv_ac_db = lv_db_access.LiveAction.get_by_id(ac_ex_db.liveaction_id) pc_svc.apply_post_run_policies(lv_ac_db) # Process completion of the action execution. diff --git a/st2actions/tests/unit/policies/test_concurrency.py b/st2actions/tests/unit/policies/test_concurrency.py index 7612bd5396..7d92edbdbb 100644 --- a/st2actions/tests/unit/policies/test_concurrency.py +++ b/st2actions/tests/unit/policies/test_concurrency.py @@ -215,7 +215,7 @@ def test_over_threshold_delay_executions(self): self.assertEqual(expected_num_exec, runner.MockActionRunner.run.call_count) # Check the status changes. - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) expected_status_changes = [ "requested", "delayed", diff --git a/st2actions/tests/unit/policies/test_retry_policy.py b/st2actions/tests/unit/policies/test_retry_policy.py index 8e16a7029c..609a607b95 100644 --- a/st2actions/tests/unit/policies/test_retry_policy.py +++ b/st2actions/tests/unit/policies/test_retry_policy.py @@ -128,7 +128,7 @@ def test_retry_on_timeout_first_retry_is_successful(self): self.assertEqual(action_execution_dbs[1].status, LIVEACTION_STATUS_REQUESTED) # Verify retried execution contains policy related context - original_liveaction_id = action_execution_dbs[0].liveaction["id"] + original_liveaction_id = action_execution_dbs[0].liveaction_id context = action_execution_dbs[1].context self.assertIn("policies", context) @@ -183,7 +183,7 @@ def test_retry_on_timeout_policy_is_retried_twice(self): self.assertEqual(action_execution_dbs[1].status, LIVEACTION_STATUS_REQUESTED) # Verify retried execution contains policy related context - original_liveaction_id = action_execution_dbs[0].liveaction["id"] + original_liveaction_id = action_execution_dbs[0].liveaction_id context = action_execution_dbs[1].context self.assertIn("policies", context) @@ -216,7 +216,7 @@ def test_retry_on_timeout_policy_is_retried_twice(self): self.assertEqual(action_execution_dbs[2].status, LIVEACTION_STATUS_REQUESTED) # Verify retried execution contains policy related context - original_liveaction_id = action_execution_dbs[1].liveaction["id"] + original_liveaction_id = action_execution_dbs[1].liveaction_id context = action_execution_dbs[2].context self.assertIn("policies", context) diff --git a/st2actions/tests/unit/test_executions.py b/st2actions/tests/unit/test_executions.py index dcb50c70f5..52c8e4f20f 100644 --- a/st2actions/tests/unit/test_executions.py +++ b/st2actions/tests/unit/test_executions.py @@ -96,7 +96,7 @@ def test_basic_execution(self): ) execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertDictEqual(execution.trigger, {}) @@ -118,8 +118,7 @@ def test_basic_execution(self): self.assertEqual(execution.result, liveaction.result) self.assertEqual(execution.status, liveaction.status) self.assertEqual(execution.context, liveaction.context) - self.assertEqual(execution.liveaction["callback"], liveaction.callback) - self.assertEqual(execution.liveaction["action"], liveaction.action) + self.assertEqual(execution.liveaction_id, str(liveaction.id)) def test_basic_execution_history_create_failed(self): MOCK_FAIL_EXECUTION_CREATE = True # noqa @@ -133,7 +132,7 @@ def test_chained_executions(self): ) execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) action = action_utils.get_action_by_ref("executions.chain") @@ -151,8 +150,7 @@ def test_chained_executions(self): self.assertEqual(execution.result, liveaction.result) self.assertEqual(execution.status, liveaction.status) self.assertEqual(execution.context, liveaction.context) - self.assertEqual(execution.liveaction["callback"], liveaction.callback) - self.assertEqual(execution.liveaction["action"], liveaction.action) + self.assertEqual(execution.liveaction_id, str(liveaction.id)) self.assertGreater(len(execution.children), 0) for child in execution.children: @@ -199,7 +197,7 @@ def test_triggered_execution(self): ) execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertDictEqual(execution.trigger, vars(TriggerAPI.from_model(trigger))) @@ -226,8 +224,7 @@ def test_triggered_execution(self): self.assertEqual(execution.result, liveaction.result) self.assertEqual(execution.status, liveaction.status) self.assertEqual(execution.context, liveaction.context) - self.assertEqual(execution.liveaction["callback"], liveaction.callback) - self.assertEqual(execution.liveaction["action"], liveaction.action) + self.assertEqual(execution.liveaction_id, str(liveaction.id)) def _get_action_execution(self, **kwargs): return ActionExecution.get(**kwargs) diff --git a/st2actions/tests/unit/test_notifier.py b/st2actions/tests/unit/test_notifier.py index f599cea08a..45b06f730e 100644 --- a/st2actions/tests/unit/test_notifier.py +++ b/st2actions/tests/unit/test_notifier.py @@ -26,7 +26,6 @@ from st2common.constants.action import LIVEACTION_COMPLETED_STATES from st2common.constants.action import LIVEACTION_STATUSES from st2common.constants.triggers import INTERNAL_TRIGGER_TYPES -from st2common.models.api.action import LiveActionAPI from st2common.models.db.action import ActionDB from st2common.models.db.execution import ActionExecutionDB from st2common.models.db.liveaction import LiveActionDB @@ -133,7 +132,7 @@ def test_notify_triggers(self): LiveAction.add_or_update(liveaction_db) execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status dispatcher = NotifierTestCase.MockDispatcher(self) @@ -183,7 +182,7 @@ def test_notify_triggers_end_timestamp_none(self): LiveAction.add_or_update(liveaction_db) execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status dispatcher = NotifierTestCase.MockDispatcher(self) @@ -236,7 +235,7 @@ def test_notify_triggers_jinja_patterns(self, dispatch): LiveAction.add_or_update(liveaction_db) execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status notifier = Notifier(connection=None, queues=[]) @@ -268,7 +267,7 @@ def test_post_generic_trigger_emit_when_default_value_is_used(self, dispatch): liveaction_db = LiveActionDB(action="core.local") liveaction_db.status = status execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status notifier = Notifier(connection=None, queues=[]) @@ -305,7 +304,7 @@ def test_post_generic_trigger_with_emit_condition(self, dispatch): liveaction_db = LiveActionDB(action="core.local") liveaction_db.status = status execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status notifier = Notifier(connection=None, queues=[]) @@ -352,7 +351,7 @@ def test_process_post_generic_notify_trigger_on_completed_state_default( liveaction_db = LiveActionDB(id=bson.ObjectId(), action="core.local") liveaction_db.status = status execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status mock_LiveAction.get_by_id.return_value = liveaction_db @@ -402,7 +401,7 @@ def test_process_post_generic_notify_trigger_on_custom_emit_when_states( liveaction_db = LiveActionDB(id=bson.ObjectId(), action="core.local") liveaction_db.status = status execution = MOCK_EXECUTION - execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) + execution.liveaction_id = str(liveaction_db.id) execution.status = liveaction_db.status mock_LiveAction.get_by_id.return_value = liveaction_db diff --git a/st2api/st2api/controllers/v1/actionexecutions.py b/st2api/st2api/controllers/v1/actionexecutions.py index 70d709192e..40f52dbcc5 100644 --- a/st2api/st2api/controllers/v1/actionexecutions.py +++ b/st2api/st2api/controllers/v1/actionexecutions.py @@ -39,6 +39,7 @@ from st2common.exceptions import apivalidation as validation_exc from st2common.exceptions import param as param_exc from st2common.exceptions import trace as trace_exc +from st2common.fields import JSONDictEscapedFieldCompatibilityField from st2common.models.api.action import LiveActionAPI from st2common.models.api.action import LiveActionCreateAPI from st2common.models.api.base import cast_argument_value @@ -135,7 +136,6 @@ def _handle_schedule_execution( rbac_utils.assert_user_is_admin_if_user_query_param_is_provided( user_db=requester_user, user=user ) - try: return self._schedule_execution( liveaction=liveaction_api, @@ -205,7 +205,6 @@ def _schedule_execution( runnertype_db = action_utils.get_runnertype_by_name( action_db.runner_type["name"] ) - try: liveaction_db.parameters = param_utils.render_live_params( runnertype_db.runner_parameters, @@ -241,7 +240,6 @@ def _schedule_execution( liveaction_db, actionexecution_db = action_service.create_request( liveaction=liveaction_db, action_db=action_db, runnertype_db=runnertype_db ) - _, actionexecution_db = action_service.publish_request( liveaction_db, actionexecution_db ) @@ -416,36 +414,19 @@ def get( :rtype: ``str`` """ - # NOTE: Here we intentionally use as_pymongo() to avoid mongoengine layer even for old style - # data + # NOTE: we need to use to_python() to uncompress the data try: result = ( - self.access.impl.model.objects.filter(id=id) - .only("result") - .as_pymongo()[0] + self.access.impl.model.objects.filter(id=id).only("result")[0].result ) except IndexError: raise NotFoundException("Execution with id %s not found" % (id)) - if isinstance(result["result"], dict): - # For backward compatibility we also support old non JSON field storage format - if pretty_format: - response_body = orjson.dumps( - result["result"], option=orjson.OPT_INDENT_2 - ) - else: - response_body = orjson.dumps(result["result"]) + # For backward compatibility we also support old non JSON field storage format + if pretty_format: + response_body = orjson.dumps(result, option=orjson.OPT_INDENT_2) else: - # For new JSON storage format we just use raw value since it's already JSON serialized - # string - response_body = result["result"] - - if pretty_format: - # Pretty format is not a default behavior since it adds quite some overhead (e.g. - # 10-30ms for non pretty format for 4 MB json vs ~120 ms for pretty formatted) - response_body = orjson.dumps( - orjson.loads(result["result"]), option=orjson.OPT_INDENT_2 - ) + response_body = orjson.dumps(result) response = Response() response.headers["Content-Type"] = "text/json" @@ -634,8 +615,14 @@ def post(self, spec_api, id, requester_user, no_merge=False, show_secrets=False) # Merge in any parameters provided by the user new_parameters = {} + original_parameters = getattr(existing_execution, "parameters", b"{}") + original_params_decoded = ( + JSONDictEscapedFieldCompatibilityField().parse_field_value( + original_parameters + ) + ) if not no_merge: - new_parameters.update(getattr(existing_execution, "parameters", {})) + new_parameters.update(original_params_decoded) new_parameters.update(spec_api.parameters) # Create object for the new execution @@ -842,7 +829,7 @@ def put(self, id, liveaction_api, requester_user, show_secrets=False): if not execution_api: abort(http_client.NOT_FOUND, "Execution with id %s not found." % id) - liveaction_id = execution_api.liveaction["id"] + liveaction_id = execution_api.liveaction_id if not liveaction_id: abort( http_client.INTERNAL_SERVER_ERROR, @@ -867,7 +854,7 @@ def update_status(liveaction_api, liveaction_db): liveaction_db, status, result, set_result_size=True ) actionexecution_db = ActionExecution.get( - liveaction__id=str(liveaction_db.id) + liveaction_id=str(liveaction_db.id) ) return (liveaction_db, actionexecution_db) @@ -971,7 +958,7 @@ def delete(self, id, requester_user, show_secrets=False): if not execution_api: abort(http_client.NOT_FOUND, "Execution with id %s not found." % id) - liveaction_id = execution_api.liveaction["id"] + liveaction_id = execution_api.liveaction_id if not liveaction_id: abort( http_client.INTERNAL_SERVER_ERROR, diff --git a/st2api/st2api/controllers/v1/aliasexecution.py b/st2api/st2api/controllers/v1/aliasexecution.py index 4e0f780896..f25704e67b 100644 --- a/st2api/st2api/controllers/v1/aliasexecution.py +++ b/st2api/st2api/controllers/v1/aliasexecution.py @@ -182,7 +182,6 @@ def _post(self, payload, requester_user, show_secrets=False, match_multiple=Fals show_secrets=show_secrets, requester_user=requester_user, ) - result = { "execution": execution, "actionalias": ActionAliasAPI.from_model(action_alias_db), diff --git a/st2api/st2api/controllers/v1/execution_views.py b/st2api/st2api/controllers/v1/execution_views.py index f4240b94ab..a051192515 100644 --- a/st2api/st2api/controllers/v1/execution_views.py +++ b/st2api/st2api/controllers/v1/execution_views.py @@ -31,7 +31,8 @@ SUPPORTED_FILTERS = { "action": "action.ref", "status": "status", - "liveaction": "liveaction.id", + "liveaction_id": "liveaction_id", + "liveaction": "liveaction_id", "parent": "parent", "rule": "rule.name", "runner": "runner.name", @@ -54,7 +55,14 @@ # List of filters that are too broad to distinct by them and are very likely to represent 1 to 1 # relation between filter and particular history record. -IGNORE_FILTERS = ["parent", "timestamp", "liveaction", "trigger_instance"] +# tldr: these filters represent MANY distinct possibilities +IGNORE_FILTERS = [ + "parent", + "timestamp", + "liveaction", + "liveaction_id", + "trigger_instance", +] class FiltersController(object): diff --git a/st2api/tests/unit/controllers/v1/test_alias_execution.py b/st2api/tests/unit/controllers/v1/test_alias_execution.py index e30b754196..65a5f134d5 100644 --- a/st2api/tests/unit/controllers/v1/test_alias_execution.py +++ b/st2api/tests/unit/controllers/v1/test_alias_execution.py @@ -325,7 +325,6 @@ def test_match_and_execute_list_action_param_str_cast_to_list(self): result = resp.json["results"][0] live_action = result["execution"]["liveaction"] action_alias = result["actionalias"] - self.assertEqual(resp.status_int, 201) self.assertTrue(isinstance(live_action["parameters"]["array_param"], list)) self.assertEqual(live_action["parameters"]["array_param"][0], "one") diff --git a/st2api/tests/unit/controllers/v1/test_executions.py b/st2api/tests/unit/controllers/v1/test_executions.py index ea5e45b6fe..64a419b546 100644 --- a/st2api/tests/unit/controllers/v1/test_executions.py +++ b/st2api/tests/unit/controllers/v1/test_executions.py @@ -34,6 +34,7 @@ from st2common.models.db.auth import UserDB from st2common.models.db.execution import ActionExecutionDB from st2common.models.db.execution import ActionExecutionOutputDB +from st2common.models.db.liveaction import LiveActionDB from st2common.models.db.keyvalue import KeyValuePairDB from st2common.persistence.execution import ActionExecution from st2common.persistence.execution import ActionExecutionOutput @@ -2008,9 +2009,19 @@ def test_get_output_running_execution(self): status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id="54c6b6d60640fd4f5354e74a", ) action_execution_db = ActionExecution.add_or_update(action_execution_db) + liveaction_db = LiveActionDB( + id="54c6b6d60640fd4f5354e74a", + start_timestamp=timestamp, + end_timestamp=timestamp, + status=status, + action="core.local", + runner_info={"name": "local-shell-cmd"}, + ) + + LiveAction.add_or_update(liveaction_db) output_params = dict( execution_id=str(action_execution_db.id), @@ -2089,9 +2100,19 @@ def test_get_output_finished_execution(self): status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id="54c6b6d60640fd4f5354e74a", ) action_execution_db = ActionExecution.add_or_update(action_execution_db) + liveaction_db = LiveActionDB( + id="54c6b6d60640fd4f5354e74a", + start_timestamp=timestamp, + end_timestamp=timestamp, + status=status, + action="core.local", + runner_info={"name": "local-shell-cmd"}, + ) + + LiveAction.add_or_update(liveaction_db) for i in range(1, 6): stdout_db = ActionExecutionOutputDB( diff --git a/st2api/tests/unit/controllers/v1/test_executions_descendants.py b/st2api/tests/unit/controllers/v1/test_executions_descendants.py index 55b1c12f53..88c1574d29 100644 --- a/st2api/tests/unit/controllers/v1/test_executions_descendants.py +++ b/st2api/tests/unit/controllers/v1/test_executions_descendants.py @@ -31,7 +31,8 @@ "child1_level3.yaml", "child2_level3.yaml", "child3_level3.yaml", - ] + ], + "liveactions": ["liveaction_fake.yaml"], } @@ -40,7 +41,9 @@ class ActionExecutionControllerTestCaseDescendantsTest(FunctionalTest): def setUpClass(cls): super(ActionExecutionControllerTestCaseDescendantsTest, cls).setUpClass() cls.MODELS = FixturesLoader().save_fixtures_to_db( - fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES + fixtures_pack=DESCENDANTS_PACK, + fixtures_dict=DESCENDANTS_FIXTURES, + use_object_ids=True, ) def test_get_all_descendants(self): diff --git a/st2api/tests/unit/controllers/v1/test_executions_filters.py b/st2api/tests/unit/controllers/v1/test_executions_filters.py index af451ca519..c916252c9e 100644 --- a/st2api/tests/unit/controllers/v1/test_executions_filters.py +++ b/st2api/tests/unit/controllers/v1/test_executions_filters.py @@ -33,7 +33,9 @@ from st2api.controllers.v1.actionexecutions import ActionExecutionsController from st2api.controllers.v1.execution_views import FILTERS_WITH_VALID_NULL_VALUES from st2common.persistence.execution import ActionExecution +from st2common.persistence.action import LiveAction from st2common.models.api.execution import ActionExecutionAPI +from st2common.models.api.execution import LiveActionAPI class TestActionExecutionFilters(FunctionalTest): @@ -60,8 +62,10 @@ def setUpClass(cls): "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), - "liveaction": copy.deepcopy( - fixture.ARTIFACTS["liveactions"]["workflow"] + "liveaction_id": fixture.ARTIFACTS["liveactions"]["workflow"]["id"], + "status": fixture.ARTIFACTS["liveactions"]["workflow"]["status"], + "result": copy.deepcopy( + fixture.ARTIFACTS["liveactions"]["workflow"]["result"] ), "context": copy.deepcopy(fixture.ARTIFACTS["context"]), "children": [], @@ -69,7 +73,11 @@ def setUpClass(cls): { "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), - "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), + "liveaction_id": fixture.ARTIFACTS["liveactions"]["task1"]["id"], + "status": fixture.ARTIFACTS["liveactions"]["task1"]["status"], + "result": copy.deepcopy( + fixture.ARTIFACTS["liveactions"]["task1"]["result"] + ), }, ] @@ -89,14 +97,24 @@ def assign_parent(child): data["id"] = obj_id data["start_timestamp"] = isotime.format(timestamp, offset=False) data["end_timestamp"] = isotime.format(timestamp, offset=False) - data["status"] = data["liveaction"]["status"] - data["result"] = data["liveaction"]["result"] if fake_type["action"]["name"] == "local" and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) + # also add the liveaction to the database so it can be retrieved by + # the actionexecution api + liveaction_data = { + "id": data["liveaction_id"], + "action": fake_type["action"]["name"], + "status": data["status"], + } + wb_live_obj = LiveActionAPI(**liveaction_data) + live_db_obj = LiveActionAPI.to_model(wb_live_obj) + # hard code id of liveaction + live_db_obj.id = data["liveaction_id"] + LiveAction.add_or_update(live_db_obj) cls.start_timestamps = sorted(cls.start_timestamps) @@ -135,7 +153,7 @@ def test_get_one(self): self.assertEqual(record["id"], obj_id) self.assertDictEqual(record["action"], fake_record.action) self.assertDictEqual(record["runner"], fake_record.runner) - self.assertDictEqual(record["liveaction"], fake_record.liveaction) + self.assertEqual(record["liveaction_id"], fake_record.liveaction_id) def test_get_one_failed(self): response = self.app.get( diff --git a/st2client/in-requirements.txt b/st2client/in-requirements.txt index 88bb5e5b5a..03b65b5fa4 100644 --- a/st2client/in-requirements.txt +++ b/st2client/in-requirements.txt @@ -1,5 +1,6 @@ # Remember to list implicit packages here, otherwise version won't be fixated! importlib-metadata +zipp<3.16.0 # importlib-metadata requires typing-extensions typing-extensions argcomplete diff --git a/st2common/BUILD b/st2common/BUILD index 832a17e483..48a0726dfd 100644 --- a/st2common/BUILD +++ b/st2common/BUILD @@ -21,6 +21,7 @@ st2_component_python_distribution( "bin/st2-pack-setup-virtualenv", "bin/migrations/v3.5/st2-migrate-db-dict-field-values", "bin/migrations/v3.8/st2-drop-st2exporter-marker-collections", + "bin/migrations/v3.9/st2-migrate-liveaction-executiondb", "bin/st2-run-pack-tests:shell", "bin/st2ctl:shell", "bin/st2-self-check:shell", diff --git a/st2common/benchmarks/micro/test_mongo_field_types.py b/st2common/benchmarks/micro/test_mongo_field_types.py index 54e5ead509..e9b3077d3b 100644 --- a/st2common/benchmarks/micro/test_mongo_field_types.py +++ b/st2common/benchmarks/micro/test_mongo_field_types.py @@ -46,6 +46,7 @@ import pytest import mongoengine as me +import orjson from st2common.service_setup import db_setup from st2common.models.db import stormbase @@ -62,7 +63,50 @@ LiveActionDB._meta["allow_inheritance"] = True # pylint: disable=no-member -# 1. Current approach aka using EscapedDynamicField +class OldJSONDictField(JSONDictField): + def parse_field_value(self, value) -> dict: + """ + Parse provided binary field value and return parsed value (dictionary). + + For example: + + - (n, o, ...) - no compression, data is serialized using orjson + - (z, o, ...) - zstandard compression, data is serialized using orjson + """ + if not value: + return self.default + + if isinstance(value, dict): + # Already deserializaed + return value + + data = orjson.loads(value) + return data + + def _serialize_field_value(self, value: dict) -> bytes: + """ + Serialize and encode the provided field value. + """ + # Orquesta workflows support toSet() YAQL operator which returns a set which used to get + # serialized to list by mongoengine DictField. + # + # For backward compatibility reasons, we need to support serializing set to a list as + # well. + # + # Based on micro benchmarks, using default function adds very little overhead (1%) so it + # should be safe to use default for every operation. + # + # If this turns out to be not true or it adds more overhead in other scenarios, we should + # revisit this decision and only use "default" argument where needed (aka Workflow models). + def default(obj): + if isinstance(obj, set): + return list(obj) + raise TypeError + + return orjson.dumps(value, default=default) + + +# 1. old approach aka using EscapedDynamicField class LiveActionDB_EscapedDynamicField(LiveActionDB): result = stormbase.EscapedDynamicField(default={}) @@ -71,46 +115,31 @@ class LiveActionDB_EscapedDynamicField(LiveActionDB): field3 = stormbase.EscapedDynamicField(default={}) -# 2. Current approach aka using EscapedDictField +# 2. old approach aka using EscapedDictField class LiveActionDB_EscapedDictField(LiveActionDB): result = stormbase.EscapedDictField(default={}) - field1 = stormbase.EscapedDynamicField(default={}, use_header=False) - field2 = stormbase.EscapedDynamicField(default={}, use_header=False) - field3 = stormbase.EscapedDynamicField(default={}, use_header=False) - - -# 3. Approach which uses new JSONDictField where value is stored as serialized JSON string / blob -class LiveActionDB_JSONField(LiveActionDB): - result = JSONDictField(default={}, use_header=False) - - field1 = JSONDictField(default={}, use_header=False) - field2 = JSONDictField(default={}, use_header=False) - field3 = JSONDictField(default={}, use_header=False) + field1 = stormbase.EscapedDynamicField(default={}) + field2 = stormbase.EscapedDynamicField(default={}) + field3 = stormbase.EscapedDynamicField(default={}) -class LiveActionDB_JSONFieldWithHeader(LiveActionDB): - result = JSONDictField(default={}, use_header=True, compression_algorithm="none") +# 3. Old Approach which uses no compression where value is stored as serialized JSON string / blob +class LiveActionDB_OLDJSONField(LiveActionDB): + result = OldJSONDictField(default={}, use_header=False) - field1 = JSONDictField(default={}, use_header=True, compression_algorithm="none") - field2 = JSONDictField(default={}, use_header=True, compression_algorithm="none") - field3 = JSONDictField(default={}, use_header=True, compression_algorithm="none") + field1 = OldJSONDictField(default={}) + field2 = OldJSONDictField(default={}) + field3 = OldJSONDictField(default={}) -class LiveActionDB_JSONFieldWithHeaderAndZstandard(LiveActionDB): - result = JSONDictField( - default={}, use_header=True, compression_algorithm="zstandard" - ) +# 4. Current Approach which uses new JSONDictField where value is stored as zstandard compressed serialized JSON string / blob +class LiveActionDB_JSONField(LiveActionDB): + result = JSONDictField(default={}, use_header=False) - field1 = JSONDictField( - default={}, use_header=True, compression_algorithm="zstandard" - ) - field2 = JSONDictField( - default={}, use_header=True, compression_algorithm="zstandard" - ) - field3 = JSONDictField( - default={}, use_header=True, compression_algorithm="zstandard" - ) + field1 = JSONDictField(default={}) + field2 = JSONDictField(default={}) + field3 = JSONDictField(default={}) class LiveActionDB_StringField(LiveActionDB): @@ -128,10 +157,8 @@ def get_model_class_for_approach(approach: str) -> Type[LiveActionDB]: model_cls = LiveActionDB_EscapedDictField elif approach == "json_dict_field": model_cls = LiveActionDB_JSONField - elif approach == "json_dict_field_with_header": - model_cls = LiveActionDB_JSONFieldWithHeader - elif approach == "json_dict_field_with_header_and_zstd": - model_cls = LiveActionDB_JSONFieldWithHeaderAndZstandard + elif approach == "old_json_dict_field": + model_cls = LiveActionDB_OLDJSONField else: raise ValueError("Invalid approach: %s" % (approach)) @@ -142,18 +169,12 @@ def get_model_class_for_approach(approach: str) -> Type[LiveActionDB]: @pytest.mark.parametrize( "approach", [ - "escaped_dynamic_field", - "escaped_dict_field", + "old_json_dict_field", "json_dict_field", - "json_dict_field_with_header", - "json_dict_field_with_header_and_zstd", ], ids=[ - "escaped_dynamic_field", - "escaped_dict_field", + "old_json_dict_field", "json_dict_field", - "json_dict_field_w_header", - "json_dict_field_w_header_and_zstd", ], ) @pytest.mark.benchmark(group="live_action_save") @@ -187,18 +208,12 @@ def run_benchmark(): @pytest.mark.parametrize( "approach", [ - "escaped_dynamic_field", - "escaped_dict_field", + "old_json_dict_field", "json_dict_field", - "json_dict_field_with_header", - "json_dict_field_with_header_and_zstd", ], ids=[ - "escaped_dynamic_field", - "escaped_dict_field", + "old_json_dict_field", "json_dict_field", - "json_dict_field_w_header", - "json_dict_field_w_header_and_zstd", ], ) @pytest.mark.benchmark(group="live_action_save_multiple_fields") @@ -240,18 +255,12 @@ def run_benchmark(): @pytest.mark.parametrize( "approach", [ - "escaped_dynamic_field", - "escaped_dict_field", + "old_json_dict_field", "json_dict_field", - "json_dict_field_with_header", - "json_dict_field_with_header_and_zstd", ], ids=[ - "escaped_dynamic_field", - "escaped_dict_field", + "old_json_dict_field", "json_dict_field", - "json_dict_field_w_header", - "json_dict_field_w_header_and_zstd", ], ) @pytest.mark.benchmark(group="live_action_read") diff --git a/st2common/bin/migrations/v3.9/BUILD b/st2common/bin/migrations/v3.9/BUILD new file mode 100644 index 0000000000..255bf31004 --- /dev/null +++ b/st2common/bin/migrations/v3.9/BUILD @@ -0,0 +1,3 @@ +python_sources( + sources=["*.py", "st2*"], +) diff --git a/st2common/bin/migrations/v3.9/__init__.py b/st2common/bin/migrations/v3.9/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/st2common/bin/migrations/v3.9/st2-migrate-liveaction-executiondb b/st2common/bin/migrations/v3.9/st2-migrate-liveaction-executiondb new file mode 100755 index 0000000000..76e69bc845 --- /dev/null +++ b/st2common/bin/migrations/v3.9/st2-migrate-liveaction-executiondb @@ -0,0 +1,212 @@ +#!/usr/bin/env python3 +# Copyright 2021 The StackStorm Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Migration which migrates data for existing objects in the database which utilize +liveaction to a string + +Migration step is idempotent and can be retried on failures / partial runs. + +Right now the script utilizes no concurrency and performs migration one object by one. That's done +for simplicity reasons and also to avoid massive CPU usage spikes when running this script with +large concurrency on large objects. + +Keep in mind that only "completed" objects are processed - this means Executions in "final" states +(succeeded, failed, timeout, etc.). + +We determine if an object should be migrating using mongodb $type query (for execution objects we +could also determine that based on the presence of result_size field). +""" + +import sys +import datetime +import time +import traceback + +from oslo_config import cfg + +from st2common import config +from st2common.service_setup import db_setup +from st2common.service_setup import db_teardown +from st2common.util import isotime +from st2common.models.db.execution import ActionExecutionDB +from st2common.constants.action import ( + LIVEACTION_COMPLETED_STATES, + LIVEACTION_STATUS_PAUSED, + LIVEACTION_STATUS_PENDING, +) + +# NOTE: To avoid unnecessary mongoengine object churn when retrieving only object ids (aka to avoid +# instantiating model class with a single field), we use raw pymongo value which is a dict with a +# single value + + +def migrate_executions(start_dt: datetime.datetime, end_dt: datetime.datetime) -> None: + """ + Perform migrations for execution related objects (ActionExecutionDB, LiveActionDB). + """ + print("Migrating execution objects") + + # NOTE: We first only retrieve the IDs because there could be a lot of objects in the database + # and this could result in massive ram use. Technically, mongoengine loads querysets lazily, + # but this is not always the case so it's better to first retrieve all the IDs and then retrieve + # objects one by one. + # Keep in mind we need to use ModelClass.objects and not PersistanceClass.query() so .only() + # works correctly - with PersistanceClass.query().only() all the fields will still be retrieved. + # 1. Migrate ActionExecutionDB objects + res_count = ActionExecutionDB.objects( + __raw__={ + "status": { + "$in": LIVEACTION_COMPLETED_STATES + + [LIVEACTION_STATUS_PAUSED, LIVEACTION_STATUS_PENDING], + }, + }, + start_timestamp__gte=start_dt, + start_timestamp__lte=end_dt, + ).as_pymongo() + for item in res_count: + try: + ActionExecutionDB.objects(__raw__={"_id": item["_id"]}).update( + __raw__={"$set": {"liveaction_id": item["liveaction"]["id"]}} + ) + except KeyError: + pass + + ActionExecutionDB.objects( + __raw__={ + "status": { + "$in": LIVEACTION_COMPLETED_STATES + + [LIVEACTION_STATUS_PAUSED, LIVEACTION_STATUS_PENDING], + }, + }, + start_timestamp__gte=start_dt, + start_timestamp__lte=end_dt, + ).update(__raw__={"$unset": {"liveaction": 1}}) + + objects_count = res_count.count() + + print("migrated %s ActionExecutionDB objects" % (objects_count)) + print("") + + +def _register_cli_opts(): + cfg.CONF.register_cli_opt( + cfg.BoolOpt( + "yes", + short="y", + required=False, + default=False, + ) + ) + + # We default to past 30 days. Keep in mind that using longer period may take a long time in + # case there are many objects in the database. + now_dt = datetime.datetime.utcnow() + start_dt = now_dt - datetime.timedelta(days=30) + + cfg.CONF.register_cli_opt( + cfg.StrOpt( + "start-dt", + required=False, + help=( + "Start cut off ISO UTC iso date time string for objects which will be migrated. " + "Defaults to now - 30 days." + "Example value: 2020-03-13T19:01:27Z" + ), + default=start_dt.strftime("%Y-%m-%dT%H:%M:%SZ"), + ) + ) + cfg.CONF.register_cli_opt( + cfg.StrOpt( + "end-dt", + required=False, + help=( + "End cut off UTC ISO date time string for objects which will be migrated." + "Defaults to now." + "Example value: 2020-03-13T19:01:27Z" + ), + default=now_dt.strftime("%Y-%m-%dT%H:%M:%SZ"), + ) + ) + + +def migrate_objects( + start_dt: datetime.datetime, end_dt: datetime.datetime, display_prompt: bool = True +) -> None: + start_dt_str = start_dt.strftime("%Y-%m-%d %H:%M:%S") + end_dt_str = end_dt.strftime("%Y-%m-%d %H:%M:%S") + + print("StackStorm v3.9 database field data migration script\n") + + if display_prompt: + input( + "Will migrate objects with creation date between %s UTC and %s UTC.\n\n" + "You are strongly recommended to create database backup before proceeding.\n\n" + "Depending on the number of the objects in the database, " + "migration may take multiple hours or more. You are recommended to start the " + "script in a screen session, tmux or similar. \n\n" + "To proceed with the migration, press enter and to cancel it, press CTRL+C.\n" + % (start_dt_str, end_dt_str) + ) + print("") + + print( + "Migrating affected database objects between %s and %s" + % (start_dt_str, end_dt_str) + ) + print("") + + start_ts = int(time.time()) + migrate_executions(start_dt=start_dt, end_dt=end_dt) + end_ts = int(time.time()) + + duration = end_ts - start_ts + + print( + "SUCCESS: All database objects migrated successfully (duration: %s seconds)." + % (duration) + ) + + +def main(): + _register_cli_opts() + + config.parse_args() + db_setup() + + start_dt = isotime.parse(cfg.CONF.start_dt) + + if cfg.CONF.end_dt == "now": + end_dt = datetime.datetime.utcnow() + end_dt = end_dt.replace(tzinfo=datetime.timezone.utc) + else: + end_dt = isotime.parse(cfg.CONF.end_dt) + + try: + migrate_objects( + start_dt=start_dt, end_dt=end_dt, display_prompt=not cfg.CONF.yes + ) + exit_code = 0 + except Exception as e: + print("ABORTED: Objects migration aborted on first failure: %s" % (str(e))) + traceback.print_exc() + exit_code = 1 + + db_teardown() + sys.exit(exit_code) + + +if __name__ == "__main__": + main() diff --git a/st2common/bin/st2-track-result b/st2common/bin/st2-track-result index 773421bf82..7f158a2453 100755 --- a/st2common/bin/st2-track-result +++ b/st2common/bin/st2-track-result @@ -68,7 +68,7 @@ def add_result_tracker(exec_id): LOG.info("Retrieving runner type and liveaction records...") runnertype_db = action_db.get_runnertype_by_name(exec_db.action.get("runner_type")) - liveaction_db = action_db.get_liveaction_by_id(exec_db.liveaction["id"]) + liveaction_db = action_db.get_liveaction_by_id(exec_db.liveaction_id) # Skip if liveaction is completed. if liveaction_db.status in action_constants.LIVEACTION_COMPLETED_STATES: @@ -100,7 +100,7 @@ def del_result_tracker(exec_id): LOG.info('Found action execution record for "%s".', exec_id) LOG.info("Retrieving runner type and liveaction records...") - liveaction_db = action_db.get_liveaction_by_id(exec_db.liveaction["id"]) + liveaction_db = action_db.get_liveaction_by_id(exec_db.liveaction_id) LOG.info("Removing result tracker entry...") removed = queries.remove_query(liveaction_db.id) diff --git a/st2common/setup.py b/st2common/setup.py index e67c846b90..5e2764286d 100644 --- a/st2common/setup.py +++ b/st2common/setup.py @@ -69,6 +69,7 @@ "bin/st2-pack-download", "bin/st2-pack-setup-virtualenv", "bin/migrations/v3.5/st2-migrate-db-dict-field-values", + "bin/migrations/v3.9/st2-migrate-liveaction-executiondb", ], entry_points={ "st2common.metrics.driver": [ diff --git a/st2common/st2common/config.py b/st2common/st2common/config.py index 15a4d5b32a..538a7e3ad6 100644 --- a/st2common/st2common/config.py +++ b/st2common/st2common/config.py @@ -21,6 +21,7 @@ from oslo_config import cfg from oslo_config.sources._environment import EnvironmentConfigurationSource +from st2common.constants.compression import ZSTANDARD_COMPRESS, VALID_COMPRESS from st2common.constants.system import VERSION_STRING from st2common.constants.system import DEFAULT_CONFIG_FILE_PATH from st2common.constants.runners import PYTHON_RUNNER_DEFAULT_LOG_LEVEL @@ -322,6 +323,14 @@ def register_opts(ignore_errors=False): "By default, it use SCRAM-SHA-1 with MongoDB 3.0 and later, " "MONGODB-CR (MongoDB Challenge Response protocol) for older servers.", ), + cfg.StrOpt( + "parameter_result_compression", + default=ZSTANDARD_COMPRESS, + required=True, + choices=VALID_COMPRESS, + help="compression for parameter and result storage in liveaction and " + "execution models", + ), cfg.StrOpt( "compressors", default="", diff --git a/st2common/st2common/constants/compression.py b/st2common/st2common/constants/compression.py new file mode 100644 index 0000000000..edb3581cf9 --- /dev/null +++ b/st2common/st2common/constants/compression.py @@ -0,0 +1,89 @@ +# Copyright 2020 The StackStorm Authors. +# Copyright 2019 Extreme Networks, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Mongoengine is licensed under MIT. +""" + + +import enum +from oslo_config import cfg +import zstandard + +ZSTANDARD_COMPRESS = "zstandard" +NO_COMPRESSION = "none" + +VALID_COMPRESS = [ZSTANDARD_COMPRESS, NO_COMPRESSION] + + +class JSONDictFieldCompressionAlgorithmEnum(enum.Enum): + """ + Enum which represents compression algorithm (if any) used for a specific JSONDictField value. + """ + + ZSTANDARD = b"z" + + +VALID_JSON_DICT_COMPRESSION_ALGORITHMS = [ + JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value, +] + + +def zstandard_compress(data): + data = ( + JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value + + zstandard.ZstdCompressor().compress(data) + ) + return data + + +def zstandard_uncompress(data): + data = zstandard.ZstdDecompressor().decompress(data) + return data + + +MAP_COMPRESS = { + ZSTANDARD_COMPRESS: zstandard_compress, +} + + +MAP_UNCOMPRESS = { + JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value: zstandard_uncompress, +} + + +def uncompress(value: bytes): + data = value + try: + uncompression_header = value[0:1] + uncompression_method = MAP_UNCOMPRESS.get(uncompression_header, False) + if uncompression_method: # skip if no compress + data = uncompression_method(value[1:]) + # will need to add additional exceptions if additonal compression methods + # are added in the future; please do not catch the general exception here. + except zstandard.ZstdError: + # skip if already a byte string and not zstandard compressed + pass + return data + + +def compress(value: bytes): + data = value + parameter_result_compression = cfg.CONF.database.parameter_result_compression + compression_method = MAP_COMPRESS.get(parameter_result_compression, False) + # none is not mapped at all so has no compression method + if compression_method: + data = compression_method(value) + return data diff --git a/st2common/st2common/fields.py b/st2common/st2common/fields.py index 0e94f11f85..f6a821aa73 100644 --- a/st2common/st2common/fields.py +++ b/st2common/st2common/fields.py @@ -27,7 +27,6 @@ import datetime import calendar -import enum import weakref import orjson @@ -38,6 +37,10 @@ from mongoengine.base.datastructures import mark_key_as_changed_wrapper from mongoengine.common import _import_class +from st2common.constants.compression import ( + compress as compress_function, + uncompress as uncompress_function, +) from st2common.util import date as date_utils from st2common.util import mongoescape @@ -49,34 +52,6 @@ JSON_DICT_FIELD_DELIMITER = b":" -class JSONDictFieldCompressionAlgorithmEnum(enum.Enum): - """ - Enum which represents compression algorithm (if any) used for a specific JSONDictField value. - """ - - NONE = b"n" - ZSTANDARD = b"z" - - -class JSONDictFieldSerializationFormatEnum(enum.Enum): - """ - Enum which represents serialization format used for a specific JSONDictField value. - """ - - ORJSON = b"o" - - -VALID_JSON_DICT_COMPRESSION_ALGORITHMS = [ - JSONDictFieldCompressionAlgorithmEnum.NONE.value, - JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value, -] - - -VALID_JSON_DICT_SERIALIZATION_FORMATS = [ - JSONDictFieldSerializationFormatEnum.ORJSON.value, -] - - class ComplexDateTimeField(LongField): """ Date time field which handles microseconds exactly and internally stores @@ -331,13 +306,14 @@ def _mark_as_changed(self, key=None): class JSONDictField(BinaryField): """ - Custom field types which stores dictionary as JSON serialized strings. + Custom field types which stores dictionary as zstandard compressed JSON serialized strings. - This is done because storing large objects as JSON serialized strings is much more fficient + This is done because storing large objects as compressed JSON serialized + strings is much more efficient on the serialize and unserialize paths compared to used EscapedDictField which needs to escape all the special values ($, .). - Only downside is that to MongoDB those values are plain raw strings which means you can't query + Only downside is that to MongoDB those values are compressed plain raw strings which means you can't query on actual dictionary field values. That's not an issue for us, because in places where we use it, those values are already treated as plain binary blobs to the database layer and we never directly query on those field values. @@ -358,25 +334,11 @@ class JSONDictField(BinaryField): IMPLEMENTATION DETAILS: - If header is used, values are stored in the following format: - ::. - - For example: - n:o:... - No compression, (or)json serialization - z:o:... - Zstandard compression, (or)json serialization - If header is not used, value is stored as a serialized JSON string of the input dictionary. """ def __init__(self, *args, **kwargs): - # True if we should use field header which is more future proof approach and also allows - # us to support optional per-field compression, etc. - # This option is only exposed so we can benchmark different approaches and how much overhead - # using a header adds. - self.use_header = kwargs.pop("use_header", False) - self.compression_algorithm = kwargs.pop("compression_algorithm", "none") - super(JSONDictField, self).__init__(*args, **kwargs) def to_mongo(self, value): @@ -403,11 +365,6 @@ def validate(self, value): def parse_field_value(self, value: Optional[Union[bytes, dict]]) -> dict: """ Parse provided binary field value and return parsed value (dictionary). - - For example: - - - (n, o, ...) - no compression, data is serialized using orjson - - (z, o, ...) - zstandard compression, data is serialized using orjson """ if not value: return self.default @@ -416,45 +373,10 @@ def parse_field_value(self, value: Optional[Union[bytes, dict]]) -> dict: # Already deserializaed return value - if not self.use_header: - return orjson.loads(value) - - split = value.split(JSON_DICT_FIELD_DELIMITER, 2) - - if len(split) != 3: - raise ValueError( - "Expected 3 values when splitting field value, got %s" % (len(split)) - ) - - compression_algorithm = split[0] - serialization_format = split[1] - data = split[2] - - if compression_algorithm not in VALID_JSON_DICT_COMPRESSION_ALGORITHMS: - raise ValueError( - "Invalid or unsupported value for compression algorithm header " - "value: %s" % (compression_algorithm) - ) - - if serialization_format not in VALID_JSON_DICT_SERIALIZATION_FORMATS: - raise ValueError( - "Invalid or unsupported value for serialization format header " - "value: %s" % (serialization_format) - ) - - if ( - compression_algorithm - == JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value - ): - # NOTE: At this point zstandard is only test dependency - import zstandard - - data = zstandard.ZstdDecompressor().decompress(data) - - data = orjson.loads(data) + data = orjson.loads(uncompress_function(value)) return data - def _serialize_field_value(self, value: dict) -> bytes: + def _serialize_field_value(self, value: dict, compress=True) -> bytes: """ Serialize and encode the provided field value. """ @@ -474,21 +396,10 @@ def default(obj): return list(obj) raise TypeError - if not self.use_header: - return orjson.dumps(value, default=default) - data = orjson.dumps(value, default=default) - - if self.compression_algorithm == "zstandard": - # NOTE: At this point zstandard is only test dependency - import zstandard - - compression_header = JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD - data = zstandard.ZstdCompressor().compress(data) - else: - compression_header = JSONDictFieldCompressionAlgorithmEnum.NONE - - return compression_header.value + b":" + b"o:" + data + if compress: + data = compress_function(data) + return data def __get__(self, instance, owner): """ @@ -522,11 +433,6 @@ class JSONDictEscapedFieldCompatibilityField(JSONDictField): def to_mongo(self, value): if isinstance(value, bytes): # Already serialized - if value[0] == b"{" and self.use_header: - # Serialized, but doesn't contain header prefix, add it (assume migration from - # format without a header) - return "n:o:" + value - return value if not isinstance(value, dict): diff --git a/st2common/st2common/garbage_collection/executions.py b/st2common/st2common/garbage_collection/executions.py index ae0f3296f4..bc0e85f1f0 100644 --- a/st2common/st2common/garbage_collection/executions.py +++ b/st2common/st2common/garbage_collection/executions.py @@ -223,5 +223,5 @@ def purge_orphaned_workflow_executions(logger): # as a result of the original failure, the garbage collection routine here cancels # the workflow execution so it cannot be rerun from failed task(s). for ac_ex_db in workflow_service.identify_orphaned_workflows(): - lv_ac_db = LiveAction.get(id=ac_ex_db.liveaction["id"]) + lv_ac_db = LiveAction.get(id=ac_ex_db.liveaction_id) action_service.request_cancellation(lv_ac_db, None) diff --git a/st2common/st2common/garbage_collection/inquiries.py b/st2common/st2common/garbage_collection/inquiries.py index 2381472a73..a447a0f5eb 100644 --- a/st2common/st2common/garbage_collection/inquiries.py +++ b/st2common/st2common/garbage_collection/inquiries.py @@ -78,7 +78,7 @@ def purge_inquiries(logger): liveaction_db = action_utils.update_liveaction_status( status=action_constants.LIVEACTION_STATUS_TIMED_OUT, result=inquiry.result, - liveaction_id=inquiry.liveaction.get("id"), + liveaction_id=inquiry.liveaction_id, ) executions.update_execution(liveaction_db) diff --git a/st2common/st2common/models/api/action.py b/st2common/st2common/models/api/action.py index dc18ba02cd..f82220950b 100644 --- a/st2common/st2common/models/api/action.py +++ b/st2common/st2common/models/api/action.py @@ -34,6 +34,7 @@ from st2common.models.db.runner import RunnerTypeDB from st2common.constants.action import LIVEACTION_STATUSES from st2common.models.system.common import ResourceReference +from st2common.fields import JSONDictEscapedFieldCompatibilityField __all__ = [ @@ -440,9 +441,23 @@ class LiveActionAPI(BaseAPI): }, "additionalProperties": False, } - skip_unescape_field_names = [ - "result", - ] + skip_unescape_field_names = ["result", "parameters"] + + @classmethod + def convert_raw(cls, doc, raw_values): + """ + override this class to + convert any raw byte values into dict + + :param doc: dict + :param raw_values: dict[field]:bytestring + """ + + for field_name, field_value in raw_values.items(): + doc[ + field_name + ] = JSONDictEscapedFieldCompatibilityField().parse_field_value(field_value) + return doc @classmethod def from_model(cls, model, mask_secrets=False): @@ -451,7 +466,6 @@ def from_model(cls, model, mask_secrets=False): doc["start_timestamp"] = isotime.format(model.start_timestamp, offset=False) if model.end_timestamp: doc["end_timestamp"] = isotime.format(model.end_timestamp, offset=False) - if getattr(model, "notify", None): doc["notify"] = NotificationsHelper.from_model(model.notify) diff --git a/st2common/st2common/models/api/base.py b/st2common/st2common/models/api/base.py index 6cdb16feef..3be5b2d69a 100644 --- a/st2common/st2common/models/api/base.py +++ b/st2common/st2common/models/api/base.py @@ -22,6 +22,7 @@ from st2common.util import mongoescape as util_mongodb from st2common import log as logging +from st2common.models.db.stormbase import EscapedDynamicField, EscapedDictField __all__ = ["BaseAPI", "APIUIDMixin"] @@ -86,6 +87,12 @@ def validate(self): @classmethod def _from_model(cls, model, mask_secrets=False): + unescape_fields = [ + k + for k, v in model._fields.items() + if type(v) in [EscapedDynamicField, EscapedDictField] + ] + unescape_fields = set(unescape_fields) - set(cls.skip_unescape_field_names) doc = model.to_mongo() if "_id" in doc: @@ -94,32 +101,36 @@ def _from_model(cls, model, mask_secrets=False): # Special case for models which utilize JSONDictField - there is no need to escape those # fields since it contains a JSON string and not a dictionary which doesn't need to be # mongo escaped. Skipping this step here substantially speeds things up for that field. - - # Right now we do this here manually for all those fields types but eventually we should - # refactor the code to just call unescape chars on escaped fields - more generic and - # faster. raw_values = {} - for field_name in cls.skip_unescape_field_names: if isinstance(doc.get(field_name, None), bytes): raw_values[field_name] = doc.pop(field_name) - - # TODO (Tomaz): In general we really shouldn't need to call unescape chars on the whole doc, - # but just on the EscapedDict and EscapedDynamicField fields - doing it on the whole doc - # level is slow and not necessary! - doc = util_mongodb.unescape_chars(doc) - - # Now add the JSON string field value which shouldn't be escaped back. - # We don't JSON parse the field value here because that happens inside the model specific - # "from_model()" method where we also parse and convert all the other field values. - for field_name, field_value in raw_values.items(): - doc[field_name] = field_value + for key in unescape_fields: + if key in doc.keys(): + doc[key] = util_mongodb.unescape_chars(doc[key]) + # convert raw fields and add back ; no need to unescape + doc = cls.convert_raw(doc, raw_values) if mask_secrets and cfg.CONF.log.mask_secrets: doc = model.mask_secrets(value=doc) return doc + @classmethod + def convert_raw(cls, doc, raw_values): + """ + override this class to + convert any raw byte values into dict + you can also use this to fix any other fields that need 'fixing' + + :param doc: dict + :param raw_values: dict[field]:bytestring + """ + + for field_name, field_value in raw_values.items(): + doc[field_name] = field_value + return doc + @classmethod def from_model(cls, model, mask_secrets=False): """ diff --git a/st2common/st2common/models/api/execution.py b/st2common/st2common/models/api/execution.py index 76aa9fbdf8..2654d5cc52 100644 --- a/st2common/st2common/models/api/execution.py +++ b/st2common/st2common/models/api/execution.py @@ -24,11 +24,13 @@ from st2common.models.api.base import BaseAPI from st2common.models.db.execution import ActionExecutionDB from st2common.models.db.execution import ActionExecutionOutputDB +from st2common.persistence.liveaction import LiveAction from st2common.models.api.trigger import TriggerTypeAPI, TriggerAPI, TriggerInstanceAPI from st2common.models.api.rule import RuleAPI from st2common.models.api.action import RunnerTypeAPI, ActionAPI, LiveActionAPI from st2common import log as logging from st2common.util.deep_copy import fast_deepcopy_dict +from st2common.fields import JSONDictEscapedFieldCompatibilityField __all__ = ["ActionExecutionAPI", "ActionExecutionOutputAPI"] @@ -60,6 +62,7 @@ class ActionExecutionAPI(BaseAPI): "rule": RuleAPI.schema, "action": REQUIRED_ATTR_SCHEMAS["action"], "runner": REQUIRED_ATTR_SCHEMAS["runner"], + "liveaction_id": {"type": "string", "required": True}, "liveaction": REQUIRED_ATTR_SCHEMAS["liveaction"], "status": { "description": "The current status of the action execution.", @@ -145,19 +148,29 @@ class ActionExecutionAPI(BaseAPI): }, "additionalProperties": False, } - skip_unescape_field_names = [ - "result", - ] + skip_unescape_field_names = ["result", "parameters"] @classmethod def from_model(cls, model, mask_secrets=False): - doc = cls._from_model(model, mask_secrets=mask_secrets) - doc["result"] = ActionExecutionDB.result.parse_field_value(doc["result"]) + doc = cls._from_model(model, mask_secrets=mask_secrets) start_timestamp = model.start_timestamp start_timestamp_iso = isotime.format(start_timestamp, offset=False) doc["start_timestamp"] = start_timestamp_iso + # check to see if liveaction_id has been excluded in output filtering + if doc.get("liveaction_id", False): + live_action_model = LiveAction.get_by_id(doc["liveaction_id"]) + if live_action_model is not None: + doc["liveaction"] = LiveActionAPI.from_model( + live_action_model, mask_secrets=mask_secrets + ) + else: + doc["liveaction"] = { + "action": doc["action"]["name"], + "id": doc["liveaction_id"], + "status": doc["status"], + } end_timestamp = model.end_timestamp if end_timestamp: @@ -171,6 +184,26 @@ def from_model(cls, model, mask_secrets=False): attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs) + @classmethod + def convert_raw(cls, doc, raw_values): + """ + override this class to + convert any raw byte values into dict + + Now add the JSON string field value which shouldn't be escaped back. + We don't JSON parse the field value here because that happens inside the model specific + "from_model()" method where we also parse and convert all the other field values. + + :param doc: dict + :param raw_values: dict[field]:bytestring + """ + + for field_name, field_value in raw_values.items(): + doc[ + field_name + ] = JSONDictEscapedFieldCompatibilityField().parse_field_value(field_value) + return doc + @classmethod def to_model(cls, instance): values = {} diff --git a/st2common/st2common/models/api/inquiry.py b/st2common/st2common/models/api/inquiry.py index d04c8efbf1..f5a3be92b8 100644 --- a/st2common/st2common/models/api/inquiry.py +++ b/st2common/st2common/models/api/inquiry.py @@ -21,7 +21,7 @@ from st2common.constants.action import LIVEACTION_STATUSES from st2common.models.api.base import BaseAPI -from st2common.models.api.action import RunnerTypeAPI, ActionAPI, LiveActionAPI +from st2common.models.api.action import RunnerTypeAPI, ActionAPI from st2common.models.db.execution import ActionExecutionDB from st2common import log as logging @@ -31,7 +31,6 @@ REQUIRED_ATTR_SCHEMAS = { "action": copy.deepcopy(ActionAPI.schema), "runner": copy.deepcopy(RunnerTypeAPI.schema), - "liveaction": copy.deepcopy(LiveActionAPI.schema), } for k, v in six.iteritems(REQUIRED_ATTR_SCHEMAS): @@ -76,7 +75,7 @@ class InquiryAPI(BaseAPI): }, "required": True, }, - "liveaction": REQUIRED_ATTR_SCHEMAS["liveaction"], + "liveaction_id": {"type": "string", "required": True}, "runner": REQUIRED_ATTR_SCHEMAS["runner"], "status": { "description": "The current status of the action execution.", @@ -112,7 +111,7 @@ def from_model(cls, model, mask_secrets=False): "id": doc["id"], "runner": doc.get("runner", None), "status": doc.get("status", None), - "liveaction": doc.get("liveaction", None), + "liveaction_id": doc.get("liveaction_id", None), "parent": doc.get("parent", None), "result": doc.get("result", None), } diff --git a/st2common/st2common/models/db/execution.py b/st2common/st2common/models/db/execution.py index 0de35a5c31..6b591d310e 100644 --- a/st2common/st2common/models/db/execution.py +++ b/st2common/st2common/models/db/execution.py @@ -39,16 +39,7 @@ class ActionExecutionDB(stormbase.StormFoundationDB): RESOURCE_TYPE = ResourceType.EXECUTION UID_FIELDS = ["id"] - - trigger = stormbase.EscapedDictField() - trigger_type = stormbase.EscapedDictField() - trigger_instance = stormbase.EscapedDictField() - rule = stormbase.EscapedDictField() - action = stormbase.EscapedDictField(required=True) - runner = stormbase.EscapedDictField(required=True) - # Only the diff between the liveaction type and what is replicated - # in the ActionExecutionDB object. - liveaction = stormbase.EscapedDictField(required=True) + # SAME as liveaction workflow_execution = me.StringField() task_execution = me.StringField() status = me.StringField( @@ -61,29 +52,40 @@ class ActionExecutionDB(stormbase.StormFoundationDB): end_timestamp = ComplexDateTimeField( help_text="The timestamp when the liveaction has finished." ) - parameters = stormbase.EscapedDynamicField( + action = stormbase.EscapedDictField(required=True) + parameters = JSONDictEscapedFieldCompatibilityField( default={}, help_text="The key-value pairs passed as to the action runner & action.", ) result = JSONDictEscapedFieldCompatibilityField( default={}, help_text="Action defined result." ) - result_size = me.IntField(default=0, help_text="Serialized result size in bytes") context = me.DictField( default={}, help_text="Contextual information on the action execution." ) + delay = me.IntField(min_value=0) + + # diff from liveaction + runner = stormbase.EscapedDictField(required=True) + trigger = stormbase.EscapedDictField() + trigger_type = stormbase.EscapedDictField() + trigger_instance = stormbase.EscapedDictField() + rule = stormbase.EscapedDictField() + result_size = me.IntField(default=0, help_text="Serialized result size in bytes") parent = me.StringField() children = me.ListField(field=me.StringField()) log = me.ListField(field=me.DictField()) - delay = me.IntField(min_value=0) # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows. web_url = me.StringField(required=False) + # liveaction id + liveaction_id = me.StringField(required=True) + meta = { "indexes": [ {"fields": ["rule.ref"]}, {"fields": ["action.ref"]}, - {"fields": ["liveaction.id"]}, + {"fields": ["liveaction_id"]}, {"fields": ["start_timestamp"]}, {"fields": ["end_timestamp"]}, {"fields": ["status"]}, @@ -115,10 +117,8 @@ def mask_secrets(self, value): :return: result: action execution object with masked secret paramters in input and output schema. :rtype: result: ``dict`` """ - result = copy.deepcopy(value) - liveaction = result["liveaction"] parameters = {} # pylint: disable=no-member parameters.update(value.get("action", {}).get("parameters", {})) @@ -128,31 +128,6 @@ def mask_secrets(self, value): result["parameters"] = mask_secret_parameters( parameters=result.get("parameters", {}), secret_parameters=secret_parameters ) - - if "parameters" in liveaction: - liveaction["parameters"] = mask_secret_parameters( - parameters=liveaction["parameters"], secret_parameters=secret_parameters - ) - - if liveaction.get("action", "") == "st2.inquiry.respond": - # Special case to mask parameters for `st2.inquiry.respond` action - # In this case, this execution is just a plain python action, not - # an inquiry, so we don't natively have a handle on the response - # schema. - # - # To prevent leakage, we can just mask all response fields. - # - # Note: The 'string' type in secret_parameters doesn't matter, - # it's just a placeholder to tell mask_secret_parameters() - # that this parameter is indeed a secret parameter and to - # mask it. - result["parameters"]["response"] = mask_secret_parameters( - parameters=liveaction["parameters"]["response"], - secret_parameters={ - p: "string" for p in liveaction["parameters"]["response"] - }, - ) - output_value = ActionExecutionDB.result.parse_field_value(result["result"]) masked_output_value = output_schema.mask_secret_output(result, output_value) result["result"] = masked_output_value diff --git a/st2common/st2common/models/db/liveaction.py b/st2common/st2common/models/db/liveaction.py index aef52462a6..4f4ebcaa37 100644 --- a/st2common/st2common/models/db/liveaction.py +++ b/st2common/st2common/models/db/liveaction.py @@ -38,6 +38,7 @@ class LiveActionDB(stormbase.StormFoundationDB): + # same as action execution workflow_execution = me.StringField() task_execution = me.StringField() # TODO: Can status be an enum at the Mongo layer? @@ -54,11 +55,7 @@ class LiveActionDB(stormbase.StormFoundationDB): action = me.StringField( required=True, help_text="Reference to the action that has to be executed." ) - action_is_workflow = me.BooleanField( - default=False, - help_text="A flag indicating whether the referenced action is a workflow.", - ) - parameters = stormbase.EscapedDynamicField( + parameters = JSONDictEscapedFieldCompatibilityField( default={}, help_text="The key-value pairs passed as to the action runner & execution.", ) @@ -68,20 +65,25 @@ class LiveActionDB(stormbase.StormFoundationDB): context = me.DictField( default={}, help_text="Contextual information on the action execution." ) + delay = me.IntField( + min_value=0, + help_text="How long (in milliseconds) to delay the execution before scheduling.", + ) + + # diff from action execution + action_is_workflow = me.BooleanField( + default=False, + help_text="A flag indicating whether the referenced action is a workflow.", + ) callback = me.DictField( default={}, help_text="Callback information for the on completion of action execution.", ) + notify = me.EmbeddedDocumentField(NotificationSchema) runner_info = me.DictField( default={}, help_text="Information about the runner which executed this live action (hostname, pid).", ) - notify = me.EmbeddedDocumentField(NotificationSchema) - delay = me.IntField( - min_value=0, - help_text="How long (in milliseconds) to delay the execution before scheduling.", - ) - meta = { "indexes": [ {"fields": ["-start_timestamp", "action"]}, @@ -114,6 +116,23 @@ def mask_secrets(self, value): result["parameters"] = mask_secret_parameters( parameters=execution_parameters, secret_parameters=secret_parameters ) + if result.get("action", "") == "st2.inquiry.respond": + # In this case, this execution is just a plain python action, not + # an inquiry, so we don't natively have a handle on the response + # schema. + # + # To prevent leakage, we can just mask all response fields. + # + # Note: The 'string' type in secret_parameters doesn't matter, + # it's just a placeholder to tell mask_secret_parameters() + # that this parameter is indeed a secret parameter and to + # mask it. + result["parameters"]["response"] = mask_secret_parameters( + parameters=result["parameters"]["response"], + secret_parameters={ + p: "string" for p in result["parameters"]["response"] + }, + ) return result def get_masked_parameters(self): diff --git a/st2common/st2common/openapi.yaml b/st2common/st2common/openapi.yaml index 93d4bcdec6..7bcda8562b 100644 --- a/st2common/st2common/openapi.yaml +++ b/st2common/st2common/openapi.yaml @@ -4936,6 +4936,8 @@ definitions: $ref: '#/definitions/RunnerType' liveaction: $ref: '#/definitions/LiveAction' + liveaction_id: + type: string task_execution: type: string workflow_execution: diff --git a/st2common/st2common/openapi.yaml.j2 b/st2common/st2common/openapi.yaml.j2 index 6b10362640..c008d5db33 100644 --- a/st2common/st2common/openapi.yaml.j2 +++ b/st2common/st2common/openapi.yaml.j2 @@ -4932,6 +4932,8 @@ definitions: $ref: '#/definitions/RunnerType' liveaction: $ref: '#/definitions/LiveAction' + liveaction_id: + type: string task_execution: type: string workflow_execution: diff --git a/st2common/st2common/services/action.py b/st2common/st2common/services/action.py index e32fb39061..5750db26df 100644 --- a/st2common/st2common/services/action.py +++ b/st2common/st2common/services/action.py @@ -61,6 +61,7 @@ def create_request( ): """ Create an action execution. + :param liveaction: LiveActionDB :param action_db: Action model to operate one. If not provided, one is retrieved from the database using values from "liveaction". @@ -167,7 +168,6 @@ def create_request( runnertype_db=runnertype_db, publish=False, ) - if trace_db: trace_service.add_or_update_given_trace_db( trace_db=trace_db, @@ -316,7 +316,7 @@ def request_cancellation(liveaction, requester): liveaction, status, result=result, context=liveaction.context ) - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) return (liveaction, execution) @@ -347,7 +347,7 @@ def request_pause(liveaction, requester): liveaction.status == action_constants.LIVEACTION_STATUS_PAUSING or liveaction.status == action_constants.LIVEACTION_STATUS_PAUSED ): - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) return (liveaction, execution) if liveaction.status != action_constants.LIVEACTION_STATUS_RUNNING: @@ -363,7 +363,7 @@ def request_pause(liveaction, requester): context=liveaction.context, ) - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) return (liveaction, execution) @@ -396,7 +396,7 @@ def request_resume(liveaction, requester): ] if liveaction.status in running_states: - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) return (liveaction, execution) if liveaction.status != action_constants.LIVEACTION_STATUS_PAUSED: @@ -412,7 +412,7 @@ def request_resume(liveaction, requester): context=liveaction.context, ) - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) return (liveaction, execution) @@ -433,7 +433,7 @@ def get_parent_liveaction(liveaction_db): return None parent_execution_db = ActionExecution.get(id=parent["execution_id"]) - parent_liveaction_db = LiveAction.get(id=parent_execution_db.liveaction["id"]) + parent_liveaction_db = LiveAction.get(id=parent_execution_db.liveaction_id) return parent_liveaction_db @@ -541,7 +541,7 @@ def store_execution_output_data_ex( def is_children_active(liveaction_id): - execution_db = ActionExecution.get(liveaction__id=str(liveaction_id)) + execution_db = ActionExecution.get(liveaction_id=str(liveaction_id)) if execution_db.runner["name"] not in action_constants.WORKFLOW_RUNNER_TYPES: return False diff --git a/st2common/st2common/services/executions.py b/st2common/st2common/services/executions.py index 80706e8f79..926c8ac808 100644 --- a/st2common/st2common/services/executions.py +++ b/st2common/st2common/services/executions.py @@ -82,12 +82,10 @@ def _decompose_liveaction(liveaction_db): """ Splits the liveaction into an ActionExecution compatible dict. """ - decomposed = {"liveaction": {}} + decomposed = {"liveaction_id": str(liveaction_db.id)} liveaction_api = vars(LiveActionAPI.from_model(liveaction_db)) for k in liveaction_api.keys(): - if k in LIVEACTION_ATTRIBUTES: - decomposed["liveaction"][k] = liveaction_api[k] - else: + if k not in LIVEACTION_ATTRIBUTES: decomposed[k] = getattr(liveaction_db, k) return decomposed @@ -155,6 +153,7 @@ def create_execution_object( # NOTE: User input data is already validate as part of the API request, # other data is set by us. Skipping validation here makes operation 10%-30% faster + execution.liveaction_id = str(liveaction.id) execution = ActionExecution.add_or_update( execution, publish=publish, validate=False ) @@ -194,7 +193,7 @@ def update_execution(liveaction_db, publish=True, set_result_size=False): :param set_result_size: True to calculate size of the serialized result field value and set it on the "result_size" database field. """ - execution = ActionExecution.get(liveaction__id=str(liveaction_db.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction_db.id)) with coordination.get_coordinator().get_lock(str(liveaction_db.id).encode()): # Skip execution object update when action is already in completed state. @@ -225,7 +224,7 @@ def update_execution(liveaction_db, publish=True, set_result_size=False): with Timer(key="action.executions.calculate_result_size"): result_size = len( ActionExecutionDB.result._serialize_field_value( - liveaction_db.result + value=liveaction_db.result, compress=False ) ) kw["set__result_size"] = result_size diff --git a/st2common/st2common/services/inquiry.py b/st2common/st2common/services/inquiry.py index 1301a62c4b..e181faa5a0 100644 --- a/st2common/st2common/services/inquiry.py +++ b/st2common/st2common/services/inquiry.py @@ -121,12 +121,16 @@ def validate_response(inquiry, response): def respond(inquiry, response, requester=None): + """ + :param inquiry: InquiryAPI + :param response: dict + """ # Set requester to system user is not provided. if not requester: requester = cfg.CONF.system_user.user # Retrieve the liveaction from the database. - liveaction_db = lv_db_access.LiveAction.get_by_id(inquiry.liveaction.get("id")) + liveaction_db = lv_db_access.LiveAction.get_by_id(inquiry.liveaction_id) # Resume the parent workflow first. If the action execution for the inquiry is updated first, # it triggers handling of the action execution completion which will interact with the paused diff --git a/st2common/st2common/services/policies.py b/st2common/st2common/services/policies.py index 46e24ce290..c4aa1ac87e 100644 --- a/st2common/st2common/services/policies.py +++ b/st2common/st2common/services/policies.py @@ -15,6 +15,9 @@ from __future__ import absolute_import +import sys +import traceback + from st2common.constants import action as ac_const from st2common import log as logging from st2common.persistence import policy as pc_db_access @@ -58,9 +61,17 @@ def apply_pre_run_policies(lv_ac_db): LOG.info(message % (policy_db.ref, policy_db.policy_type, str(lv_ac_db.id))) lv_ac_db = driver.apply_before(lv_ac_db) except: - message = 'An exception occurred while applying policy "%s" (%s) for liveaction "%s".' + _, ex, tb = sys.exc_info() + traceback_var = "".join(traceback.format_tb(tb, 20)) + message = 'An exception occurred while applying policy "%s" (%s) for liveaction "%s". traceback "%s"' LOG.exception( - message % (policy_db.ref, policy_db.policy_type, str(lv_ac_db.id)) + message + % ( + policy_db.ref, + policy_db.policy_type, + str(lv_ac_db.id), + traceback_var, + ) ) if lv_ac_db.status == ac_const.LIVEACTION_STATUS_DELAYED: diff --git a/st2common/st2common/services/trace.py b/st2common/st2common/services/trace.py index 2d51161838..37d435f36f 100644 --- a/st2common/st2common/services/trace.py +++ b/st2common/st2common/services/trace.py @@ -197,7 +197,7 @@ def get_trace_db_by_live_action(liveaction): ) return (created, trace_db) # 3. Check if the action_execution associated with liveaction leads to a trace_db - execution = ActionExecution.get(liveaction__id=str(liveaction.id)) + execution = ActionExecution.get(liveaction_id=str(liveaction.id)) if execution: trace_db = get_trace_db_by_action_execution(action_execution=execution) # 4. No trace_db found, therefore create one. This typically happens diff --git a/st2common/st2common/services/workflows.py b/st2common/st2common/services/workflows.py index b84671f8b1..c99fb896b5 100644 --- a/st2common/st2common/services/workflows.py +++ b/st2common/st2common/services/workflows.py @@ -19,6 +19,8 @@ import datetime import retrying import six +import sys +import traceback from orquesta import conducting from orquesta import events @@ -248,6 +250,7 @@ def request(wf_def, ac_ex_db, st2_ctx, notify_cfg=None): ) # Instantiate the workflow conductor. + LOG.info("action_params: " + str(action_params)) conductor_params = {"inputs": action_params, "context": st2_ctx} conductor = conducting.WorkflowConductor(wf_spec, **conductor_params) @@ -469,7 +472,7 @@ def request_cancellation(ac_ex_db): and root_ac_ex_db.status not in ac_const.LIVEACTION_CANCEL_STATES ): LOG.info("[%s] Cascading cancelation request to parent workflow.", wf_ac_ex_id) - root_lv_ac_db = lv_db_access.LiveAction.get(id=root_ac_ex_db.liveaction["id"]) + root_lv_ac_db = lv_db_access.LiveAction.get(id=root_ac_ex_db.liveaction_id) ac_svc.request_cancellation(root_lv_ac_db, None) LOG.debug("[%s] %s", wf_ac_ex_id, conductor.serialize()) @@ -666,7 +669,7 @@ def request_task_execution(wf_ex_db, st2_ctx, task_ex_req): except Exception as e: msg = 'Failed action execution(s) for task "%s", route "%s".' msg = msg % (task_id, str(task_route)) - LOG.exception(msg) + LOG.exception(msg, exc_info=True) msg = "%s %s: %s" % (msg, type(e).__name__, six.text_type(e)) update_progress(wf_ex_db, msg, severity="error", log=False) msg = "%s: %s" % (type(e).__name__, six.text_type(e)) @@ -676,7 +679,13 @@ def request_task_execution(wf_ex_db, st2_ctx, task_ex_req): "task_id": task_id, "route": task_route, } - update_task_execution(str(task_ex_db.id), statuses.FAILED, {"errors": [error]}) + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback_in_var = traceback.format_tb(exc_traceback) + update_task_execution( + str(task_ex_db.id), + statuses.FAILED, + {"errors": [error], "traceback": traceback_in_var}, + ) raise e return task_ex_db @@ -906,7 +915,7 @@ def handle_action_execution_resume(ac_ex_db): if parent_ac_ex_db.status == ac_const.LIVEACTION_STATUS_PAUSED: action_utils.update_liveaction_status( - liveaction_id=parent_ac_ex_db.liveaction["id"], + liveaction_id=parent_ac_ex_db.liveaction_id, status=ac_const.LIVEACTION_STATUS_RUNNING, publish=False, ) @@ -1184,12 +1193,15 @@ def request_next_tasks(wf_ex_db, task_ex_id=None): # Request the task execution. request_task_execution(wf_ex_db, st2_ctx, task) except Exception as e: + + exc_type, exc_value, exc_traceback = sys.exc_info() msg = 'Failed task execution for task "%s", route "%s".' msg = msg % (task["id"], str(task["route"])) update_progress( wf_ex_db, "%s %s" % (msg, str(e)), severity="error", log=False ) - LOG.exception(msg) + LOG.exception(msg, exc_info=True) + fail_workflow_execution(str(wf_ex_db.id), e, task=task) return @@ -1203,7 +1215,9 @@ def request_next_tasks(wf_ex_db, task_ex_id=None): next_tasks = conductor.get_next_tasks() if not next_tasks: - update_progress(wf_ex_db, "No tasks identified to execute next.") + update_progress( + wf_ex_db, "end of while No tasks identified to execute next." + ) update_progress(wf_ex_db, "\n", log=False) @@ -1435,7 +1449,7 @@ def update_execution_records( # Update the corresponding liveaction and action execution for the workflow. wf_ac_ex_db = ex_db_access.ActionExecution.get_by_id(wf_ex_db.action_execution) - wf_lv_ac_db = action_utils.get_liveaction_by_id(wf_ac_ex_db.liveaction["id"]) + wf_lv_ac_db = action_utils.get_liveaction_by_id(wf_ac_ex_db.liveaction_id) # Gather result for liveaction and action execution. result = {"output": wf_ex_db.output or None} diff --git a/st2common/st2common/util/param.py b/st2common/st2common/util/param.py index 67fb83e9ac..104a3c5479 100644 --- a/st2common/st2common/util/param.py +++ b/st2common/st2common/util/param.py @@ -310,6 +310,7 @@ def render_live_params( additional_contexts=None, ): """ + :param params: BaseDict Renders list of parameters. Ensures that there's no cyclic or missing dependencies. Returns a dict of plain rendered parameters. """ diff --git a/st2common/tests/unit/migrations/test_v35_migrate_db_dict_field_values.py b/st2common/tests/unit/migrations/test_v35_migrate_db_dict_field_values.py index 84347fcbab..b8fafed457 100644 --- a/st2common/tests/unit/migrations/test_v35_migrate_db_dict_field_values.py +++ b/st2common/tests/unit/migrations/test_v35_migrate_db_dict_field_values.py @@ -11,16 +11,23 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from st2common.util.monkey_patch import monkey_patch + +monkey_patch() import os import sys import datetime +import mongoengine as me from st2common.constants import action as action_constants +from st2common.fields import ComplexDateTimeField +from st2common.fields import JSONDictEscapedFieldCompatibilityField from st2common.models.db import stormbase from st2common.models.db.execution import ActionExecutionDB from st2common.models.db.liveaction import LiveActionDB +from st2common.models.db.notification import NotificationSchema from st2common.models.db.workflow import WorkflowExecutionDB from st2common.models.db.workflow import TaskExecutionDB from st2common.models.db.trigger import TriggerInstanceDB @@ -31,6 +38,7 @@ from st2common.persistence.trigger import TriggerInstance from st2common.constants.triggers import TRIGGER_INSTANCE_PROCESSED from st2common.constants.triggers import TRIGGER_INSTANCE_PENDING +from st2common.util import date as date_utils from st2tests import DbTestCase @@ -41,7 +49,6 @@ import st2_migrate_db_dict_field_values as migration_module - MOCK_RESULT_1 = { "foo": "bar1", "bar": 1, @@ -79,10 +86,178 @@ def test_migrate_executions(self): LiveActionDB._meta["allow_inheritance"] = True class ActionExecutionDB_OldFieldType(ActionExecutionDB): + liveaction_id = me.StringField() # not required; didn't exist result = stormbase.EscapedDynamicField(default={}) + liveaction = stormbase.EscapedDictField(required=True) + parameters = stormbase.EscapedDynamicField(default={}) + + workflow_execution = me.StringField() + task_execution = me.StringField() + status = me.StringField( + required=True, help_text="The current status of the liveaction." + ) + start_timestamp = ComplexDateTimeField( + default=date_utils.get_datetime_utc_now, + help_text="The timestamp when the liveaction was created.", + ) + end_timestamp = ComplexDateTimeField( + help_text="The timestamp when the liveaction has finished." + ) + action = stormbase.EscapedDictField(required=True) + context = me.DictField( + default={}, help_text="Contextual information on the action execution." + ) + delay = me.IntField(min_value=0) + + # diff from liveaction + runner = stormbase.EscapedDictField(required=True) + trigger = stormbase.EscapedDictField() + trigger_type = stormbase.EscapedDictField() + trigger_instance = stormbase.EscapedDictField() + rule = stormbase.EscapedDictField() + result_size = me.IntField( + default=0, help_text="Serialized result size in bytes" + ) + parent = me.StringField() + children = me.ListField(field=me.StringField()) + log = me.ListField(field=me.DictField()) + # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows. + web_url = me.StringField(required=False) class LiveActionDB_OldFieldType(LiveActionDB): result = stormbase.EscapedDynamicField(default={}) + workflow_execution = me.StringField() + task_execution = me.StringField() + # TODO: Can status be an enum at the Mongo layer? + status = me.StringField( + required=True, help_text="The current status of the liveaction." + ) + start_timestamp = ComplexDateTimeField( + default=date_utils.get_datetime_utc_now, + help_text="The timestamp when the liveaction was created.", + ) + end_timestamp = ComplexDateTimeField( + help_text="The timestamp when the liveaction has finished." + ) + action = me.StringField( + required=True, + help_text="Reference to the action that has to be executed.", + ) + parameters = JSONDictEscapedFieldCompatibilityField( + default={}, + help_text="The key-value pairs passed as to the action runner & execution.", + ) + context = me.DictField( + default={}, help_text="Contextual information on the action execution." + ) + delay = me.IntField( + min_value=0, + help_text="How long (in milliseconds) to delay the execution before scheduling.", + ) + + # diff from action execution + action_is_workflow = me.BooleanField( + default=False, + help_text="A flag indicating whether the referenced action is a workflow.", + ) + callback = me.DictField( + default={}, + help_text="Callback information for the on completion of action execution.", + ) + notify = me.EmbeddedDocumentField(NotificationSchema) + runner_info = me.DictField( + default={}, + help_text="Information about the runner which executed this live action (hostname, pid).", + ) + + class LiveActionDB_NewFieldType(LiveActionDB): + result = JSONDictEscapedFieldCompatibilityField( + default={}, help_text="Action defined result." + ) + workflow_execution = me.StringField() + task_execution = me.StringField() + # TODO: Can status be an enum at the Mongo layer? + status = me.StringField( + required=True, help_text="The current status of the liveaction." + ) + start_timestamp = ComplexDateTimeField( + default=date_utils.get_datetime_utc_now, + help_text="The timestamp when the liveaction was created.", + ) + end_timestamp = ComplexDateTimeField( + help_text="The timestamp when the liveaction has finished." + ) + action = me.StringField( + required=True, + help_text="Reference to the action that has to be executed.", + ) + parameters = JSONDictEscapedFieldCompatibilityField( + default={}, + help_text="The key-value pairs passed as to the action runner & execution.", + ) + context = me.DictField( + default={}, help_text="Contextual information on the action execution." + ) + delay = me.IntField( + min_value=0, + help_text="How long (in milliseconds) to delay the execution before scheduling.", + ) + + # diff from action execution + action_is_workflow = me.BooleanField( + default=False, + help_text="A flag indicating whether the referenced action is a workflow.", + ) + callback = me.DictField( + default={}, + help_text="Callback information for the on completion of action execution.", + ) + notify = me.EmbeddedDocumentField(NotificationSchema) + runner_info = me.DictField( + default={}, + help_text="Information about the runner which executed this live action (hostname, pid).", + ) + + class ActionExecutionDB_NewFieldType(ActionExecutionDB): + liveaction_id = me.StringField() # not required; didn't exist + liveaction = stormbase.EscapedDictField(required=True) + parameters = stormbase.EscapedDynamicField(default={}) + result = JSONDictEscapedFieldCompatibilityField( + default={}, help_text="Action defined result." + ) + + workflow_execution = me.StringField() + task_execution = me.StringField() + status = me.StringField( + required=True, help_text="The current status of the liveaction." + ) + start_timestamp = ComplexDateTimeField( + default=date_utils.get_datetime_utc_now, + help_text="The timestamp when the liveaction was created.", + ) + end_timestamp = ComplexDateTimeField( + help_text="The timestamp when the liveaction has finished." + ) + action = stormbase.EscapedDictField(required=True) + context = me.DictField( + default={}, help_text="Contextual information on the action execution." + ) + delay = me.IntField(min_value=0) + + # diff from liveaction + runner = stormbase.EscapedDictField(required=True) + trigger = stormbase.EscapedDictField() + trigger_type = stormbase.EscapedDictField() + trigger_instance = stormbase.EscapedDictField() + rule = stormbase.EscapedDictField() + result_size = me.IntField( + default=0, help_text="Serialized result size in bytes" + ) + parent = me.StringField() + children = me.ListField(field=me.StringField()) + log = me.ListField(field=me.DictField()) + # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows. + web_url = me.StringField(required=False) execution_dbs = ActionExecution.query( __raw__={ @@ -224,15 +399,23 @@ class LiveActionDB_OldFieldType(LiveActionDB): "$type": "object", }, } - ).update(set___cls="ActionExecutionDB") - + ).update(set___cls="ActionExecutionDB.ActionExecutionDB_NewFieldType") + execution_dbs = ActionExecution.query( + __raw__={ + "result": { + "$not": { + "$type": "binData", + }, + } + } + ) LiveAction.query( __raw__={ "result": { "$type": "object", }, } - ).update(set___cls="LiveActionDB") + ).update(set___cls="LiveActionDB.LiveActionDB_NewFieldType") # 2. Run migration start_dt = datetime.datetime.utcnow().replace( diff --git a/st2common/tests/unit/services/test_trace.py b/st2common/tests/unit/services/test_trace.py index b7f8a2ddaf..f4d3ce48c5 100644 --- a/st2common/tests/unit/services/test_trace.py +++ b/st2common/tests/unit/services/test_trace.py @@ -253,9 +253,7 @@ def test_get_trace_db_by_live_action_parent_fail(self): def test_get_trace_db_by_live_action_from_execution(self): traceable_liveaction = copy.copy(self.traceable_liveaction) # fixtures id value in liveaction is not persisted in DB. - traceable_liveaction.id = bson.ObjectId( - self.traceable_execution.liveaction["id"] - ) + traceable_liveaction.id = bson.ObjectId(self.traceable_execution.liveaction_id) created, trace_db = trace_service.get_trace_db_by_live_action( traceable_liveaction ) diff --git a/st2common/tests/unit/services/test_workflow_identify_orphans.py b/st2common/tests/unit/services/test_workflow_identify_orphans.py index ba1df395a8..c3f39f06cf 100644 --- a/st2common/tests/unit/services/test_workflow_identify_orphans.py +++ b/st2common/tests/unit/services/test_workflow_identify_orphans.py @@ -170,7 +170,7 @@ def mock_workflow_records(self, completed=False, expired=True, log=True): workflow_execution=str(wf_ex_db.id), action={"runner_type": runner, "ref": action_ref}, runner={"name": runner}, - liveaction={"id": str(lv_ac_db.id)}, + liveaction_id=str(lv_ac_db.id), context={"user": user, "workflow_execution": str(wf_ex_db.id)}, status=status, start_timestamp=start_timestamp, @@ -264,7 +264,7 @@ def mock_task_records( task_execution=str(tk_ex_db.id), action={"runner_type": runner, "ref": action_ref}, runner={"name": runner}, - liveaction={"id": str(lv_ac_db.id)}, + liveaction_id=str(lv_ac_db.id), context=context, status=status, start_timestamp=tk_ex_db.start_timestamp, diff --git a/st2common/tests/unit/services/test_workflow_service_retries.py b/st2common/tests/unit/services/test_workflow_service_retries.py index 4de49fcf3f..fef68fbdcb 100644 --- a/st2common/tests/unit/services/test_workflow_service_retries.py +++ b/st2common/tests/unit/services/test_workflow_service_retries.py @@ -140,7 +140,7 @@ def test_recover_from_coordinator_connection_error(self, mock_get_lock): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) mock_get_lock.side_effect = [ coordination.ToozConnectionError("foobar"), @@ -174,7 +174,7 @@ def test_retries_exhausted_from_coordinator_connection_error(self, mock_get_lock tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) mock_get_lock.side_effect = [ @@ -215,7 +215,7 @@ def test_recover_from_database_connection_error(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) wf_svc.handle_action_execution_completion(tk1_ac_ex_db) @@ -242,7 +242,7 @@ def test_retries_exhausted_from_database_connection_error(self): tk1_ac_ex_db = ex_db_access.ActionExecution.query( task_execution=str(tk1_ex_db.id) )[0] - tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction["id"]) + tk1_lv_ac_db = lv_db_access.LiveAction.get_by_id(tk1_ac_ex_db.liveaction_id) self.assertEqual(tk1_lv_ac_db.status, ac_const.LIVEACTION_STATUS_SUCCEEDED) # The connection error should raise if retries are exhaused. diff --git a/st2common/tests/unit/test_db_execution.py b/st2common/tests/unit/test_db_execution.py index ca322c0f1b..5b0f73701e 100644 --- a/st2common/tests/unit/test_db_execution.py +++ b/st2common/tests/unit/test_db_execution.py @@ -60,6 +60,7 @@ }, }, }, + "id": "liveaction_inquiry", "action": "core.ask", } @@ -70,6 +71,7 @@ } }, "action": "st2.inquiry.respond", + "id": "liveaction_respond", } OUTPUT_SCHEMA_RESULT = { @@ -82,6 +84,7 @@ } OUTPUT_SCHEMA_LIVEACTION = { + "id": "output_schema", "action": "core.ask", "parameters": {}, } @@ -91,14 +94,14 @@ "action": {"uid": "action:core:ask", "output_schema": {}}, "status": "succeeded", "runner": {"name": "inquirer"}, - "liveaction": INQUIRY_LIVEACTION, + "liveaction_id": INQUIRY_LIVEACTION["id"], "result": INQUIRY_RESULT, }, "execution_2": { "action": {"uid": "action:st2:inquiry.respond", "output_schema": {}}, "status": "succeeded", "runner": {"name": "python-script"}, - "liveaction": RESPOND_LIVEACTION, + "liveaction_id": RESPOND_LIVEACTION["id"], "result": {"exit_code": 0, "result": None, "stderr": "", "stdout": ""}, }, "execution_3": { @@ -119,7 +122,7 @@ }, "status": "succeeded", "runner": {"name": "inquirer", "output_key": "result"}, - "liveaction": OUTPUT_SCHEMA_LIVEACTION, + "liveaction_id": OUTPUT_SCHEMA_LIVEACTION["id"], "result": OUTPUT_SCHEMA_RESULT, }, } @@ -137,7 +140,7 @@ def setUp(self): created.action = execution["action"] created.status = execution["status"] created.runner = execution["runner"] - created.liveaction = execution["liveaction"] + created.liveaction_id = execution["liveaction_id"] created.result = execution["result"] saved = ActionExecutionModelTest._save_execution(created) @@ -188,19 +191,6 @@ def test_execution_inquiry_secrets(self): "supersecretvalue", ) - def test_execution_inquiry_response_action(self): - """Test that the response parameters for any `st2.inquiry.respond` executions are masked - - We aren't bothering to get the inquiry schema in the `st2.inquiry.respond` action, - so we mask all response values. This test ensures this happens. - """ - - masked = self.executions["execution_2"].mask_secrets( - self.executions["execution_2"].to_serializable_dict() - ) - for value in masked["parameters"]["response"].values(): - self.assertEqual(value, MASKED_ATTRIBUTE_VALUE) - def test_output_schema_secret_param_masking(self): """Test that the output marked as secret in the output schema is masked in the output result diff --git a/st2common/tests/unit/test_db_fields.py b/st2common/tests/unit/test_db_fields.py index c6deff0f6c..7804fc5f72 100644 --- a/st2common/tests/unit/test_db_fields.py +++ b/st2common/tests/unit/test_db_fields.py @@ -21,8 +21,8 @@ import mock import unittest +from oslo_config import cfg import orjson -import zstandard # pytest: make sure monkey_patching happens before importing mongoengine from st2common.util.monkey_patch import monkey_patch @@ -37,8 +37,6 @@ from st2common.models.db import MongoDBAccess from st2common.fields import JSONDictField from st2common.fields import JSONDictEscapedFieldCompatibilityField -from st2common.fields import JSONDictFieldCompressionAlgorithmEnum -from st2common.fields import JSONDictFieldSerializationFormatEnum from st2tests import DbTestCase @@ -79,6 +77,14 @@ class ModelWithJSONDictFieldDB(stormbase.StormFoundationDB): class JSONDictFieldTestCase(unittest.TestCase): + def setUp(self): + # NOTE: It's important we re-establish a connection on each setUp + cfg.CONF.reset() + + def tearDown(self): + # NOTE: It's important we disconnect here otherwise tests will fail + cfg.CONF.reset() + def test_set_to_mongo(self): field = JSONDictField(use_header=False) result = field.to_mongo({"test": {1, 2}}) @@ -89,12 +95,27 @@ def test_header_set_to_mongo(self): result = field.to_mongo({"test": {1, 2}}) self.assertTrue(isinstance(result, bytes)) - def test_to_mongo(self): + def test_to_mongo_to_python_none(self): + cfg.CONF.set_override( + name="parameter_result_compression", group="database", override="none" + ) + field = JSONDictField(use_header=False) + result = field.to_mongo(MOCK_DATA_DICT) + + self.assertTrue(isinstance(result, bytes)) + result = field.to_python(result) + self.assertEqual(result, MOCK_DATA_DICT) + + def test_to_mongo_zstandard(self): + cfg.CONF.set_override( + name="parameter_result_compression", group="database", override="zstandard" + ) field = JSONDictField(use_header=False) result = field.to_mongo(MOCK_DATA_DICT) self.assertTrue(isinstance(result, bytes)) - self.assertEqual(result, orjson.dumps(MOCK_DATA_DICT)) + result = field.to_python(result) + self.assertEqual(result, MOCK_DATA_DICT) def test_to_python(self): field = JSONDictField(use_header=False) @@ -147,75 +168,13 @@ def test_parse_field_value(self): self.assertEqual(result, {"c": "d"}) -class JSONDictFieldTestCaseWithHeader(unittest.TestCase): - def test_to_mongo_no_compression(self): - field = JSONDictField(use_header=True) - - result = field.to_mongo(MOCK_DATA_DICT) - self.assertTrue(isinstance(result, bytes)) - - split = result.split(b":", 2) - self.assertEqual(split[0], JSONDictFieldCompressionAlgorithmEnum.NONE.value) - self.assertEqual(split[1], JSONDictFieldSerializationFormatEnum.ORJSON.value) - self.assertEqual(orjson.loads(split[2]), MOCK_DATA_DICT) - - parsed_value = field.parse_field_value(result) - self.assertEqual(parsed_value, MOCK_DATA_DICT) - - def test_to_mongo_zstandard_compression(self): - field = JSONDictField(use_header=True, compression_algorithm="zstandard") - - result = field.to_mongo(MOCK_DATA_DICT) - self.assertTrue(isinstance(result, bytes)) - - split = result.split(b":", 2) - self.assertEqual( - split[0], JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value - ) - self.assertEqual(split[1], JSONDictFieldSerializationFormatEnum.ORJSON.value) - self.assertEqual( - orjson.loads(zstandard.ZstdDecompressor().decompress(split[2])), - MOCK_DATA_DICT, - ) - - parsed_value = field.parse_field_value(result) - self.assertEqual(parsed_value, MOCK_DATA_DICT) - - def test_to_python_no_compression(self): - field = JSONDictField(use_header=True) - - serialized_data = field.to_mongo(MOCK_DATA_DICT) - - self.assertTrue(isinstance(serialized_data, bytes)) - split = serialized_data.split(b":", 2) - self.assertEqual(split[0], JSONDictFieldCompressionAlgorithmEnum.NONE.value) - self.assertEqual(split[1], JSONDictFieldSerializationFormatEnum.ORJSON.value) - - desserialized_data = field.to_python(serialized_data) - self.assertEqual(desserialized_data, MOCK_DATA_DICT) - - def test_to_python_zstandard_compression(self): - field = JSONDictField(use_header=True, compression_algorithm="zstandard") - - serialized_data = field.to_mongo(MOCK_DATA_DICT) - self.assertTrue(isinstance(serialized_data, bytes)) - - split = serialized_data.split(b":", 2) - self.assertEqual( - split[0], JSONDictFieldCompressionAlgorithmEnum.ZSTANDARD.value - ) - self.assertEqual(split[1], JSONDictFieldSerializationFormatEnum.ORJSON.value) - - desserialized_data = field.to_python(serialized_data) - self.assertEqual(desserialized_data, MOCK_DATA_DICT) - - class JSONDictEscapedFieldCompatibilityFieldTestCase(DbTestCase): def test_to_mongo(self): field = JSONDictEscapedFieldCompatibilityField(use_header=False) result_to_mongo_1 = field.to_mongo(MOCK_DATA_DICT) - self.assertEqual(result_to_mongo_1, orjson.dumps(MOCK_DATA_DICT)) + self.assertTrue(isinstance(result_to_mongo_1, bytes)) + self.assertEqual(result_to_mongo_1[0:1], b"z") # Already serialized result_to_mongo_2 = field.to_mongo(MOCK_DATA_DICT) @@ -275,7 +234,12 @@ def test_existing_db_value_is_using_escaped_dict_field_compatibility(self): self.assertEqual(len(pymongo_result), 1) self.assertEqual(pymongo_result[0]["_id"], inserted_model_db.id) self.assertTrue(isinstance(pymongo_result[0]["result"], bytes)) - self.assertEqual(orjson.loads(pymongo_result[0]["result"]), expected_data) + + result = pymongo_result[0]["result"] + + field = JSONDictField(use_header=False) + result = field.to_python(result) + self.assertEqual(result, expected_data) self.assertEqual(pymongo_result[0]["counter"], 1) def test_field_state_changes_are_correctly_detected_add_or_update_method(self): diff --git a/st2common/tests/unit/test_db_liveaction.py b/st2common/tests/unit/test_db_liveaction.py index 605aa759f6..9cc308ad38 100644 --- a/st2common/tests/unit/test_db_liveaction.py +++ b/st2common/tests/unit/test_db_liveaction.py @@ -16,6 +16,7 @@ from __future__ import absolute_import import mock +from st2common.constants.secrets import MASKED_ATTRIBUTE_VALUE from st2common.models.db.liveaction import LiveActionDB from st2common.models.db.notification import NotificationSchema, NotificationSubSchema from st2common.persistence.liveaction import LiveAction @@ -132,6 +133,32 @@ def test_liveaction_create_with_notify_both_on_success_and_on_error(self): self.assertEqual(on_failure.message, retrieved.notify.on_failure.message) self.assertEqual(retrieved.notify.on_complete, None) + def test_liveaction_inquiry_response_action(self): + RESPOND_LIVEACTION = { + "parameters": { + "response": { + "secondfactor": "omgsupersecret", + } + }, + "action": "st2.inquiry.respond", + "id": "54c6b6d60640fd4f5354e74c", + } + + created = LiveActionDB() + created.action = RESPOND_LIVEACTION["action"] + created.status = "succeeded" + created.parameters = RESPOND_LIVEACTION["parameters"] + created.id = RESPOND_LIVEACTION["id"] + saved = LiveActionModelTest._save_liveaction(created) + + retrieved = LiveAction.get_by_id(saved.id) + self.assertEqual( + saved.action, retrieved.action, "Same triggertype was not returned." + ) + masked = retrieved.mask_secrets(retrieved.to_serializable_dict()) + for value in masked["parameters"]["response"].values(): + self.assertEqual(value, MASKED_ATTRIBUTE_VALUE) + @staticmethod def _save_liveaction(liveaction): return LiveAction.add_or_update(liveaction) diff --git a/st2common/tests/unit/test_executions.py b/st2common/tests/unit/test_executions.py index 0be1ca7c9d..24dc0be9c0 100644 --- a/st2common/tests/unit/test_executions.py +++ b/st2common/tests/unit/test_executions.py @@ -23,7 +23,9 @@ from st2common.util import isotime from st2common.util import date as date_utils from st2common.persistence.execution import ActionExecution +from st2common.persistence.liveaction import LiveAction from st2common.models.api.execution import ActionExecutionAPI +from st2common.models.api.action import LiveActionAPI from st2common.exceptions.db import StackStormDBObjectNotFoundError from six.moves import range @@ -33,12 +35,18 @@ def setUp(self): super(TestActionExecutionHistoryModel, self).setUp() # Fake execution record for action liveactions triggered by workflow runner. + self.fake_history_liveactions = [ + fixture.ARTIFACTS["liveactions"]["task1"], + fixture.ARTIFACTS["liveactions"]["task2"], + ] self.fake_history_subtasks = [ { "id": str(bson.ObjectId()), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), - "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), + "liveaction_id": copy.deepcopy( + fixture.ARTIFACTS["liveactions"]["task1"]["id"] + ), "status": fixture.ARTIFACTS["liveactions"]["task1"]["status"], "start_timestamp": fixture.ARTIFACTS["liveactions"]["task1"][ "start_timestamp" @@ -51,7 +59,9 @@ def setUp(self): "id": str(bson.ObjectId()), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), - "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task2"]), + "liveaction_id": copy.deepcopy( + fixture.ARTIFACTS["liveactions"]["task2"]["id"] + ), "status": fixture.ARTIFACTS["liveactions"]["task2"]["status"], "start_timestamp": fixture.ARTIFACTS["liveactions"]["task2"][ "start_timestamp" @@ -71,7 +81,9 @@ def setUp(self): "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), - "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["workflow"]), + "liveaction_id": copy.deepcopy( + fixture.ARTIFACTS["liveactions"]["workflow"]["id"] + ), "children": [task["id"] for task in self.fake_history_subtasks], "status": fixture.ARTIFACTS["liveactions"]["workflow"]["status"], "start_timestamp": fixture.ARTIFACTS["liveactions"]["workflow"][ @@ -81,12 +93,18 @@ def setUp(self): "end_timestamp" ], } - + self.fake_history_workflow_liveaction = fixture.ARTIFACTS["liveactions"][ + "workflow" + ] # Assign parent to the execution records for the subtasks. for task in self.fake_history_subtasks: task["parent"] = self.fake_history_workflow["id"] def test_model_complete(self): + # create LiveactionApiObject + live_action_obj = LiveActionAPI( + **copy.deepcopy(self.fake_history_workflow_liveaction) + ) # Create API object. obj = ActionExecutionAPI(**copy.deepcopy(self.fake_history_workflow)) @@ -100,10 +118,15 @@ def test_model_complete(self): self.assertDictEqual(obj.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(obj.action, self.fake_history_workflow["action"]) self.assertDictEqual(obj.runner, self.fake_history_workflow["runner"]) - self.assertEqual(obj.liveaction, self.fake_history_workflow["liveaction"]) + self.assertEqual(obj.liveaction_id, self.fake_history_workflow["liveaction_id"]) self.assertIsNone(getattr(obj, "parent", None)) self.assertListEqual(obj.children, self.fake_history_workflow["children"]) + # convert liveaction API to model + live_action_model = LiveActionAPI.to_model(live_action_obj) + live_action_model.id = live_action_obj.id + LiveAction.add_or_update(live_action_model) + # Convert API object to DB model. model = ActionExecutionAPI.to_model(obj) self.assertEqual(str(model.id), obj.id) @@ -117,10 +140,9 @@ def test_model_complete(self): self.assertDictEqual(model.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(model.action, self.fake_history_workflow["action"]) self.assertDictEqual(model.runner, self.fake_history_workflow["runner"]) - doc = copy.deepcopy(self.fake_history_workflow["liveaction"]) - doc["start_timestamp"] = doc["start_timestamp"] - doc["end_timestamp"] = doc["end_timestamp"] - self.assertDictEqual(model.liveaction, doc) + self.assertEqual( + model.liveaction_id, self.fake_history_workflow["liveaction_id"] + ) self.assertIsNone(getattr(model, "parent", None)) self.assertListEqual(model.children, self.fake_history_workflow["children"]) @@ -137,7 +159,7 @@ def test_model_complete(self): self.assertDictEqual(obj.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(obj.action, self.fake_history_workflow["action"]) self.assertDictEqual(obj.runner, self.fake_history_workflow["runner"]) - self.assertDictEqual(obj.liveaction, self.fake_history_workflow["liveaction"]) + self.assertEqual(obj.liveaction_id, self.fake_history_workflow["liveaction_id"]) self.assertIsNone(getattr(obj, "parent", None)) self.assertListEqual(obj.children, self.fake_history_workflow["children"]) @@ -157,10 +179,9 @@ def test_crud_complete(self): self.assertDictEqual(model.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(model.action, self.fake_history_workflow["action"]) self.assertDictEqual(model.runner, self.fake_history_workflow["runner"]) - doc = copy.deepcopy(self.fake_history_workflow["liveaction"]) - doc["start_timestamp"] = doc["start_timestamp"] - doc["end_timestamp"] = doc["end_timestamp"] - self.assertDictEqual(model.liveaction, doc) + self.assertEqual( + model.liveaction_id, self.fake_history_workflow["liveaction_id"] + ) self.assertIsNone(getattr(model, "parent", None)) self.assertListEqual(model.children, self.fake_history_workflow["children"]) @@ -178,6 +199,10 @@ def test_crud_complete(self): ) def test_model_partial(self): + # create LiveactionApiObject + live_action_obj = LiveActionAPI( + **copy.deepcopy(self.fake_history_liveactions[0]) + ) # Create API object. obj = ActionExecutionAPI(**copy.deepcopy(self.fake_history_subtasks[0])) self.assertIsNone(getattr(obj, "trigger", None)) @@ -186,14 +211,19 @@ def test_model_partial(self): self.assertIsNone(getattr(obj, "rule", None)) self.assertDictEqual(obj.action, self.fake_history_subtasks[0]["action"]) self.assertDictEqual(obj.runner, self.fake_history_subtasks[0]["runner"]) - self.assertDictEqual( - obj.liveaction, self.fake_history_subtasks[0]["liveaction"] + self.assertEqual( + obj.liveaction_id, self.fake_history_subtasks[0]["liveaction_id"] ) self.assertEqual(obj.parent, self.fake_history_subtasks[0]["parent"]) self.assertIsNone(getattr(obj, "children", None)) + # convert liveaction API to model + live_action_model = LiveActionAPI.to_model(live_action_obj) + live_action_model.id = live_action_obj.id # Convert API object to DB model. model = ActionExecutionAPI.to_model(obj) + LiveAction.add_or_update(live_action_model) + self.assertEqual(str(live_action_model.id), str(live_action_model.id)) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, {}) self.assertDictEqual(model.trigger_type, {}) @@ -201,11 +231,9 @@ def test_model_partial(self): self.assertDictEqual(model.rule, {}) self.assertDictEqual(model.action, self.fake_history_subtasks[0]["action"]) self.assertDictEqual(model.runner, self.fake_history_subtasks[0]["runner"]) - doc = copy.deepcopy(self.fake_history_subtasks[0]["liveaction"]) - doc["start_timestamp"] = doc["start_timestamp"] - doc["end_timestamp"] = doc["end_timestamp"] - - self.assertDictEqual(model.liveaction, doc) + self.assertEqual( + model.liveaction_id, self.fake_history_subtasks[0]["liveaction_id"] + ) self.assertEqual(model.parent, self.fake_history_subtasks[0]["parent"]) self.assertListEqual(model.children, []) @@ -218,8 +246,8 @@ def test_model_partial(self): self.assertIsNone(getattr(obj, "rule", None)) self.assertDictEqual(obj.action, self.fake_history_subtasks[0]["action"]) self.assertDictEqual(obj.runner, self.fake_history_subtasks[0]["runner"]) - self.assertDictEqual( - obj.liveaction, self.fake_history_subtasks[0]["liveaction"] + self.assertEqual( + obj.liveaction_id, self.fake_history_subtasks[0]["liveaction_id"] ) self.assertEqual(obj.parent, self.fake_history_subtasks[0]["parent"]) self.assertIsNone(getattr(obj, "children", None)) @@ -236,10 +264,9 @@ def test_crud_partial(self): self.assertDictEqual(model.rule, {}) self.assertDictEqual(model.action, self.fake_history_subtasks[0]["action"]) self.assertDictEqual(model.runner, self.fake_history_subtasks[0]["runner"]) - doc = copy.deepcopy(self.fake_history_subtasks[0]["liveaction"]) - doc["start_timestamp"] = doc["start_timestamp"] - doc["end_timestamp"] = doc["end_timestamp"] - self.assertDictEqual(model.liveaction, doc) + self.assertEqual( + model.liveaction_id, self.fake_history_subtasks[0]["liveaction_id"] + ) self.assertEqual(model.parent, self.fake_history_subtasks[0]["parent"]) self.assertListEqual(model.children, []) diff --git a/st2common/tests/unit/test_executions_util.py b/st2common/tests/unit/test_executions_util.py index 0776ef57cc..b7e6e15ada 100644 --- a/st2common/tests/unit/test_executions_util.py +++ b/st2common/tests/unit/test_executions_util.py @@ -79,7 +79,7 @@ def test_execution_creation_manual_action_run(self): executions_util.create_execution_object(liveaction) post_creation_timestamp = date_utils.get_datetime_utc_now() execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertDictEqual(execution.trigger, {}) self.assertDictEqual(execution.trigger_type, {}) @@ -90,7 +90,7 @@ def test_execution_creation_manual_action_run(self): runner = RunnerType.get_by_name(action.runner_type["name"]) self.assertDictEqual(execution.runner, vars(RunnerTypeAPI.from_model(runner))) liveaction = LiveAction.get_by_id(str(liveaction.id)) - self.assertEqual(execution.liveaction["id"], str(liveaction.id)) + self.assertEqual(execution.liveaction_id, str(liveaction.id)) self.assertEqual(len(execution.log), 1) self.assertEqual(execution.log[0]["status"], liveaction.status) self.assertGreater(execution.log[0]["timestamp"], pre_creation_timestamp) @@ -120,7 +120,7 @@ def test_execution_creation_action_triggered_by_rule(self): ) executions_util.create_execution_object(liveaction) execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertDictEqual(execution.trigger, vars(TriggerAPI.from_model(trigger))) self.assertDictEqual( @@ -136,13 +136,13 @@ def test_execution_creation_action_triggered_by_rule(self): runner = RunnerType.get_by_name(action.runner_type["name"]) self.assertDictEqual(execution.runner, vars(RunnerTypeAPI.from_model(runner))) liveaction = LiveAction.get_by_id(str(liveaction.id)) - self.assertEqual(execution.liveaction["id"], str(liveaction.id)) + self.assertEqual(execution.liveaction_id, str(liveaction.id)) def test_execution_creation_with_web_url(self): liveaction = self.MODELS["liveactions"]["liveaction1.yaml"] executions_util.create_execution_object(liveaction) execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertIsNotNone(execution.web_url) execution_id = str(execution.id) @@ -164,7 +164,7 @@ def test_execution_update(self): executions_util.update_execution(liveaction) post_update_timestamp = date_utils.get_datetime_utc_now() execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertEqual(len(execution.log), 2) self.assertEqual(execution.log[1]["status"], liveaction.status) @@ -178,7 +178,7 @@ def test_skip_execution_update(self): liveaction.status = "running" executions_util.update_execution(liveaction) execution = self._get_action_execution( - liveaction__id=str(liveaction.id), raise_exception=True + liveaction_id=str(liveaction.id), raise_exception=True ) self.assertEqual(len(execution.log), 1) # Check status is not updated if it's already in completed state. diff --git a/st2common/tests/unit/test_purge_executions.py b/st2common/tests/unit/test_purge_executions.py index bc2c43504f..40d9fe62c3 100644 --- a/st2common/tests/unit/test_purge_executions.py +++ b/st2common/tests/unit/test_purge_executions.py @@ -194,7 +194,7 @@ def test_liveaction_gets_deleted(self): exec_model["end_timestamp"] = end_ts exec_model["status"] = action_constants.LIVEACTION_STATUS_SUCCEEDED exec_model["id"] = bson.ObjectId() - exec_model["liveaction"]["id"] = str(liveaction.id) + exec_model["liveaction_id"] = str(liveaction.id) ActionExecution.add_or_update(exec_model) liveactions = LiveAction.get_all() diff --git a/st2reactor/tests/integration/test_garbage_collector.py b/st2reactor/tests/integration/test_garbage_collector.py index 4ce3d90afc..51c0e2dc1f 100644 --- a/st2reactor/tests/integration/test_garbage_collector.py +++ b/st2reactor/tests/integration/test_garbage_collector.py @@ -89,7 +89,7 @@ def test_garbage_collection(self): status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id="ref", ) ActionExecution.add_or_update(action_execution_db) @@ -125,7 +125,7 @@ def test_garbage_collection(self): status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id="ref", ) ActionExecution.add_or_update(action_execution_db) @@ -160,7 +160,7 @@ def test_garbage_collection(self): status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id="ref", ) ActionExecution.add_or_update(action_execution_db) diff --git a/st2stream/tests/unit/controllers/v1/test_stream.py b/st2stream/tests/unit/controllers/v1/test_stream.py index dbfb6277c1..2d661992f9 100644 --- a/st2stream/tests/unit/controllers/v1/test_stream.py +++ b/st2stream/tests/unit/controllers/v1/test_stream.py @@ -19,8 +19,8 @@ from st2common.models.api.action import ActionAPI from st2common.models.api.action import RunnerTypeAPI +from st2common.models.api.action import LiveActionAPI from st2common.models.api.execution import ActionExecutionAPI -from st2common.models.api.execution import LiveActionAPI from st2common.models.api.execution import ActionExecutionOutputAPI from st2common.models.db.liveaction import LiveActionDB from st2common.models.db.execution import ActionExecutionDB diff --git a/st2stream/tests/unit/controllers/v1/test_stream_execution_output.py b/st2stream/tests/unit/controllers/v1/test_stream_execution_output.py index 9a135d1789..ab9088056b 100644 --- a/st2stream/tests/unit/controllers/v1/test_stream_execution_output.py +++ b/st2stream/tests/unit/controllers/v1/test_stream_execution_output.py @@ -21,9 +21,11 @@ from st2common.constants import action as action_constants from st2common.models.db.execution import ActionExecutionDB +from st2common.models.db.liveaction import LiveActionDB from st2common.models.db.execution import ActionExecutionOutputDB from st2common.persistence.execution import ActionExecution from st2common.persistence.execution import ActionExecutionOutput +from st2common.persistence.liveaction import LiveAction from st2common.util import date as date_utils from st2common.stream.listener import get_listener @@ -53,15 +55,21 @@ def test_get_output_running_execution(self): # Test the execution output API endpoint for execution which is running (blocking) status = action_constants.LIVEACTION_STATUS_RUNNING timestamp = date_utils.get_datetime_utc_now() + liveaction_id = "54c6b6d60640fd4f5354e74a" action_execution_db = ActionExecutionDB( start_timestamp=timestamp, end_timestamp=timestamp, status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id=liveaction_id, ) action_execution_db = ActionExecution.add_or_update(action_execution_db) + liveaction_db = LiveActionDB( + action="core.local", runner_info={"name": "local-shell-cmd"}, status=status + ) + liveaction_db.id = liveaction_id + LiveAction.add_or_update(liveaction_db) output_params = dict( execution_id=str(action_execution_db.id), @@ -135,15 +143,23 @@ def test_get_output_finished_execution(self): # Insert mock execution and output objects status = action_constants.LIVEACTION_STATUS_SUCCEEDED timestamp = date_utils.get_datetime_utc_now() + liveaction_id = "54c6b6d60640fd4f5354e74a" action_execution_db = ActionExecutionDB( start_timestamp=timestamp, end_timestamp=timestamp, status=status, action={"ref": "core.local"}, runner={"name": "local-shell-cmd"}, - liveaction={"ref": "foo"}, + liveaction_id=liveaction_id, ) action_execution_db = ActionExecution.add_or_update(action_execution_db) + liveaction_db = LiveActionDB( + action="core.local", + runner_info={"name": "local-shell-cmd"}, + status=status, + ) + liveaction_db.id = liveaction_id + LiveAction.add_or_update(liveaction_db) for i in range(1, 6): stdout_db = ActionExecutionOutputDB( diff --git a/st2tests/integration/orquesta/test_wiring_error_handling.py b/st2tests/integration/orquesta/test_wiring_error_handling.py index 130a68c7c5..edaba668a1 100644 --- a/st2tests/integration/orquesta/test_wiring_error_handling.py +++ b/st2tests/integration/orquesta/test_wiring_error_handling.py @@ -22,7 +22,16 @@ class ErrorHandlingTest(base.TestWorkflowExecution): + def error_inspect(self, ex, expected_errors): + errors = [] + for i in ex.result.get("errors"): + i.pop("traceback", None) + errors.append(i) + for index, i in enumerate(errors): + self.assertDictEqual(i, expected_errors[index]) + def test_inspection_error(self): + self.maxDiff = None expected_errors = [ { "type": "content", @@ -66,7 +75,8 @@ def test_inspection_error(self): ex = self._execute_workflow("examples.orquesta-fail-inspection") ex = self._wait_for_completion(ex) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) + self.error_inspect(ex, expected_errors) + self.assertIsNone(ex.result["output"]) def test_input_error(self): expected_errors = [ @@ -83,7 +93,7 @@ def test_input_error(self): ex = self._execute_workflow("examples.orquesta-fail-input-rendering") ex = self._wait_for_completion(ex) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) + self.error_inspect(ex, expected_errors) def test_vars_error(self): expected_errors = [ @@ -99,8 +109,8 @@ def test_vars_error(self): ex = self._execute_workflow("examples.orquesta-fail-vars-rendering") ex = self._wait_for_completion(ex) + self.error_inspect(ex, expected_errors) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) def test_start_task_error(self): self.maxDiff = None @@ -127,8 +137,8 @@ def test_start_task_error(self): ex = self._execute_workflow("examples.orquesta-fail-start-task") ex = self._wait_for_completion(ex) + self.error_inspect(ex, expected_errors) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) def test_task_transition_error(self): expected_errors = [ @@ -149,9 +159,8 @@ def test_task_transition_error(self): ex = self._execute_workflow("examples.orquesta-fail-task-transition") ex = self._wait_for_completion(ex) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual( - ex.result, {"errors": expected_errors, "output": expected_output} - ) + self.assertDictEqual(ex.result["output"], expected_output) + self.error_inspect(ex, expected_errors) def test_task_publish_error(self): expected_errors = [ @@ -171,10 +180,10 @@ def test_task_publish_error(self): ex = self._execute_workflow("examples.orquesta-fail-task-publish") ex = self._wait_for_completion(ex) + self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual( - ex.result, {"errors": expected_errors, "output": expected_output} - ) + self.assertDictEqual(ex.result["output"], expected_output) + self.error_inspect(ex, expected_errors) def test_output_error(self): expected_errors = [ @@ -191,9 +200,10 @@ def test_output_error(self): ex = self._execute_workflow("examples.orquesta-fail-output-rendering") ex = self._wait_for_completion(ex) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) + self.error_inspect(ex, expected_errors) def test_task_content_errors(self): + self.maxDiff = None expected_errors = [ { "type": "content", @@ -226,8 +236,9 @@ def test_task_content_errors(self): ex = self._execute_workflow("examples.orquesta-fail-inspection-task-contents") ex = self._wait_for_completion(ex) + self.error_inspect(ex, expected_errors) + self.assertIsNone(ex.result["output"]) self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) def test_remediate_then_fail(self): expected_errors = [ @@ -262,10 +273,9 @@ def test_remediate_then_fail(self): self._wait_for_task(ex, "task1", ac_const.LIVEACTION_STATUS_FAILED) self._wait_for_task(ex, "log", ac_const.LIVEACTION_STATUS_SUCCEEDED) - # Assert workflow status and result. self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) - self.assertDictEqual(ex.result, {"errors": expected_errors, "output": None}) + self.error_inspect(ex, expected_errors) def test_fail_manually(self): expected_errors = [ @@ -332,7 +342,6 @@ def test_fail_continue(self): # Assert task status. self._wait_for_task(ex, "task1", ac_const.LIVEACTION_STATUS_FAILED) - # Assert workflow status and result. self.assertEqual(ex.status, ac_const.LIVEACTION_STATUS_FAILED) self.assertDictEqual( diff --git a/st2tests/st2tests/api.py b/st2tests/st2tests/api.py index 1500a0b321..0f80aa2af5 100644 --- a/st2tests/st2tests/api.py +++ b/st2tests/st2tests/api.py @@ -87,7 +87,6 @@ def do_request(self, req, **kwargs): if req.environ["REQUEST_METHOD"] != "OPTIONS": # Making sure endpoint handles OPTIONS method properly self.options(req.environ["PATH_INFO"]) - res = super(TestApp, self).do_request(req, **kwargs) if res.headers.get("Warning", None): @@ -395,7 +394,7 @@ def _get_actionexecution_id(resp): @staticmethod def _get_liveaction_id(resp): - return resp.json["liveaction"]["id"] + return resp.json["liveaction_id"] def _do_get_one(self, actionexecution_id, *args, **kwargs): return self.app.get("/v1/executions/%s" % actionexecution_id, *args, **kwargs) diff --git a/st2tests/st2tests/fixtures/descendants/executions/child1_level1.yaml b/st2tests/st2tests/fixtures/descendants/executions/child1_level1.yaml index 30467cfb18..ebd2708f75 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child1_level1.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child1_level1.yaml @@ -7,8 +7,7 @@ children: - 54e6583d0640fd16887d685b end_timestamp: '2014-09-01T00:00:57.000001Z' id: 54e657f20640fd16887d6857 -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e657d60640fd16887d6855 runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child1_level2.yaml b/st2tests/st2tests/fixtures/descendants/executions/child1_level2.yaml index 2ee548ccb2..6f330f5547 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child1_level2.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child1_level2.yaml @@ -5,8 +5,7 @@ action: children: [] end_timestamp: '2014-09-01T00:00:56.000002Z' id: 54e657fa0640fd16887d6858 -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e657f20640fd16887d6857 runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child1_level3.yaml b/st2tests/st2tests/fixtures/descendants/executions/child1_level3.yaml index 276c5aa0b7..59626f9260 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child1_level3.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child1_level3.yaml @@ -5,8 +5,7 @@ action: children: [] end_timestamp: '2014-09-01T00:00:55.100000Z' id: 54e6581b0640fd16887d6859 -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e6583d0640fd16887d685b runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child2_level1.yaml b/st2tests/st2tests/fixtures/descendants/executions/child2_level1.yaml index 35050c15cd..d3b9188507 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child2_level1.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child2_level1.yaml @@ -6,8 +6,7 @@ children: - 54e658570640fd16887d685d end_timestamp: '2014-09-01T00:00:55.000000Z' id: 54e658290640fd16887d685a -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e657d60640fd16887d6855 runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child2_level2.yaml b/st2tests/st2tests/fixtures/descendants/executions/child2_level2.yaml index 7d57ceb171..d5fe4246c7 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child2_level2.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child2_level2.yaml @@ -6,8 +6,7 @@ children: - 54e6581b0640fd16887d6859 end_timestamp: '2014-09-01T00:00:55.000000Z' id: 54e6583d0640fd16887d685b -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e657f20640fd16887d6857 runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child2_level3.yaml b/st2tests/st2tests/fixtures/descendants/executions/child2_level3.yaml index a10bcd016b..9dd19be9cb 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child2_level3.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child2_level3.yaml @@ -5,8 +5,7 @@ action: children: [] end_timestamp: '2014-09-01T00:00:59.000010Z' id: 54e6584a0640fd16887d685c -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e658570640fd16887d685d runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child3_level2.yaml b/st2tests/st2tests/fixtures/descendants/executions/child3_level2.yaml index d803654d6b..cc40bcc4ba 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child3_level2.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child3_level2.yaml @@ -7,8 +7,7 @@ children: - 54e6585f0640fd16887d685e end_timestamp: '2014-09-01T00:00:55.000000Z' id: 54e658570640fd16887d685d -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e658290640fd16887d685a runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/child3_level3.yaml b/st2tests/st2tests/fixtures/descendants/executions/child3_level3.yaml index ad1aae1bad..4f28157a71 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/child3_level3.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/child3_level3.yaml @@ -5,8 +5,7 @@ action: children: [] end_timestamp: '2014-09-01T00:00:55.000000Z' id: 54e6585f0640fd16887d685e -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a parent: 54e658570640fd16887d685d runner: name: pointlessrunner diff --git a/st2tests/st2tests/fixtures/descendants/executions/root_execution.yaml b/st2tests/st2tests/fixtures/descendants/executions/root_execution.yaml index d993f9c140..1f0a0bfb48 100644 --- a/st2tests/st2tests/fixtures/descendants/executions/root_execution.yaml +++ b/st2tests/st2tests/fixtures/descendants/executions/root_execution.yaml @@ -7,8 +7,7 @@ children: - 54e658290640fd16887d685a end_timestamp: '2014-09-01T00:00:59.000000Z' id: 54e657d60640fd16887d6855 -liveaction: - action: pointlessaction +liveaction_id: 54c6b6d60640fd4f5354e74a runner: name: pointlessrunner runner_module: no.module diff --git a/st2tests/st2tests/fixtures/descendants/liveactions/liveaction_fake.yaml b/st2tests/st2tests/fixtures/descendants/liveactions/liveaction_fake.yaml new file mode 100644 index 0000000000..b933dedf63 --- /dev/null +++ b/st2tests/st2tests/fixtures/descendants/liveactions/liveaction_fake.yaml @@ -0,0 +1,5 @@ +--- +action: local +name: "fake" +id: 54c6b6d60640fd4f5354e74a +status: succeeded diff --git a/st2tests/st2tests/fixtures/generic/executions/execution1.yaml b/st2tests/st2tests/fixtures/generic/executions/execution1.yaml index 8d519fad7a..4aef0aac05 100644 --- a/st2tests/st2tests/fixtures/generic/executions/execution1.yaml +++ b/st2tests/st2tests/fixtures/generic/executions/execution1.yaml @@ -13,17 +13,7 @@ action: runner_type: run-local end_timestamp: '2014-09-01T00:00:05.000000Z' id: 54c6bb640640fd5211edef0d -liveaction: - action: core.someworkflow - callback: {} - context: - user: system - end_timestamp: '2014-09-01T00:00:05.000000Z' - id: 54c6b6d60640fd4f5354e74a - parameters: {} - result: {} - start_timestamp: '2014-09-01T00:00:01.000000Z' - status: scheduled +liveaction_id: 54c6b6d60640fd4f5354e74a parameters: {} result: {} runner: diff --git a/st2tests/st2tests/fixtures/generic/liveactions/parentliveaction.yaml b/st2tests/st2tests/fixtures/generic/liveactions/parentliveaction.yaml index ed56d4c449..087be792b1 100644 --- a/st2tests/st2tests/fixtures/generic/liveactions/parentliveaction.yaml +++ b/st2tests/st2tests/fixtures/generic/liveactions/parentliveaction.yaml @@ -1,10 +1,10 @@ --- action: core.someworkflow +id: 54c6b6d60640fd4f5354e74a callback: {} context: user: system end_timestamp: '2014-09-01T00:00:05.000000Z' -id: 54c6b6d60640fd4f5354e74a parameters: {} result: {} start_timestamp: '2014-09-01T00:00:01.000000Z' diff --git a/st2tests/st2tests/fixtures/packs/executions/liveactions.yaml b/st2tests/st2tests/fixtures/packs/executions/liveactions.yaml index 6f87578650..5e41bc3c4b 100644 --- a/st2tests/st2tests/fixtures/packs/executions/liveactions.yaml +++ b/st2tests/st2tests/fixtures/packs/executions/liveactions.yaml @@ -1,5 +1,6 @@ --- task1: + id: 54c6b6d60640fd4f5354e74a action: executions.local callback: {} end_timestamp: '2014-09-01T00:00:05.000000Z' @@ -18,6 +19,7 @@ task1: start_timestamp: '2014-09-01T00:00:02.000000Z' status: succeeded task2: + id: 54c6b6d60640fd4f5354e74a action: executions.local callback: {} end_timestamp: '2014-09-01T00:00:05.000000Z' @@ -36,6 +38,7 @@ task2: start_timestamp: '2014-09-01T00:00:03.000000Z' status: succeeded workflow: + id: 54c6b6d60640fd4f5354e74a action: executions.chain callback: {} end_timestamp: '2014-09-01T00:00:05.000000Z' diff --git a/st2tests/st2tests/fixtures/rule_enforcements/executions/execution1.yaml b/st2tests/st2tests/fixtures/rule_enforcements/executions/execution1.yaml index b76bd99d57..291eab7bcb 100644 --- a/st2tests/st2tests/fixtures/rule_enforcements/executions/execution1.yaml +++ b/st2tests/st2tests/fixtures/rule_enforcements/executions/execution1.yaml @@ -13,17 +13,7 @@ action: runner_type: run-local end_timestamp: '2014-09-01T00:00:05.000000Z' id: 565e15ce32ed350857dfa626 -liveaction: - action: core.someworkflow - callback: {} - context: - user: system - end_timestamp: '2014-09-01T00:00:05.000000Z' - id: 54c6b6d60640fd4f5354e74a - parameters: {} - result: {} - start_timestamp: '2014-09-01T00:00:01.000000Z' - status: scheduled +liveaction_id: 54c6b6d60640fd4f5354e74a parameters: cmd: echo bar result: {} diff --git a/st2tests/st2tests/fixtures/traces/executions/execution_with_parent.yaml b/st2tests/st2tests/fixtures/traces/executions/execution_with_parent.yaml index d627ef3777..279e799b8b 100644 --- a/st2tests/st2tests/fixtures/traces/executions/execution_with_parent.yaml +++ b/st2tests/st2tests/fixtures/traces/executions/execution_with_parent.yaml @@ -17,17 +17,7 @@ action: runner_type: action-chain end_timestamp: '2014-09-01T00:00:05.000000Z' id: 54c6bb640640fd5211edef3d -liveaction: - action: traces.someworkflow - callback: {} - context: - user: system - end_timestamp: '2014-09-01T00:00:05.000000Z' - id: 54c6b6d60640fd4f5354e75a - parameters: {} - result: {} - start_timestamp: '2014-09-01T00:00:01.000000Z' - status: scheduled +liveaction_id: 54c6b6d60640fd4f5354e75a parameters: {} result: {} runner: diff --git a/st2tests/st2tests/fixtures/traces/executions/rule_fired_execution.yaml b/st2tests/st2tests/fixtures/traces/executions/rule_fired_execution.yaml index 9e5f3af967..d09ee9faaa 100644 --- a/st2tests/st2tests/fixtures/traces/executions/rule_fired_execution.yaml +++ b/st2tests/st2tests/fixtures/traces/executions/rule_fired_execution.yaml @@ -17,17 +17,7 @@ action: runner_type: action-chain end_timestamp: '2014-09-01T00:00:05.000000Z' id: 54c6bb640640fd5211edef0d -liveaction: - action: traces.someworkflow - callback: {} - context: - user: system - end_timestamp: '2014-09-01T00:00:05.000000Z' - id: 54c6b6d60640fd4f5354e74a - parameters: {} - result: {} - start_timestamp: '2014-09-01T00:00:01.000000Z' - status: scheduled +liveaction_id: 54c6b6d60640fd4f5354e74a parameters: {} result: {} runner: diff --git a/st2tests/st2tests/fixtures/traces/executions/traceable_execution.yaml b/st2tests/st2tests/fixtures/traces/executions/traceable_execution.yaml index 55d7d404fa..70a0667e7b 100644 --- a/st2tests/st2tests/fixtures/traces/executions/traceable_execution.yaml +++ b/st2tests/st2tests/fixtures/traces/executions/traceable_execution.yaml @@ -17,17 +17,7 @@ action: runner_type: action-chain end_timestamp: '2014-09-01T00:00:05.000000Z' id: 54c6bb640640fd5211edef0d -liveaction: - action: traces.someworkflow - callback: {} - context: - user: system - end_timestamp: '2014-09-01T00:00:05.000000Z' - id: 54c6b6d60640fd4f5354e74a - parameters: {} - result: {} - start_timestamp: '2014-09-01T00:00:01.000000Z' - status: scheduled +liveaction_id: 54c6b6d60640fd4f5354e74a parameters: {} result: {} runner: