diff --git a/python/apps/taiga/src/taiga/stories/stories/repositories.py b/python/apps/taiga/src/taiga/stories/stories/repositories.py index 4bc0bd597..ee4fa5f06 100644 --- a/python/apps/taiga/src/taiga/stories/stories/repositories.py +++ b/python/apps/taiga/src/taiga/stories/stories/repositories.py @@ -243,3 +243,11 @@ def list_stories_to_reorder(filters: StoryFilters = {}) -> list[Story]: @sync_to_async def list_story_assignees(story: Story) -> list[User]: return list(story.assignees.all().order_by("-story_assignments__created_at")) + + +async def bulk_update_workflow_to_stories( + statuses_ids: list[UUID], old_workflow_id: UUID, new_workflow_id: UUID +) -> None: + await Story.objects.filter(status_id__in=statuses_ids, workflow_id=old_workflow_id).aupdate( + workflow_id=new_workflow_id + ) diff --git a/python/apps/taiga/src/taiga/workflows/api/__init__.py b/python/apps/taiga/src/taiga/workflows/api/__init__.py index f76ba7f46..b48f81daf 100644 --- a/python/apps/taiga/src/taiga/workflows/api/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/api/__init__.py @@ -20,6 +20,7 @@ from taiga.workflows.api.validators import ( CreateWorkflowStatusValidator, CreateWorkflowValidator, + DeleteWorkflowQuery, DeleteWorkflowStatusQuery, ReorderWorkflowStatusesValidator, UpdateWorkflowStatusValidator, @@ -32,6 +33,7 @@ CREATE_WORKFLOW = IsProjectAdmin() LIST_WORKFLOWS = HasPerm("view_story") GET_WORKFLOW = HasPerm("view_story") +DELETE_WORKFLOW = IsProjectAdmin() UPDATE_WORKFLOW = IsProjectAdmin() CREATE_WORKFLOW_STATUS = IsProjectAdmin() UPDATE_WORKFLOW_STATUS = IsProjectAdmin() @@ -119,7 +121,7 @@ async def get_workflow( return await workflows_services.get_workflow_detail(project_id=project_id, workflow_slug=workflow_slug) -########################################################## +######################################################### # update workflow ########################################################## @@ -147,6 +149,44 @@ async def update_workflow( return await workflows_services.update_workflow(project_id=project_id, workflow=workflow, values=values) +################################################ +# delete workflow +################################################ + + +@routes.workflows.delete( + "/projects/{project_id}/workflows/{workflow_slug}", + name="project.workflow.delete", + summary="Delete a workflow", + responses=ERROR_403 | ERROR_404 | ERROR_422, + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_workflow( + project_id: B64UUID, + workflow_slug: str, + request: AuthRequest, + query_params: DeleteWorkflowQuery = Depends(), +) -> None: + """ + Deletes a workflow in the given project, providing the option to move all the statuses and their stories to another + workflow. + + Query params: + + * **move_to:** the workflow's slug to which move the statuses from the workflow being deleted + - if not received, the workflow, statuses and its contained stories will be deleted + - if received, the workflow will be deleted but its statuses and stories won't (they will be appended to the + last status of the specified workflow). + """ + workflow = await get_workflow_or_404(project_id=project_id, workflow_slug=workflow_slug) + await check_permissions(permissions=DELETE_WORKFLOW, user=request.user, obj=workflow) + + await workflows_services.delete_workflow( + workflow=workflow, + target_workflow_slug=query_params.move_to, + ) + + ################################################ # misc ################################################ @@ -247,7 +287,7 @@ async def reorder_workflow_statuses( await check_permissions(permissions=REORDER_WORKFLOW_STATUSES, user=request.user, obj=workflow) return await workflows_services.reorder_workflow_statuses( - workflow=workflow, + target_workflow=workflow, statuses=form.statuses, reorder=form.get_reorder_dict(), ) @@ -277,7 +317,6 @@ async def delete_workflow_status( to any other existing workflow status in the same workflow. Query params: - * **move_to:** the workflow status's slug to which move the stories from the status being deleted - if not received, the workflow status and its contained stories will be deleted - if received, the workflow status will be deleted but its contained stories won't (they will be first moved to diff --git a/python/apps/taiga/src/taiga/workflows/api/validators.py b/python/apps/taiga/src/taiga/workflows/api/validators.py index 347daa2e1..e0e49b5fb 100644 --- a/python/apps/taiga/src/taiga/workflows/api/validators.py +++ b/python/apps/taiga/src/taiga/workflows/api/validators.py @@ -31,6 +31,16 @@ class CreateWorkflowValidator(BaseModel): name: WorkflowName +class DeleteWorkflowQuery(BaseModel): + move_to: WorkflowName | None + + @validator("move_to") + def check_move_to_slug(cls, v: WorkflowName | None) -> WorkflowName | None: + if v is None: + return None + return v + + class CreateWorkflowStatusValidator(BaseModel): name: WorkflowStatusName color: conint(gt=0, lt=9) # type: ignore diff --git a/python/apps/taiga/src/taiga/workflows/events/__init__.py b/python/apps/taiga/src/taiga/workflows/events/__init__.py index 1ec5c2e86..d704b8355 100644 --- a/python/apps/taiga/src/taiga/workflows/events/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/events/__init__.py @@ -10,6 +10,7 @@ from taiga.workflows.events.content import ( CreateWorkflowContent, CreateWorkflowStatusContent, + DeleteWorkflowContent, DeleteWorkflowStatusContent, ReorderWorkflowStatusesContent, UpdateWorkflowContent, @@ -20,6 +21,7 @@ CREATE_WORKFLOW = "workflows.create" UPDATE_WORKFLOW = "workflows.update" +DELETE_WORKFLOW = "workflows.delete" CREATE_WORKFLOW_STATUS = "workflowstatuses.create" UPDATE_WORKFLOW_STATUS = "workflowstatuses.update" REORDER_WORKFLOW_STATUS = "workflowstatuses.reorder" @@ -44,6 +46,19 @@ async def emit_event_when_workflow_is_updated(project: Project, workflow: Workfl ) +async def emit_event_when_workflow_is_deleted( + project: Project, workflow: WorkflowSerializer, target_workflow: WorkflowSerializer | None +) -> None: + await events_manager.publish_on_project_channel( + project=project, + type=DELETE_WORKFLOW, + content=DeleteWorkflowContent( + workflow=workflow, + target_workflow=target_workflow, + ), + ) + + async def emit_event_when_workflow_status_is_created(project: Project, workflow_status: WorkflowStatus) -> None: await events_manager.publish_on_project_channel( project=project, diff --git a/python/apps/taiga/src/taiga/workflows/events/content.py b/python/apps/taiga/src/taiga/workflows/events/content.py index f9c3d4b90..5c4b42d67 100644 --- a/python/apps/taiga/src/taiga/workflows/events/content.py +++ b/python/apps/taiga/src/taiga/workflows/events/content.py @@ -17,6 +17,11 @@ class UpdateWorkflowContent(BaseModel): workflow: WorkflowSerializer +class DeleteWorkflowContent(BaseModel): + workflow: WorkflowSerializer + target_workflow: WorkflowSerializer | None + + class CreateWorkflowStatusContent(BaseModel): workflow_status: WorkflowStatusSerializer diff --git a/python/apps/taiga/src/taiga/workflows/repositories.py b/python/apps/taiga/src/taiga/workflows/repositories.py index 0176ce69c..46bd9c35c 100644 --- a/python/apps/taiga/src/taiga/workflows/repositories.py +++ b/python/apps/taiga/src/taiga/workflows/repositories.py @@ -29,6 +29,7 @@ class WorkflowFilters(TypedDict, total=False): + id: UUID slug: str project_id: UUID @@ -162,6 +163,17 @@ def update_workflow(workflow: Workflow, values: dict[str, Any] = {}) -> Workflow return workflow +########################################################## +# Workflow - delete workflow +########################################################## + + +async def delete_workflow(filters: WorkflowFilters = {}) -> int: + qs = _apply_filters_to_workflow_queryset(qs=DEFAULT_QUERYSET_WORKFLOW, filters=filters) + count, _ = await qs.adelete() + return count + + ########################################################## # WorkflowStatus - filters and querysets ########################################################## diff --git a/python/apps/taiga/src/taiga/workflows/serializers/__init__.py b/python/apps/taiga/src/taiga/workflows/serializers/__init__.py index 1e3b13ad1..cfa1a0064 100644 --- a/python/apps/taiga/src/taiga/workflows/serializers/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/serializers/__init__.py @@ -42,7 +42,7 @@ class Config: class ReorderWorkflowStatusesSerializer(BaseModel): workflow: WorkflowNestedSerializer statuses: list[UUIDB64] - reorder: ReorderSerializer + reorder: ReorderSerializer | None class Config: orm_mode = True diff --git a/python/apps/taiga/src/taiga/workflows/services/__init__.py b/python/apps/taiga/src/taiga/workflows/services/__init__.py index cbeac0bf6..7d96447c2 100644 --- a/python/apps/taiga/src/taiga/workflows/services/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/services/__init__.py @@ -136,6 +136,79 @@ async def update_workflow(project_id: UUID, workflow: Workflow, values: dict[str return updated_workflow_detail +########################################################## +# delete workflow +########################################################## + + +async def delete_workflow(workflow: Workflow, target_workflow_slug: str | None = None) -> bool: + """ + This method deletes a workflow status, providing the option to first migrating its stories to another workflow + status of the same workflow. + + :param workflow: the workflow to delete + :param target_workflow_slug: the workflow slug to which move their statuses from the workflow being deleted + - if not received, the workflow, statuses and its contained stories will be deleted + - if received, the workflow will be deleted but its statuses and stories won't (they will be appended to the + last status of the specified workflow). + :return: bool + """ + # recover the workflow's detail before being deleted + workflow_detail = await get_workflow_detail(project_id=workflow.project_id, workflow_slug=workflow.slug) + target_workflow = None + if target_workflow_slug: + target_workflow = await get_workflow(project_id=workflow.project.id, workflow_slug=target_workflow_slug) + if not target_workflow: + raise ex.NonExistingMoveToWorkflow(f"The workflow '{target_workflow_slug}' doesn't exist") + if target_workflow.id == workflow.id: + raise ex.SameMoveToStatus("The to-be-deleted workflow and the target-workflow cannot be the same") + + statuses_to_move = await workflows_repositories.list_workflow_statuses( + filters={"workflow_id": workflow.id}, + order_by=["order"], + ) + + if statuses_to_move: + target_workflow_statuses = await workflows_repositories.list_workflow_statuses( + filters={"workflow_id": target_workflow.id}, order_by=["-order"], offset=0, limit=1 + ) + # no statuses in the target_workflow (no valid anchor). The order of the statuses will be preserved + if not target_workflow_statuses: + await reorder_workflow_statuses( + target_workflow=target_workflow, + statuses=[status.id for status in statuses_to_move], + reorder=None, + source_workflow=workflow, + ) + # existing statuses in the target_workflow. The anchor status will be the last one + else: + await reorder_workflow_statuses( + target_workflow=target_workflow, + statuses=[status.id for status in statuses_to_move], + reorder={"place": "after", "status": target_workflow_statuses[0].id}, + source_workflow=workflow, + ) + + deleted = await workflows_repositories.delete_workflow(filters={"id": workflow.id}) + + if deleted > 0: + target_workflow_detail = None + # events will render the final statuses in the target_workflow AFTER any reorder process + if target_workflow: + target_workflow_detail = await get_workflow_detail( + project_id=target_workflow.project_id, workflow_slug=target_workflow.slug + ) + + await workflows_events.emit_event_when_workflow_is_deleted( + project=workflow.project, + workflow=workflow_detail, + target_workflow=target_workflow_detail, + ) + return True + + return False + + ########################################################## # create workflow status ########################################################## @@ -236,56 +309,92 @@ async def _calculate_offset( async def reorder_workflow_statuses( - workflow: Workflow, + target_workflow: Workflow, statuses: list[UUID], - reorder: dict[str, Any], + reorder: dict[str, Any] | None, + source_workflow: Workflow | None = None, ) -> ReorderWorkflowStatusesSerializer: - if reorder["status"] in statuses: - raise ex.InvalidWorkflowStatusError(f"Status {reorder['status']} should not be part of the statuses to reorder") - - # check anchor workflow status exists - reorder_status = await workflows_repositories.get_workflow_status( - filters={"workflow_id": workflow.id, "id": reorder["status"]} - ) - if not reorder_status: - raise ex.InvalidWorkflowStatusError(f"Status {reorder['status']} doesn't exist in this workflow") - reorder_place = reorder["place"] + """ + Reorder the statuses from a workflow to another (can be the same), before or after an existing status + (anchor) when a reorder criteria is provided, or preserving its original order when not provided. + :param target_workflow: the destination workflow for the statuses being reordered + :param statuses: the statuses id's to reorder (move) in the "target_workflow" + :param reorder: reorder["status"] anchor workflow status's id, reorder["place"]: position strategy ["before","after] + None will mean there's no anchor status preserving their original order + :param source_workflow: Workflow containing the statuses to reorder. + None will mean the "source_workflow" and the "target_workflow" are the same + :return: + """ + if not source_workflow: + source_workflow = target_workflow - # check all statuses "to reorder" exist statuses_to_reorder = await workflows_repositories.list_workflow_statuses_to_reorder( - filters={"workflow_id": workflow.id, "ids": statuses} + filters={"workflow_id": source_workflow.id, "ids": statuses} ) if len(statuses_to_reorder) < len(statuses): raise ex.InvalidWorkflowStatusError("One or more statuses don't exist in this workflow") - # calculate offset - offset, pre_order = await _calculate_offset( - workflow=workflow, - total_statuses_to_reorder=len(statuses_to_reorder), - reorder_status=reorder_status, - reorder_place=reorder_place, - ) - - # update workflow statuses - statuses_to_update_tmp = {s.id: s for s in statuses_to_reorder} statuses_to_update = [] - for i, id in enumerate(statuses): - status = statuses_to_update_tmp[id] - status.order = pre_order + (offset * (i + 1)) - statuses_to_update.append(status) + + if not reorder: + if source_workflow == target_workflow: + raise ex.NonExistingMoveToStatus("Reorder criteria required") + else: + statuses_to_update_tmp = {s.id: s for s in statuses_to_reorder} + for i, id in enumerate(statuses): + status = statuses_to_update_tmp[id] + status.workflow = target_workflow + statuses_to_update.append(status) + # position statuses according to this anchor status + elif reorder: + # check anchor workflow status exists + reorder_status = await workflows_repositories.get_workflow_status( + filters={"workflow_id": target_workflow.id, "id": reorder["status"]} + ) + if not reorder_status: + # re-ordering in the same workflow must have a valid anchor status + raise ex.InvalidWorkflowStatusError(f"Status {reorder['status']} doesn't exist in this workflow") + + if reorder["status"] in statuses: + raise ex.InvalidWorkflowStatusError( + f"Status {reorder['status']} should not be part of the statuses to reorder" + ) + reorder_place = reorder["place"] + # calculate offset + offset, pre_order = await _calculate_offset( + workflow=target_workflow, + total_statuses_to_reorder=len(statuses_to_reorder), + reorder_status=reorder_status, + reorder_place=reorder_place, + ) + # update workflow statuses + statuses_to_update_tmp = {s.id: s for s in statuses_to_reorder} + for i, id in enumerate(statuses): + status = statuses_to_update_tmp[id] + status.order = pre_order + (offset * (i + 1)) + status.workflow = target_workflow + statuses_to_update.append(status) # save stories await workflows_repositories.bulk_update_workflow_statuses( - objs_to_update=statuses_to_update, fields_to_update=["order"] + objs_to_update=statuses_to_update, fields_to_update=["order", "workflow"] ) + if source_workflow != target_workflow and statuses_to_reorder: + # update the workflow to the moved stories + await stories_repositories.bulk_update_workflow_to_stories( + statuses_ids=statuses, + old_workflow_id=source_workflow.id, + new_workflow_id=target_workflow.id, + ) + reorder_status_serializer = serializers_services.serialize_reorder_workflow_statuses( - workflow=workflow, statuses=statuses, reorder=reorder + workflow=target_workflow, statuses=statuses, reorder=reorder ) # event await workflows_events.emit_event_when_workflow_statuses_are_reordered( - project=workflow.project, reorder=reorder_status_serializer + project=target_workflow.project, reorder=reorder_status_serializer ) return reorder_status_serializer diff --git a/python/apps/taiga/src/taiga/workflows/services/exceptions.py b/python/apps/taiga/src/taiga/workflows/services/exceptions.py index af3aaad3f..54360f087 100644 --- a/python/apps/taiga/src/taiga/workflows/services/exceptions.py +++ b/python/apps/taiga/src/taiga/workflows/services/exceptions.py @@ -13,6 +13,14 @@ class TaigaValidationError(TaigaServiceException): ... +class NonExistingMoveToWorkflow(TaigaServiceException): + ... + + +class SameMoveToWorkflow(TaigaServiceException): + ... + + class InvalidWorkflowStatusError(TaigaServiceException): ... diff --git a/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py b/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py index 3684ad71b..1c348d06c 100644 --- a/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py +++ b/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py @@ -245,16 +245,23 @@ async def test_list_stories_to_reorder() -> None: assert stories[2].ref == story2.ref -async def test_list_stories_to_reorder_bad_names() -> None: - project = await f.create_project() - workflow = await sync_to_async(project.workflows.first)() - status = await sync_to_async(workflow.statuses.first)() - story1 = await f.create_story(project=project, workflow=workflow, status=status) - story2 = await f.create_story(project=project, workflow=workflow, status=status) - non_existing_reference = 9999999 +########################################################## +# misc - bulk_update_workflow_to_stories +########################################################## - refs = [story1.ref, non_existing_reference, story2.ref] - stories = await repositories.list_stories_to_reorder(filters={"status_id": status.id, "refs": refs}) - assert len(stories) == 2 - assert stories[0].ref == story1.ref - assert stories[1].ref == story2.ref + +async def test_bulk_update_workflow_to_stories() -> None: + project = await f.create_project() + old_workflow = await sync_to_async(project.workflows.first)() + new_workflow = await sync_to_async(project.workflows.first)() + status = await sync_to_async(old_workflow.statuses.first)() + story1 = await f.create_story(project=project, workflow=old_workflow, status=status) + story2 = await f.create_story(project=project, workflow=old_workflow, status=status) + + await repositories.bulk_update_workflow_to_stories( + statuses_ids=[status.id], old_workflow_id=old_workflow.id, new_workflow_id=new_workflow.id + ) + stories = await repositories.list_stories(filters={"workflow_id": old_workflow}, select_related=["workflow"]) + assert story1 in stories and story2 in stories + assert stories[0].workflow == new_workflow + assert stories[1].workflow == new_workflow diff --git a/python/apps/taiga/tests/integration/taiga/workflows/test_api.py b/python/apps/taiga/tests/integration/taiga/workflows/test_api.py index 7519166cf..a3553a2ad 100644 --- a/python/apps/taiga/tests/integration/taiga/workflows/test_api.py +++ b/python/apps/taiga/tests/integration/taiga/workflows/test_api.py @@ -298,6 +298,61 @@ async def test_update_status_422_unprocessable_wf_status_b64id(client): assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text +################################################################################ +# Workflow DELETE /projects//workflows/ +################################################################################ + + +async def test_delete_workflow_204_ok(client): + project = await f.create_project() + deleted_workflow = await f.create_workflow(project=project) + f.build_workflow_status(workflow=deleted_workflow, order=1) + f.build_workflow_status(workflow=deleted_workflow, order=2) + target_workflow = f.build_workflow(project=project) + f.build_workflow_status(workflow=target_workflow, order=1) + f.build_workflow_status(workflow=target_workflow, order=2) + + client.login(project.created_by) + response = client.delete( + f"/projects/{project.b64id}/workflows/{deleted_workflow.slug}/?moveTo={target_workflow.slug}" + ) + assert response.status_code == status.HTTP_200_OK, response.text + + +async def test_delete_workflow_403_not_project_admin(client): + project = await f.create_project() + workflow = await f.create_workflow(project=project) + another_user = await f.create_user() + + client.login(another_user) + response = client.delete(f"/projects/{project.b64id}/workflows/{workflow.slug}") + assert response.status_code == status.HTTP_403_FORBIDDEN, response.text + + +async def test_delete_workflow_404_not_found_project_b64id(client): + project = await f.create_project() + workflow = await f.create_workflow(project=project) + client.login(project.created_by) + response = client.delete(f"/projects/{NOT_EXISTING_B64ID}/workflows/{workflow.slug}") + assert response.status_code == status.HTTP_404_NOT_FOUND, response.text + + +async def test_delete_workflow_422_empty_move_to_slug(client): + project = await f.create_project() + client.login(project.created_by) + empty_string = "" + response = client.delete(f"/projects/{project.b64id}/workflows/slug/?moveTo={empty_string}") + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text + + +async def test_delete_workflow_422_long_move_to_slug(client): + project = await f.create_project() + client.login(project.created_by) + long_string = "slug_slug_slug_slug_slug_slug_slug_slug_slug_slug" + response = client.delete(f"/projects/{project.b64id}/workflows/slug/?moveTo={long_string}") + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text + + ########################################################## # Workflow Status POST /projects//workflows//statuses/reorder ########################################################## diff --git a/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py b/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py index be4a1473f..c6f13a032 100644 --- a/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py +++ b/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py @@ -87,6 +87,27 @@ async def test_update_workflow(): assert updated_workflow.name == "Updated name" +######################################################### +# delete workflow +########################################################## + + +async def test_delete_workflow_without_workflow_statuses_ok() -> None: + project = await f.create_project() + workflow = await f.create_workflow(project=project, statuses=[]) + + delete_ret = await repositories.delete_workflow(filters={"id": workflow.id}) + assert delete_ret == 1 + + +async def test_delete_workflow_with_workflow_statuses_ok() -> None: + project = await f.create_project() + workflow = await f.create_workflow(project=project) + + delete_ret = await repositories.delete_workflow(filters={"id": workflow.id}) + assert delete_ret == 4 + + ########################################################## # create_workflow_status ########################################################## diff --git a/python/apps/taiga/tests/unit/taiga/workflows/test_services.py b/python/apps/taiga/tests/unit/taiga/workflows/test_services.py index bac480d79..1a70e02a1 100644 --- a/python/apps/taiga/tests/unit/taiga/workflows/test_services.py +++ b/python/apps/taiga/tests/unit/taiga/workflows/test_services.py @@ -5,11 +5,12 @@ # # Copyright (c) 2023-present Kaleidos INC from decimal import Decimal -from unittest.mock import patch +from unittest.mock import call, patch import pytest from taiga.base.repositories.neighbors import Neighbor from taiga.workflows import services +from taiga.workflows.serializers import ReorderWorkflowStatusesSerializer, WorkflowNestedSerializer, WorkflowSerializer from taiga.workflows.services import exceptions as ex from tests.utils import factories as f @@ -229,6 +230,243 @@ async def test_update_workflow_status_none_name(): fake_workflows_events.emit_event_when_workflow_status_is_updated.assert_not_awaited() +####################################################### +# delete workflow +####################################################### + + +async def test_delete_workflow_no_target_workflow_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + workflow = f.build_workflow() + status1 = f.build_workflow_status(workflow=workflow, order=1) + status2 = f.build_workflow_status(workflow=workflow, order=2) + status3 = f.build_workflow_status(workflow=workflow, order=3) + fake_workflows_repo.get_workflow.return_value = workflow + fake_workflows_repo.list_workflow_statuses.return_value = [status1, status2, status3] + fake_workflows_repo.delete_workflow.return_value = True + + ret = await services.delete_workflow(workflow=workflow) + + fake_workflows_repo.delete_workflow.assert_awaited_once_with(filters={"id": workflow.id}) + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_awaited_once() + assert ret is True + + +async def test_delete_workflow_with_target_workflow_with_anchor_status_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_status1 = f.build_workflow_status(workflow=deleted_workflow, order=1) + deleted_workflow_status2 = f.build_workflow_status(workflow=deleted_workflow, order=2) + deleted_workflow_statuses = [deleted_workflow_status1, deleted_workflow_status2] + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=deleted_workflow_statuses, + ) + target_workflow = f.build_workflow(slug="target_workflow") + target_workflow_status1 = f.build_workflow_status(workflow=target_workflow, order=1) + target_workflow_status2 = f.build_workflow_status(workflow=target_workflow, order=2) + target_workflow_statuses = [target_workflow_status2, target_workflow_status1] + target_workflow_detail = WorkflowSerializer( + id=target_workflow.id, + name=target_workflow.name, + slug=target_workflow.slug, + order=target_workflow.order, + statuses=target_workflow_statuses, + ) + + fake_get_workflow.return_value = target_workflow + fake_get_workflow_detail.side_effect = [deleted_workflow_detail, target_workflow_detail] + # the serializer response doesn't maters + fake_reorder_workflow_statuses.return_value = ReorderWorkflowStatusesSerializer( + workflow=WorkflowNestedSerializer( + id=target_workflow.id, name=deleted_workflow.name, slug=deleted_workflow.slug + ), + statuses=[], + reorder=None, + ) + fake_workflows_repo.list_workflow_statuses.side_effect = [deleted_workflow_statuses, target_workflow_statuses] + fake_workflows_repo.delete_workflow.return_value = True + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=target_workflow.slug) + # asserts + fake_workflows_repo.list_workflow_statuses.assert_has_awaits( + [ + call(filters={"workflow_id": deleted_workflow.id}, order_by=["order"]), + call(filters={"workflow_id": target_workflow.id}, order_by=["-order"], offset=0, limit=1), + ] + ) + fake_workflows_repo.delete_workflow.assert_awaited_once_with(filters={"id": deleted_workflow.id}) + fake_reorder_workflow_statuses.assert_awaited_once_with( + target_workflow=target_workflow, + statuses=[status.id for status in deleted_workflow_statuses], + reorder={"place": "after", "status": target_workflow_statuses[0].id}, + source_workflow=deleted_workflow, + ) + fake_get_workflow_detail.assert_has_awaits( + [ + call(project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug), + call(project_id=target_workflow.project.id, workflow_slug=target_workflow.slug), + ] + ) + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_awaited_once_with( + project=deleted_workflow.project, + workflow=deleted_workflow_detail, + target_workflow=target_workflow_detail, + ) + assert ret is True + + +async def test_delete_workflow_with_target_workflow_with_no_anchor_status_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_status1 = f.build_workflow_status(workflow=deleted_workflow, order=1) + deleted_workflow_statuses = [deleted_workflow_status1] + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=deleted_workflow_statuses, + ) + target_workflow = f.build_workflow(slug="target_workflow") + target_workflow_statuses = [] + target_workflow_detail = WorkflowSerializer( + id=target_workflow.id, + name=target_workflow.name, + slug=target_workflow.slug, + order=target_workflow.order, + statuses=target_workflow_statuses, + ) + + fake_get_workflow.return_value = target_workflow + fake_get_workflow_detail.side_effect = [deleted_workflow_detail, target_workflow_detail] + # the serializer response doesn't maters + fake_reorder_workflow_statuses.return_value = ReorderWorkflowStatusesSerializer( + workflow=WorkflowNestedSerializer( + id=target_workflow.id, name=deleted_workflow.name, slug=deleted_workflow.slug + ), + statuses=[], + reorder=None, + ) + fake_workflows_repo.list_workflow_statuses.side_effect = [deleted_workflow_statuses, target_workflow_statuses] + fake_workflows_repo.delete_workflow.return_value = True + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=target_workflow.slug) + # asserts + fake_workflows_repo.list_workflow_statuses.assert_has_awaits( + [ + call(filters={"workflow_id": deleted_workflow.id}, order_by=["order"]), + call(filters={"workflow_id": target_workflow.id}, order_by=["-order"], offset=0, limit=1), + ] + ) + fake_workflows_repo.delete_workflow.assert_awaited_once_with(filters={"id": deleted_workflow.id}) + fake_reorder_workflow_statuses.assert_awaited_once_with( + target_workflow=target_workflow, + statuses=[status.id for status in deleted_workflow_statuses], + reorder=None, + source_workflow=deleted_workflow, + ) + fake_get_workflow_detail.assert_has_awaits( + [ + call(project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug), + call(project_id=target_workflow.project.id, workflow_slug=target_workflow.slug), + ] + ) + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_awaited_once_with( + project=deleted_workflow.project, + workflow=deleted_workflow_detail, + target_workflow=target_workflow_detail, + ) + assert ret is True + + +async def test_delete_workflow_not_existing_target_workflow_exception(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + pytest.raises(ex.NonExistingMoveToWorkflow), + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=[], + ) + target_workflow = f.build_workflow(slug="target_workflow") + fake_get_workflow_detail.return_value = deleted_workflow_detail + fake_get_workflow.return_value = None + + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=target_workflow.slug) + + # asserts + fake_get_workflow_detail.assert_awaited_once_with( + project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug + ) + fake_reorder_workflow_statuses.assert_not_awaited() + fake_workflows_repo.delete_workflow.assert_not_awaited() + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_not_awaited() + + assert ret is False + + +async def test_delete_workflow_same_target_workflow_exception(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + pytest.raises(ex.SameMoveToStatus), + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=[], + ) + fake_get_workflow_detail.return_value = deleted_workflow_detail + fake_get_workflow.return_value = deleted_workflow + + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=deleted_workflow.slug) + + # asserts + fake_get_workflow_detail.assert_awaited_once_with( + project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug + ) + fake_reorder_workflow_statuses.assert_not_awaited() + fake_workflows_repo.delete_workflow.assert_not_awaited() + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_not_awaited() + + assert ret is False + + ####################################################### # _calculate_offset ####################################################### @@ -278,9 +516,10 @@ async def test_calculate_offset() -> None: ####################################################### -async def test_reorder_workflow_statuses_ok(): +async def test_reorder_workflow_statuses_same_workflow_ok(): with ( patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.stories_repositories", autospec=True) as fake_stories_repo, patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, ): workflow = f.build_workflow() @@ -291,23 +530,104 @@ async def test_reorder_workflow_statuses_ok(): fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status3, status2] await services.reorder_workflow_statuses( - workflow=f.build_workflow(), + target_workflow=f.build_workflow(), statuses=[status3.id, status2.id], reorder={"place": "after", "status": status1.id}, ) + fake_stories_repo.bulk_update_workflow_to_stories.assert_not_awaited() fake_workflows_repo.bulk_update_workflow_statuses.assert_awaited_once_with( - objs_to_update=[status3, status2], fields_to_update=["order"] + objs_to_update=[status3, status2], fields_to_update=["order", "workflow"] ) fake_workflows_events.emit_event_when_workflow_statuses_are_reordered.assert_awaited_once() +async def test_reorder_workflow_statuses_between_workflows_with_anchor_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.stories_repositories", autospec=True) as fake_stories_repo, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + workflow1 = f.build_workflow() + workflow2 = f.build_workflow() + status1 = f.build_workflow_status(workflow=workflow1, order=1) + status2 = f.build_workflow_status(workflow=workflow1, order=2) + status3 = f.build_workflow_status(workflow=workflow1, order=3) + fake_workflows_repo.get_workflow_status.return_value = status1 + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status3, status2] + fake_stories_repo.bulk_update_workflow_to_stories.return_value = None + + await services.reorder_workflow_statuses( + target_workflow=workflow1, + statuses=[status3.id, status2.id], + reorder={"place": "after", "status": status1.id}, + source_workflow=workflow2, + ) + + fake_workflows_repo.bulk_update_workflow_statuses.assert_awaited_once_with( + objs_to_update=[status3, status2], fields_to_update=["order", "workflow"] + ) + fake_stories_repo.bulk_update_workflow_to_stories.assert_awaited_once_with( + statuses_ids=[status3.id, status2.id], old_workflow_id=workflow2.id, new_workflow_id=workflow1.id + ) + fake_workflows_events.emit_event_when_workflow_statuses_are_reordered.assert_awaited_once() + + +async def test_reorder_workflow_statuses_between_workflows_no_anchor_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.stories_repositories", autospec=True) as fake_stories_repo, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + workflow1 = f.build_workflow() + workflow2 = f.build_workflow(statuses=[]) + status1 = f.build_workflow_status(workflow=workflow1, order=1) + status2 = f.build_workflow_status(workflow=workflow1, order=2) + fake_workflows_repo.get_workflow_status.return_value = status1 + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status1, status2] + fake_stories_repo.bulk_update_workflow_to_stories.return_value = None + + await services.reorder_workflow_statuses( + target_workflow=workflow2, statuses=[status1.id, status2.id], reorder=None, source_workflow=workflow1 + ) + + fake_workflows_repo.bulk_update_workflow_statuses.assert_awaited_once_with( + objs_to_update=[status1, status2], fields_to_update=["order", "workflow"] + ) + fake_stories_repo.bulk_update_workflow_to_stories.assert_awaited_once_with( + statuses_ids=[status1.id, status2.id], old_workflow_id=workflow1.id, new_workflow_id=workflow2.id + ) + fake_workflows_events.emit_event_when_workflow_statuses_are_reordered.assert_awaited_once() + + +async def test_reorder_workflow_statuses_between_workflows_no_anchor_same_workflow_exception(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + pytest.raises(ex.NonExistingMoveToStatus), + ): + workflow = f.build_workflow() + status1 = f.build_workflow_status(workflow=workflow, order=1) + status2 = f.build_workflow_status(workflow=workflow, order=2) + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status1, status2] + + await services.reorder_workflow_statuses( + target_workflow=workflow, statuses=[status1.id, status2.id], reorder=None, source_workflow=workflow + ) + + async def test_reorder_workflow_status_repeated(): - with (pytest.raises(ex.InvalidWorkflowStatusError),): + with ( + pytest.raises(ex.InvalidWorkflowStatusError), + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + ): + workflow = f.build_workflow() + status = f.build_workflow_status(workflow=workflow, order=1) + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status] + await services.reorder_workflow_statuses( - workflow=f.build_workflow(), - statuses=["new"], - reorder={"place": "after", "status": "new"}, + target_workflow=workflow, + statuses=[status.id], + reorder={"place": "after", "status": status.id}, ) @@ -319,7 +639,7 @@ async def test_reorder_anchor_workflow_status_does_not_exist(): fake_workflows_repo.get_workflow_status.return_value = None await services.reorder_workflow_statuses( - workflow=f.build_workflow(), + target_workflow=f.build_workflow(), statuses=["in-progress"], reorder={"place": "after", "status": "mooo"}, ) @@ -333,7 +653,7 @@ async def test_reorder_any_workflow_status_does_not_exist(): fake_workflows_repo.get_workflow_status.return_value = None await services.reorder_workflow_statuses( - workflow=f.build_workflow(), + target_workflow=f.build_workflow(), statuses=["in-progress", "mooo"], reorder={"place": "after", "status": "new"}, ) diff --git a/python/docs/events.md b/python/docs/events.md index 4911d3d17..2967a16b1 100644 --- a/python/docs/events.md +++ b/python/docs/events.md @@ -681,6 +681,19 @@ Content for: } ``` +#### `workflows.delete` + +It happens when a workflow has been deleted. + +Content for: +- project channel: + ``` + { + "workflow": {... "workflow object" ...}, + "targetWorkflow": null | {... "workflow object" ...} + } + ``` + #### `workflowstatuses.create` @@ -740,7 +753,7 @@ Content for: ``` { "workflowStatus": {... "workflow status object" ...}, - "moveToSlug": null | "closed" + "targetStatus": null | "closed" } ```