diff --git a/python/apps/taiga/src/taiga/stories/stories/repositories.py b/python/apps/taiga/src/taiga/stories/stories/repositories.py index 69c117d33..a9f9ed4d4 100644 --- a/python/apps/taiga/src/taiga/stories/stories/repositories.py +++ b/python/apps/taiga/src/taiga/stories/stories/repositories.py @@ -245,3 +245,11 @@ def list_stories_to_reorder(filters: StoryFilters = {}) -> list[Story]: @sync_to_async def list_story_assignees(story: Story) -> list[User]: return list(story.assignees.all().order_by("-story_assignments__created_at")) + + +async def bulk_update_workflow_to_stories( + statuses_ids: list[UUID], old_workflow_id: UUID, new_workflow_id: UUID +) -> None: + await Story.objects.filter(status_id__in=statuses_ids, workflow_id=old_workflow_id).aupdate( + workflow_id=new_workflow_id + ) diff --git a/python/apps/taiga/src/taiga/stories/stories/serializers/__init__.py b/python/apps/taiga/src/taiga/stories/stories/serializers/__init__.py index 687a91169..f478e73da 100644 --- a/python/apps/taiga/src/taiga/stories/stories/serializers/__init__.py +++ b/python/apps/taiga/src/taiga/stories/stories/serializers/__init__.py @@ -18,7 +18,7 @@ class StorySummarySerializer(BaseModel): title: str status: WorkflowStatusNestedSerializer version: int - assignees: list[UserNestedSerializer] + assignees: list[UserNestedSerializer] | None class Config: orm_mode = True diff --git a/python/apps/taiga/src/taiga/stories/stories/services/__init__.py b/python/apps/taiga/src/taiga/stories/stories/services/__init__.py index 6d6185063..b696030e4 100644 --- a/python/apps/taiga/src/taiga/stories/stories/services/__init__.py +++ b/python/apps/taiga/src/taiga/stories/stories/services/__init__.py @@ -83,6 +83,22 @@ async def list_paginated_stories( filters={"workflow_slug": workflow_slug, "project_id": project_id} ) + stories_serializer = await get_serialized_stories(project_id, workflow_slug, limit, offset) + + pagination = Pagination(offset=offset, limit=limit, total=total_stories) + + return pagination, stories_serializer + + +async def list_all_stories(project_id: UUID, workflow_slug: str) -> list[StorySummarySerializer]: + stories_serializer = await get_serialized_stories(project_id, workflow_slug) + + return stories_serializer + + +async def get_serialized_stories( + project_id: UUID, workflow_slug: str, limit: int | None = None, offset: int | None = None +) -> list[StorySummarySerializer]: stories = await stories_repositories.list_stories( filters={"workflow_slug": workflow_slug, "project_id": project_id}, offset=offset, @@ -90,18 +106,13 @@ async def list_paginated_stories( select_related=["created_by", "project", "workflow", "status"], prefetch_related=["assignees"], ) - - stories_serializer = [ + return [ serializers_services.serialize_story_list( story=story, assignees=await stories_repositories.list_story_assignees(story) ) for story in stories ] - pagination = Pagination(offset=offset, limit=limit, total=total_stories) - - return pagination, stories_serializer - ########################################################## # get story diff --git a/python/apps/taiga/src/taiga/workflows/api/__init__.py b/python/apps/taiga/src/taiga/workflows/api/__init__.py index 31b1e6bc1..17dd6f31b 100644 --- a/python/apps/taiga/src/taiga/workflows/api/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/api/__init__.py @@ -20,6 +20,7 @@ from taiga.workflows.api.validators import ( CreateWorkflowStatusValidator, CreateWorkflowValidator, + DeleteWorkflowQuery, DeleteWorkflowStatusQuery, ReorderWorkflowStatusesValidator, UpdateWorkflowStatusValidator, @@ -32,6 +33,7 @@ CREATE_WORKFLOW = IsProjectAdmin() LIST_WORKFLOWS = HasPerm("view_story") GET_WORKFLOW = HasPerm("view_story") +DELETE_WORKFLOW = IsProjectAdmin() UPDATE_WORKFLOW = IsProjectAdmin() CREATE_WORKFLOW_STATUS = IsProjectAdmin() UPDATE_WORKFLOW_STATUS = IsProjectAdmin() @@ -119,7 +121,7 @@ async def get_workflow( return await workflows_services.get_workflow_detail(project_id=project_id, workflow_slug=workflow_slug) -########################################################## +######################################################### # update workflow ########################################################## @@ -147,6 +149,44 @@ async def update_workflow( return await workflows_services.update_workflow(project_id=project_id, workflow=workflow, values=values) +################################################ +# delete workflow +################################################ + + +@routes.workflows.delete( + "/projects/{project_id}/workflows/{workflow_slug}", + name="project.workflow.delete", + summary="Delete a workflow", + responses=ERROR_403 | ERROR_404 | ERROR_422, + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_workflow( + project_id: B64UUID, + workflow_slug: str, + request: AuthRequest, + query_params: DeleteWorkflowQuery = Depends(), +) -> None: + """ + Deletes a workflow in the given project, providing the option to move all the statuses and their stories to another + workflow. + + Query params: + + * **move_to:** the workflow's slug to which move the statuses from the workflow being deleted + - if not received, the workflow, statuses and its contained stories will be deleted + - if received, the workflow will be deleted but its statuses and stories won't (they will be appended to the + last status of the specified workflow). + """ + workflow = await get_workflow_or_404(project_id=project_id, workflow_slug=workflow_slug) + await check_permissions(permissions=DELETE_WORKFLOW, user=request.user, obj=workflow) + + await workflows_services.delete_workflow( + workflow=workflow, + target_workflow_slug=query_params.move_to, + ) + + ################################################ # misc ################################################ @@ -247,7 +287,7 @@ async def reorder_workflow_statuses( await check_permissions(permissions=REORDER_WORKFLOW_STATUSES, user=request.user, obj=workflow) return await workflows_services.reorder_workflow_statuses( - workflow=workflow, + target_workflow=workflow, statuses=form.statuses, reorder=form.get_reorder_dict(), ) @@ -277,7 +317,6 @@ async def delete_workflow_status( to any other existing workflow status in the same workflow. Query params: - * **move_to:** the workflow status's slug to which move the stories from the status being deleted - if not received, the workflow status and its contained stories will be deleted - if received, the workflow status will be deleted but its contained stories won't (they will be first moved to diff --git a/python/apps/taiga/src/taiga/workflows/api/validators.py b/python/apps/taiga/src/taiga/workflows/api/validators.py index 347daa2e1..fe02d6d01 100644 --- a/python/apps/taiga/src/taiga/workflows/api/validators.py +++ b/python/apps/taiga/src/taiga/workflows/api/validators.py @@ -27,10 +27,26 @@ class WorkflowName(ConstrainedStr): max_length = 40 +class WorkflowSlug(ConstrainedStr): + strip_whitespace = True + min_length = 1 + max_length = 40 + + class CreateWorkflowValidator(BaseModel): name: WorkflowName +class DeleteWorkflowQuery(BaseModel): + move_to: WorkflowSlug | None + + @validator("move_to") + def check_move_to_slug(cls, v: WorkflowSlug | None) -> WorkflowSlug | None: + if v is None: + return None + return v + + class CreateWorkflowStatusValidator(BaseModel): name: WorkflowStatusName color: conint(gt=0, lt=9) # type: ignore diff --git a/python/apps/taiga/src/taiga/workflows/events/__init__.py b/python/apps/taiga/src/taiga/workflows/events/__init__.py index 1ec5c2e86..d81886368 100644 --- a/python/apps/taiga/src/taiga/workflows/events/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/events/__init__.py @@ -10,16 +10,18 @@ from taiga.workflows.events.content import ( CreateWorkflowContent, CreateWorkflowStatusContent, + DeleteWorkflowContent, DeleteWorkflowStatusContent, ReorderWorkflowStatusesContent, UpdateWorkflowContent, UpdateWorkflowStatusContent, ) from taiga.workflows.models import WorkflowStatus -from taiga.workflows.serializers import ReorderWorkflowStatusesSerializer, WorkflowSerializer +from taiga.workflows.serializers import DeleteWorkflowSerializer, ReorderWorkflowStatusesSerializer, WorkflowSerializer CREATE_WORKFLOW = "workflows.create" UPDATE_WORKFLOW = "workflows.update" +DELETE_WORKFLOW = "workflows.delete" CREATE_WORKFLOW_STATUS = "workflowstatuses.create" UPDATE_WORKFLOW_STATUS = "workflowstatuses.update" REORDER_WORKFLOW_STATUS = "workflowstatuses.reorder" @@ -44,6 +46,19 @@ async def emit_event_when_workflow_is_updated(project: Project, workflow: Workfl ) +async def emit_event_when_workflow_is_deleted( + project: Project, workflow: DeleteWorkflowSerializer, target_workflow: WorkflowSerializer | None +) -> None: + await events_manager.publish_on_project_channel( + project=project, + type=DELETE_WORKFLOW, + content=DeleteWorkflowContent( + workflow=workflow, + target_workflow=target_workflow, + ), + ) + + async def emit_event_when_workflow_status_is_created(project: Project, workflow_status: WorkflowStatus) -> None: await events_manager.publish_on_project_channel( project=project, diff --git a/python/apps/taiga/src/taiga/workflows/events/content.py b/python/apps/taiga/src/taiga/workflows/events/content.py index f9c3d4b90..0b57c85d8 100644 --- a/python/apps/taiga/src/taiga/workflows/events/content.py +++ b/python/apps/taiga/src/taiga/workflows/events/content.py @@ -6,7 +6,12 @@ # Copyright (c) 2023-present Kaleidos INC from taiga.base.serializers import BaseModel -from taiga.workflows.serializers import ReorderWorkflowStatusesSerializer, WorkflowSerializer, WorkflowStatusSerializer +from taiga.workflows.serializers import ( + DeleteWorkflowSerializer, + ReorderWorkflowStatusesSerializer, + WorkflowSerializer, + WorkflowStatusSerializer, +) class CreateWorkflowContent(BaseModel): @@ -17,6 +22,11 @@ class UpdateWorkflowContent(BaseModel): workflow: WorkflowSerializer +class DeleteWorkflowContent(BaseModel): + workflow: DeleteWorkflowSerializer + target_workflow: WorkflowSerializer | None + + class CreateWorkflowStatusContent(BaseModel): workflow_status: WorkflowStatusSerializer diff --git a/python/apps/taiga/src/taiga/workflows/repositories.py b/python/apps/taiga/src/taiga/workflows/repositories.py index 0176ce69c..46bd9c35c 100644 --- a/python/apps/taiga/src/taiga/workflows/repositories.py +++ b/python/apps/taiga/src/taiga/workflows/repositories.py @@ -29,6 +29,7 @@ class WorkflowFilters(TypedDict, total=False): + id: UUID slug: str project_id: UUID @@ -162,6 +163,17 @@ def update_workflow(workflow: Workflow, values: dict[str, Any] = {}) -> Workflow return workflow +########################################################## +# Workflow - delete workflow +########################################################## + + +async def delete_workflow(filters: WorkflowFilters = {}) -> int: + qs = _apply_filters_to_workflow_queryset(qs=DEFAULT_QUERYSET_WORKFLOW, filters=filters) + count, _ = await qs.adelete() + return count + + ########################################################## # WorkflowStatus - filters and querysets ########################################################## diff --git a/python/apps/taiga/src/taiga/workflows/serializers/__init__.py b/python/apps/taiga/src/taiga/workflows/serializers/__init__.py index 1e3b13ad1..3f56787d8 100644 --- a/python/apps/taiga/src/taiga/workflows/serializers/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/serializers/__init__.py @@ -6,6 +6,7 @@ # Copyright (c) 2023-present Kaleidos INC from taiga.base.serializers import UUIDB64, BaseModel +from taiga.stories.stories.serializers import StorySummarySerializer from taiga.workflows.serializers.nested import WorkflowNestedSerializer, WorkflowStatusNestedSerializer @@ -20,6 +21,18 @@ class Config: orm_mode = True +class DeleteWorkflowSerializer(BaseModel): + id: UUIDB64 + name: str + slug: str + order: int + statuses: list[WorkflowStatusNestedSerializer] + stories: list[StorySummarySerializer] + + class Config: + orm_mode = True + + class WorkflowStatusSerializer(BaseModel): id: UUIDB64 name: str @@ -42,7 +55,7 @@ class Config: class ReorderWorkflowStatusesSerializer(BaseModel): workflow: WorkflowNestedSerializer statuses: list[UUIDB64] - reorder: ReorderSerializer + reorder: ReorderSerializer | None class Config: orm_mode = True diff --git a/python/apps/taiga/src/taiga/workflows/serializers/services.py b/python/apps/taiga/src/taiga/workflows/serializers/services.py index 9b184f649..eb9f7311b 100644 --- a/python/apps/taiga/src/taiga/workflows/serializers/services.py +++ b/python/apps/taiga/src/taiga/workflows/serializers/services.py @@ -9,8 +9,9 @@ from typing import Any from uuid import UUID +from taiga.stories.stories.serializers import StorySummarySerializer from taiga.workflows.models import Workflow, WorkflowStatus -from taiga.workflows.serializers import ReorderWorkflowStatusesSerializer, WorkflowSerializer +from taiga.workflows.serializers import DeleteWorkflowSerializer, ReorderWorkflowStatusesSerializer, WorkflowSerializer def serialize_workflow(workflow: Workflow, workflow_statuses: list[WorkflowStatus] = []) -> WorkflowSerializer: @@ -23,6 +24,21 @@ def serialize_workflow(workflow: Workflow, workflow_statuses: list[WorkflowStatu ) +def serialize_delete_workflow_detail( + workflow: Workflow, + workflow_statuses: list[WorkflowStatus] = [], + workflow_stories: list[StorySummarySerializer] = [], +) -> DeleteWorkflowSerializer: + return DeleteWorkflowSerializer( + id=workflow.id, + name=workflow.name, + slug=workflow.slug, + order=workflow.order, + statuses=workflow_statuses, + stories=workflow_stories, + ) + + def serialize_reorder_workflow_statuses( workflow: Workflow, statuses: list[UUID], reorder: dict[str, Any] | None = None ) -> ReorderWorkflowStatusesSerializer: diff --git a/python/apps/taiga/src/taiga/workflows/services/__init__.py b/python/apps/taiga/src/taiga/workflows/services/__init__.py index ba92a8b31..3a2033ae3 100644 --- a/python/apps/taiga/src/taiga/workflows/services/__init__.py +++ b/python/apps/taiga/src/taiga/workflows/services/__init__.py @@ -19,7 +19,7 @@ from taiga.workflows import events as workflows_events from taiga.workflows import repositories as workflows_repositories from taiga.workflows.models import Workflow, WorkflowStatus -from taiga.workflows.serializers import ReorderWorkflowStatusesSerializer, WorkflowSerializer +from taiga.workflows.serializers import DeleteWorkflowSerializer, ReorderWorkflowStatusesSerializer, WorkflowSerializer from taiga.workflows.serializers import services as serializers_services from taiga.workflows.services import exceptions as ex @@ -119,6 +119,29 @@ async def get_workflow_detail(project_id: UUID, workflow_slug: str) -> WorkflowS return serializers_services.serialize_workflow(workflow=workflow, workflow_statuses=workflow_statuses) +async def get_delete_workflow_detail(project_id: UUID, workflow_slug: str) -> DeleteWorkflowSerializer: + workflow = cast( + Workflow, + await workflows_repositories.get_workflow( + filters={ + "project_id": project_id, + "slug": workflow_slug, + }, + select_related=["project"], + ), + ) + workflow_statuses = await workflows_repositories.list_workflow_statuses(filters={"workflow_id": workflow.id}) + workflow_stories = await stories_services.list_all_stories( + project_id=project_id, + workflow_slug=workflow_slug, + ) + return serializers_services.serialize_delete_workflow_detail( + workflow=workflow, + workflow_statuses=workflow_statuses, + workflow_stories=workflow_stories, + ) + + ########################################################## # update workflow ########################################################## @@ -137,6 +160,79 @@ async def update_workflow(project_id: UUID, workflow: Workflow, values: dict[str return updated_workflow_detail +########################################################## +# delete workflow +########################################################## + + +async def delete_workflow(workflow: Workflow, target_workflow_slug: str | None = None) -> bool: + """ + This method deletes a workflow, providing the option to first migrate its workflow statuses to another workflow + in the same project. + + :param workflow: the workflow to delete + :param target_workflow_slug: the workflow slug to which move their statuses from the workflow being deleted + - if not received, the workflow, statuses and its contained stories will be deleted + - if received, the workflow will be deleted but its statuses and stories won't (they will be appended to the + last status of the specified workflow). + :return: bool + """ + # recover the workflow's detail before being deleted + workflow_detail = await get_delete_workflow_detail(project_id=workflow.project_id, workflow_slug=workflow.slug) + target_workflow = None + if target_workflow_slug: + target_workflow = await get_workflow(project_id=workflow.project_id, workflow_slug=target_workflow_slug) + if not target_workflow: + raise ex.NonExistingMoveToWorkflow(f"The workflow '{target_workflow_slug}' doesn't exist") + if target_workflow.id == workflow.id: + raise ex.SameMoveToWorkflow("The to-be-deleted workflow and the target-workflow cannot be the same") + + statuses_to_move = await workflows_repositories.list_workflow_statuses( + filters={"workflow_id": workflow.id}, + order_by=["order"], + ) + + if statuses_to_move: + target_workflow_statuses = await workflows_repositories.list_workflow_statuses( + filters={"workflow_id": target_workflow.id}, order_by=["-order"], offset=0, limit=1 + ) + # no statuses in the target_workflow (no valid anchor). The order of the statuses will be preserved + if not target_workflow_statuses: + await reorder_workflow_statuses( + target_workflow=target_workflow, + statuses=[status.id for status in statuses_to_move], + reorder=None, + source_workflow=workflow, + ) + # existing statuses in the target_workflow. The anchor status will be the last one + else: + await reorder_workflow_statuses( + target_workflow=target_workflow, + statuses=[status.id for status in statuses_to_move], + reorder={"place": "after", "status": target_workflow_statuses[0].id}, + source_workflow=workflow, + ) + + deleted = await workflows_repositories.delete_workflow(filters={"id": workflow.id}) + + if deleted > 0: + target_workflow_detail = None + # events will render the final statuses in the target_workflow AFTER any reorder process + if target_workflow: + target_workflow_detail = await get_workflow_detail( + project_id=target_workflow.project_id, workflow_slug=target_workflow.slug + ) + + await workflows_events.emit_event_when_workflow_is_deleted( + project=workflow.project, + workflow=workflow_detail, + target_workflow=target_workflow_detail, + ) + return True + + return False + + ########################################################## # create workflow status ########################################################## @@ -237,56 +333,92 @@ async def _calculate_offset( async def reorder_workflow_statuses( - workflow: Workflow, + target_workflow: Workflow, statuses: list[UUID], - reorder: dict[str, Any], + reorder: dict[str, Any] | None, + source_workflow: Workflow | None = None, ) -> ReorderWorkflowStatusesSerializer: - if reorder["status"] in statuses: - raise ex.InvalidWorkflowStatusError(f"Status {reorder['status']} should not be part of the statuses to reorder") - - # check anchor workflow status exists - reorder_status = await workflows_repositories.get_workflow_status( - filters={"workflow_id": workflow.id, "id": reorder["status"]} - ) - if not reorder_status: - raise ex.InvalidWorkflowStatusError(f"Status {reorder['status']} doesn't exist in this workflow") - reorder_place = reorder["place"] + """ + Reorder the statuses from a workflow to another (can be the same), before or after an existing status + (anchor) when a reorder criteria is provided, or preserving its original order when not provided. + :param target_workflow: the destination workflow for the statuses being reordered + :param statuses: the statuses id's to reorder (move) in the "target_workflow" + :param reorder: reorder["status"] anchor workflow status's id, reorder["place"]: position strategy ["before","after] + None will mean there's no anchor status preserving their original order + :param source_workflow: Workflow containing the statuses to reorder. + None will mean the "source_workflow" and the "target_workflow" are the same + :return: + """ + if not source_workflow: + source_workflow = target_workflow - # check all statuses "to reorder" exist statuses_to_reorder = await workflows_repositories.list_workflow_statuses_to_reorder( - filters={"workflow_id": workflow.id, "ids": statuses} + filters={"workflow_id": source_workflow.id, "ids": statuses} ) if len(statuses_to_reorder) < len(statuses): raise ex.InvalidWorkflowStatusError("One or more statuses don't exist in this workflow") - # calculate offset - offset, pre_order = await _calculate_offset( - workflow=workflow, - total_statuses_to_reorder=len(statuses_to_reorder), - reorder_status=reorder_status, - reorder_place=reorder_place, - ) - - # update workflow statuses - statuses_to_update_tmp = {s.id: s for s in statuses_to_reorder} statuses_to_update = [] - for i, id in enumerate(statuses): - status = statuses_to_update_tmp[id] - status.order = pre_order + (offset * (i + 1)) - statuses_to_update.append(status) + + if not reorder: + if source_workflow == target_workflow: + raise ex.NonExistingMoveToStatus("Reorder criteria required") + else: + statuses_to_update_tmp = {s.id: s for s in statuses_to_reorder} + for i, id in enumerate(statuses): + status = statuses_to_update_tmp[id] + status.workflow = target_workflow + statuses_to_update.append(status) + # position statuses according to this anchor status + elif reorder: + # check anchor workflow status exists + reorder_status = await workflows_repositories.get_workflow_status( + filters={"workflow_id": target_workflow.id, "id": reorder["status"]} + ) + if not reorder_status: + # re-ordering in the same workflow must have a valid anchor status + raise ex.InvalidWorkflowStatusError(f"Status {reorder['status']} doesn't exist in this workflow") + + if reorder["status"] in statuses: + raise ex.InvalidWorkflowStatusError( + f"Status {reorder['status']} should not be part of the statuses to reorder" + ) + reorder_place = reorder["place"] + # calculate offset + offset, pre_order = await _calculate_offset( + workflow=target_workflow, + total_statuses_to_reorder=len(statuses_to_reorder), + reorder_status=reorder_status, + reorder_place=reorder_place, + ) + # update workflow statuses + statuses_to_update_tmp = {s.id: s for s in statuses_to_reorder} + for i, id in enumerate(statuses): + status = statuses_to_update_tmp[id] + status.order = pre_order + (offset * (i + 1)) + status.workflow = target_workflow + statuses_to_update.append(status) # save stories await workflows_repositories.bulk_update_workflow_statuses( - objs_to_update=statuses_to_update, fields_to_update=["order"] + objs_to_update=statuses_to_update, fields_to_update=["order", "workflow"] ) + if source_workflow != target_workflow and statuses_to_reorder: + # update the workflow to the moved stories + await stories_repositories.bulk_update_workflow_to_stories( + statuses_ids=statuses, + old_workflow_id=source_workflow.id, + new_workflow_id=target_workflow.id, + ) + reorder_status_serializer = serializers_services.serialize_reorder_workflow_statuses( - workflow=workflow, statuses=statuses, reorder=reorder + workflow=target_workflow, statuses=statuses, reorder=reorder ) # event await workflows_events.emit_event_when_workflow_statuses_are_reordered( - project=workflow.project, reorder=reorder_status_serializer + project=target_workflow.project, reorder=reorder_status_serializer ) return reorder_status_serializer @@ -301,7 +433,7 @@ async def delete_workflow_status( workflow_status: WorkflowStatus, deleted_by: User, target_status_id: UUID | None ) -> bool: """ - This method deletes a workflow status, providing the option to first migrating its stories to another workflow + This method deletes a workflow status, providing the option to first migrate its stories to another workflow status of the same workflow. :param deleted_by: the user who is deleting the workflow status diff --git a/python/apps/taiga/src/taiga/workflows/services/exceptions.py b/python/apps/taiga/src/taiga/workflows/services/exceptions.py index af3aaad3f..54360f087 100644 --- a/python/apps/taiga/src/taiga/workflows/services/exceptions.py +++ b/python/apps/taiga/src/taiga/workflows/services/exceptions.py @@ -13,6 +13,14 @@ class TaigaValidationError(TaigaServiceException): ... +class NonExistingMoveToWorkflow(TaigaServiceException): + ... + + +class SameMoveToWorkflow(TaigaServiceException): + ... + + class InvalidWorkflowStatusError(TaigaServiceException): ... diff --git a/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py b/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py index 3684ad71b..1c348d06c 100644 --- a/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py +++ b/python/apps/taiga/tests/integration/taiga/stories/stories/test_repositories.py @@ -245,16 +245,23 @@ async def test_list_stories_to_reorder() -> None: assert stories[2].ref == story2.ref -async def test_list_stories_to_reorder_bad_names() -> None: - project = await f.create_project() - workflow = await sync_to_async(project.workflows.first)() - status = await sync_to_async(workflow.statuses.first)() - story1 = await f.create_story(project=project, workflow=workflow, status=status) - story2 = await f.create_story(project=project, workflow=workflow, status=status) - non_existing_reference = 9999999 +########################################################## +# misc - bulk_update_workflow_to_stories +########################################################## - refs = [story1.ref, non_existing_reference, story2.ref] - stories = await repositories.list_stories_to_reorder(filters={"status_id": status.id, "refs": refs}) - assert len(stories) == 2 - assert stories[0].ref == story1.ref - assert stories[1].ref == story2.ref + +async def test_bulk_update_workflow_to_stories() -> None: + project = await f.create_project() + old_workflow = await sync_to_async(project.workflows.first)() + new_workflow = await sync_to_async(project.workflows.first)() + status = await sync_to_async(old_workflow.statuses.first)() + story1 = await f.create_story(project=project, workflow=old_workflow, status=status) + story2 = await f.create_story(project=project, workflow=old_workflow, status=status) + + await repositories.bulk_update_workflow_to_stories( + statuses_ids=[status.id], old_workflow_id=old_workflow.id, new_workflow_id=new_workflow.id + ) + stories = await repositories.list_stories(filters={"workflow_id": old_workflow}, select_related=["workflow"]) + assert story1 in stories and story2 in stories + assert stories[0].workflow == new_workflow + assert stories[1].workflow == new_workflow diff --git a/python/apps/taiga/tests/integration/taiga/workflows/test_api.py b/python/apps/taiga/tests/integration/taiga/workflows/test_api.py index 7519166cf..3ec7f63ae 100644 --- a/python/apps/taiga/tests/integration/taiga/workflows/test_api.py +++ b/python/apps/taiga/tests/integration/taiga/workflows/test_api.py @@ -298,6 +298,61 @@ async def test_update_status_422_unprocessable_wf_status_b64id(client): assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text +################################################################################ +# Workflow DELETE /projects//workflows/ +################################################################################ + + +async def test_delete_workflow_204_ok(client): + project = await f.create_project() + deleted_workflow = await f.create_workflow(project=project) + f.build_workflow_status(workflow=deleted_workflow, order=1) + f.build_workflow_status(workflow=deleted_workflow, order=2) + target_workflow = await f.create_workflow(project=project) + f.build_workflow_status(workflow=target_workflow, order=1) + f.build_workflow_status(workflow=target_workflow, order=2) + + client.login(project.created_by) + response = client.delete( + f"/projects/{project.b64id}/workflows/{deleted_workflow.slug}/?moveTo={target_workflow.slug}" + ) + assert response.status_code == status.HTTP_204_NO_CONTENT, response.text + + +async def test_delete_workflow_403_not_project_admin(client): + project = await f.create_project() + workflow = await f.create_workflow(project=project) + another_user = await f.create_user() + + client.login(another_user) + response = client.delete(f"/projects/{project.b64id}/workflows/{workflow.slug}") + assert response.status_code == status.HTTP_403_FORBIDDEN, response.text + + +async def test_delete_workflow_404_not_found_project_b64id(client): + project = await f.create_project() + workflow = await f.create_workflow(project=project) + client.login(project.created_by) + response = client.delete(f"/projects/{NOT_EXISTING_B64ID}/workflows/{workflow.slug}") + assert response.status_code == status.HTTP_404_NOT_FOUND, response.text + + +async def test_delete_workflow_422_empty_move_to_slug(client): + project = await f.create_project() + client.login(project.created_by) + empty_string = "" + response = client.delete(f"/projects/{project.b64id}/workflows/slug/?moveTo={empty_string}") + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text + + +async def test_delete_workflow_422_long_move_to_slug(client): + project = await f.create_project() + client.login(project.created_by) + long_string = "slug_slug_slug_slug_slug_slug_slug_slug_slug_slug" + response = client.delete(f"/projects/{project.b64id}/workflows/slug/?moveTo={long_string}") + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text + + ########################################################## # Workflow Status POST /projects//workflows//statuses/reorder ########################################################## diff --git a/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py b/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py index be4a1473f..c6f13a032 100644 --- a/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py +++ b/python/apps/taiga/tests/integration/taiga/workflows/test_repositories.py @@ -87,6 +87,27 @@ async def test_update_workflow(): assert updated_workflow.name == "Updated name" +######################################################### +# delete workflow +########################################################## + + +async def test_delete_workflow_without_workflow_statuses_ok() -> None: + project = await f.create_project() + workflow = await f.create_workflow(project=project, statuses=[]) + + delete_ret = await repositories.delete_workflow(filters={"id": workflow.id}) + assert delete_ret == 1 + + +async def test_delete_workflow_with_workflow_statuses_ok() -> None: + project = await f.create_project() + workflow = await f.create_workflow(project=project) + + delete_ret = await repositories.delete_workflow(filters={"id": workflow.id}) + assert delete_ret == 4 + + ########################################################## # create_workflow_status ########################################################## diff --git a/python/apps/taiga/tests/unit/taiga/workflows/test_services.py b/python/apps/taiga/tests/unit/taiga/workflows/test_services.py index 19b3da1f6..e6e7b9c54 100644 --- a/python/apps/taiga/tests/unit/taiga/workflows/test_services.py +++ b/python/apps/taiga/tests/unit/taiga/workflows/test_services.py @@ -5,11 +5,12 @@ # # Copyright (c) 2023-present Kaleidos INC from decimal import Decimal -from unittest.mock import patch +from unittest.mock import call, patch import pytest from taiga.base.repositories.neighbors import Neighbor from taiga.workflows import services +from taiga.workflows.serializers import ReorderWorkflowStatusesSerializer, WorkflowNestedSerializer, WorkflowSerializer from taiga.workflows.services import exceptions as ex from tests.utils import factories as f @@ -229,6 +230,243 @@ async def test_update_workflow_status_none_name(): fake_workflows_events.emit_event_when_workflow_status_is_updated.assert_not_awaited() +####################################################### +# delete workflow +####################################################### + + +async def test_delete_workflow_no_target_workflow_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + workflow = f.build_workflow() + status1 = f.build_workflow_status(workflow=workflow, order=1) + status2 = f.build_workflow_status(workflow=workflow, order=2) + status3 = f.build_workflow_status(workflow=workflow, order=3) + fake_workflows_repo.get_workflow.return_value = workflow + fake_workflows_repo.list_workflow_statuses.return_value = [status1, status2, status3] + fake_workflows_repo.delete_workflow.return_value = True + + ret = await services.delete_workflow(workflow=workflow) + + fake_workflows_repo.delete_workflow.assert_awaited_once_with(filters={"id": workflow.id}) + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_awaited_once() + assert ret is True + + +async def test_delete_workflow_with_target_workflow_with_anchor_status_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_status1 = f.build_workflow_status(workflow=deleted_workflow, order=1) + deleted_workflow_status2 = f.build_workflow_status(workflow=deleted_workflow, order=2) + deleted_workflow_statuses = [deleted_workflow_status1, deleted_workflow_status2] + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=deleted_workflow_statuses, + ) + target_workflow = f.build_workflow(slug="target_workflow") + target_workflow_status1 = f.build_workflow_status(workflow=target_workflow, order=1) + target_workflow_status2 = f.build_workflow_status(workflow=target_workflow, order=2) + target_workflow_statuses = [target_workflow_status2, target_workflow_status1] + target_workflow_detail = WorkflowSerializer( + id=target_workflow.id, + name=target_workflow.name, + slug=target_workflow.slug, + order=target_workflow.order, + statuses=target_workflow_statuses, + ) + + fake_get_workflow.return_value = target_workflow + fake_get_workflow_detail.side_effect = [deleted_workflow_detail, target_workflow_detail] + # the serializer response doesn't maters + fake_reorder_workflow_statuses.return_value = ReorderWorkflowStatusesSerializer( + workflow=WorkflowNestedSerializer( + id=target_workflow.id, name=deleted_workflow.name, slug=deleted_workflow.slug + ), + statuses=[], + reorder=None, + ) + fake_workflows_repo.list_workflow_statuses.side_effect = [deleted_workflow_statuses, target_workflow_statuses] + fake_workflows_repo.delete_workflow.return_value = True + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=target_workflow.slug) + # asserts + fake_workflows_repo.list_workflow_statuses.assert_has_awaits( + [ + call(filters={"workflow_id": deleted_workflow.id}, order_by=["order"]), + call(filters={"workflow_id": target_workflow.id}, order_by=["-order"], offset=0, limit=1), + ] + ) + fake_workflows_repo.delete_workflow.assert_awaited_once_with(filters={"id": deleted_workflow.id}) + fake_reorder_workflow_statuses.assert_awaited_once_with( + target_workflow=target_workflow, + statuses=[status.id for status in deleted_workflow_statuses], + reorder={"place": "after", "status": target_workflow_statuses[0].id}, + source_workflow=deleted_workflow, + ) + fake_get_workflow_detail.assert_has_awaits( + [ + call(project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug), + call(project_id=target_workflow.project.id, workflow_slug=target_workflow.slug), + ] + ) + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_awaited_once_with( + project=deleted_workflow.project, + workflow=deleted_workflow_detail, + target_workflow=target_workflow_detail, + ) + assert ret is True + + +async def test_delete_workflow_with_target_workflow_with_no_anchor_status_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_status1 = f.build_workflow_status(workflow=deleted_workflow, order=1) + deleted_workflow_statuses = [deleted_workflow_status1] + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=deleted_workflow_statuses, + ) + target_workflow = f.build_workflow(slug="target_workflow") + target_workflow_statuses = [] + target_workflow_detail = WorkflowSerializer( + id=target_workflow.id, + name=target_workflow.name, + slug=target_workflow.slug, + order=target_workflow.order, + statuses=target_workflow_statuses, + ) + + fake_get_workflow.return_value = target_workflow + fake_get_workflow_detail.side_effect = [deleted_workflow_detail, target_workflow_detail] + # the serializer response doesn't maters + fake_reorder_workflow_statuses.return_value = ReorderWorkflowStatusesSerializer( + workflow=WorkflowNestedSerializer( + id=target_workflow.id, name=deleted_workflow.name, slug=deleted_workflow.slug + ), + statuses=[], + reorder=None, + ) + fake_workflows_repo.list_workflow_statuses.side_effect = [deleted_workflow_statuses, target_workflow_statuses] + fake_workflows_repo.delete_workflow.return_value = True + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=target_workflow.slug) + # asserts + fake_workflows_repo.list_workflow_statuses.assert_has_awaits( + [ + call(filters={"workflow_id": deleted_workflow.id}, order_by=["order"]), + call(filters={"workflow_id": target_workflow.id}, order_by=["-order"], offset=0, limit=1), + ] + ) + fake_workflows_repo.delete_workflow.assert_awaited_once_with(filters={"id": deleted_workflow.id}) + fake_reorder_workflow_statuses.assert_awaited_once_with( + target_workflow=target_workflow, + statuses=[status.id for status in deleted_workflow_statuses], + reorder=None, + source_workflow=deleted_workflow, + ) + fake_get_workflow_detail.assert_has_awaits( + [ + call(project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug), + call(project_id=target_workflow.project.id, workflow_slug=target_workflow.slug), + ] + ) + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_awaited_once_with( + project=deleted_workflow.project, + workflow=deleted_workflow_detail, + target_workflow=target_workflow_detail, + ) + assert ret is True + + +async def test_delete_workflow_not_existing_target_workflow_exception(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + pytest.raises(ex.NonExistingMoveToWorkflow), + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=[], + ) + target_workflow = f.build_workflow(slug="target_workflow") + fake_get_workflow_detail.return_value = deleted_workflow_detail + fake_get_workflow.return_value = None + + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=target_workflow.slug) + + # asserts + fake_get_workflow_detail.assert_awaited_once_with( + project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug + ) + fake_reorder_workflow_statuses.assert_not_awaited() + fake_workflows_repo.delete_workflow.assert_not_awaited() + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_not_awaited() + + assert ret is False + + +async def test_delete_workflow_same_target_workflow_exception(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.get_workflow_detail", autospec=True) as fake_get_workflow_detail, + patch("taiga.workflows.services.get_workflow", autospec=True) as fake_get_workflow, + patch("taiga.workflows.services.reorder_workflow_statuses", autospec=True) as fake_reorder_workflow_statuses, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + pytest.raises(ex.SameMoveToWorkflow), + ): + deleted_workflow = f.build_workflow(slug="deleted_workflow") + deleted_workflow_detail = WorkflowSerializer( + id=deleted_workflow.id, + name=deleted_workflow.name, + slug=deleted_workflow.slug, + order=deleted_workflow.order, + statuses=[], + ) + fake_get_workflow_detail.return_value = deleted_workflow_detail + fake_get_workflow.return_value = deleted_workflow + + # service call + ret = await services.delete_workflow(workflow=deleted_workflow, target_workflow_slug=deleted_workflow.slug) + + # asserts + fake_get_workflow_detail.assert_awaited_once_with( + project_id=deleted_workflow.project.id, workflow_slug=deleted_workflow.slug + ) + fake_reorder_workflow_statuses.assert_not_awaited() + fake_workflows_repo.delete_workflow.assert_not_awaited() + fake_workflows_events.emit_event_when_workflow_is_deleted.assert_not_awaited() + + assert ret is False + + ####################################################### # _calculate_offset ####################################################### @@ -278,9 +516,10 @@ async def test_calculate_offset() -> None: ####################################################### -async def test_reorder_workflow_statuses_ok(): +async def test_reorder_workflow_statuses_same_workflow_ok(): with ( patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.stories_repositories", autospec=True) as fake_stories_repo, patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, ): workflow = f.build_workflow() @@ -291,23 +530,104 @@ async def test_reorder_workflow_statuses_ok(): fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status3, status2] await services.reorder_workflow_statuses( - workflow=f.build_workflow(), + target_workflow=f.build_workflow(), statuses=[status3.id, status2.id], reorder={"place": "after", "status": status1.id}, ) + fake_stories_repo.bulk_update_workflow_to_stories.assert_not_awaited() fake_workflows_repo.bulk_update_workflow_statuses.assert_awaited_once_with( - objs_to_update=[status3, status2], fields_to_update=["order"] + objs_to_update=[status3, status2], fields_to_update=["order", "workflow"] ) fake_workflows_events.emit_event_when_workflow_statuses_are_reordered.assert_awaited_once() +async def test_reorder_workflow_statuses_between_workflows_with_anchor_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.stories_repositories", autospec=True) as fake_stories_repo, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + workflow1 = f.build_workflow() + workflow2 = f.build_workflow() + status1 = f.build_workflow_status(workflow=workflow1, order=1) + status2 = f.build_workflow_status(workflow=workflow1, order=2) + status3 = f.build_workflow_status(workflow=workflow1, order=3) + fake_workflows_repo.get_workflow_status.return_value = status1 + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status3, status2] + fake_stories_repo.bulk_update_workflow_to_stories.return_value = None + + await services.reorder_workflow_statuses( + target_workflow=workflow1, + statuses=[status3.id, status2.id], + reorder={"place": "after", "status": status1.id}, + source_workflow=workflow2, + ) + + fake_workflows_repo.bulk_update_workflow_statuses.assert_awaited_once_with( + objs_to_update=[status3, status2], fields_to_update=["order", "workflow"] + ) + fake_stories_repo.bulk_update_workflow_to_stories.assert_awaited_once_with( + statuses_ids=[status3.id, status2.id], old_workflow_id=workflow2.id, new_workflow_id=workflow1.id + ) + fake_workflows_events.emit_event_when_workflow_statuses_are_reordered.assert_awaited_once() + + +async def test_reorder_workflow_statuses_between_workflows_no_anchor_ok(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + patch("taiga.workflows.services.stories_repositories", autospec=True) as fake_stories_repo, + patch("taiga.workflows.services.workflows_events", autospec=True) as fake_workflows_events, + ): + workflow1 = f.build_workflow() + workflow2 = f.build_workflow(statuses=[]) + status1 = f.build_workflow_status(workflow=workflow1, order=1) + status2 = f.build_workflow_status(workflow=workflow1, order=2) + fake_workflows_repo.get_workflow_status.return_value = status1 + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status1, status2] + fake_stories_repo.bulk_update_workflow_to_stories.return_value = None + + await services.reorder_workflow_statuses( + target_workflow=workflow2, statuses=[status1.id, status2.id], reorder=None, source_workflow=workflow1 + ) + + fake_workflows_repo.bulk_update_workflow_statuses.assert_awaited_once_with( + objs_to_update=[status1, status2], fields_to_update=["order", "workflow"] + ) + fake_stories_repo.bulk_update_workflow_to_stories.assert_awaited_once_with( + statuses_ids=[status1.id, status2.id], old_workflow_id=workflow1.id, new_workflow_id=workflow2.id + ) + fake_workflows_events.emit_event_when_workflow_statuses_are_reordered.assert_awaited_once() + + +async def test_reorder_workflow_statuses_between_workflows_no_anchor_same_workflow_exception(): + with ( + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + pytest.raises(ex.NonExistingMoveToStatus), + ): + workflow = f.build_workflow() + status1 = f.build_workflow_status(workflow=workflow, order=1) + status2 = f.build_workflow_status(workflow=workflow, order=2) + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status1, status2] + + await services.reorder_workflow_statuses( + target_workflow=workflow, statuses=[status1.id, status2.id], reorder=None, source_workflow=workflow + ) + + async def test_reorder_workflow_status_repeated(): - with (pytest.raises(ex.InvalidWorkflowStatusError),): + with ( + pytest.raises(ex.InvalidWorkflowStatusError), + patch("taiga.workflows.services.workflows_repositories", autospec=True) as fake_workflows_repo, + ): + workflow = f.build_workflow() + status = f.build_workflow_status(workflow=workflow, order=1) + fake_workflows_repo.list_workflow_statuses_to_reorder.return_value = [status] + await services.reorder_workflow_statuses( - workflow=f.build_workflow(), - statuses=["new"], - reorder={"place": "after", "status": "new"}, + target_workflow=workflow, + statuses=[status.id], + reorder={"place": "after", "status": status.id}, ) @@ -319,7 +639,7 @@ async def test_reorder_anchor_workflow_status_does_not_exist(): fake_workflows_repo.get_workflow_status.return_value = None await services.reorder_workflow_statuses( - workflow=f.build_workflow(), + target_workflow=f.build_workflow(), statuses=["in-progress"], reorder={"place": "after", "status": "mooo"}, ) @@ -333,7 +653,7 @@ async def test_reorder_any_workflow_status_does_not_exist(): fake_workflows_repo.get_workflow_status.return_value = None await services.reorder_workflow_statuses( - workflow=f.build_workflow(), + target_workflow=f.build_workflow(), statuses=["in-progress", "mooo"], reorder={"place": "after", "status": "new"}, ) diff --git a/python/docs/events.md b/python/docs/events.md index 46c4372f7..90008ede6 100644 --- a/python/docs/events.md +++ b/python/docs/events.md @@ -681,6 +681,19 @@ Content for: } ``` +#### `workflows.delete` + +It happens when a workflow has been deleted. + +Content for: +- project channel: + ``` + { + "workflow": {... "workflow object" ...}, + "targetWorkflow": null | {... "workflow object" ...} + } + ``` + #### `workflowstatuses.create` diff --git a/python/docs/postman/taiga.postman_collection.json b/python/docs/postman/taiga.postman_collection.json index f9d180a24..68a5ce667 100644 --- a/python/docs/postman/taiga.postman_collection.json +++ b/python/docs/postman/taiga.postman_collection.json @@ -1,9 +1,9 @@ { "info": { - "_postman_id": "b330253f-040c-48f5-b6e8-a7d24d0b18f3", + "_postman_id": "6395d1e4-7f57-478c-b868-47621357a450", "name": "taiga-next", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "15018493" + "_exporter_id": "3299216" }, "item": [ { @@ -2801,6 +2801,67 @@ } }, "response": [] + }, + { + "name": "delete workflow", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "// Post-request execution tasks", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": {} + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{auth_token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{protocol}}://{{domain}}{{port}}{{api_url}}/projects/{{pj-id}}/workflows/{{wf-slug}}?moveTo={{wf-slug}}", + "protocol": "{{protocol}}", + "host": [ + "{{domain}}{{port}}{{api_url}}" + ], + "path": [ + "projects", + "{{pj-id}}", + "workflows", + "{{wf-slug}}" + ], + "query": [ + { + "key": "moveTo", + "value": "{{wf-slug}}" + } + ] + } + }, + "response": [] } ] }, diff --git a/python/docs/postman/taiga.postman_collection_e2e.json b/python/docs/postman/taiga.postman_collection_e2e.json index fb97370c4..d3dc2534d 100644 --- a/python/docs/postman/taiga.postman_collection_e2e.json +++ b/python/docs/postman/taiga.postman_collection_e2e.json @@ -1,9 +1,9 @@ { "info": { - "_postman_id": "882b348a-4bc9-4ce2-a3fe-18e77c30fe4f", + "_postman_id": "6eb9988f-3776-442a-b609-f4bafcce235f", "name": "taiga-next e2e", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "15018493" + "_exporter_id": "3299216" }, "item": [ { @@ -4235,6 +4235,66 @@ } }, "response": [] + }, + { + "name": "204.projects.{pj}.workflows.{wf}", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "// Post-request execution tasks", + "", + "// Tests", + "pm.test(\"HTTP status code is correct\", function () {", + " pm.response.to.have.status(204);", + "", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": {} + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{auth_token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{protocol}}://{{domain}}{{port}}{{api_url}}/projects/{{pj-id}}/workflows/{{wf-slug}}", + "protocol": "{{protocol}}", + "host": [ + "{{domain}}{{port}}{{api_url}}" + ], + "path": [ + "projects", + "{{pj-id}}", + "workflows", + "{{wf-slug}}" + ] + } + }, + "response": [] } ] },