From 9a5d1967cbbb2199f23d867e726ae6215b80401e Mon Sep 17 00:00:00 2001 From: Valentino Giardino <77643678+valentinogiardino@users.noreply.github.com> Date: Tue, 3 Dec 2024 19:42:51 -0300 Subject: [PATCH 1/7] fix(content-import-job) fix key fields (#30843) This pull request focuses on updating the content import functionality to use field IDs instead of field names. Key changes include: ### Validation and Schema Updates: * [`dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java`](diffhunk://#diff-f5dc237cd813ef4bac45f0d238379d561192b07c9c1dd7e03619cde2ee3566adL232-R232): Updated the field validation logic to use `field.id()` instead of `field.variable()`. * [`dotCMS/src/main/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportParamsSchema.java`](diffhunk://#diff-ac814b1da2510effe84f7fbd1a316ce69ca5146951885d5d4a325fbfef2ab8e4L31-R31): Modified the example JSON to use a field ID instead of a field name. ### Test Case Adjustments: * `dotcms-integration/src/test/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportResourceIntegrationTest.java`: * Added `fieldId` to store the ID of the first field in the content type. [[1]](diffhunk://#diff-038e819a6c662a05544ffc5a4c261de236b8dad56f380b9c4623d35c7a89e99bR60) [[2]](diffhunk://#diff-038e819a6c662a05544ffc5a4c261de236b8dad56f380b9c4623d35c7a89e99bR77-R78) * Updated test cases to use `fieldId` instead of the field name "title". [[1]](diffhunk://#diff-038e819a6c662a05544ffc5a4c261de236b8dad56f380b9c4623d35c7a89e99bL104-R111) [[2]](diffhunk://#diff-038e819a6c662a05544ffc5a4c261de236b8dad56f380b9c4623d35c7a89e99bL122-R129) [[3]](diffhunk://#diff-038e819a6c662a05544ffc5a4c261de236b8dad56f380b9c4623d35c7a89e99bL139-R146) [[4]](diffhunk://#diff-038e819a6c662a05544ffc5a4c261de236b8dad56f380b9c4623d35c7a89e99bL157-R164) ### Postman Collection Update: * `dotcms-postman/src/main/resources/postman/ContentImportResource.postman_collection.json`: * Added a script to set the field ID in the collection variables after creating the content type. * Updated the `fields` variable to be empty initially, to be populated by the script. --- .../impl/ImportContentletsProcessor.java | 12 ++++----- .../dotimport/ContentImportParamsSchema.java | 2 +- .../ContentImportResourceIntegrationTest.java | 19 +++++++------ ...tentImportResource.postman_collection.json | 27 +++++++++++++++++-- 4 files changed, 43 insertions(+), 17 deletions(-) diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java b/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java index bf42bf0fc249..12d1a8b70fde 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java @@ -223,15 +223,15 @@ && getWorkflowActionId(parameters).isEmpty()) { * {@link JobValidationException} is thrown.

* * @param parameters The job parameters containing the fields to validate - * @param contentTypeFound The content type to validate the fields against + * @param contentType The content type to validate the fields against * @throws JobValidationException if any field specified in the parameters is not found in the content type */ - private void validateFields(final Map parameters, final ContentType contentTypeFound) { - var fields = contentTypeFound.fields(); - for (String field : getFields(parameters)) { - if (fields.stream().noneMatch(f -> Objects.equals(f.variable(), field))) { + private void validateFields(final Map parameters, final ContentType contentType) { + var contentTypeFields = contentType.fields(); + for (String providedField : getFields(parameters)) { + if (contentTypeFields.stream().noneMatch(field -> Objects.equals(field.id(), providedField))) { final var errorMessage = String.format( - "Field [%s] not found in Content Type [%s].", field, contentTypeFound.variable() + "Field [%s] not found in Content Type [%s].", providedField, contentType.variable() ); Logger.error(this, errorMessage); throw new JobValidationException(errorMessage); diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportParamsSchema.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportParamsSchema.java index 9e59db2ac3ae..4daddf0a3ac0 100644 --- a/dotCMS/src/main/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportParamsSchema.java +++ b/dotCMS/src/main/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportParamsSchema.java @@ -28,7 +28,7 @@ public class ContentImportParamsSchema { " \"contentType\": \"activity\",\n" + " \"language\": \"en-US\",\n" + " \"workflowActionId\": \"1234\",\n" + - " \"fields\": [\"title\"]\n" + + " \"fields\": [\"e1f99107-fd0e-49d4-a099-1cc10aa284d8\"]\n" + "}" ) private String form; diff --git a/dotcms-integration/src/test/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportResourceIntegrationTest.java b/dotcms-integration/src/test/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportResourceIntegrationTest.java index bd50dd777cc7..398d918fa8cb 100644 --- a/dotcms-integration/src/test/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportResourceIntegrationTest.java +++ b/dotcms-integration/src/test/java/com/dotcms/rest/api/v1/content/dotimport/ContentImportResourceIntegrationTest.java @@ -57,6 +57,7 @@ public class ContentImportResourceIntegrationTest extends Junit5WeldBaseTest { private static File csvFile; private static ContentType contentType; + private static String fieldId; @Inject ContentImportHelper contentImportHelper; @@ -73,6 +74,8 @@ static void setUp() throws Exception { defaultLanguage = APILocator.getLanguageAPI().getDefaultLanguage(); contentType = TestDataUtils.getRichTextLikeContentType(); + fieldId = contentType.fields().get(0).id(); + assert fieldId != null; csvFile = createTestCsvFile(); } @@ -101,11 +104,11 @@ static void cleanup() { */ @Test public void test_import_content_with_valid_params() throws IOException, DotDataException { - ContentImportForm form = createContentImportForm(contentType.name(), String.valueOf(defaultLanguage.getId()), WORKFLOW_PUBLISH_ACTION_ID, List.of("title")); + ContentImportForm form = createContentImportForm(contentType.name(), String.valueOf(defaultLanguage.getId()), WORKFLOW_PUBLISH_ACTION_ID, List.of(fieldId)); ContentImportParams params = createContentImportParams(csvFile, form); Response importContentResponse = importResource.importContent(request, response, params); - validateSuccessfulResponse(importContentResponse, contentType.name(), String.valueOf(defaultLanguage.getId()), List.of("title"), WORKFLOW_PUBLISH_ACTION_ID, CMD_PUBLISH); + validateSuccessfulResponse(importContentResponse, contentType.name(), String.valueOf(defaultLanguage.getId()), List.of(fieldId), WORKFLOW_PUBLISH_ACTION_ID, CMD_PUBLISH); } @@ -119,11 +122,11 @@ public void test_import_content_with_valid_params() throws IOException, DotDataE */ @Test public void test_import_content_validate_with_valid_params() throws IOException, DotDataException { - ContentImportForm form = createContentImportForm(contentType.name(), String.valueOf(defaultLanguage.getId()), WORKFLOW_PUBLISH_ACTION_ID, List.of("title")); + ContentImportForm form = createContentImportForm(contentType.name(), String.valueOf(defaultLanguage.getId()), WORKFLOW_PUBLISH_ACTION_ID, List.of(fieldId)); ContentImportParams params = createContentImportParams(csvFile, form); Response importContentResponse = importResource.validateContentImport(request, response, params); - validateSuccessfulResponse(importContentResponse, contentType.name(), String.valueOf(defaultLanguage.getId()), List.of("title"), WORKFLOW_PUBLISH_ACTION_ID, CMD_PREVIEW); + validateSuccessfulResponse(importContentResponse, contentType.name(), String.valueOf(defaultLanguage.getId()), List.of(fieldId), WORKFLOW_PUBLISH_ACTION_ID, CMD_PREVIEW); } /** @@ -136,11 +139,11 @@ public void test_import_content_validate_with_valid_params() throws IOException, */ @Test public void test_import_content_with_language_iso_code() throws IOException, DotDataException { - ContentImportForm form = createContentImportForm(contentType.name(), defaultLanguage.getIsoCode(), WORKFLOW_PUBLISH_ACTION_ID, List.of("title")); + ContentImportForm form = createContentImportForm(contentType.name(), defaultLanguage.getIsoCode(), WORKFLOW_PUBLISH_ACTION_ID, List.of(fieldId)); ContentImportParams params = createContentImportParams(csvFile, form); Response importContentResponse = importResource.importContent(request, response, params); - validateSuccessfulResponse(importContentResponse, contentType.name(), defaultLanguage.getIsoCode(), List.of("title"), WORKFLOW_PUBLISH_ACTION_ID, CMD_PUBLISH); + validateSuccessfulResponse(importContentResponse, contentType.name(), defaultLanguage.getIsoCode(), List.of(fieldId), WORKFLOW_PUBLISH_ACTION_ID, CMD_PUBLISH); } @@ -154,11 +157,11 @@ public void test_import_content_with_language_iso_code() throws IOException, Dot */ @Test public void test_import_content__validate_with_language_iso_code() throws IOException, DotDataException { - ContentImportForm form = createContentImportForm(contentType.name(), defaultLanguage.getIsoCode(), WORKFLOW_PUBLISH_ACTION_ID, List.of("title")); + ContentImportForm form = createContentImportForm(contentType.name(), defaultLanguage.getIsoCode(), WORKFLOW_PUBLISH_ACTION_ID, List.of(fieldId)); ContentImportParams params = createContentImportParams(csvFile, form); Response importContentResponse = importResource.validateContentImport(request, response, params); - validateSuccessfulResponse(importContentResponse, contentType.name(), defaultLanguage.getIsoCode(), List.of("title"), WORKFLOW_PUBLISH_ACTION_ID, CMD_PREVIEW); + validateSuccessfulResponse(importContentResponse, contentType.name(), defaultLanguage.getIsoCode(), List.of(fieldId), WORKFLOW_PUBLISH_ACTION_ID, CMD_PREVIEW); } /** diff --git a/dotcms-postman/src/main/resources/postman/ContentImportResource.postman_collection.json b/dotcms-postman/src/main/resources/postman/ContentImportResource.postman_collection.json index 25cebbf18a26..45d5294c190c 100644 --- a/dotcms-postman/src/main/resources/postman/ContentImportResource.postman_collection.json +++ b/dotcms-postman/src/main/resources/postman/ContentImportResource.postman_collection.json @@ -11,7 +11,30 @@ "name": "pre-execution-scripts", "item": [ { - "name": "Create ContentType Copy", + "name": "Create ContentType", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code should be ok 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test(\"fields check\", function () {", + " pm.expect(jsonData.entity[0].fields.length).to.eql(8);", + " pm.expect(jsonData.entity[0].fields[3].variable).to.eql('title');", + " pm.collectionVariables.set(\"fields\", JSON.stringify([jsonData.entity[0].fields[3].id]))", + "});", + "" + ], + "type": "text/javascript", + "packages": {} + } + } + ], "request": { "method": "POST", "header": [], @@ -1913,7 +1936,7 @@ }, { "key": "fields", - "value": "[\"title\"]", + "value": "", "type": "string" } ] From 3b866d606a708fe67c8733d5befec51ff346bb6c Mon Sep 17 00:00:00 2001 From: Rafael Velazco Date: Wed, 4 Dec 2024 10:07:53 -0400 Subject: [PATCH 2/7] feat(FTM): New UVE Toolbar - Implement "API" button (#30830) This pull request includes several changes to the `DotUveToolbarComponent` and related files to enhance the functionality and testing of the API URL link. The most important changes include updating the toolbar component to use an anchor tag for the API URL link, adding a computed property for the API URL, and enhancing the tests to verify the correct behavior of the API URL link. Changes to `DotUveToolbarComponent`: * Updated the toolbar component to use an anchor (``) tag instead of a button for the API URL link, and added attributes such as `title`, `target`, and `href` to improve accessibility and functionality. (`[core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.htmlL19-R26](diffhunk://#diff-9937556e73b051b878ba22ad1ce971a70019a617d7979b3e0bcc814801ad350bL19-R26)`) Enhancements to testing: * Added a constant `$apiURL` and updated the `DotUveToolbarComponent` tests to include this constant and verify the presence and correctness of the API URL link. (`[[1]](diffhunk://#diff-3eaa147616a5d1ff374a5fa27b0f38f0159a9039ef7e8d672dec43631f48a9e1R38-R39)`, `[[2]](diffhunk://#diff-3eaa147616a5d1ff374a5fa27b0f38f0159a9039ef7e8d672dec43631f48a9e1R123)`, `[[3]](diffhunk://#diff-3eaa147616a5d1ff374a5fa27b0f38f0159a9039ef7e8d672dec43631f48a9e1L177-L180)`, `[[4]](diffhunk://#diff-3eaa147616a5d1ff374a5fa27b0f38f0159a9039ef7e8d672dec43631f48a9e1R195-R205)`) Computed property for API URL: * Added a computed property `$apiURL` in the `DotUveToolbarComponent` to dynamically generate the API URL based on the page parameters. (`[[1]](diffhunk://#diff-217a9e619d6590c4f652e85353b9637ba5e464ddeb0424be35aef39bb8dceb30R34)`, `[[2]](diffhunk://#diff-e6d3fb6319626fa85a4fc6894b57935843713366be593de6dd1dc5ed68bf6afcR119-R127)`) New test for `withUVEToolbar`: * Added a new test file `withUVEToolbar.spec.ts` to verify the computed property `$apiURL` and ensure it returns the correct API URL based on given page parameters. (`[core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.spec.tsR1-R83](diffhunk://#diff-7a5de702ac1dc81304f4c31816f5c0363aa56141f8afa583a87856e7a0d8482dR1-R83)`) ### Videos #### Traditional https://github.com/user-attachments/assets/3042d0d0-c964-4d1c-b454-736e71542f21 #### Headless https://github.com/user-attachments/assets/25fc13b6-5101-47e0-871a-feea0b65156a --- .../dot-uve-toolbar.component.html | 10 ++- .../dot-uve-toolbar.component.spec.ts | 18 +++- .../dot-uve-toolbar.component.ts | 1 + .../editor/toolbar/withUVEToolbar.spec.ts | 83 +++++++++++++++++++ .../features/editor/toolbar/withUVEToolbar.ts | 9 ++ 5 files changed, 116 insertions(+), 5 deletions(-) create mode 100644 core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.spec.ts diff --git a/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.html b/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.html index 835d9a334eab..34b3e7ca45e3 100644 --- a/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.html +++ b/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.html @@ -16,10 +16,14 @@ [cdkCopyToClipboard]="$toolbar().editor.copyUrl" data-testId="uve-toolbar-copy-url" /> - + class="p-button-text" + target="_blank" + data-testId="uve-toolbar-api-link" + [href]="$apiURL()"> } @else if ($toolbar().preview) {
PREVIEW MODE CONTENT
diff --git a/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.spec.ts b/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.spec.ts index a87708cecd5f..70b5c8518936 100644 --- a/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.spec.ts +++ b/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.spec.ts @@ -37,6 +37,8 @@ import { import { DotEmaBookmarksComponent } from '../dot-ema-bookmarks/dot-ema-bookmarks.component'; import { DotEmaRunningExperimentComponent } from '../dot-ema-running-experiment/dot-ema-running-experiment.component'; +const $apiURL = '/api/v1/page/json/123-xyz-567-xxl?host_id=123-xyz-567-xxl&language_id=1'; + describe('DotUveToolbarComponent', () => { let spectator: Spectator; let messageService: MessageService; @@ -125,6 +127,7 @@ describe('DotUveToolbarComponent', () => { setSocialMedia: jest.fn(), pageParams: signal(params), pageAPIResponse: signal(MOCK_RESPONSE_VTL), + $apiURL: signal($apiURL), reloadCurrentPage: jest.fn(), loadPageAsset: jest.fn() }; @@ -187,8 +190,8 @@ describe('DotUveToolbarComponent', () => { }); }); - it('should have api link button', () => { - expect(spectator.query(byTestId('uve-toolbar-api-link'))).toBeTruthy(); + it('should have not experiments button if experiment is not running', () => { + expect(spectator.query(byTestId('uve-toolbar-running-experiment'))).toBeFalsy(); }); it('should have language selector', () => { @@ -202,6 +205,17 @@ describe('DotUveToolbarComponent', () => { it('should have workflows button', () => { expect(spectator.query(byTestId('uve-toolbar-workflow-actions'))).toBeTruthy(); }); + + describe('API URL', () => { + it('should have api link button', () => { + expect(spectator.query(byTestId('uve-toolbar-api-link'))).toBeTruthy(); + }); + + it('should have api link button with correct href', () => { + const btn = spectator.query(byTestId('uve-toolbar-api-link')); + expect(btn.getAttribute('href')).toBe($apiURL); + }); + }); }); describe('State changes', () => { diff --git a/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.ts b/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.ts index 5e3869e11e1e..0e8495d4a3f9 100644 --- a/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.ts +++ b/core-web/libs/portlets/edit-ema/portlet/src/lib/edit-ema-editor/components/dot-uve-toolbar/dot-uve-toolbar.component.ts @@ -33,6 +33,7 @@ export class DotUveToolbarComponent { readonly #dotMessageService = inject(DotMessageService); readonly $toolbar = this.#store.$uveToolbar; + readonly $apiURL = this.#store.$apiURL; togglePreviewMode(preview: boolean) { this.#store.togglePreviewMode(preview); diff --git a/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.spec.ts b/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.spec.ts new file mode 100644 index 000000000000..6a3b044ff152 --- /dev/null +++ b/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.spec.ts @@ -0,0 +1,83 @@ +import { describe } from '@jest/globals'; +import { createServiceFactory, mockProvider, SpectatorService } from '@ngneat/spectator/jest'; +import { signalStore, withState } from '@ngrx/signals'; +import { of } from 'rxjs'; + +import { ActivatedRoute, Router } from '@angular/router'; + +import { withUVEToolbar } from './withUVEToolbar'; + +import { DotPageApiService } from '../../../../services/dot-page-api.service'; +import { UVE_STATUS } from '../../../../shared/enums'; +import { MOCK_RESPONSE_HEADLESS } from '../../../../shared/mocks'; +import { UVEState } from '../../../models'; + +const pageParams = { + url: 'test-url', + language_id: '1', + 'com.dotmarketing.persona.id': 'dot:persona', + variantName: 'DEFAULT', + clientHost: 'http://localhost:3000' +}; + +const initialState: UVEState = { + isEnterprise: true, + languages: [], + pageAPIResponse: MOCK_RESPONSE_HEADLESS, + currentUser: null, + experiment: null, + errorCode: null, + pageParams, + status: UVE_STATUS.LOADED, + isTraditionalPage: false, + canEditPage: true, + pageIsLocked: true, + isClientReady: false +}; + +export const uveStoreMock = signalStore(withState(initialState), withUVEToolbar()); + +describe('withEditor', () => { + let spectator: SpectatorService>; + let store: InstanceType; + + const createService = createServiceFactory({ + service: uveStoreMock, + providers: [ + mockProvider(Router), + mockProvider(ActivatedRoute), + mockProvider(Router), + mockProvider(ActivatedRoute), + { + provide: DotPageApiService, + useValue: { + get() { + return of(MOCK_RESPONSE_HEADLESS); + }, + getClientPage() { + return of(MOCK_RESPONSE_HEADLESS); + }, + save: jest.fn() + } + } + ] + }); + + beforeEach(() => { + spectator = createService(); + store = spectator.service; + }); + + describe('Computed', () => { + it('should return the right API URL', () => { + const params = { ...pageParams }; + // Delete the url from the params to test the function + delete params.url; + + const queryParams = new URLSearchParams(params).toString(); + const expectURL = `/api/v1/page/json/test-url?${queryParams}`; + + expect(store.$apiURL()).toBe(expectURL); + }); + }); +}); diff --git a/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.ts b/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.ts index ce67a7ae9ffb..047bb34bdbec 100644 --- a/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.ts +++ b/core-web/libs/portlets/edit-ema/portlet/src/lib/store/features/editor/toolbar/withUVEToolbar.ts @@ -116,6 +116,15 @@ export function withUVEToolbar() { unlockButton: shouldShowUnlock ? unlockButton : null, showInfoDisplay: shouldShowInfoDisplay }; + }), + $apiURL: computed(() => { + const pageParams = store.pageParams(); + const url = sanitizeURL(pageParams?.url); + const params = createPageApiUrlWithQueryParams(url, pageParams); + const pageType = store.isTraditionalPage() ? 'render' : 'json'; + const pageAPI = `/api/v1/page/${pageType}/${params}`; + + return pageAPI; }) })), withMethods((store) => ({ From 6bff5a05e951b54d78b3825900f3a5aeff228da4 Mon Sep 17 00:00:00 2001 From: spbolton Date: Wed, 4 Dec 2024 17:10:19 +0000 Subject: [PATCH 3/7] test(jmeter): Add jmeter module (#30789) (#30842) ### Proposed Changes * Add initial jmeter testing module that is not enabled in the CI builds * See test-jmeter/README.md * Must be enabled locally with -Djmeter.test.skip=false otherwise will be skiped as a module in maven commands. * Must test against a https server currently and is currently setup to work with the new local docker-desktop k8s helm chart * Should mainly test to ensure it does get skipped in the CI build and is a WIP for further development and discussion. ### Additional Info Related to #30789 (Provide basic mechanism to generate traffic and an). ### Screenshots Original | Updated :-------------------------:|:-------------------------: ** original screenshot ** | ** updated screenshot ** --- justfile | 3 + parent/pom.xml | 6 + pom.xml | 16 + test-jmeter/README.md | 108 ++ test-jmeter/pom.xml | 132 ++ test-jmeter/src/test/jmeter/sessions.jmx | 1635 ++++++++++++++++++++++ 6 files changed, 1900 insertions(+) create mode 100644 test-jmeter/README.md create mode 100644 test-jmeter/pom.xml create mode 100644 test-jmeter/src/test/jmeter/sessions.jmx diff --git a/justfile b/justfile index 881932955386..7232644735b7 100644 --- a/justfile +++ b/justfile @@ -198,6 +198,9 @@ run-java-cli-native *ARGS: tools/dotcms-cli/cli/target/dotcms-cli-1.0.0-SNAPSHOT-runner {{ARGS}} +run-jmeter-tests: + ./mvnw verify -Djmeter.test.skip=false -pl :dotcms-test-jmeter + ########################################################### # Useful Maven Helper Commands ########################################################### diff --git a/parent/pom.xml b/parent/pom.xml index 9c4b314ab2c3..9017a07f8744 100644 --- a/parent/pom.xml +++ b/parent/pom.xml @@ -208,6 +208,7 @@ ${environment.properties.folder}/environment.properties true 0.14.2 + true @@ -223,6 +224,11 @@ + + com.lazerycode.jmeter + jmeter-maven-plugin + 3.8.0 + org.codehaus.mojo flatten-maven-plugin diff --git a/pom.xml b/pom.xml index 8d9912d177f2..cb8458cca201 100644 --- a/pom.xml +++ b/pom.xml @@ -102,4 +102,20 @@ + + + jmeter + + + jmeter.test.skip + false + + + + test-jmeter + + + + + diff --git a/test-jmeter/README.md b/test-jmeter/README.md new file mode 100644 index 000000000000..c78fbe779cf9 --- /dev/null +++ b/test-jmeter/README.md @@ -0,0 +1,108 @@ +# dotCMS JMeter Performance Tests + +This module contains JMeter performance tests for dotCMS. The tests are configured to run against a dotCMS instance and measure various performance metrics. + +This is a work in progress. It currently requires a https connection to the dotCMS instance to maintain session cookies. It will also not run in the CI environment and is only for local testing requiring the -Djmeter.test.skip=false flag to be set to enable + + +## Test Configuration + +The JMeter tests are configured in `src/test/jmeter/sessions.jmx`. The default configuration includes: + +- Host: dotcms.local +- Port: 443 +- Ramp-up period: 0 seconds +- Startup delay: 5 seconds +- Test duration: 5 seconds + +## Running the Tests + +### Basic Execution + +```bash +./mvnw install -Djmeter.test.skip=false -pl :dotcms-test-jmeter +``` + +you can also run the above with the justfile alias `run-jmeter-tests`: + + +```bash +just run-jmeter-tests +``` + +### Opening test script in JMeter GUI + +```bash +cd test-jmeter +../mvnw jmeter:configure jmeter:gui -DguiTestFile=src/test/jmeter/sessions.jmx +```` + +### Overriding Test Parameters + +You can override the default settings using command-line properties: + +```bash +# Override host and port +./mvnw install -Djmeter.test.skip=false -pl :dotcms-test-jmeter \ + -Djmeter.host=my-dotcms-instance.com \ + -Djmeter.port=444 \ # default is 443 + -Djmeter.thread.number=10 # The number of concurrent users to simulate + +# Override test timing parameters +./mvnw install -Djmeter.test.skip=false -pl :dotcms-test-jmeter \ + -Djmeter.rampup=10 \ + -Djmeter.startup.delay=2 \ + -Djmeter.test.duration=30 +``` + +## Test Reports + +HTML reports are generated in the `target/jmeter/reports` directory. The plugin is configured to: +- Delete existing results before new test runs + +A csv is also generated in the `target/jmeter/results` directory. e.g. `20241203-sessions.csv` this contains +- Capture additional variables: JVM_HOSTNAME, SESSION_ID, X_DOT_SERVER +- The SESSION_ID and X_DOT_SERVER can be used in the csv to validate session propagation when there are multiple replicas and can be used to show behaviour when replicas are scaled up and down. +## Configuration Files + +- Main JMeter test file: `src/test/jmeter/sessions.jmx` +- Maven configuration: `pom.xml` + +## Properties Configuration + +Default properties in pom.xml: +```xml + + dotcms.local + 443 + 0 + 5 + 60 + 2 + +``` + +## Profile Information + +The tests run under the `jmeter-standalone` profile, which is active by default. This profile includes: +- Clean configuration for test reports +- JMeter test execution +- Results validation +- Report generation + +## Troubleshooting + +We have not yet validated the memory requirements. Eventually we should probably be explicit about the JVM memory settings. These can be added into the configuration block +in the pom.xml e.g.: +```xml + + + -XX:MaxMetaspaceSize=256m + -Xmx1024m + -Xms1024m + + +``` +## High load testing +Currently this runs as a standalone service, for high load testing we would need to run this in a distributed mode with multiple jmeter instances and jmeter should not be running on the same server as DotCMS. This is not yet supported. +As such performance issues in adding too many threads may be due to local server limitations of resources and not the dotCMS instance itself. diff --git a/test-jmeter/pom.xml b/test-jmeter/pom.xml new file mode 100644 index 000000000000..707f360e114f --- /dev/null +++ b/test-jmeter/pom.xml @@ -0,0 +1,132 @@ + + 4.0.0 + + com.dotcms + dotcms-parent + 1.0.0-SNAPSHOT + + + dotcms-test-jmeter + pom + + test-jmeter + + + + 21 + 21 + 21 + UTF-8 + sessions.jmx + dotcms.local + 443 + 0 + 5 + 60 + 2 + + + + + junit + junit + 3.8.1 + test + + + + + + jmeter-standalone + + true + + + + + + maven-clean-plugin + + + clean-reports + test + + clean + + + true + false + + + target + + jmeter/reports/**/* + + + + + + + + + + + + com.lazerycode.jmeter + jmeter-maven-plugin + + + + configuration + + configure + + + + + jmeter-tests + + jmeter + + + + + jmeter-check-results + + results + + + + + + ${project.basedir}/src/test/jmeter + true + + + csv + true + JVM_HOSTNAME,SESSION_ID,X_DOT_SERVER + DELETE + ${jmeter.host} + ${jmeter.port} + ${jmeter.rampup} + ${jmeter.startup.delay} + ${jmeter.test.duration} + ${jmeter.thread.number} + + + ${jmeterScript} + + true + + + + + + + + + + diff --git a/test-jmeter/src/test/jmeter/sessions.jmx b/test-jmeter/src/test/jmeter/sessions.jmx new file mode 100644 index 000000000000..9d8454e3204f --- /dev/null +++ b/test-jmeter/src/test/jmeter/sessions.jmx @@ -0,0 +1,1635 @@ + + + + + + + + xDotServer + NONE + = + + + jsessionid + NONE + = + + + + false + false + + + + + + host + ${__P(host,dotcms.local)} + = + + + port + ${__P(port,443)} + = + + + scheme + https + = + + + thread.number + ${__P(thread.number,5)} + = + + + startup.delay + ${__P(startup.delay,5)} + = + + + rampup + ${__P(rampup,0)} + = + + + test.duration + ${__P(test.duration,60)} + = + + + + + + + + + ${host} + ${port} + https + + + + + true + false + + + + ${thread.number} + ${test.duration} + false + true + startnextloop + + -1 + false + + ${rampup} + ${startup.delay} + + + + -1 + + + + false + + + + 443 + UTF-8 + /api/v1/appconfiguration + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + application/json, text/plain, */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + ${__groovy(prev.getResponseCode() == '200',)} + false + true + + + + 5 + 0 + 0 + + + + + 100 + + + + false + + saveConfig + + + true + true + true + + true + true + true + true + false + true + true + false + false + false + true + false + false + false + true + 0 + true + true + true + true + true + true + + + + + + + + + + true + + + + false + {"userId":"admin@dotcms.com","password":"admin","rememberMe":false,"language":"en","country":"US","backEndLogin":true} + = + + + + 443 + UTF-8 + /api/v1/authentication + POST + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Origin + ${scheme}://${host} + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + true + jsessionid + JSESSIONID=(.*?); + $1$. + NOT_FOUND + false + + + + true + xdotserver + x-dot-server:\s(.*?)\s + $1$ + NOT_FOUND + false + + + + + false + + + + 443 + UTF-8 + /api/v1/menu + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + application/json, text/plain, */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/v1/users/current/ + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + false + userid + dotcms.org.1 + = + true + + + false + permission + WRITE + = + true + + + true + permissiontype + HTMLPAGES,STRUCTURES,TEMPLATES,CONTENTLETS + = + true + + + + 443 + UTF-8 + /api/v1/permissions/_bypermissiontype + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + false + keys + CONTENT_EDITOR2_ENABLED + = + true + + + + 443 + UTF-8 + /api/v1/configuration/config + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + application/json, text/plain, */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/v1/site/currentSite + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + false + filter + * + = + true + + + false + per_page + 15 + = + true + + + false + archive + false + = + true + + + + 443 + UTF-8 + /api/v1/site + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/v1/notification/getNotifications/offset/0/limit/24 + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/v1/announcements + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + application/json, text/plain, */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + false + countLangVars + true + = + true + + + + 443 + UTF-8 + /api/v2/languages + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + application/json, text/plain, */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/v1/users/current/ + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/environment/loadenvironments/roleId/e7d4e34e-5127-45fc-8123-d48b62d510e3 + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + true + + + + false + {"query":"+conhost:48190c8c-42c4-46af-8d1a-0cd5db894797 +working:true +(urlmap:* OR basetype:5) +deleted:false ","sort":"modDate DESC","limit":40,"offset":"0"} + = + + + + 443 + UTF-8 + /api/content/_search + POST + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/dotAdmin/ + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Origin + ${scheme}://${host} + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Content-Type + application/json + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + + false + + + + 443 + UTF-8 + /api/v1/jvm + GET + true + false + true + false + false + false + false + 6 + false + 0 + + + + + + Sec-Fetch-Mode + cors + + + Referer + ${scheme}://${host}/c/portal/layout?p_l_id=1a87b81c-e7ec-4e5b-9218-b55790353f09&p_p_id=maintenance&p_p_action=0&&dm_rlout=1&r=1732799772984&in_frame=true&frame=detailFrame&container=true&angularCurrentPortlet=maintenance + + + Sec-Fetch-Site + same-origin + + + Accept-Language + en-US,en;q=0.9 + + + Accept + */* + + + sec-ch-ua + "Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24" + + + sec-ch-ua-mobile + ?0 + + + sec-ch-ua-platform + "macOS" + + + Accept-Encoding + gzip, deflate, br, zstd + + + User-Agent + Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 + + + Sec-Fetch-Dest + empty + + + + + + JVM_HOSTNAME + environment.HOSTNAME + + all + + + + + false + + saveConfig + + + true + true + true + + true + true + true + true + false + true + true + false + false + false + true + false + false + false + true + 0 + true + true + true + true + true + true + + + summary-report.csv + + + + false + + saveConfig + + + true + true + true + + true + true + true + true + false + true + true + false + false + false + true + false + false + false + true + 0 + true + true + true + true + true + true + + + + + + + false + + saveConfig + + + true + true + true + + true + true + true + true + false + true + true + false + false + false + true + false + false + false + true + 0 + true + true + true + true + true + true + + + resultsb.csv + + + + + groovy + + + true + +// Initialize variables +def sessionId = vars.get("SESSION_ID") + +if (sessionId==null) sessionId = "" + +// Get response headers to extract JSESSIONID from Set-Cookie +def responseHeaders = prev.getResponseHeaders() +def xDotServer="" +def jsessionidResponse=vars.get("SESSION_ID") + +if (responseHeaders != null && !responseHeaders.trim().isEmpty()) { + responseHeaders.eachLine { line -> + if (line.toLowerCase().startsWith("x-dot-server:")) { + xDotServer = line.split(": ", 2)[1]?.trim() + } else if (line.toLowerCase().startsWith("set-cookie:") && line.contains("JSESSIONID=")) { + jsessionidResponse = line.split("JSESSIONID=")[1].split(";")[0]?.trim() + vars.put("SESSION_ID",jsessionidResponse) + } + } + vars.put("X_DOT_SERVER",xDotServer) +} + +log.info("Request: " + prev.getSampleLabel()+": X_DOT_SERVER="+xDotServer+" JSESSIONID="+jsessionidResponse) + + + + true + true + true + + + + + false + + saveConfig + + + true + true + true + + true + true + true + false + false + true + false + false + false + false + true + false + false + true + true + 0 + true + true + true + true + + + + + + + 8888 + + (?i).*\.(bmp|css|js|gif|ico|jpe?g|png|swf|eot|otf|ttf|mp4|woff|woff2) + www\.download\.windowsupdate\.com.* + toolbarqueries\.google\..* + clients.*\.google.* + api\.bing\.com.* + (?i).*\.(bmp|css|js|gif|ico|jpe?g|png|swf|eot|otf|ttf|mp4|woff|woff2)[\?;].* + us\.update\.toolbar\.yahoo\.com.* + safebrowsing.*\.google\.com.* + g\.msn.* + .*msg\.yahoo\.com.* + tiles.*\.mozilla\.com.* + sqm\.microsoft\.com.* + geo\.yahoo\.com.* + .*yimg\.com.* + www\.google-analytics\.com.* + http?://self-repair\.mozilla\.org.* + windowsupdate\.microsoft\.com.* + .*detectportal\.firefox\.com.* + .*toolbar\.yahoo\.com.* + .*\.google\.com.*/safebrowsing/.* + toolbar\.google\.com.* + pgq\.yahoo\.com.* + toolbar\.avg\.com/.* + toolbar\.msn\.com.* + + + demo\.dotcms\.com.* + + + true + 4 + false + + false + true + true + false + true + + + false + + 1 + + true + UTF-8 + + + + false + + saveConfig + + + true + true + true + + true + true + true + true + false + true + true + true + false + true + true + false + true + false + true + 0 + true + true + true + true + true + true + + + record.xml + + + + + + From c09a5f718055382318df0a27aabeb065f7b989e6 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Wed, 4 Dec 2024 12:12:53 -0600 Subject: [PATCH 4/7] Issue 30518 analytics tool refactor (#30812) Adding the changes to support the new velocity report format, just by parameters --------- Co-authored-by: Jose Castro --- .../content/ContentAnalyticsQuery.java | 266 +++++++++++++++++ .../analytics/viewtool/AnalyticsTool.java | 103 +++++++ .../content/ContentAnalyticsResource.java | 142 +++++++-- .../analytics/viewtool/AnalyticsToolTest.java | 69 +++++ .../Content_Analytics.postman_collection.json | 281 +++++++++++++----- 5 files changed, 773 insertions(+), 88 deletions(-) create mode 100644 dotCMS/src/main/java/com/dotcms/analytics/content/ContentAnalyticsQuery.java diff --git a/dotCMS/src/main/java/com/dotcms/analytics/content/ContentAnalyticsQuery.java b/dotCMS/src/main/java/com/dotcms/analytics/content/ContentAnalyticsQuery.java new file mode 100644 index 000000000000..065ed1f3dabd --- /dev/null +++ b/dotCMS/src/main/java/com/dotcms/analytics/content/ContentAnalyticsQuery.java @@ -0,0 +1,266 @@ +package com.dotcms.analytics.content; + +import com.dotmarketing.util.UtilMethods; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static com.liferay.util.StringPool.COLON; +import static com.liferay.util.StringPool.COMMA; + +/** + * This class represents the parameters of a Content Analytics Query abstracting the complexity + * of the underlying JSON format. The simplified REST Endpoint and the Content Analytics ViewTool + * use this class so that parameters can be entered in a more user-friendly way. + * + * @author Jose Castro + * @since Nov 28th, 2024 + */ +@JsonDeserialize(builder = ContentAnalyticsQuery.Builder.class) +public class ContentAnalyticsQuery implements Serializable { + + public static final String MEASURES_ATTR = "measures"; + public static final String DIMENSIONS_ATTR = "dimensions"; + public static final String TIME_DIMENSIONS_ATTR = "timeDimensions"; + public static final String FILTERS_ATTR = "filters"; + public static final String ORDER_ATTR = "order"; + public static final String LIMIT_ATTR = "limit"; + public static final String OFFSET_ATTR = "offset"; + public static final String GRANULARITY_ATTR = "granularity"; + public static final String DATE_RANGE_ATTR = "dateRange"; + public static final String MEMBER_ATTR = "member"; + public static final String OPERATOR_ATTR = "operator"; + public static final String VALUES_ATTR = "values"; + + @JsonProperty() + private final Set measures; + @JsonProperty() + private final Set dimensions; + @JsonProperty() + private final List> timeDimensions; + @JsonProperty() + private final List> filters; + @JsonProperty() + private final List order; + @JsonProperty() + private final int limit; + @JsonProperty() + private final int offset; + + private static final String SEPARATOR = COLON; + + private ContentAnalyticsQuery(final Builder builder) { + this.measures = builder.measures; + this.dimensions = builder.dimensions; + this.timeDimensions = builder.timeDimensions; + this.filters = builder.filters; + this.order = builder.order; + this.limit = builder.limit; + this.offset = builder.offset; + } + + public Set measures() { + return this.measures; + } + + public Set dimensions() { + return this.dimensions; + } + + public List> timeDimensions() { + return this.timeDimensions; + } + + public List> filters() { + return this.filters; + } + + public List order() { + return this.order; + } + + public int limit() { + return this.limit; + } + + public int offset() { + return this.offset; + } + + public static ContentAnalyticsQuery.Builder builder() { + return new Builder(); + } + + @Override + public String toString() { + return "ContentAnalyticsQuery{" + + "measures='" + measures + '\'' + + ", dimensions='" + dimensions + '\'' + + ", timeDimensions='" + timeDimensions + '\'' + + ", filters='" + filters + '\'' + + ", order='" + order + '\'' + + ", limit='" + limit + '\'' + + ", offset='" + offset + '\'' + + '}'; + } + + /** + * This builder creates the appropriate data structures that match the JSON format of the final + * CubeJS query. + */ + public static class Builder { + + private Set measures; + private Set dimensions; + private final List> timeDimensions = new ArrayList<>(); + private final List> filters = new ArrayList<>(); + private final List order = new ArrayList<>(); + private int limit = 1000; + private int offset = 0; + + /** + * The measures parameter contains a set of measures and each measure is an aggregation over + * a certain column in your ClickHouse database table. + * + * @param measures A string with the measures separated by a space. + * + * @return The builder instance. + */ + public Builder measures(final String measures) { + this.measures = Set.of(measures.split("\\s+")); + return this; + } + + /** + * The dimensions property contains a set of dimensions. You can think about a dimension as + * an attribute related to a measure, e.g. the measure user_count can have dimensions like + * country, age, occupation, etc. + * + * @param dimensions A string with the dimensions separated by a space. + * + * @return The builder instance. + */ + public Builder dimensions(final String dimensions) { + this.dimensions = Set.of(dimensions.split("\\s+")); + return this; + } + + /** + * Time dimensions provide a convenient way to specify a time dimension with a filter. It is + * an array of objects in timeDimension format. If no date range is provided, the default + * value will be "Last week". + * + * @param timeDimensions A string with the time dimensions separated by a colon. + * + * @return The builder instance. + */ + public Builder timeDimensions(final String timeDimensions) { + if (UtilMethods.isNotSet(timeDimensions)) { + return this; + } + final String[] timeParams = timeDimensions.split(SEPARATOR); + final Map timeDimensionsData = new HashMap<>(); + timeDimensionsData.put(DIMENSIONS_ATTR, timeParams[0]); + if (timeParams.length > 2) { + timeDimensionsData.put(GRANULARITY_ATTR, timeParams[1]); + timeDimensionsData.put(DATE_RANGE_ATTR, timeParams[2]); + } else if (timeParams.length > 1) { + timeDimensionsData.put(DATE_RANGE_ATTR, timeParams[1]); + } else { + timeDimensionsData.put(DATE_RANGE_ATTR, "Last week"); + } + this.timeDimensions.add(timeDimensionsData); + return this; + } + + /** + * Filters are applied differently to dimensions and measures. When you filter on a + * dimension, you are restricting the raw data before any calculations are made. When you + * filter on a measure, you are restricting the results after the measure has been + * calculated. They are composed of: member, operator, and values. + * + * @param filters A string with the filters separated by a colon. + * + * @return The builder instance. + */ + public Builder filters(final String filters) { + if (UtilMethods.isNotSet(filters)) { + return this; + } + final String[] filterArr = filters.split(SEPARATOR); + for (final String filter : filterArr) { + final String[] filterParams = filter.split("\\s+"); + final Map filterDataMap = new HashMap<>(); + filterDataMap.put(MEMBER_ATTR, filterParams[0]); + filterDataMap.put(OPERATOR_ATTR, filterParams[1]); + final String[] filterValues = filterParams[2].split(COMMA); + filterDataMap.put(VALUES_ATTR, filterValues); + this.filters.add(filterDataMap); + } + return this; + } + + /** + * This is an object where the keys are measures or dimensions to order by and their + * corresponding values are either asc or desc. The order of the fields to order on is based + * on the order of the keys in the object. If not provided, default ordering is applied. If + * an empty object ([]) is provided, no ordering is applied. + * + * @param order A string with the order separated by a colon. + * + * @return The builder instance. + */ + public Builder order(final String order) { + if (UtilMethods.isNotSet(order)) { + return this; + } + final Set orderCriteria = Set.of(order.split(SEPARATOR)); + for (final String orderCriterion : orderCriteria) { + final String[] orderParams = orderCriterion.split("\\s+"); + this.order.add(orderParams); + } + return this; + } + + /** + * A row limit for your query. + * + * @param limit The number of rows to limit the query. The default value is 1000. + * + * @return The builder instance. + */ + public Builder limit(final int limit) { + this.limit = limit; + return this; + } + + /** + * The number of initial rows to be skipped for your query. The default value is 0. + * + * @param offset The number of rows to skip. + * + * @return The builder instance. + */ + public Builder offset(final int offset) { + this.offset = offset; + return this; + } + + /** + * This method builds the ContentAnalyticsQuery object. + * + * @return The ContentAnalyticsQuery object. + */ + public ContentAnalyticsQuery build() { + return new ContentAnalyticsQuery(this); + } + + } + +} diff --git a/dotCMS/src/main/java/com/dotcms/analytics/viewtool/AnalyticsTool.java b/dotCMS/src/main/java/com/dotcms/analytics/viewtool/AnalyticsTool.java index dcad56d98c98..8cc0aa6fa455 100644 --- a/dotCMS/src/main/java/com/dotcms/analytics/viewtool/AnalyticsTool.java +++ b/dotCMS/src/main/java/com/dotcms/analytics/viewtool/AnalyticsTool.java @@ -1,26 +1,35 @@ package com.dotcms.analytics.viewtool; import com.dotcms.analytics.content.ContentAnalyticsAPI; +import com.dotcms.analytics.content.ContentAnalyticsQuery; import com.dotcms.analytics.content.ReportResponse; import com.dotcms.analytics.query.AnalyticsQuery; import com.dotcms.analytics.query.AnalyticsQueryParser; import com.dotcms.cdi.CDIUtils; import com.dotcms.cube.CubeJSQuery; import com.dotcms.rest.api.v1.DotObjectMapperProvider; +import com.dotcms.util.JsonUtil; import com.dotmarketing.business.web.UserWebAPI; import com.dotmarketing.business.web.WebAPILocator; import com.dotmarketing.exception.DotRuntimeException; import com.dotmarketing.util.Logger; +import com.dotmarketing.util.UtilMethods; import com.liferay.portal.model.User; import org.apache.velocity.tools.view.context.ViewContext; import org.apache.velocity.tools.view.tools.ViewTool; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.DIMENSIONS_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.MEASURES_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.TIME_DIMENSIONS_ATTR; +import static com.dotcms.util.DotPreconditions.checkArgument; + /** * This class is a ViewTool that can be used to access the analytics data. * @author jsanca @@ -99,6 +108,100 @@ public ReportResponse runReportFromJson(final String query) { return contentAnalyticsAPI.runReport(this.analyticsQueryParser.parseJsonToQuery(query), user); } + /** + * Runs an analytics report based on a set of parameters + * example: + * + * $analytics.runReport('Events.count Events.uniqueCount', + * 'Events.referer Events.experiment', 'Events.day day', + * 'Events.variant = [B] : Events.experiments = [B]', 'Events.day ASC', 100, 0) + * + * @param measures String + * example: 'Events.count Events.uniqueCount' + * @param dimensions String + * example: 'Events.referer Events.experiment' + * @param timeDimensions String + * example: 'Events.day day' + * @param filters String + * example: 'Events.variant = [B] : Events.experiments = [B]' + * @param order String + * example: 'Events.day ASC' + * @param limit int + * example: 100 + * @param offset int + * example: 0 + * @return ReportResponse + */ + public ReportResponse runReport(final String measures, final String dimensions, + final String timeDimensions, final String filters, final String order, + final int limit, final int offset) { + + checkArgument(!(UtilMethods.isNotSet(measures) + && UtilMethods.isNotSet(dimensions) + && UtilMethods.isNotSet(timeDimensions)), + IllegalArgumentException.class, + "Query should contain either measures, dimensions or timeDimensions with granularities in order to be valid"); + + final ContentAnalyticsQuery.Builder builder = new ContentAnalyticsQuery.Builder(); + + if (Objects.nonNull(dimensions)) { + builder.dimensions(dimensions); + } + + if (Objects.nonNull(measures)) { + builder.measures(measures); + } + + if (Objects.nonNull(filters)) { + builder.filters(filters); + } + + if (Objects.nonNull(order)) { + builder.order(order); + } + + if (Objects.nonNull(timeDimensions)) { + builder.timeDimensions(timeDimensions); + } + + if (limit > 0) { + builder.limit(limit); + } + if (offset >= 0) { + builder.offset(offset); + } + + final ContentAnalyticsQuery contentAnalyticsQuery = builder.build(); + + Logger.debug(this, () -> "Running report from query: " + contentAnalyticsQuery.toString()); + + final Map queryMap = new HashMap<>(); + if (UtilMethods.isSet(contentAnalyticsQuery.measures())) { + queryMap.put("measures", contentAnalyticsQuery.measures()); + } + if (UtilMethods.isSet(contentAnalyticsQuery.dimensions())) { + queryMap.put("dimensions", contentAnalyticsQuery.dimensions()); + } + if (UtilMethods.isSet(contentAnalyticsQuery.timeDimensions())) { + queryMap.put("timeDimensions", contentAnalyticsQuery.timeDimensions()); + } + if (UtilMethods.isSet(contentAnalyticsQuery.filters())) { + queryMap.put("filters", contentAnalyticsQuery.filters()); + } + if (UtilMethods.isSet(contentAnalyticsQuery.order())) { + queryMap.put("order", contentAnalyticsQuery.order()); + } + queryMap.put("limit", contentAnalyticsQuery.limit()); + queryMap.put("offset", contentAnalyticsQuery.offset()); + + final String cubeJsQuery = JsonUtil.getJsonStringFromObject(queryMap); + + final ReportResponse reportResponse = this.contentAnalyticsAPI.runRawReport(cubeJsQuery, + user); + + return reportResponse; + } + /** * Runs an analytics report based on Map query. * example: diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/analytics/content/ContentAnalyticsResource.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/analytics/content/ContentAnalyticsResource.java index 16cea9eaafc9..fbdaeedbd22d 100644 --- a/dotCMS/src/main/java/com/dotcms/rest/api/v1/analytics/content/ContentAnalyticsResource.java +++ b/dotCMS/src/main/java/com/dotcms/rest/api/v1/analytics/content/ContentAnalyticsResource.java @@ -1,10 +1,12 @@ package com.dotcms.rest.api.v1.analytics.content; import com.dotcms.analytics.content.ContentAnalyticsAPI; +import com.dotcms.analytics.content.ContentAnalyticsQuery; import com.dotcms.analytics.content.ReportResponse; import com.dotcms.analytics.model.ResultSetItem; import com.dotcms.analytics.track.collectors.Collector; import com.dotcms.analytics.track.collectors.EventSource; +import com.dotcms.analytics.track.collectors.EventType; import com.dotcms.analytics.track.collectors.WebEventsCollectorServiceFactory; import com.dotcms.analytics.track.matchers.FilesRequestMatcher; import com.dotcms.analytics.track.matchers.PagesAndUrlMapsRequestMatcher; @@ -17,13 +19,15 @@ import com.dotcms.rest.ResponseEntityStringView; import com.dotcms.rest.WebResource; import com.dotcms.rest.annotation.NoCache; -import com.dotcms.util.DotPreconditions; +import com.dotcms.util.JsonUtil; import com.dotmarketing.beans.Host; import com.dotmarketing.business.web.WebAPILocator; import com.dotmarketing.exception.DotSecurityException; import com.dotmarketing.util.Config; import com.dotmarketing.util.Logger; import com.dotmarketing.util.UUIDUtil; +import com.dotmarketing.util.UtilMethods; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.annotations.VisibleForTesting; import com.liferay.portal.model.User; import io.swagger.v3.oas.annotations.Operation; @@ -31,25 +35,35 @@ import io.swagger.v3.oas.annotations.media.ExampleObject; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.tags.Tag; -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.function.Supplier; -import java.util.stream.Collectors; +import io.vavr.Lazy; +import org.glassfish.jersey.server.JSONP; + import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; +import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.function.Supplier; +import java.util.stream.Collectors; -import io.vavr.Lazy; -import org.glassfish.jersey.server.JSONP; -import com.dotcms.analytics.track.collectors.EventType; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.DIMENSIONS_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.FILTERS_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.LIMIT_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.MEASURES_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.OFFSET_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.ORDER_ATTR; +import static com.dotcms.analytics.content.ContentAnalyticsQuery.TIME_DIMENSIONS_ATTR; +import static com.dotcms.util.DotPreconditions.checkArgument; +import static com.dotcms.util.DotPreconditions.checkNotNull; /** * Resource class that exposes endpoints to query content analytics data. @@ -63,7 +77,7 @@ @Path("/v1/analytics/content") @Tag(name = "Content Analytics", - description = "Endpoints that exposes information related to how dotCMS content is accessed and interacted with by users.") + description = "This REST Endpoint exposes information related to how dotCMS content is accessed and interacted with by users.") public class ContentAnalyticsResource { private static final UserCustomDefinedRequestMatcher USER_CUSTOM_DEFINED_REQUEST_MATCHER = new UserCustomDefinedRequestMatcher(); @@ -102,7 +116,9 @@ public ContentAnalyticsResource(final WebResource webResource, @Operation( operationId = "postContentAnalyticsQuery", summary = "Retrieve Content Analytics data", - description = "Returns information of specific dotCMS objects whose health and engagement data is tracked.", + description = "Returns information of specific dotCMS objects whose health and " + + "engagement data is tracked. This method takes a specific less verbose JSON " + + "format to query the data.", tags = {"Content Analytics"}, responses = { @ApiResponse(responseCode = "200", description = "Content Analytics data being queried", @@ -151,7 +167,7 @@ public ReportResponseEntityView query(@Context final HttpServletRequest request, .init(); final User user = initDataObject.getUser(); - DotPreconditions.checkNotNull(queryForm, IllegalArgumentException.class, "The 'query' JSON data cannot be null"); + checkNotNull(queryForm, IllegalArgumentException.class, "The 'query' JSON data cannot be null"); Logger.debug(this, () -> "Querying content analytics data with the form: " + queryForm); final ReportResponse reportResponse = this.contentAnalyticsAPI.runReport(queryForm.getQuery(), user); @@ -169,10 +185,12 @@ public ReportResponseEntityView query(@Context final HttpServletRequest request, @Operation( operationId = "postContentAnalyticsQuery", summary = "Retrieve Content Analytics data", - description = "Returns information of specific dotCMS objects whose health and engagement data is tracked.", + description = "Returns information of specific dotCMS objects whose health and " + + "engagement data is tracked, using a CubeJS JSON query.", tags = {"Content Analytics"}, responses = { - @ApiResponse(responseCode = "200", description = "Content Analytics data being queried", + @ApiResponse(responseCode = "200", description = "Content Analytics data " + + "being queried", content = @Content(mediaType = "application/json", examples = { @ExampleObject( @@ -187,7 +205,7 @@ public ReportResponseEntityView query(@Context final HttpServletRequest request, ) ), @ApiResponse(responseCode = "400", description = "Bad Request"), - @ApiResponse(responseCode = "403", description = "Forbidden"), + @ApiResponse(responseCode = "401", description = "Unauthorized"), @ApiResponse(responseCode = "500", description = "Internal Server Error") } ) @@ -208,13 +226,101 @@ public ReportResponseEntityView queryCubeJs(@Context final HttpServletRequest re .init(); final User user = initDataObject.getUser(); - DotPreconditions.checkNotNull(cubeJsQueryJson, IllegalArgumentException.class, "The 'query' JSON data cannot be null"); + checkNotNull(cubeJsQueryJson, IllegalArgumentException.class, "The 'query' JSON data cannot be null"); Logger.debug(this, ()->"Querying content analytics data with the cube query json: " + cubeJsQueryJson); final ReportResponse reportResponse = this.contentAnalyticsAPI.runRawReport(cubeJsQueryJson, user); return new ReportResponseEntityView(reportResponse.getResults().stream().map(ResultSetItem::getAll).collect(Collectors.toList())); } + + /** + * Returns information of specific dotCMS objects whose health and engagement data is tracked, + * using Path Parameters instead of a CubeJS JSON query. This helps abstract the complexity of + * the underlying JSON format for users that need an easier way to query for specific data. + * + * @param request The current instance of the {@link HttpServletRequest} object. + * @param response The current instance of the {@link HttpServletResponse} object. + * @param params The query parameters provided in the URL path. + * + * @return The request information from the Content Analytics server. + */ + @Operation( + operationId = "postContentAnalyticsQuery", + summary = "Retrieve Content Analytics data", + description = "Returns information of specific dotCMS objects whose health and " + + "engagement data is tracked, using Path Parameters instead of a CubeJS JSON " + + "query. This helps abstract the complexity of the underlying JSON format for " + + "users that need an easier way to query for specific data.", + tags = {"Content Analytics"}, + responses = { + @ApiResponse(responseCode = "200", description = "Content Analytics data " + + "being queried", + content = @Content(mediaType = "application/json", + examples = { + @ExampleObject( + value = "http://localhost:8080/api/v1" + + "/analytics/content/query/measures" + + "/request.count request" + + ".totalSessions/dimensions/request" + + ".host request.whatAmI request" + + ".url/timeDimensions/request" + + ".createdAt:day:This " + + "month/filters/request.totalRequest " + + "gt 0:request.whatAmI contains PAGE," + + "FILE/order/request.count asc:request" + + ".createdAt asc/limit/5/offset/0" + ) + } + ) + ), + @ApiResponse(responseCode = "400", description = "Bad Request"), + @ApiResponse(responseCode = "401", description = "Unauthorized"), + @ApiResponse(responseCode = "500", description = "Internal Server Error") + } + ) + @POST + @Path("/query/{params:.*}") + @JSONP + @NoCache + @Consumes(MediaType.APPLICATION_JSON) + @Produces({MediaType.APPLICATION_JSON, "application/javascript"}) + public ReportResponseEntityView query(@Context final HttpServletRequest request, + @Context final HttpServletResponse response, + final @PathParam("params") String params) throws JsonProcessingException { + final InitDataObject initDataObject = new WebResource.InitBuilder(this.webResource) + .requestAndResponse(request, response) + .params(params) + .requiredBackendUser(true) + .rejectWhenNoUser(true) + .init(); + final User user = initDataObject.getUser(); + final Map paramsMap = initDataObject.getParamsMap(); + Logger.debug(this, () -> "Querying content analytics data with the following parameters: " + paramsMap); + checkArgument(!(UtilMethods.isNotSet(paramsMap.get(MEASURES_ATTR)) + && UtilMethods.isNotSet(paramsMap.get(DIMENSIONS_ATTR)) + && UtilMethods.isNotSet(paramsMap.get(TIME_DIMENSIONS_ATTR.toLowerCase()))), + IllegalArgumentException.class, "Query should contain either measures, dimensions or timeDimensions with granularities in order to be valid"); + final ContentAnalyticsQuery.Builder builder = new ContentAnalyticsQuery.Builder() + .measures(paramsMap.get(MEASURES_ATTR)) + .dimensions(paramsMap.get(DIMENSIONS_ATTR)) + .timeDimensions(paramsMap.get(TIME_DIMENSIONS_ATTR.toLowerCase())) + .filters(paramsMap.get(FILTERS_ATTR)) + .order(paramsMap.get(ORDER_ATTR)); + if (paramsMap.containsKey(LIMIT_ATTR)) { + builder.limit(Integer.parseInt(paramsMap.get(LIMIT_ATTR))); + } + if (paramsMap.containsKey(OFFSET_ATTR)) { + builder.offset(Integer.parseInt(paramsMap.get(OFFSET_ATTR))); + } + final ContentAnalyticsQuery contentAnalyticsQuery = builder.build(); + final String cubeJsQuery = JsonUtil.getJsonStringFromObject(contentAnalyticsQuery); + Logger.debug(this, ()-> "Generated query: " + cubeJsQuery); + final ReportResponse reportResponse = this.contentAnalyticsAPI.runRawReport(cubeJsQuery, user); + return new ReportResponseEntityView(reportResponse.getResults() + .stream().map(ResultSetItem::getAll).collect(Collectors.toList())); + } + /** * Fire an user custom event. * @@ -253,7 +359,7 @@ public ResponseEntityStringView fireUserCustomEvent(@Context final HttpServletRe @Context final HttpServletResponse response, final Map userEventPayload) throws DotSecurityException { - DotPreconditions.checkNotNull(userEventPayload, IllegalArgumentException.class, "The 'userEventPayload' JSON cannot be null"); + checkNotNull(userEventPayload, IllegalArgumentException.class, "The 'userEventPayload' JSON cannot be null"); if (userEventPayload.containsKey(Collector.EVENT_SOURCE)) { throw new IllegalArgumentException("The 'event_source' field is reserved and cannot be used"); } diff --git a/dotCMS/src/test/java/com/dotcms/analytics/viewtool/AnalyticsToolTest.java b/dotCMS/src/test/java/com/dotcms/analytics/viewtool/AnalyticsToolTest.java index ef534e71e505..8b3d8d2ec71b 100644 --- a/dotCMS/src/test/java/com/dotcms/analytics/viewtool/AnalyticsToolTest.java +++ b/dotCMS/src/test/java/com/dotcms/analytics/viewtool/AnalyticsToolTest.java @@ -121,6 +121,75 @@ public void test_run_report_from_json_good_json() { Assert.assertNotNull(reportResponse); } + /** + * Method to test: {@link AnalyticsTool#runReport(String, String, String, String, String, int, int)} + * Given Scenario: Sending good parameter + * ExpectedResult: Should a non null ReportResponse + */ + @Test() + public void test_run_report_good_case() { + + final HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + final HttpSession session = Mockito.mock(HttpSession.class); + final ContentAnalyticsAPI contentAnalyticsAPI = Mockito.mock(ContentAnalyticsAPI.class); + final AnalyticsQueryParser analyticsQueryParser = new AnalyticsQueryParser(); + final UserWebAPI userWebAPI = Mockito.mock(UserWebAPI.class); + final ViewContext viewContext = Mockito.mock(ViewContext.class); + final AnalyticsTool analyticsTool = new AnalyticsTool(contentAnalyticsAPI, + analyticsQueryParser, userWebAPI); + final User user = new User(); + + Mockito.when(viewContext.getRequest()).thenReturn(request); + Mockito.when(request.getSession(false)).thenReturn(session); + Mockito.when(userWebAPI.getLoggedInUser(request)).thenReturn(user); + Mockito.when(contentAnalyticsAPI.runRawReport(Mockito.any(String.class), Mockito.eq(user))).thenReturn(new ReportResponse(List.of())); + + analyticsTool.init(viewContext); + final ReportResponse reportResponse = analyticsTool.runReport( + "Events.count Events.uniqueCount", + "Events.referer Events.experiment", + "Events.day day", + "Events.variant = ['B'] or Events.experiments = ['B']", + "Events.day ASC", + 100, 1); + + Assert.assertNotNull(reportResponse); + } + + /** + * Method to test: {@link AnalyticsTool#runReport(String, String, String, String, String, int, int)} + * Given Scenario: Sending bad parameters, missing measures and dimensions + * ExpectedResult: throws an IllegalArgumentException + */ + @Test(expected = IllegalArgumentException.class) + public void test_run_report_illegal_params_case() { + + final HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + final HttpSession session = Mockito.mock(HttpSession.class); + final ContentAnalyticsAPI contentAnalyticsAPI = Mockito.mock(ContentAnalyticsAPI.class); + final AnalyticsQueryParser analyticsQueryParser = new AnalyticsQueryParser(); + final UserWebAPI userWebAPI = Mockito.mock(UserWebAPI.class); + final ViewContext viewContext = Mockito.mock(ViewContext.class); + final AnalyticsTool analyticsTool = new AnalyticsTool(contentAnalyticsAPI, + analyticsQueryParser, userWebAPI); + final User user = new User(); + + Mockito.when(viewContext.getRequest()).thenReturn(request); + Mockito.when(request.getSession(false)).thenReturn(session); + Mockito.when(userWebAPI.getLoggedInUser(request)).thenReturn(user); + Mockito.when(contentAnalyticsAPI.runRawReport(Mockito.any(String.class), Mockito.eq(user))).thenReturn(new ReportResponse(List.of())); + + analyticsTool.init(viewContext); + final ReportResponse reportResponse = analyticsTool.runReport( + null, + null, + null, + "Events.variant = ['B'] or Events.experiments = ['B']", + "Events.day ASC", + 100, 1); + + } + /** * Method to test: {@link AnalyticsTool#runReportFromMap(Map)} * Given Scenario: Sending a null map diff --git a/dotcms-postman/src/main/resources/postman/Content_Analytics.postman_collection.json b/dotcms-postman/src/main/resources/postman/Content_Analytics.postman_collection.json index c9fd53d7dd8f..0c56b2927967 100644 --- a/dotcms-postman/src/main/resources/postman/Content_Analytics.postman_collection.json +++ b/dotcms-postman/src/main/resources/postman/Content_Analytics.postman_collection.json @@ -1,95 +1,236 @@ { "info": { - "_postman_id": "b0022550-05d2-4e69-8dec-2c9c743f6119", + "_postman_id": "10c05583-df4b-45e1-8794-0a1721e168f6", "name": "Content Analytics", "description": "Performs simple data validation for the Content Analytics REST Endpoint. It's very important to notice that, for the time being, the CICD instance does not start up any of the additional third-party tools required to actually run the Content Analytics feature.\n\nThis means that these test do not deal with retrieveing or saving data at all. It verifies that important/required information is present.", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "781456" + "_exporter_id": "5403727" }, "item": [ { "name": "Data Query", "item": [ { - "name": "No User Authentication", - "event": [ + "name": "Using Path Parameters", + "item": [ { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"HTTP Status code must be unauthorized\", function () {", - " pm.response.to.have.status(401);", - "});", - "" - ], - "type": "text/javascript", - "packages": {} - } - } - ], - "request": { - "auth": { - "type": "noauth" + "name": "No User Authentication", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(\"HTTP Status code must be unauthorized\", function () {", + " pm.response.to.have.status(401);", + "});", + "" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "noauth" + }, + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{serverURL}}/api/v1/analytics/content/query/measures/request.count request.totalSessions/dimensions/request.host request.whatAmI request.url/timeDimensions/request.createdAt:day:This month/filters/request.totalRequest gt 0:request.whatAmI contains PAGE,FILE/order/request.count asc:request.createdAt asc/limit/5/offset/0", + "host": [ + "{{serverURL}}" + ], + "path": [ + "api", + "v1", + "analytics", + "content", + "query", + "measures", + "request.count request.totalSessions", + "dimensions", + "request.host request.whatAmI request.url", + "timeDimensions", + "request.createdAt:day:This month", + "filters", + "request.totalRequest gt 0:request.whatAmI contains PAGE,FILE", + "order", + "request.count asc:request.createdAt asc", + "limit", + "5", + "offset", + "0" + ] + } + }, + "response": [] }, - "method": "POST", - "header": [], - "url": { - "raw": "{{serverURL}}/api/v1/analytics/content/_query", - "host": [ - "{{serverURL}}" + { + "name": "Missing Required Parameters", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(\"HTTP Status code must be Bad Request\", function () {", + " pm.response.to.have.status(400);", + "});", + "", + "pm.test(\"Check that minimum required parameters are NOT present\", function () {", + " pm.expect(pm.response.json().message).to.equal(\"Query should contain either measures, dimensions or timeDimensions with granularities in order to be valid\", \"This is NOT the expected error message\");", + "});", + "" + ], + "type": "text/javascript", + "packages": {} + } + } ], - "path": [ - "api", - "v1", - "analytics", - "content", - "_query" - ] + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{jwt}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{serverURL}}/api/v1/analytics/content/query/filters/request.totalRequest gt 0:request.whatAmI contains PAGE,FILE/order/request.count asc:request.createdAt asc/limit/5/offset/0", + "host": [ + "{{serverURL}}" + ], + "path": [ + "api", + "v1", + "analytics", + "content", + "query", + "filters", + "request.totalRequest gt 0:request.whatAmI contains PAGE,FILE", + "order", + "request.count asc:request.createdAt asc", + "limit", + "5", + "offset", + "0" + ] + }, + "description": "As the error message states, the CubeJS Query should contain either measures, dimensions or timeDimensions with granularities in order to be valid." + }, + "response": [] } - }, - "response": [] + ], + "description": "This test group verifies that the Endpoint that receives plain String parameters for the CubeJS query works as expected.\n\nParameter has specfic formatting and separator characters that allow Users a more dynamic interaction with the Content Analitycs service:\n\n- Measures: Values are separated by blank spaces: `/measures/request.count request.totalSessions`\n \n- Dimensions: Values are separated by blank spaces: `/dimensions/request.host request.whatAmI request.url`\n \n- Time Dimensions: Values are separated by colons: `/timeDimensions/request.createdAt:day:This month` . The second parameter 'day' -- the \"granularity\" parameter -- is optional.\n \n- Filters: Values are separated by colons: `/filters/request.totalRequest gt 0:request.whatAmI contains PAGE,FILE` . In this case, you're filtering by the number of requests, and the type of object being queried: Pages and Files.\n \n- Order: Values are separated by colon: `/order/request.count asc:request.createdAt asc`\n \n- Limit: Value is provided as is: `/limit/50`\n \n- Offset: Value is provided as is: `/ffset/0`" }, { - "name": "No Query Form", - "event": [ + "name": "Using the JSON query", + "item": [ { - "listen": "test", - "script": { - "exec": [ - "" - ], - "type": "text/javascript", - "packages": {} - } - } - ], - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"query\": {}\n}\n", - "options": { - "raw": { - "language": "json" + "name": "No User Authentication", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(\"HTTP Status code must be unauthorized\", function () {", + " pm.response.to.have.status(401);", + "});", + "" + ], + "type": "text/javascript", + "packages": {} + } } - } + ], + "request": { + "auth": { + "type": "noauth" + }, + "method": "POST", + "header": [], + "url": { + "raw": "{{serverURL}}/api/v1/analytics/content/_query", + "host": [ + "{{serverURL}}" + ], + "path": [ + "api", + "v1", + "analytics", + "content", + "_query" + ] + } + }, + "response": [] }, - "url": { - "raw": "{{serverURL}}/api/v1/analytics/content/_query", - "host": [ - "{{serverURL}}" + { + "name": "No Query Form", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "" + ], + "type": "text/javascript", + "packages": {} + } + } ], - "path": [ - "api", - "v1", - "analytics", - "content", - "_query" - ] + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{\n \"query\": {}\n}\n", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{serverURL}}/api/v1/analytics/content/_query", + "host": [ + "{{serverURL}}" + ], + "path": [ + "api", + "v1", + "analytics", + "content", + "_query" + ] + } + }, + "response": [] } - }, - "response": [] + ] } ] }, From 4b18b488191e0a0677c95641e8275de5943f49f9 Mon Sep 17 00:00:00 2001 From: Jonathan Gamba Date: Wed, 4 Dec 2024 14:49:22 -0600 Subject: [PATCH 5/7] #30367 Refactor job system and enhance SSE monitoring. (#30816) Removed obsolete job events, streamlined job state management by introducing more precise states such as `FAILED_PERMANENTLY` and `ABANDONED_PERMANENTLY`. Replaced job completion terminology and refined method signatures and naming conventions to reinforce consistency. Enhanced Server-Sent Events (SSE) monitoring with a dedicated utility class for improved performance and error handling. --- .../jobs/business/api/JobQueueManagerAPI.java | 39 +- .../business/api/JobQueueManagerAPIImpl.java | 105 +++-- .../business/api/events/JobCanceledEvent.java | 38 -- .../api/events/JobRemovedFromQueueEvent.java | 38 -- .../api/events/RealTimeJobMonitor.java | 184 ++++---- .../dotcms/jobs/business/job/AbstractJob.java | 38 +- .../dotcms/jobs/business/job/JobState.java | 14 +- .../impl/ImportContentletsProcessor.java | 24 +- .../AbstractJobStateQueryParameters.java | 76 ++++ .../dotcms/jobs/business/queue/JobQueue.java | 30 +- .../jobs/business/queue/PostgresJobQueue.java | 401 ++++++++++++------ .../rest/api/v1/job/JobQueueHelper.java | 104 +++-- .../rest/api/v1/job/JobQueueResource.java | 215 +++++----- .../rest/api/v1/job/SSEConnectionManager.java | 270 ------------ .../rest/api/v1/job/SSEMonitorUtil.java | 234 ++++++++++ .../java/com/dotmarketing/util/FileUtil.java | 23 +- .../JobQueueManagerAPIIntegrationTest.java | 169 ++++++-- .../business/api/JobQueueManagerAPITest.java | 48 ++- .../PostgresJobQueueIntegrationTest.java | 93 +++- ...ueResourceAPITests.postman_collection.json | 104 ++++- 20 files changed, 1380 insertions(+), 867 deletions(-) delete mode 100644 dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobCanceledEvent.java delete mode 100644 dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobRemovedFromQueueEvent.java create mode 100644 dotCMS/src/main/java/com/dotcms/jobs/business/queue/AbstractJobStateQueryParameters.java delete mode 100644 dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEConnectionManager.java create mode 100644 dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEMonitorUtil.java diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPI.java b/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPI.java index 8ada82da14c0..0fcb72c595f3 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPI.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPI.java @@ -1,5 +1,6 @@ package com.dotcms.jobs.business.api; +import com.dotcms.jobs.business.api.events.JobWatcher; import com.dotcms.jobs.business.error.CircuitBreaker; import com.dotcms.jobs.business.error.JobProcessorNotFoundException; import com.dotcms.jobs.business.error.RetryStrategy; @@ -128,6 +129,16 @@ JobPaginatedResult getActiveJobs(String queueName, int page, int pageSize) */ JobPaginatedResult getCompletedJobs(int page, int pageSize) throws DotDataException; + /** + * Retrieves a list of successful jobs + * + * @param page The page number + * @param pageSize The number of jobs per page + * @return A result object containing the list of successful jobs and pagination information. + * @throws DotDataException if there's an error fetching the jobs + */ + JobPaginatedResult getSuccessfulJobs(int page, int pageSize) throws DotDataException; + /** * Retrieves a list of canceled jobs * @@ -148,6 +159,16 @@ JobPaginatedResult getActiveJobs(String queueName, int page, int pageSize) */ JobPaginatedResult getFailedJobs(int page, int pageSize) throws DotDataException; + /** + * Retrieves a list of abandoned jobs + * + * @param page The page number + * @param pageSize The number of jobs per page + * @return A result object containing the list of abandoned jobs and pagination information. + * @throws DotDataException if there's an error fetching the jobs + */ + JobPaginatedResult getAbandonedJobs(int page, int pageSize) throws DotDataException; + /** * Cancels a job. * @@ -161,8 +182,24 @@ JobPaginatedResult getActiveJobs(String queueName, int page, int pageSize) * * @param jobId The ID of the job to watch * @param watcher The consumer to be notified of job updates + * @return A JobWatcher instance representing the registered watcher + */ + JobWatcher watchJob(String jobId, Consumer watcher); + + /** + * Removes a watcher for a specific job. + * + * @param jobId The ID of the job to unwatch + * @param watcher The watcher to remove + */ + void removeJobWatcher(String jobId, JobWatcher watcher); + + /** + * Removes all watchers for a specific job. + * + * @param jobId The ID of the job */ - void watchJob(String jobId, Consumer watcher); + void removeAllJobWatchers(String jobId); /** * Sets a retry strategy for a specific queue. diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPIImpl.java b/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPIImpl.java index b63796ce3c7f..e789c70bd919 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPIImpl.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/api/JobQueueManagerAPIImpl.java @@ -3,14 +3,13 @@ import com.dotcms.business.CloseDBIfOpened; import com.dotcms.business.WrapInTransaction; import com.dotcms.jobs.business.api.events.JobCancelRequestEvent; -import com.dotcms.jobs.business.api.events.JobCanceledEvent; import com.dotcms.jobs.business.api.events.JobCancellingEvent; import com.dotcms.jobs.business.api.events.JobCompletedEvent; import com.dotcms.jobs.business.api.events.JobCreatedEvent; import com.dotcms.jobs.business.api.events.JobFailedEvent; import com.dotcms.jobs.business.api.events.JobProgressUpdatedEvent; -import com.dotcms.jobs.business.api.events.JobRemovedFromQueueEvent; import com.dotcms.jobs.business.api.events.JobStartedEvent; +import com.dotcms.jobs.business.api.events.JobWatcher; import com.dotcms.jobs.business.api.events.RealTimeJobMonitor; import com.dotcms.jobs.business.detector.AbandonedJobDetector; import com.dotcms.jobs.business.error.CircuitBreaker; @@ -321,7 +320,7 @@ public Job getJob(final String jobId) throws DotDataException { @CloseDBIfOpened @Override - public JobPaginatedResult getActiveJobs(String queueName, int page, int pageSize) + public JobPaginatedResult getActiveJobs(final String queueName, final int page, final int pageSize) throws DotDataException { try { return jobQueue.getActiveJobs(queueName, page, pageSize); @@ -342,7 +341,8 @@ public JobPaginatedResult getJobs(final int page, final int pageSize) throws Dot @CloseDBIfOpened @Override - public JobPaginatedResult getActiveJobs(int page, int pageSize) throws DotDataException { + public JobPaginatedResult getActiveJobs(final int page, final int pageSize) + throws DotDataException { try { return jobQueue.getActiveJobs(page, pageSize); } catch (JobQueueDataException e) { @@ -352,7 +352,8 @@ public JobPaginatedResult getActiveJobs(int page, int pageSize) throws DotDataEx @CloseDBIfOpened @Override - public JobPaginatedResult getCompletedJobs(int page, int pageSize) throws DotDataException { + public JobPaginatedResult getCompletedJobs(final int page, final int pageSize) + throws DotDataException { try { return jobQueue.getCompletedJobs(page, pageSize); } catch (JobQueueDataException e) { @@ -362,7 +363,19 @@ public JobPaginatedResult getCompletedJobs(int page, int pageSize) throws DotDat @CloseDBIfOpened @Override - public JobPaginatedResult getCanceledJobs(int page, int pageSize) throws DotDataException { + public JobPaginatedResult getSuccessfulJobs(final int page, final int pageSize) + throws DotDataException { + try { + return jobQueue.getSuccessfulJobs(page, pageSize); + } catch (JobQueueDataException e) { + throw new DotDataException("Error fetching successful jobs", e); + } + } + + @CloseDBIfOpened + @Override + public JobPaginatedResult getCanceledJobs(final int page, final int pageSize) + throws DotDataException { try { return jobQueue.getCanceledJobs(page, pageSize); } catch (JobQueueDataException e) { @@ -372,7 +385,8 @@ public JobPaginatedResult getCanceledJobs(int page, int pageSize) throws DotData @CloseDBIfOpened @Override - public JobPaginatedResult getFailedJobs(int page, int pageSize) throws DotDataException { + public JobPaginatedResult getFailedJobs(final int page, final int pageSize) + throws DotDataException { try { return jobQueue.getFailedJobs(page, pageSize); } catch (JobQueueDataException e) { @@ -380,12 +394,22 @@ public JobPaginatedResult getFailedJobs(int page, int pageSize) throws DotDataEx } } + @CloseDBIfOpened + @Override + public JobPaginatedResult getAbandonedJobs(final int page, final int pageSize) throws DotDataException { + try { + return jobQueue.getAbandonedJobs(page, pageSize); + } catch (JobQueueDataException e) { + throw new DotDataException("Error fetching abandoned jobs", e); + } + } + @Override public void cancelJob(final String jobId) throws DotDataException { final Job job = getJob(jobId); - if (job.state() == JobState.PENDING || job.state() == JobState.RUNNING) { + if (isInCancelableState(job)) { handleJobCancelRequest(job); } else { Logger.warn(this, "Job " + job.id() + " is not in a cancellable state. " @@ -408,9 +432,7 @@ void onCancelRequestJob(final JobCancelRequestEvent event) { try { final var job = getJob(event.getJob().id()); - if (job.state() == JobState.PENDING - || job.state() == JobState.RUNNING - || job.state() == JobState.CANCEL_REQUESTED) { + if (isInCancelableState(job) || job.state() == JobState.CANCEL_REQUESTED) { final Optional instance = getInstance(job.id()); if (instance.isPresent()) { @@ -439,8 +461,18 @@ void onCancelRequestJob(final JobCancelRequestEvent event) { } @Override - public void watchJob(final String jobId, final Consumer watcher) { - realTimeJobMonitor.registerWatcher(jobId, watcher); + public JobWatcher watchJob(final String jobId, final Consumer watcher) { + return realTimeJobMonitor.registerWatcher(jobId, watcher); + } + + @Override + public void removeJobWatcher(final String jobId, final JobWatcher watcher) { + realTimeJobMonitor.removeWatcher(jobId, watcher); + } + + @Override + public void removeAllJobWatchers(final String jobId) { + realTimeJobMonitor.removeAllWatchers(jobId); } @Override @@ -689,7 +721,7 @@ private boolean isReadyForRetry(Job job) throws DotDataException { /** * Handles a failed job that cannot be retried. This method logs a warning about the - * non-retryable job and removes it from the active queue. + * non-retryable job, removes it from the active queue, and marks it as failed permanently. * * @param job The failed job that cannot be retried. */ @@ -697,13 +729,16 @@ private void handleNonRetryableFailedJob(final Job job) throws DotDataException Logger.warn(this, "Job " + job.id() + " has failed and cannot be retried."); - try { - jobQueue.removeJobFromQueue(job.id()); - // Send the job removed from queue events - JobUtil.sendEvents(job, JobRemovedFromQueueEvent::new); - } catch (JobQueueDataException e) { - throw new DotDataException("Error removing failed job", e); + Job finishedJob = job.markAsFailedPermanently(); + if (job.state() == JobState.ABANDONED) { + finishedJob = job.markAsAbandonedPermanently(); } + + // Giving the job a final state + updateJobStatus(finishedJob); + + // Send the job completion events + JobUtil.sendEvents(finishedJob, JobCompletedEvent::new); } /** @@ -839,17 +874,17 @@ private void handleJobCompletion(final Job job, final JobProcessor processor) final float progress = getJobProgress(job); try { + + Job completedJob = job.markAsSuccessful(jobResult).withProgress(progress); + if (jobQueue.hasJobBeenInState(job.id(), JobState.CANCEL_REQUESTED, JobState.CANCELLING)) { - Job canceledJob = job.markAsCanceled(jobResult).withProgress(progress); - updateJobStatus(canceledJob); - // Send the job canceled events - JobUtil.sendEvents(canceledJob, JobCanceledEvent::new); - } else { - final Job completedJob = job.markAsCompleted(jobResult).withProgress(progress); - updateJobStatus(completedJob); - // Send the job completed events - JobUtil.sendEvents(completedJob, JobCompletedEvent::new); + completedJob = job.markAsCanceled(jobResult).withProgress(progress); } + + updateJobStatus(completedJob); + // Send the job completed events + JobUtil.sendEvents(completedJob, JobCompletedEvent::new); + } catch (JobQueueDataException e) { final var errorMessage = "Error updating job status"; Logger.error(this, errorMessage, e); @@ -1104,6 +1139,18 @@ private int incrementAndResetEmptyQueueCount( return emptyQueueCount; } + /** + * Verifies if a job state is in a cancellable state. + * + * @param job The job to check. + * @return {@code true} if the job is in a cancellable state, {@code false} otherwise. + */ + private boolean isInCancelableState(final Job job) { + return job.state() == JobState.PENDING || job.state() == JobState.RUNNING + || job.state() == JobState.FAILED || job.state() == JobState.ABANDONED + || job.state() == JobState.ABANDONED_PERMANENTLY; + } + /** * A wrapper class that makes ScheduledExecutorService auto-closeable. This class is designed to * be used with try-with-resources to ensure that the ScheduledExecutorService is properly shut diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobCanceledEvent.java b/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobCanceledEvent.java deleted file mode 100644 index 55046c416556..000000000000 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobCanceledEvent.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.dotcms.jobs.business.api.events; - -import com.dotcms.jobs.business.job.Job; -import java.time.LocalDateTime; - -/** - * Event fired when a job is canceled. - */ -public class JobCanceledEvent implements JobEvent { - - private final Job job; - private final LocalDateTime canceledAt; - - /** - * Constructs a new JobCanceledEvent. - * - * @param job The canceled job. - * @param canceledAt The timestamp when the job was canceled. - */ - public JobCanceledEvent(Job job, LocalDateTime canceledAt) { - this.job = job; - this.canceledAt = canceledAt; - } - - /** - * @return The canceled job. - */ - public Job getJob() { - return job; - } - - /** - * @return The timestamp when the job was canceled. - */ - public LocalDateTime getCanceledAt() { - return canceledAt; - } -} diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobRemovedFromQueueEvent.java b/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobRemovedFromQueueEvent.java deleted file mode 100644 index b93504e600ec..000000000000 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/JobRemovedFromQueueEvent.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.dotcms.jobs.business.api.events; - -import com.dotcms.jobs.business.job.Job; -import java.time.LocalDateTime; - -/** - * Event fired when a job is removed from the queue because failed and is not retryable. - */ -public class JobRemovedFromQueueEvent implements JobEvent { - - private final Job job; - private final LocalDateTime removedAt; - - /** - * Constructs a new JobRemovedFromQueueEvent. - * - * @param job The non-retryable job. - * @param canceledAt The timestamp when the job was removed from the queue. - */ - public JobRemovedFromQueueEvent(Job job, LocalDateTime canceledAt) { - this.job = job; - this.removedAt = canceledAt; - } - - /** - * @return The non-retryable job. - */ - public Job getJob() { - return job; - } - - /** - * @return The timestamp when the job removed from the queue. - */ - public LocalDateTime getRemovedAt() { - return removedAt; - } -} diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/RealTimeJobMonitor.java b/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/RealTimeJobMonitor.java index ef33514f30d0..065b6e361060 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/RealTimeJobMonitor.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/api/events/RealTimeJobMonitor.java @@ -86,21 +86,11 @@ protected void init() { (EventSubscriber) this::onJobFailed ); - APILocator.getLocalSystemEventsAPI().subscribe( - JobRemovedFromQueueEvent.class, - (EventSubscriber) this::onJobRemovedFromQueueEvent - ); - APILocator.getLocalSystemEventsAPI().subscribe( JobCompletedEvent.class, (EventSubscriber) this::onJobCompleted ); - APILocator.getLocalSystemEventsAPI().subscribe( - JobCanceledEvent.class, - (EventSubscriber) this::onJobCanceled - ); - APILocator.getLocalSystemEventsAPI().subscribe( JobCancellingEvent.class, (EventSubscriber) this::onJobCancelling @@ -157,31 +147,36 @@ protected void init() { * @param jobId The ID of the job to watch * @param watcher The consumer to be notified of job updates * @param filter Optional predicate to filter job updates (null means receive all updates) + * @return A JobWatcher instance representing the registered watcher * @throws NullPointerException if jobId or watcher is null * @see Predicates for common filter predicates * @see CopyOnWriteArrayList for more details about the thread-safety guarantees */ - public void registerWatcher(String jobId, Consumer watcher, Predicate filter) { + public JobWatcher registerWatcher(String jobId, Consumer watcher, Predicate filter) { Objects.requireNonNull(jobId, "jobId cannot be null"); Objects.requireNonNull(watcher, "watcher cannot be null"); + final var jobWatcher = JobWatcher.builder() + .watcher(watcher) + .filter(filter != null ? filter : job -> true) + .build(); + jobWatchers.compute(jobId, (key, existingWatchers) -> { List watchers = Objects.requireNonNullElseGet( existingWatchers, CopyOnWriteArrayList::new ); - watchers.add(JobWatcher.builder() - .watcher(watcher) - .filter(filter != null ? filter : job -> true) - .build()); + watchers.add(jobWatcher); Logger.debug(this, String.format( "Added watcher for job %s. Total watchers: %d", jobId, watchers.size())); return watchers; }); + + return jobWatcher; } /** @@ -214,11 +209,12 @@ public void registerWatcher(String jobId, Consumer watcher, Predicate * * @param jobId The ID of the job to watch * @param watcher The consumer to be notified of job updates + * @return A JobWatcher instance representing the registered watcher * @throws NullPointerException if jobId or watcher is null * @see CopyOnWriteArrayList for more details about the thread-safety guarantees */ - public void registerWatcher(String jobId, Consumer watcher) { - registerWatcher(jobId, watcher, null); + public JobWatcher registerWatcher(String jobId, Consumer watcher) { + return registerWatcher(jobId, watcher, null); } /** @@ -232,44 +228,17 @@ public Set getWatchedJobIds() { } /** - * Updates watchers for a list of jobs. - * Each job's watchers are notified according to their filter predicates. + * Removes all the watchers associated with the specified job ID. * - * @param updatedJobs List of jobs that have been updated - * @throws IllegalArgumentException if updatedJobs is null + * @param jobId The ID of the job whose watchers are to be removed. */ - public void updateWatchers(List updatedJobs) { - for (Job job : updatedJobs) { - updateWatchers(job); - } - } + public void removeAllWatchers(final String jobId) { - /** - * Updates watchers for a single job. Removes watchers if the job has reached a final state. - * - * @param job The job that has been updated. - */ - private void updateWatchers(Job job) { - - List watchers = jobWatchers.get(job.id()); - if (watchers != null) { - watchers.forEach(jobWatcher -> { - try { - if (jobWatcher.filter().test(job)) { - jobWatcher.watcher().accept(job); - } - } catch (Exception e) { - Logger.error(this, "Error notifying job watcher for job " + job.id(), e); - - // Direct remove is thread-safe with CopyOnWriteArrayList - watchers.remove(jobWatcher); - - // If this was the last watcher, clean up the map entry - if (watchers.isEmpty()) { - jobWatchers.remove(job.id()); - } - } - }); + List removed = jobWatchers.remove(jobId); + if (removed != null) { + Logger.info(this, + String.format("Removed all watchers for job %s. Watchers removed: %d", + jobId, removed.size())); } } @@ -278,13 +247,16 @@ private void updateWatchers(Job job) { * * @param jobId The ID of the job whose watcher is to be removed. */ - private void removeWatcher(String jobId) { + public void removeWatcher(final String jobId, final JobWatcher watcher) { - List removed = jobWatchers.remove(jobId); - if (removed != null) { - Logger.debug(this, - String.format("Removed all watchers for job %s. Watchers removed: %d", - jobId, removed.size())); + if (jobId == null || watcher == null) { + return; + } + + // Get the list of watchers for the job + List watchers = jobWatchers.get(jobId); + if (watchers != null) { + removeWatcherFromList(jobId, watcher, watchers); } } @@ -324,16 +296,6 @@ public void onJobCancelling(JobCancellingEvent event) { updateWatchers(event.getJob()); } - /** - * Handles the job-canceled event. - * - * @param event The JobCanceledEvent. - */ - public void onJobCanceled(JobCanceledEvent event) { - updateWatchers(event.getJob()); - removeWatcher(event.getJob().id()); - } - /** * Handles the job completed event. * @@ -341,16 +303,7 @@ public void onJobCanceled(JobCanceledEvent event) { */ public void onJobCompleted(JobCompletedEvent event) { updateWatchers(event.getJob()); - removeWatcher(event.getJob().id()); - } - - /** - * Handles the job removed from queue event when failed and is not retryable. - * - * @param event The JobRemovedFromQueueEvent. - */ - public void onJobRemovedFromQueueEvent(JobRemovedFromQueueEvent event) { - removeWatcher(event.getJob().id()); + removeAllWatchers(event.getJob().id()); } /** @@ -371,6 +324,49 @@ public void onJobProgressUpdated(JobProgressUpdatedEvent event) { updateWatchers(event.getJob()); } + /** + * Updates watchers for a single job. Removes watchers if the job has reached a final state. + * + * @param job The job that has been updated. + */ + private void updateWatchers(Job job) { + + List watchers = jobWatchers.get(job.id()); + if (watchers != null) { + watchers.forEach(jobWatcher -> { + try { + if (jobWatcher.filter().test(job)) { + jobWatcher.watcher().accept(job); + } + } catch (Exception e) { + Logger.error(this, "Error notifying job watcher for job " + job.id(), e); + + // Direct remove is thread-safe with CopyOnWriteArrayList + removeWatcherFromList(job.id(), jobWatcher, watchers); + } + }); + } + } + + /** + * Removes the watcher from the list of watchers for the specified job ID. + * + * @param jobId The ID of the job whose watcher is to be removed. + * @param watcher The watcher to remove. + * @param watchers The list of watchers for the job. + */ + private void removeWatcherFromList(String jobId, JobWatcher watcher, + List watchers) { + + // Remove the watcher from the list + watchers.remove(watcher); + + // If this was the last watcher, clean up the map entry + if (watchers.isEmpty()) { + jobWatchers.remove(jobId); + } + } + /** * Common predicates for filtering job updates. These predicates can be used individually or * combined using {@link Predicate#and(Predicate)} and {@link Predicate#or(Predicate)} to create @@ -425,19 +421,43 @@ public boolean test(Job job) { * @return A predicate for matching failed jobs */ public static Predicate hasFailed() { - return job -> job.state() == JobState.FAILED + return job -> (job.state() == JobState.FAILED + || job.state() == JobState.FAILED_PERMANENTLY) && job.result().isPresent() && job.result().get().errorDetail().isPresent(); } /** - * Creates a predicate that matches completed jobs. The predicate matches any job in the - * COMPLETED state. + * Creates a predicate that matches any completed job. * * @return A predicate for matching completed jobs */ public static Predicate isCompleted() { - return job -> job.state() == JobState.COMPLETED; + return job -> (job.state() == JobState.SUCCESS + || job.state() == JobState.CANCELED + || job.state() == JobState.ABANDONED_PERMANENTLY + || job.state() == JobState.FAILED_PERMANENTLY); + } + + /** + * Creates a predicate that matches successful jobs. The predicate matches any job in the + * SUCCESS state. + * + * @return A predicate for matching successful jobs + */ + public static Predicate isSuccessful() { + return job -> job.state() == JobState.SUCCESS; + } + + /** + * Creates a predicate that matches completed jobs. The predicate matches any job in the + * ABANDONED state. + * + * @return A predicate for matching completed jobs + */ + public static Predicate isAbandoned() { + return job -> (job.state() == JobState.ABANDONED + || job.state() == JobState.ABANDONED_PERMANENTLY); } /** diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/job/AbstractJob.java b/dotCMS/src/main/java/com/dotcms/jobs/business/job/AbstractJob.java index faf8d5745bec..38969a963961 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/job/AbstractJob.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/job/AbstractJob.java @@ -130,16 +130,16 @@ default Job withState(final JobState newState) { } /** - * Creates a new Job marked as completed. + * Creates a new Job marked as successful. * - * @param result The result details of the completed job. + * @param result The result details of the successful job. * - * @return A new Job instance marked as completed. + * @return A new Job instance marked as successful. */ - default Job markAsCompleted(final JobResult result) { + default Job markAsSuccessful(final JobResult result) { return Job.builder().from(this) - .state(JobState.COMPLETED) + .state(JobState.SUCCESS) .completedAt(Optional.of(LocalDateTime.now())) .updatedAt(LocalDateTime.now()) .result(result != null ? Optional.of(result) : Optional.empty()) @@ -163,4 +163,32 @@ default Job markAsCanceled(final JobResult result) { .build(); } + /** + * Creates a new Job marked as failed permanently. + * + * @return A new Job instance marked as failed permanently. + */ + default Job markAsFailedPermanently() { + + return Job.builder().from(this) + .state(JobState.FAILED_PERMANENTLY) + .completedAt(Optional.of(LocalDateTime.now())) + .updatedAt(LocalDateTime.now()) + .build(); + } + + /** + * Creates a new Job marked as abandoned permanently. + * + * @return A new Job instance marked as abandoned permanently. + */ + default Job markAsAbandonedPermanently() { + + return Job.builder().from(this) + .state(JobState.ABANDONED_PERMANENTLY) + .completedAt(Optional.of(LocalDateTime.now())) + .updatedAt(LocalDateTime.now()) + .build(); + } + } diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/job/JobState.java b/dotCMS/src/main/java/com/dotcms/jobs/business/job/JobState.java index f3e7ebdc0880..abed293bf094 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/job/JobState.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/job/JobState.java @@ -18,18 +18,30 @@ public enum JobState { /** * The job has finished executing successfully. */ - COMPLETED, + SUCCESS, /** * The job encountered an error and could not complete successfully. */ FAILED, + /** + * The job encountered error and could not complete successfully. The error is permanent and the + * job will not be retried. + */ + FAILED_PERMANENTLY, + /** * The job was abandoned before it could complete. */ ABANDONED, + /** + * The job was abandoned before it could complete. The error is permanent and the job will not + * be retried. + */ + ABANDONED_PERMANENTLY, + /** * The job is waiting to be canceled. */ diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java b/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java index 12d1a8b70fde..cdfa600a319f 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/processor/impl/ImportContentletsProcessor.java @@ -25,21 +25,24 @@ import com.dotmarketing.portlets.languagesmanager.model.Language; import com.dotmarketing.portlets.workflows.model.WorkflowAction; import com.dotmarketing.util.AdminLogger; +import com.dotmarketing.util.FileUtil; import com.dotmarketing.util.ImportUtil; import com.dotmarketing.util.Logger; import com.google.common.hash.Hashing; import com.liferay.portal.model.User; import com.liferay.portal.util.Constants; -import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; -import java.io.FileReader; import java.io.IOException; -import java.io.InputStreamReader; import java.io.Reader; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import java.util.*; +import java.nio.file.Files; +import java.util.Calendar; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongConsumer; @@ -292,9 +295,8 @@ private void handleImport(final boolean preview, final Job job, final File fileT ); } - try (Reader reader = new BufferedReader( - new InputStreamReader(new FileInputStream(fileToImport), - Charset.defaultCharset()))) { + try (Reader reader = Files.newBufferedReader( + fileToImport.toPath(), StandardCharsets.UTF_8)) { CsvReader csvReader = createCsvReader(reader); @@ -490,8 +492,8 @@ public static long jobIdToLong(final String jobId) { private Long totalLines(final Job job, final File dotTempFile) { long totalCount; - try (BufferedReader reader = new BufferedReader(new FileReader(dotTempFile))) { - totalCount = reader.lines().count(); + try { + totalCount = FileUtil.countFileLines(dotTempFile); if (totalCount == 0) { Logger.info(this.getClass(), "No lines in CSV import file: " + dotTempFile.getName()); diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/queue/AbstractJobStateQueryParameters.java b/dotCMS/src/main/java/com/dotcms/jobs/business/queue/AbstractJobStateQueryParameters.java new file mode 100644 index 000000000000..b9d7636b9576 --- /dev/null +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/queue/AbstractJobStateQueryParameters.java @@ -0,0 +1,76 @@ +package com.dotcms.jobs.business.queue; + +import com.dotcms.jobs.business.job.JobState; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.time.LocalDateTime; +import java.util.Optional; +import org.immutables.value.Value; + +/** + * Interface representing the parameters for querying job states. + */ +@Value.Style(typeImmutable = "*", typeAbstract = "Abstract*") +@Value.Immutable +@JsonSerialize(as = JobStateQueryParameters.class) +@JsonDeserialize(as = JobStateQueryParameters.class) +public interface AbstractJobStateQueryParameters { + + /** + * Gets the name of the queue. + * + * @return an Optional containing the queue name, or an empty Optional if not specified. + */ + Optional queueName(); + + /** + * Gets the start date for the query. + * + * @return an Optional containing the start date, or an empty Optional if not specified. + */ + Optional startDate(); + + /** + * Gets the end date for the query. + * + * @return an Optional containing the end date, or an empty Optional if not specified. + */ + Optional endDate(); + + /** + * Gets the page number for pagination. + * + * @return the page number. + */ + int page(); + + /** + * Gets the page size for pagination. + * + * @return the page size. + */ + int pageSize(); + + /** + * Gets the column name to filter dates. + * + * @return an Optional containing the filter date column name, or an empty Optional if not + * specified. + */ + Optional filterDateColumn(); + + /** + * Gets the column name to order the results by. + * + * @return the order by column name. + */ + String orderByColumn(); + + /** + * Gets the states to filter the jobs by. + * + * @return an array of JobState values. + */ + JobState[] states(); + +} diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/queue/JobQueue.java b/dotCMS/src/main/java/com/dotcms/jobs/business/queue/JobQueue.java index 9b2a043c00a9..1b4846c52705 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/queue/JobQueue.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/queue/JobQueue.java @@ -112,6 +112,16 @@ JobPaginatedResult getCompletedJobs(String queueName, LocalDateTime startDate, */ JobPaginatedResult getCompletedJobs(int page, int pageSize) throws JobQueueDataException; + /** + * Retrieves a list of successful jobs. + * + * @param page The page number (for pagination). + * @param pageSize The number of items per page. + * @return A result object containing the list of successful jobs and pagination information. + * @throws JobQueueDataException if there's a data storage error while fetching the jobs + */ + JobPaginatedResult getSuccessfulJobs(int page, int pageSize) throws JobQueueDataException; + /** * Retrieves a list of canceled jobs. * @@ -132,6 +142,16 @@ JobPaginatedResult getCompletedJobs(String queueName, LocalDateTime startDate, */ JobPaginatedResult getFailedJobs(int page, int pageSize) throws JobQueueDataException; + /** + * Retrieves a list of abandoned + * + * @param page The page number (for pagination). + * @param pageSize The number of items per page. + * @return A result object containing the list of abandoned jobs and pagination information. + * @throws JobQueueDataException if there's a data storage error while fetching the jobs + */ + JobPaginatedResult getAbandonedJobs(int page, int pageSize) throws JobQueueDataException; + /** * Updates the status of a job. * @@ -190,16 +210,6 @@ Optional detectAndMarkAbandoned(Duration threshold, JobState... inStates) */ void updateJobProgress(String jobId, float progress) throws JobQueueDataException; - /** - * Removes a job from the queue. This method should be used for jobs that have permanently - * failed and cannot be retried. Implementing classes should ensure that the job is completely - * removed from the queue and any associated resources are cleaned up. - * - * @param jobId The ID of the job to remove. - * @throws JobQueueDataException if there's a data storage error while removing the job - */ - void removeJobFromQueue(String jobId) throws JobQueueDataException; - /** * Checks if a job has ever been in a specific state. * diff --git a/dotCMS/src/main/java/com/dotcms/jobs/business/queue/PostgresJobQueue.java b/dotCMS/src/main/java/com/dotcms/jobs/business/queue/PostgresJobQueue.java index 72ca197b4e03..a2139cd99c39 100644 --- a/dotCMS/src/main/java/com/dotcms/jobs/business/queue/PostgresJobQueue.java +++ b/dotCMS/src/main/java/com/dotcms/jobs/business/queue/PostgresJobQueue.java @@ -101,23 +101,25 @@ public class PostgresJobQueue implements JobQueue { "WHERE id IN (SELECT id FROM abandoned_jobs) " + "RETURNING *"; - private static final String GET_ACTIVE_JOBS_QUERY_FOR_QUEUE = + private static final String GET_JOBS_QUERY_BY_QUEUE_AND_STATE = "WITH total AS (SELECT COUNT(*) AS total_count " + - " FROM job WHERE queue_name = ? AND state IN (?, ?) " + + " FROM job WHERE queue_name = ? AND state IN $??$ " + "), " + "paginated_data AS (SELECT * " + - " FROM job WHERE queue_name = ? AND state IN (?, ?) " + - " ORDER BY created_at LIMIT ? OFFSET ? " + + " FROM job WHERE queue_name = ? AND state IN $??$ " + + " ORDER BY $ORDER_BY$ LIMIT ? OFFSET ? " + ") " + "SELECT p.*, t.total_count FROM total t LEFT JOIN paginated_data p ON true"; - private static final String GET_COMPLETED_JOBS_QUERY_FOR_QUEUE = + private static final String GET_JOBS_QUERY_BY_QUEUE_AND_STATE_IN_DATE_RANGE = "WITH total AS (SELECT COUNT(*) AS total_count " + - " FROM job WHERE queue_name = ? AND state = ? AND completed_at BETWEEN ? AND ? " + + " FROM job WHERE queue_name = ? AND state IN $??$ AND $DATE_COLUMN$ BETWEEN ? AND ? " + + "), " + "paginated_data AS (SELECT * FROM job " + - " WHERE queue_name = ? AND state = ? AND completed_at BETWEEN ? AND ? " + - " ORDER BY completed_at DESC LIMIT ? OFFSET ? " + + " WHERE queue_name = ? AND state IN $??$ AND $DATE_COLUMN$ BETWEEN ? AND ? " + + + " ORDER BY $ORDER_BY$ DESC LIMIT ? OFFSET ? " + ") " + "SELECT p.*, t.total_count FROM total t LEFT JOIN paginated_data p ON true"; @@ -177,9 +179,13 @@ public class PostgresJobQueue implements JobQueue { + "EXISTS (SELECT 1 FROM job_history WHERE job_id = ? AND state IN $??$)"; private static final String COLUMN_TOTAL_COUNT = "total_count"; + private static final String COLUMN_COMPLETED_AT = "completed_at"; + private static final String COLUMN_UPDATED_AT = "updated_at"; + private static final String COLUMN_CREATED_AT = "created_at"; private static final String REPLACE_TOKEN_PARAMETERS = "$??$"; private static final String REPLACE_TOKEN_ORDER_BY = "$ORDER_BY$"; + private static final String REPLACE_TOKEN_DATE_COLUMN = "$DATE_COLUMN$"; /** * Jackson mapper configuration and lazy initialized instance. @@ -306,26 +312,15 @@ public JobState getJobState(final String jobId) public JobPaginatedResult getActiveJobs(final String queueName, final int page, final int pageSize) throws JobQueueDataException { - try { - - DotConnect dc = new DotConnect(); - dc.setSQL(GET_ACTIVE_JOBS_QUERY_FOR_QUEUE); - dc.addParam(queueName); - dc.addParam(JobState.PENDING.name()); - dc.addParam(JobState.RUNNING.name()); - dc.addParam(queueName); // Repeated for paginated_data CTE - dc.addParam(JobState.PENDING.name()); - dc.addParam(JobState.RUNNING.name()); - dc.addParam(pageSize); - dc.addParam((page - 1) * pageSize); - - return jobPaginatedResult(page, pageSize, dc); - } catch (DotDataException e) { - Logger.error(this, - "Database error while fetching active jobs by queue", e); - throw new JobQueueDataException( - "Database error while fetching active jobs by queue", e); - } + return getJobsByState(JobStateQueryParameters.builder() + .queueName(queueName) + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_CREATED_AT) + .states(JobState.PENDING, JobState.RUNNING, + JobState.FAILED, JobState.ABANDONED, JobState.CANCEL_REQUESTED, + JobState.CANCELLING).build() + ); } @CloseDBIfOpened @@ -335,27 +330,17 @@ public JobPaginatedResult getCompletedJobs(final String queueName, final LocalDateTime endDate, final int page, final int pageSize) throws JobQueueDataException { - try { - DotConnect dc = new DotConnect(); - dc.setSQL(GET_COMPLETED_JOBS_QUERY_FOR_QUEUE); - dc.addParam(queueName); - dc.addParam(JobState.COMPLETED.name()); - dc.addParam(Timestamp.valueOf(startDate)); - dc.addParam(Timestamp.valueOf(endDate)); - dc.addParam(queueName); // Repeated for paginated_data CTE - dc.addParam(JobState.COMPLETED.name()); - dc.addParam(Timestamp.valueOf(startDate)); - dc.addParam(Timestamp.valueOf(endDate)); - dc.addParam(pageSize); - dc.addParam((page - 1) * pageSize); - - return jobPaginatedResult(page, pageSize, dc); - } catch (DotDataException e) { - Logger.error(this, - "Database error while fetching completed jobs by queue", e); - throw new JobQueueDataException( - "Database error while fetching completed jobs by queue", e); - } + return getJobsByState(JobStateQueryParameters.builder() + .queueName(queueName) + .startDate(startDate) + .endDate(endDate) + .filterDateColumn(COLUMN_COMPLETED_AT) + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_COMPLETED_AT) + .states(JobState.SUCCESS, JobState.CANCELED, + JobState.ABANDONED_PERMANENTLY, JobState.FAILED_PERMANENTLY).build() + ); } @CloseDBIfOpened @@ -381,26 +366,14 @@ public JobPaginatedResult getJobs(final int page, final int pageSize) public JobPaginatedResult getActiveJobs(final int page, final int pageSize) throws JobQueueDataException { - try { - - var query = GET_JOBS_QUERY_BY_STATE - .replace(REPLACE_TOKEN_PARAMETERS, "(?, ?)") - .replace(REPLACE_TOKEN_ORDER_BY, "created_at"); - - DotConnect dc = new DotConnect(); - dc.setSQL(query); - dc.addParam(JobState.PENDING.name()); - dc.addParam(JobState.RUNNING.name()); - dc.addParam(JobState.PENDING.name()); // Repeated for paginated_data CTE - dc.addParam(JobState.RUNNING.name()); - dc.addParam(pageSize); - dc.addParam((page - 1) * pageSize); - - return jobPaginatedResult(page, pageSize, dc); - } catch (DotDataException e) { - Logger.error(this, "Database error while fetching active jobs", e); - throw new JobQueueDataException("Database error while fetching active jobs", e); - } + return getJobsByState(JobStateQueryParameters.builder() + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_CREATED_AT) + .states(JobState.PENDING, JobState.RUNNING, + JobState.FAILED, JobState.ABANDONED, JobState.CANCEL_REQUESTED, + JobState.CANCELLING).build() + ); } @CloseDBIfOpened @@ -408,74 +381,61 @@ public JobPaginatedResult getActiveJobs(final int page, final int pageSize) public JobPaginatedResult getCompletedJobs(final int page, final int pageSize) throws JobQueueDataException { - try { - - var query = GET_JOBS_QUERY_BY_STATE - .replace(REPLACE_TOKEN_PARAMETERS, "(?)") - .replace(REPLACE_TOKEN_ORDER_BY, "completed_at"); - - DotConnect dc = new DotConnect(); - dc.setSQL(query); - dc.addParam(JobState.COMPLETED.name()); - dc.addParam(JobState.COMPLETED.name()); // Repeated for paginated_data CTE - dc.addParam(pageSize); - dc.addParam((page - 1) * pageSize); + return getJobsByState(JobStateQueryParameters.builder() + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_COMPLETED_AT) + .states(JobState.SUCCESS, JobState.CANCELED, + JobState.ABANDONED_PERMANENTLY, JobState.FAILED_PERMANENTLY).build() + ); + } - return jobPaginatedResult(page, pageSize, dc); - } catch (DotDataException e) { - Logger.error(this, "Database error while fetching completed jobs", e); - throw new JobQueueDataException("Database error while fetching completed jobs", e); - } + @CloseDBIfOpened + @Override + public JobPaginatedResult getSuccessfulJobs(final int page, final int pageSize) + throws JobQueueDataException { + return getJobsByState(JobStateQueryParameters.builder() + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_COMPLETED_AT) + .states(JobState.SUCCESS).build() + ); } @CloseDBIfOpened @Override public JobPaginatedResult getCanceledJobs(final int page, final int pageSize) throws JobQueueDataException { - - try { - - var query = GET_JOBS_QUERY_BY_STATE - .replace(REPLACE_TOKEN_PARAMETERS, "(?)") - .replace(REPLACE_TOKEN_ORDER_BY, "completed_at"); - - DotConnect dc = new DotConnect(); - dc.setSQL(query); - dc.addParam(JobState.CANCELED.name()); - dc.addParam(JobState.CANCELED.name()); // Repeated for paginated_data CTE - dc.addParam(pageSize); - dc.addParam((page - 1) * pageSize); - - return jobPaginatedResult(page, pageSize, dc); - } catch (DotDataException e) { - Logger.error(this, "Database error while fetching cancelled jobs", e); - throw new JobQueueDataException("Database error while fetching cancelled jobs", e); - } + return getJobsByState(JobStateQueryParameters.builder() + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_UPDATED_AT) + .states(JobState.CANCEL_REQUESTED, JobState.CANCELLING, JobState.CANCELED).build() + ); } @CloseDBIfOpened @Override public JobPaginatedResult getFailedJobs(final int page, final int pageSize) throws JobQueueDataException { + return getJobsByState(JobStateQueryParameters.builder() + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_UPDATED_AT) + .states(JobState.FAILED, JobState.FAILED_PERMANENTLY).build() + ); + } - try { - - var query = GET_JOBS_QUERY_BY_STATE - .replace(REPLACE_TOKEN_PARAMETERS, "(?)") - .replace(REPLACE_TOKEN_ORDER_BY, "updated_at"); - - DotConnect dc = new DotConnect(); - dc.setSQL(query); - dc.addParam(JobState.FAILED.name()); - dc.addParam(JobState.FAILED.name()); // Repeated for paginated_data CTE - dc.addParam(pageSize); - dc.addParam((page - 1) * pageSize); - - return jobPaginatedResult(page, pageSize, dc); - } catch (DotDataException e) { - Logger.error(this, "Database error while fetching failed jobs", e); - throw new JobQueueDataException("Database error while fetching failed jobs", e); - } + @CloseDBIfOpened + @Override + public JobPaginatedResult getAbandonedJobs(final int page, final int pageSize) + throws JobQueueDataException { + return getJobsByState(JobStateQueryParameters.builder() + .page(page) + .pageSize(pageSize) + .orderByColumn(COLUMN_UPDATED_AT) + .states(JobState.ABANDONED, JobState.ABANDONED_PERMANENTLY).build() + ); } @CloseDBIfOpened @@ -524,11 +484,11 @@ public void updateJobStatus(final Job job) throws JobQueueDataException { }).orElse(null)); historyDc.loadResult(); - // Remove from job_queue if completed, failed, abandoned or canceled - if (job.state() == JobState.COMPLETED - || job.state() == JobState.FAILED - || job.state() == JobState.ABANDONED - || job.state() == JobState.CANCELED) { + // Remove from job_queue if the job is considered done + if (job.state() != JobState.PENDING + && job.state() != JobState.RUNNING + && job.state() != JobState.CANCEL_REQUESTED + && job.state() != JobState.CANCELLING) { removeJobFromQueue(job.id()); } @@ -694,9 +654,15 @@ public void updateJobProgress(final String jobId, final float progress) } } + /** + * Removes a job from the queue. This method should be used for jobs that have permanently + * failed and cannot be retried. + * + * @param jobId The ID of the job to remove. + * @throws JobQueueDataException if there's a data storage error while removing the job + */ @CloseDBIfOpened - @Override - public void removeJobFromQueue(final String jobId) throws JobQueueDataException { + private void removeJobFromQueue(final String jobId) throws JobQueueDataException { try { DotConnect dc = new DotConnect(); @@ -743,6 +709,181 @@ public boolean hasJobBeenInState(final String jobId, final JobState... states) } } + /** + * Retrieves a paginated result of jobs filtered by state, queue name, and date range. + * + * @param parameters An instance of JobStateQueryParameters containing filter and pagination + * information. + * @return A JobPaginatedResult containing the jobs that match the specified filters and + * pagination criteria. + * @throws JobQueueDataException if there is a data storage error while fetching the jobs. + */ + @CloseDBIfOpened + private JobPaginatedResult getJobsByState(final JobStateQueryParameters parameters) + throws JobQueueDataException { + + if (parameters.queueName().isPresent() && parameters.startDate().isPresent() + && parameters.endDate().isPresent()) { + return getJobsFilterByNameDateAndState(parameters); + } else if (parameters.queueName().isPresent()) { + return getJobsFilterByNameAndState(parameters); + } + + return getJobsFilterByState(parameters); + } + + /** + * Helper method to fetch jobs by state and return a paginated result. + * + * @param parameters An instance of JobStateQueryParameters containing filter and pagination. + * This includes page number, page size, job states, and order by column. + * @return A JobPaginatedResult instance + * @throws JobQueueDataException if there's a data storage error while fetching the jobs + */ + @CloseDBIfOpened + private JobPaginatedResult getJobsFilterByState( + final JobStateQueryParameters parameters) throws JobQueueDataException { + + final var states = parameters.states(); + final var page = parameters.page(); + final var pageSize = parameters.pageSize(); + final var orderByColumn = parameters.orderByColumn(); + + try { + + String statesParam = String.join(", ", Collections.nCopies(states.length, "?")); + + var query = GET_JOBS_QUERY_BY_STATE + .replace(REPLACE_TOKEN_PARAMETERS, "(" + statesParam + ")") + .replace(REPLACE_TOKEN_ORDER_BY, orderByColumn); + + DotConnect dc = new DotConnect(); + dc.setSQL(query); + for (JobState state : states) { + dc.addParam(state.name()); + } + for (JobState state : states) {// Repeated for paginated_data CTE + dc.addParam(state.name()); + } + dc.addParam(pageSize); + dc.addParam((page - 1) * pageSize); + + return jobPaginatedResult(page, pageSize, dc); + } catch (DotDataException e) { + final var message = "Database error while fetching jobs by state"; + Logger.error(this, message, e); + throw new JobQueueDataException(message, e); + } + } + + /** + * Retrieves a paginated result of jobs filtered by state and queue name. + * + * @param parameters An instance of JobStateQueryParameters containing filter and pagination + * information. This includes queue name, job states, page number, page size + * and order by column. + * @return A JobPaginatedResult containing the jobs that match the specified filters and + * pagination criteria. + * @throws JobQueueDataException if there is a data storage error while fetching the jobs. + */ + @CloseDBIfOpened + public JobPaginatedResult getJobsFilterByNameAndState( + final JobStateQueryParameters parameters) throws JobQueueDataException { + + final var queueName = parameters.queueName().orElseThrow(); + final var states = parameters.states(); + final var page = parameters.page(); + final var pageSize = parameters.pageSize(); + final var orderByColumn = parameters.orderByColumn(); + + String statesParam = String.join(", ", + Collections.nCopies(parameters.states().length, "?")); + + var query = GET_JOBS_QUERY_BY_QUEUE_AND_STATE + .replace(REPLACE_TOKEN_PARAMETERS, "(" + statesParam + ")") + .replace(REPLACE_TOKEN_ORDER_BY, orderByColumn); + + try { + + DotConnect dc = new DotConnect(); + dc.setSQL(query); + dc.addParam(queueName); + for (JobState state : states) { + dc.addParam(state.name()); + } + dc.addParam(queueName); // Repeated for paginated_data CTE + for (JobState state : states) { + dc.addParam(state.name()); + } + dc.addParam(pageSize); + dc.addParam((page - 1) * pageSize); + + return jobPaginatedResult(page, pageSize, dc); + } catch (DotDataException e) { + Logger.error(this, + "Database error while fetching active jobs by queue", e); + throw new JobQueueDataException( + "Database error while fetching active jobs by queue", e); + } + } + + /** + * Retrieves a paginated result of jobs filtered by state, queue name, and date range. + * + * @param parameters An instance of JobStateQueryParameters containing filter and pagination + * information. This includes queue name, start and end dates, job states, + * page number, page size, order by column and filter date column. + * @return A JobPaginatedResult containing the jobs that match the specified filters and + * pagination criteria. + * @throws JobQueueDataException if there is a data storage error while fetching the jobs. + */ + @CloseDBIfOpened + private JobPaginatedResult getJobsFilterByNameDateAndState( + final JobStateQueryParameters parameters) throws JobQueueDataException { + + final var queueName = parameters.queueName().orElseThrow(); + final var startDate = parameters.startDate().orElseThrow(); + final var endDate = parameters.endDate().orElseThrow(); + final var states = parameters.states(); + final var page = parameters.page(); + final var pageSize = parameters.pageSize(); + final var orderByColumn = parameters.orderByColumn(); + final var filterDateColumn = parameters.filterDateColumn().orElseThrow(); + + String statesParam = String.join(", ", + Collections.nCopies(parameters.states().length, "?")); + + var query = GET_JOBS_QUERY_BY_QUEUE_AND_STATE_IN_DATE_RANGE + .replace(REPLACE_TOKEN_PARAMETERS, "(" + statesParam + ")") + .replace(REPLACE_TOKEN_ORDER_BY, orderByColumn) + .replace(REPLACE_TOKEN_DATE_COLUMN, filterDateColumn); + + try { + DotConnect dc = new DotConnect(); + dc.setSQL(query); + dc.addParam(queueName); + for (JobState state : states) { + dc.addParam(state.name()); + } + dc.addParam(Timestamp.valueOf(startDate)); + dc.addParam(Timestamp.valueOf(endDate)); + dc.addParam(queueName); // Repeated for paginated_data CTE + for (JobState state : states) { + dc.addParam(state.name()); + } + dc.addParam(Timestamp.valueOf(startDate)); + dc.addParam(Timestamp.valueOf(endDate)); + dc.addParam(pageSize); + dc.addParam((page - 1) * pageSize); + + return jobPaginatedResult(page, pageSize, dc); + } catch (DotDataException e) { + final var message = "Database error while fetching jobs by queue and state"; + Logger.error(this, message, e); + throw new JobQueueDataException(message, e); + } + } + /** * Helper method to create a JobPaginatedResult from a DotConnect query result. * diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueHelper.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueHelper.java index d57d5d3edfaf..c3e86f797c71 100644 --- a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueHelper.java +++ b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueHelper.java @@ -3,6 +3,7 @@ import static com.dotcms.jobs.business.util.JobUtil.roundedProgress; import com.dotcms.jobs.business.api.JobQueueManagerAPI; +import com.dotcms.jobs.business.api.events.JobWatcher; import com.dotcms.jobs.business.error.JobProcessorNotFoundException; import com.dotcms.jobs.business.job.Job; import com.dotcms.jobs.business.job.JobPaginatedResult; @@ -19,7 +20,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.annotations.VisibleForTesting; import com.liferay.portal.model.User; -import java.io.IOException; import java.io.InputStream; import java.time.format.DateTimeFormatter; import java.util.HashMap; @@ -31,10 +31,7 @@ import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.core.MediaType; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; -import org.glassfish.jersey.media.sse.EventOutput; -import org.glassfish.jersey.media.sse.OutboundEvent; /** * Helper class for interacting with the job queue system. This class provides methods for creating, cancelling, and listing jobs. @@ -182,9 +179,28 @@ void cancelJob(String jobId) throws DotDataException { * @param jobId The ID of the job * @param watcher The watcher */ - void watchJob(String jobId, Consumer watcher) { + JobWatcher watchJob(String jobId, Consumer watcher) { // if it does then watch it - jobQueueManagerAPI.watchJob(jobId, watcher); + return jobQueueManagerAPI.watchJob(jobId, watcher); + } + + /** + * Removes a watcher from a job + * + * @param jobId The ID of the job + * @param watcher The watcher to remove + */ + void removeWatcher(final String jobId, final JobWatcher watcher) { + jobQueueManagerAPI.removeJobWatcher(jobId, watcher); + } + + /** + * Removes all watchers from a job + * + * @param jobId The ID of the job + */ + void removeAllWatchers(final String jobId) { + jobQueueManagerAPI.removeAllJobWatchers(jobId); } /** @@ -251,6 +267,22 @@ JobPaginatedResult getCompletedJobs(int page, int pageSize) { return JobPaginatedResult.builder().build(); } + /** + * Retrieves a list of successful jobs + * + * @param page The page number + * @param pageSize The number of jobs per page + * @return A result object containing the list of successful jobs and pagination information. + */ + JobPaginatedResult getSuccessfulJobs(int page, int pageSize) { + try { + return jobQueueManagerAPI.getSuccessfulJobs(page, pageSize); + } catch (DotDataException e) { + Logger.error(this.getClass(), "Error fetching successful jobs", e); + } + return JobPaginatedResult.builder().build(); + } + /** * Retrieves a list of canceled jobs * @@ -272,7 +304,7 @@ JobPaginatedResult getCanceledJobs(int page, int pageSize) { * * @param page The page number * @param pageSize The number of jobs per page - * @return A result object containing the list of completed jobs and pagination information. + * @return A result object containing the list of failed jobs and pagination information. */ JobPaginatedResult getFailedJobs(int page, int pageSize) { try { @@ -283,6 +315,22 @@ JobPaginatedResult getFailedJobs(int page, int pageSize) { return JobPaginatedResult.builder().build(); } + /** + * Retrieves a list of abandoned jobs + * + * @param page The page number + * @param pageSize The number of jobs per page + * @return A result object containing the list of abandoned jobs and pagination information. + */ + JobPaginatedResult getAbandonedJobs(int page, int pageSize) { + try { + return jobQueueManagerAPI.getAbandonedJobs(page, pageSize); + } catch (DotDataException e) { + Logger.error(this.getClass(), "Error fetching abandoned jobs", e); + } + return JobPaginatedResult.builder().build(); + } + /** * Retrieves a list of active jobs for a specific queue. * @return JobPaginatedResult @@ -358,43 +406,6 @@ Job getJobForSSE(final String jobId) throws DotDataException { return job; } - /** - * Send an error event and close the connection - * - * @param errorName The name of the error event - * @param errorCode The error code - * @param eventOutput The event output - */ - void sendErrorAndClose(final String errorName, final String errorCode, - final EventOutput eventOutput) { - - try { - OutboundEvent event = new OutboundEvent.Builder() - .mediaType(MediaType.TEXT_HTML_TYPE) - .name(errorName) - .data(String.class, errorCode) - .build(); - eventOutput.write(event); - closeSSEConnection(eventOutput); - } catch (IOException e) { - Logger.error(this, "Error sending error event", e); - closeSSEConnection(eventOutput); - } - } - - /** - * Close the SSE connection - * - * @param eventOutput The event output - */ - void closeSSEConnection(final EventOutput eventOutput) { - try { - eventOutput.close(); - } catch (IOException e) { - Logger.error(this, "Error closing SSE connection", e); - } - } - /** * Check if a job is in a terminal state * @@ -402,8 +413,9 @@ void closeSSEConnection(final EventOutput eventOutput) { * @return true if the job is in a terminal state, false otherwise */ boolean isTerminalState(final JobState state) { - return state == JobState.COMPLETED || - state == JobState.FAILED || + return state == JobState.SUCCESS || + state == JobState.FAILED_PERMANENTLY || + state == JobState.ABANDONED_PERMANENTLY || state == JobState.CANCELED; } diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueResource.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueResource.java index 268ca26b38b4..884e64900325 100644 --- a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueResource.java +++ b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/JobQueueResource.java @@ -5,17 +5,16 @@ import com.dotcms.jobs.business.job.JobPaginatedResult; import com.dotcms.rest.ResponseEntityView; import com.dotcms.rest.WebResource; +import com.dotcms.rest.WebResource.InitBuilder; import com.dotcms.rest.exception.mapper.ExceptionMapperUtil; import com.dotmarketing.exception.DotDataException; -import com.dotmarketing.util.Logger; import com.fasterxml.jackson.core.JsonProcessingException; import graphql.VisibleForTesting; -import java.io.IOException; import java.util.Map; import java.util.Set; -import java.util.function.Consumer; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import javax.ws.rs.BeanParam; import javax.ws.rs.Consumes; import javax.ws.rs.DefaultValue; @@ -29,7 +28,6 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.glassfish.jersey.media.sse.EventOutput; -import org.glassfish.jersey.media.sse.OutboundEvent; import org.glassfish.jersey.media.sse.SseFeature; @Path("/v1/jobs") @@ -37,20 +35,19 @@ public class JobQueueResource { private final WebResource webResource; private final JobQueueHelper helper; - private final SSEConnectionManager sseConnectionManager; + private final SSEMonitorUtil sseMonitorUtil; @Inject - public JobQueueResource(final JobQueueHelper helper, - final SSEConnectionManager sseConnectionManager) { - this(new WebResource(), helper, sseConnectionManager); + public JobQueueResource(final JobQueueHelper helper, final SSEMonitorUtil sseMonitorUtil) { + this(new WebResource(), helper, sseMonitorUtil); } @VisibleForTesting public JobQueueResource(WebResource webResource, JobQueueHelper helper, - SSEConnectionManager sseConnectionManager) { + final SSEMonitorUtil sseMonitorUtil) { this.webResource = webResource; this.helper = helper; - this.sseConnectionManager = sseConnectionManager; + this.sseMonitorUtil = sseMonitorUtil; } @POST @@ -58,14 +55,14 @@ public JobQueueResource(WebResource webResource, JobQueueHelper helper, @Consumes(MediaType.MULTIPART_FORM_DATA) @Produces(MediaType.APPLICATION_JSON) public Response createJob( - @Context HttpServletRequest request, + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @PathParam("queueName") String queueName, @BeanParam JobParams form) throws JsonProcessingException, DotDataException { - final var initDataObject = new WebResource.InitBuilder(webResource) + final var initDataObject = new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); @@ -83,14 +80,14 @@ public Response createJob( @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response createJob( - @Context HttpServletRequest request, + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @PathParam("queueName") String queueName, Map parameters) throws DotDataException { - final var initDataObject = new WebResource.InitBuilder(webResource) + final var initDataObject = new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); @@ -106,11 +103,12 @@ public Response createJob( @GET @Path("/queues") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView> getQueues(@Context HttpServletRequest request) { - new WebResource.InitBuilder(webResource) + public ResponseEntityView> getQueues( + @Context final HttpServletRequest request, @Context final HttpServletResponse response) { + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); return new ResponseEntityView<>(helper.getQueueNames()); @@ -119,14 +117,14 @@ public ResponseEntityView> getQueues(@Context HttpServletRequest req @GET @Path("/{jobId}/status") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView getJobStatus(@Context HttpServletRequest request, - @PathParam("jobId") String jobId) - throws DotDataException { + public ResponseEntityView getJobStatus( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, + @PathParam("jobId") String jobId) throws DotDataException { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); @@ -138,12 +136,13 @@ public ResponseEntityView getJobStatus(@Context HttpServletRequest request, @Path("/{jobId}/cancel") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.WILDCARD) - public ResponseEntityView cancelJob(@Context HttpServletRequest request, + public ResponseEntityView cancelJob( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @PathParam("jobId") String jobId) throws DotDataException { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); helper.cancelJob(jobId); @@ -153,14 +152,15 @@ public ResponseEntityView cancelJob(@Context HttpServletRequest request, @GET @Path("/{queueName}/active") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView activeJobs(@Context HttpServletRequest request, + public ResponseEntityView activeJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @PathParam("queueName") String queueName, @QueryParam("page") @DefaultValue("1") int page, @QueryParam("pageSize") @DefaultValue("20") int pageSize) { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); final JobPaginatedResult result = helper.getActiveJobs(queueName, page, pageSize); @@ -168,133 +168,83 @@ public ResponseEntityView activeJobs(@Context HttpServletReq } @GET - @Path("/{jobId}/monitor") - @Produces(SseFeature.SERVER_SENT_EVENTS) - public EventOutput monitorJob(@Context HttpServletRequest request, - @PathParam("jobId") String jobId) { - - new WebResource.InitBuilder(webResource) + @Produces(MediaType.APPLICATION_JSON) + public ResponseEntityView listJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, + @QueryParam("page") @DefaultValue("1") int page, + @QueryParam("pageSize") @DefaultValue("20") int pageSize) { + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); - - final EventOutput eventOutput = new EventOutput(); - - try { - Job job = helper.getJobForSSE(jobId); - - if (job == null) { - helper.sendErrorAndClose("job-not-found", "404", eventOutput); - return eventOutput; - } - - if (helper.isNotWatchable(job)) { - helper.sendErrorAndClose(String.format("job-not-watchable [%s]", - job.state()), "400", eventOutput); - return eventOutput; - } - - if (!sseConnectionManager.canAcceptNewConnection(jobId)) { - helper.sendErrorAndClose("too-many-connections", "429", eventOutput); - return eventOutput; - } - - // Callback for watching job updates and sending them to the client - Consumer jobWatcher = watched -> { - if (!eventOutput.isClosed()) { - try { - OutboundEvent event = new OutboundEvent.Builder() - .mediaType(MediaType.APPLICATION_JSON_TYPE) - .name("job-update") - .data(Map.class, helper.getJobStatusInfo(watched)) - .build(); - eventOutput.write(event); - - // If job is complete/failed/cancelled, close the connection - if (helper.isTerminalState(watched.state())) { - sseConnectionManager.closeJobConnections(jobId); - } - - } catch (IOException e) { - Logger.error(this, "Error writing SSE event", e); - sseConnectionManager.closeJobConnections(jobId); - } - } - }; - - // Register the connection and watcher - sseConnectionManager.addConnection(jobId, eventOutput); - - // Start watching the job - helper.watchJob(job.id(), jobWatcher); - - } catch (DotDataException e) { - Logger.error(this, "Error setting up job monitor", e); - helper.closeSSEConnection(eventOutput); - } - - return eventOutput; + final JobPaginatedResult result = helper.getJobs(page, pageSize); + return new ResponseEntityView<>(result); } @GET + @Path("/active") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView listJobs(@Context HttpServletRequest request, + public ResponseEntityView activeJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @QueryParam("page") @DefaultValue("1") int page, @QueryParam("pageSize") @DefaultValue("20") int pageSize) { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); - final JobPaginatedResult result = helper.getJobs(page, pageSize); + final JobPaginatedResult result = helper.getActiveJobs(page, pageSize); return new ResponseEntityView<>(result); } @GET - @Path("/active") + @Path("/completed") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView activeJobs(@Context HttpServletRequest request, + public ResponseEntityView completedJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @QueryParam("page") @DefaultValue("1") int page, @QueryParam("pageSize") @DefaultValue("20") int pageSize) { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); - final JobPaginatedResult result = helper.getActiveJobs(page, pageSize); + final JobPaginatedResult result = helper.getCompletedJobs(page, pageSize); return new ResponseEntityView<>(result); } @GET - @Path("/completed") + @Path("/successful") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView completedJobs(@Context HttpServletRequest request, + public ResponseEntityView successfulJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @QueryParam("page") @DefaultValue("1") int page, @QueryParam("pageSize") @DefaultValue("20") int pageSize) { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); - final JobPaginatedResult result = helper.getCompletedJobs(page, pageSize); + final JobPaginatedResult result = helper.getSuccessfulJobs(page, pageSize); return new ResponseEntityView<>(result); } @GET @Path("/canceled") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView canceledJobs(@Context HttpServletRequest request, + public ResponseEntityView canceledJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @QueryParam("page") @DefaultValue("1") int page, @QueryParam("pageSize") @DefaultValue("20") int pageSize) { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); final JobPaginatedResult result = helper.getCanceledJobs(page, pageSize); @@ -304,17 +254,54 @@ public ResponseEntityView canceledJobs(@Context HttpServletR @GET @Path("/failed") @Produces(MediaType.APPLICATION_JSON) - public ResponseEntityView failedJobs(@Context HttpServletRequest request, + public ResponseEntityView failedJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, @QueryParam("page") @DefaultValue("1") int page, @QueryParam("pageSize") @DefaultValue("20") int pageSize) { - new WebResource.InitBuilder(webResource) + new InitBuilder(webResource) .requiredBackendUser(true) .requiredFrontendUser(false) - .requestAndResponse(request, null) + .requestAndResponse(request, response) .rejectWhenNoUser(true) .init(); final JobPaginatedResult result = helper.getFailedJobs(page, pageSize); return new ResponseEntityView<>(result); } + @GET + @Path("/abandoned") + @Produces(MediaType.APPLICATION_JSON) + public ResponseEntityView abandonedJobs( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, + @QueryParam("page") @DefaultValue("1") int page, + @QueryParam("pageSize") @DefaultValue("20") int pageSize) { + new InitBuilder(webResource) + .requiredBackendUser(true) + .requiredFrontendUser(false) + .requestAndResponse(request, response) + .rejectWhenNoUser(true) + .init(); + final JobPaginatedResult result = helper.getAbandonedJobs(page, pageSize); + return new ResponseEntityView<>(result); + } + + @GET + @Path("/{jobId}/monitor") + @Produces(SseFeature.SERVER_SENT_EVENTS) + @SuppressWarnings("java:S1854") // jobWatcher assignment is needed for cleanup in catch blocks + public EventOutput monitorJob( + @Context final HttpServletRequest request, @Context final HttpServletResponse response, + @PathParam("jobId") String jobId) { + + new InitBuilder(webResource) + .requiredBackendUser(true) + .requiredFrontendUser(false) + .requestAndResponse(request, response) + .rejectWhenNoUser(true) + .init(); + + // Set up job monitoring + return sseMonitorUtil.monitorJob(jobId); + } + } \ No newline at end of file diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEConnectionManager.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEConnectionManager.java deleted file mode 100644 index 104b824f6429..000000000000 --- a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEConnectionManager.java +++ /dev/null @@ -1,270 +0,0 @@ -package com.dotcms.rest.api.v1.job; - -import com.dotmarketing.util.Config; -import com.dotmarketing.util.Logger; -import io.vavr.Lazy; -import java.io.IOException; -import java.time.LocalDateTime; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import javax.annotation.PreDestroy; -import javax.enterprise.context.ApplicationScoped; -import org.glassfish.jersey.media.sse.EventOutput; - -/** - * Manages Server-Sent Events (SSE) connections for job monitoring. This class provides - * functionality for tracking, limiting, and cleaning up SSE connections across multiple jobs. - * - *

Key features include: - *

    - *
  • Connection limits per job and system-wide
  • - *
  • Automatic connection timeout and cleanup
  • - *
  • Thread-safe connection management
  • - *
  • Proper resource cleanup on shutdown
  • - *
- * - *

Configuration properties: - *

    - *
  • {@code MAX_SSE_CONNECTIONS_PER_JOB} - Maximum number of concurrent connections per job (default: 5)
  • - *
  • {@code MAX_SSE_TOTAL_CONNECTIONS} - Maximum total concurrent connections across all jobs (default: 50)
  • - *
  • {@code SSE_CONNECTION_TIMEOUT_MINUTES} - Connection timeout in minutes (default: 30)
  • - *
- * - *

Usage example: - *

{@code
- * SSEConnectionManager manager = new SSEConnectionManager();
- *
- * // Check if new connection can be accepted
- * if (manager.canAcceptNewConnection(jobId)) {
- *     // Add new connection
- *     manager.addConnection(jobId, eventOutput);
- * }
- *
- * // Close connections when job completes
- * manager.closeJobConnections(jobId);
- * }
- */ -@ApplicationScoped -public class SSEConnectionManager { - - // Add status tracking - private volatile boolean isShutdown = false; - - private static final Lazy MAX_SSE_CONNECTIONS_PER_JOB = - Lazy.of(() -> Config.getIntProperty("MAX_SSE_CONNECTIONS_PER_JOB", 5)); - - private static final Lazy MAX_SSE_TOTAL_CONNECTIONS = - Lazy.of(() -> Config.getIntProperty("MAX_SSE_TOTAL_CONNECTIONS", 50)); - - private static final Lazy SSE_CONNECTION_TIMEOUT_MINUTES = - Lazy.of(() -> Config.getIntProperty("SSE_CONNECTION_TIMEOUT_MINUTES", 30)); - - private final ConcurrentMap> jobConnections = - new ConcurrentHashMap<>(); - private final ScheduledExecutorService timeoutExecutor = - Executors.newSingleThreadScheduledExecutor(); - - /** - * Shuts down the SSE connection manager and cleans up all resources. This method closes all - * active connections and shuts down the timeout executor. After shutdown, no new connections - * can be added. - */ - @PreDestroy - public void shutdown() { - - isShutdown = true; - - try { - closeAllConnections(); - } finally { - timeoutExecutor.shutdown(); - try { - if (!timeoutExecutor.awaitTermination(30, TimeUnit.SECONDS)) { - timeoutExecutor.shutdownNow(); - } - } catch (InterruptedException e) { - timeoutExecutor.shutdownNow(); - Thread.currentThread().interrupt(); - } - } - } - - /** - * Checks if a new SSE connection can be accepted for the given job. This method verifies both - * per-job and system-wide connection limits. - * - * @param jobId The ID of the job for which to check connection availability - * @return true if a new connection can be accepted, false otherwise - */ - public boolean canAcceptNewConnection(String jobId) { - if (getTotalConnections() >= MAX_SSE_TOTAL_CONNECTIONS.get()) { - return false; - } - - Set connections = jobConnections.get(jobId); - return connections == null || connections.size() < MAX_SSE_CONNECTIONS_PER_JOB.get(); - } - - /** - * Adds a new SSE connection for a job. The connection will be automatically closed after the - * configured timeout period. - * - * @param jobId The ID of the job to monitor - * @param eventOutput The EventOutput instance representing the SSE connection - * @throws IllegalStateException if the manager is shut down - */ - public void addConnection(String jobId, EventOutput eventOutput) { - - if (isShutdown) { - throw new IllegalStateException("SSEConnectionManager is shut down"); - } - - SSEConnection connection = new SSEConnection(jobId, eventOutput); - jobConnections.computeIfAbsent(jobId, k -> ConcurrentHashMap.newKeySet()).add(connection); - - // Schedule connection timeout - timeoutExecutor.schedule(() -> { - try { - removeConnection(jobId, connection); - } catch (Exception e) { - Logger.error(this, "Error removing expired connection", e); - } - }, SSE_CONNECTION_TIMEOUT_MINUTES.get(), TimeUnit.MINUTES); - } - - /** - * Removes a specific SSE connection for a job. If this was the last connection for the job, the - * job entry is removed from tracking. - * - * @param jobId The ID of the job - * @param connection The connection to remove - */ - public void removeConnection(String jobId, SSEConnection connection) { - Set connections = jobConnections.get(jobId); - if (connections != null) { - connections.remove(connection); - connection.close(); - - if (connections.isEmpty()) { - jobConnections.remove(jobId); - } - } - } - - /** - * Gets the total number of active SSE connections across all jobs. - * - * @return The total number of active connections - */ - private int getTotalConnections() { - return jobConnections.values().stream() - .mapToInt(Set::size) - .sum(); - } - - /** - * Closes all active SSE connections and clears connection tracking. - */ - private void closeAllConnections() { - jobConnections.values().forEach(connections -> - connections.forEach(SSEConnection::close) - ); - jobConnections.clear(); - } - - /** - * Closes all SSE connections for a specific job. - * - * @param jobId The ID of the job whose connections should be closed - */ - public void closeJobConnections(String jobId) { - Set connections = jobConnections.remove(jobId); - if (connections != null) { - connections.forEach(SSEConnection::close); - } - } - - /** - * Gets the number of active connections for a specific job. - * - * @param jobId The ID of the job - * @return The number of active connections for the job - */ - public int getConnectionCount(String jobId) { - Set connections = jobConnections.get(jobId); - return connections != null ? connections.size() : 0; - } - - /** - * Gets information about the current state of SSE connections. - * - * @return A map containing connection statistics: - * - totalConnections: Total number of active connections - * - activeJobs: Number of jobs with active connections - */ - public Map getConnectionInfo() { - return Map.of( - "totalConnections", getTotalConnections(), - "activeJobs", jobConnections.size() - ); - } - - /** - * Represents a single SSE connection for a job. Each connection tracks its creation time and - * handles its own cleanup. - */ - public static class SSEConnection { - - private final String jobId; - private final EventOutput eventOutput; - private final LocalDateTime createdAt; - - /** - * Creates a new SSE connection. - * - * @param jobId The ID of the job this connection is monitoring - * @param eventOutput The EventOutput instance representing the SSE connection - */ - public SSEConnection(String jobId, EventOutput eventOutput) { - this.jobId = jobId; - this.eventOutput = eventOutput; - this.createdAt = LocalDateTime.now(); - } - - /** - * Closes this SSE connection. - */ - public void close() { - try { - eventOutput.close(); - } catch (IOException e) { - Logger.error(SSEConnection.class, "Error closing SSE connection", e); - } - } - - /** - * Checks if this connection has exceeded its timeout period. - * - * @return true if the connection has expired, false otherwise - */ - public boolean isExpired() { - return LocalDateTime.now().isAfter( - createdAt.plusMinutes(SSE_CONNECTION_TIMEOUT_MINUTES.get())); - } - - /** - * Gets the ID of the job this connection is monitoring. - * - * @return The job ID - */ - public String getJobId() { - return jobId; - } - } - -} diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEMonitorUtil.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEMonitorUtil.java new file mode 100644 index 000000000000..a213355d0e3f --- /dev/null +++ b/dotCMS/src/main/java/com/dotcms/rest/api/v1/job/SSEMonitorUtil.java @@ -0,0 +1,234 @@ +package com.dotcms.rest.api.v1.job; + +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import static javax.ws.rs.core.Response.Status.NOT_FOUND; + +import com.dotcms.jobs.business.api.events.JobWatcher; +import com.dotcms.jobs.business.job.Job; +import com.dotmarketing.exception.DotRuntimeException; +import com.dotmarketing.util.Logger; +import java.io.IOException; +import java.util.Map; +import java.util.function.Consumer; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; +import javax.ws.rs.core.MediaType; +import org.glassfish.jersey.media.sse.EventOutput; +import org.glassfish.jersey.media.sse.OutboundEvent; +import org.glassfish.jersey.media.sse.OutboundEvent.Builder; + +/** + * Utility class for managing Server-Sent Events (SSE) job monitoring. This class handles the setup, + * maintenance, and cleanup of SSE connections for monitoring job progress and status updates. + * + *

Key responsibilities include: + *

    + *
  • Setting up SSE connections for job monitoring
  • + *
  • Managing job watchers and event streams
  • + *
  • Handling error conditions and connection cleanup
  • + *
  • Coordinating between job updates and SSE event publishing
  • + *
+ * + *

Usage example: + *

{@code
+ * @Inject
+ * private SSEMonitorUtil sseMonitorUtil;
+ *
+ * // Set up job monitoring
+ * EventOutput eventOutput = sseMonitorUtil.monitorJob(jobId);
+ * }
+ * + * @see JobQueueHelper + */ +@ApplicationScoped +public class SSEMonitorUtil { + + private final JobQueueHelper helper; + + public SSEMonitorUtil() { + // Default constructor required for CDI + this.helper = null; + } + + @Inject + public SSEMonitorUtil(JobQueueHelper helper) { + this.helper = helper; + } + + /** + * Sets up job monitoring via SSE + * + * @param jobId The job ID to monitor + * @return EventOutput for streaming updates + */ + @SuppressWarnings("java:S1854") // jobWatcher assignment is needed for cleanup in catch blocks + public EventOutput monitorJob(final String jobId) { + + final var eventOutput = new EventOutput(); + final var resources = new MonitorResources(jobId, eventOutput, helper); + + try { + + Job job = helper.getJobForSSE(jobId); + if (job == null) { + sendErrorAndClose(SSEError.JOB_NOT_FOUND, resources); + return eventOutput; + } + + if (helper.isNotWatchable(job)) { + sendErrorAndClose(SSEError.JOB_NOT_WATCHABLE, resources); + return eventOutput; + } + + // Callback for watching job updates and sending them to the client + Consumer jobWatcherConsumer = watched -> { + if (!eventOutput.isClosed()) { + try { + OutboundEvent event = new Builder() + .mediaType(MediaType.APPLICATION_JSON_TYPE) + .name("job-update") + .data(Map.class, helper.getJobStatusInfo(watched)) + .build(); + eventOutput.write(event); + + // If job is in a completed state, close the connection as no further + // updates will be available + if (helper.isTerminalState(watched.state())) { + resources.close(); + } + + } catch (IOException e) { + final var errorMessage = "Error writing SSE event"; + Logger.error(this, errorMessage, e); + + // Make sure to close the connection + resources.close(); + + // Re-throw the IOException to be caught by the outer catch block in the + // RealTimeJobMonitor that will clean up the job watcher + throw new DotRuntimeException(errorMessage, e); + } + } + }; + + // Start watching the job + final var jobWatcher = helper.watchJob(job.id(), jobWatcherConsumer); + resources.jobWatcher(jobWatcher); + + return eventOutput; + } catch (Exception e) { + final var errorMessage = "Error setting up job monitor"; + Logger.error(this, errorMessage, e); + + // Make sure to close the connection and remove the job watcher + resources.close(); + + throw new DotRuntimeException(errorMessage, e); + } + } + + /** + * Send an error event and close the connection + * + * @param error The error to send + * @param resources The current monitoring resources + * @throws IOException If there is an error writing the event + */ + private void sendErrorAndClose(final SSEError error, MonitorResources resources) + throws IOException { + OutboundEvent event = new OutboundEvent.Builder() + .mediaType(MediaType.TEXT_HTML_TYPE) + .name(error.getName()) + .data(String.class, String.valueOf(error.getCode())) + .build(); + resources.eventOutput().write(event); + resources.close(); + } + + /** + * Enumeration representing various SSE (Server-Sent Events) error states with associated error + * names and HTTP status codes. It is used to identify specific error conditions related to job + * monitoring. + */ + private enum SSEError { + + JOB_NOT_FOUND("job-not-found", NOT_FOUND.getStatusCode()), + JOB_NOT_WATCHABLE("job-not-watchable", BAD_REQUEST.getStatusCode()); + + private final String name; + private final int code; + + SSEError(String name, int code) { + this.name = name; + this.code = code; + } + + public String getName() { + return name; + } + + public int getCode() { + return code; + } + } + + /** + * A resource management class that handles cleanup of SSE monitoring resources. + */ + private static class MonitorResources { + + private final EventOutput eventOutput; + private JobWatcher jobWatcher; + private final String jobId; + private final JobQueueHelper helper; + + /** + * Creates a new MonitorResources instance to manage SSE monitoring resources. + * + * @param jobId The ID of the job being monitored + * @param eventOutput The SSE connection for job updates + * @param helper Helper for job queue operations + */ + MonitorResources(String jobId, EventOutput eventOutput, JobQueueHelper helper) { + this.jobId = jobId; + this.eventOutput = eventOutput; + this.helper = helper; + } + + /** + * Sets the job watcher for this monitoring session. + * + * @param watcher The job watcher to associate with this monitoring session + */ + void jobWatcher(JobWatcher watcher) { + this.jobWatcher = watcher; + } + + /** + * Gets the SSE connection for this monitoring session. + * + * @return The SSE connection + */ + EventOutput eventOutput() { + return eventOutput; + } + + /** + * Closes and cleans up all monitoring resources. This includes closing the SSE connection + * and removing the job watcher if one exists. + */ + void close() { + if (eventOutput != null) { + try { + eventOutput.close(); + } catch (IOException e) { + Logger.error(MonitorResources.class, "Error closing event output", e); + } + } + if (jobWatcher != null) { + helper.removeWatcher(jobId, jobWatcher); + } + } + } + +} diff --git a/dotCMS/src/main/java/com/dotmarketing/util/FileUtil.java b/dotCMS/src/main/java/com/dotmarketing/util/FileUtil.java index 7eb7f040cec9..1d0c30c3c396 100644 --- a/dotCMS/src/main/java/com/dotmarketing/util/FileUtil.java +++ b/dotCMS/src/main/java/com/dotmarketing/util/FileUtil.java @@ -9,20 +9,19 @@ import com.liferay.util.StringPool; import io.vavr.Lazy; import io.vavr.control.Try; -import java.nio.charset.Charset; -import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.lang3.StringUtils; - import java.io.BufferedInputStream; +import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileFilter; +import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; +import java.nio.charset.Charset; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; @@ -36,6 +35,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Stream; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; import org.mozilla.universalchardet.UniversalDetector; /** @@ -521,7 +522,21 @@ public static Charset detectEncodeType(final File file) { } + /** + * Count the number of lines in the file + * + * @param file the file to count the lines + * @return the number of lines in the file + */ + public static Long countFileLines(final File file) throws IOException { + long totalCount; + try (BufferedReader reader = new BufferedReader(new FileReader(file))) { + totalCount = reader.lines().count(); + } + + return totalCount; + } } diff --git a/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPIIntegrationTest.java b/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPIIntegrationTest.java index 5ea4d2974f29..29c9b8372ef0 100644 --- a/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPIIntegrationTest.java +++ b/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPIIntegrationTest.java @@ -132,7 +132,7 @@ void test_CreateAndProcessJob() throws Exception { // Wait for the job to be processed CountDownLatch latch = new CountDownLatch(1); jobQueueManagerAPI.watchJob(jobId, job -> { - if (job.state() == JobState.COMPLETED) { + if (job.state() == JobState.SUCCESS) { latch.countDown(); } }); @@ -145,8 +145,8 @@ void test_CreateAndProcessJob() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { Job job = jobQueueManagerAPI.getJob(jobId); - assertEquals(JobState.COMPLETED, job.state(), - "Job should be in COMPLETED state"); + assertEquals(JobState.SUCCESS, job.state(), + "Job should be in SUCCESS state"); }); } @@ -183,7 +183,7 @@ void test_JobRetry() throws Exception { CountDownLatch latch = new CountDownLatch(1); jobQueueManagerAPI.watchJob(jobId, job -> { - if (job.state() == JobState.COMPLETED) { + if (job.state() == JobState.SUCCESS) { latch.countDown(); } }); @@ -196,8 +196,8 @@ void test_JobRetry() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { Job job = jobQueueManagerAPI.getJob(jobId); - assertEquals(JobState.COMPLETED, job.state(), - "Job should be in COMPLETED state"); + assertEquals(JobState.SUCCESS, job.state(), + "Job should be in SUCCESS state"); assertEquals(maxRetries + 1, processor.getAttempts(), "Job should have been attempted " + maxRetries + " times"); }); @@ -206,16 +206,17 @@ void test_JobRetry() throws Exception { /** * Method to test: Job failure handling in JobQueueManagerAPI * Given Scenario: A job is created that is designed to fail - * ExpectedResult: The job fails, is marked as FAILED, and contains the expected error details + * ExpectedResult: The job fails, is marked as FAILED_PERMANENTLY, and contains the expected + * error details */ @Test @Order(3) - void test_FailingJob() throws Exception { + void test_Failing_Permanently_Job() throws Exception { jobQueueManagerAPI.registerProcessor("failingQueue", FailingJobProcessor.class); - RetryStrategy contentImportRetryStrategy = new ExponentialBackoffRetryStrategy( + RetryStrategy noRetriesStrategy = new ExponentialBackoffRetryStrategy( 5000, 300000, 2.0, 0 ); - jobQueueManagerAPI.setRetryStrategy("failingQueue", contentImportRetryStrategy); + jobQueueManagerAPI.setRetryStrategy("failingQueue", noRetriesStrategy); if (!jobQueueManagerAPI.isStarted()) { jobQueueManagerAPI.start(); @@ -240,8 +241,8 @@ void test_FailingJob() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { Job job = jobQueueManagerAPI.getJob(jobId); - assertEquals(JobState.FAILED, job.state(), - "Job should be in FAILED state"); + assertEquals(JobState.FAILED_PERMANENTLY, job.state(), + "Job should be in FAILED_PERMANENTLY state"); assertNotNull(job.result().get().errorDetail().get(), "Job should have an error detail"); assertEquals("Simulated failure", @@ -306,8 +307,6 @@ void test_CancelJob() throws Exception { }); } - - /** * Method to test: Progress tracking functionality in JobQueueManagerAPI * Given Scenario: A job is created that reports progress during its execution @@ -342,12 +341,12 @@ void test_JobWithProgressTracker() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .until(() -> { Job job = jobQueueManagerAPI.getJob(jobId); - return job.state() == JobState.COMPLETED; + return job.state() == JobState.SUCCESS; }); // Verify final job state Job completedJob = jobQueueManagerAPI.getJob(jobId); - assertEquals(JobState.COMPLETED, completedJob.state(), "Job should be in COMPLETED state"); + assertEquals(JobState.SUCCESS, completedJob.state(), "Job should be in SUCCESS state"); assertEquals(1.0f, completedJob.progress(), 0.01f, "Final progress should be 1.0"); // Verify progress updates @@ -377,7 +376,6 @@ void test_JobWithProgressTracker() throws Exception { */ @Test @Order(6) - @Ignore void test_CombinedScenarios() throws Exception { // Register processors for different scenarios jobQueueManagerAPI.registerProcessor("successQueue", TestJobProcessor.class); @@ -409,12 +407,12 @@ void test_CombinedScenarios() throws Exception { // Watch jobs jobQueueManagerAPI.watchJob(successJob1Id, job -> { - if (job.state() == JobState.COMPLETED) { + if (job.state() == JobState.SUCCESS) { successLatch.countDown(); } }); jobQueueManagerAPI.watchJob(successJob2Id, job -> { - if (job.state() == JobState.COMPLETED) { + if (job.state() == JobState.SUCCESS) { successLatch.countDown(); } }); @@ -441,11 +439,11 @@ void test_CombinedScenarios() throws Exception { assertTrue(allCompleted, "All jobs should complete within the timeout period"); // Verify final states - assertEquals(JobState.COMPLETED, jobQueueManagerAPI.getJob(successJob1Id).state(), - "First success job should be completed"); - assertEquals(JobState.COMPLETED, jobQueueManagerAPI.getJob(successJob2Id).state(), - "Second success job should be completed"); - assertEquals(JobState.FAILED, jobQueueManagerAPI.getJob(failJobId).state(), + assertEquals(JobState.SUCCESS, jobQueueManagerAPI.getJob(successJob1Id).state(), + "First success job should be successful"); + assertEquals(JobState.SUCCESS, jobQueueManagerAPI.getJob(successJob2Id).state(), + "Second success job should be successful"); + assertEquals(JobState.FAILED_PERMANENTLY, jobQueueManagerAPI.getJob(failJobId).state(), "Fail job should be in failed state"); assertEquals(JobState.CANCELED, jobQueueManagerAPI.getJob(cancelJobId).state(), "Cancel job should be canceled"); @@ -457,8 +455,8 @@ void test_CombinedScenarios() throws Exception { Job failedJob = jobQueueManagerAPI.getJob(failJobId); assertEquals(2, failedJob.retryCount(), "Job should have been retried " + 2 + " times"); - assertEquals(JobState.FAILED, failedJob.state(), - "Job should be in FAILED state"); + assertEquals(JobState.FAILED_PERMANENTLY, failedJob.state(), + "Job should be in FAILED_PERMANENTLY state"); assertTrue(failedJob.result().isPresent(), "Failed job should have a result"); assertTrue(failedJob.result().get().errorDetail().isPresent(), @@ -555,8 +553,8 @@ void test_AbandonedJobDetection() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { Job job = jobQueueManagerAPI.getJob(jobId); - assertEquals(JobState.COMPLETED, job.state(), - "Job should be in COMPLETED state"); + assertEquals(JobState.SUCCESS, job.state(), + "Job should be in SUCCESS state"); }); // Verify job history contains the state transitions @@ -571,8 +569,119 @@ void test_AbandonedJobDetection() throws Exception { "Second state should be ABANDONED"); assertEquals(JobState.RUNNING.name(), history.get(2).get("state"), "Third state should be RUNNING"); - assertEquals(JobState.COMPLETED.name(), history.get(3).get("state"), - "Latest state should be COMPLETED"); + assertEquals(JobState.SUCCESS.name(), history.get(3).get("state"), + "Latest state should be SUCCESS"); + } + + /** + * Tests the abandoned job detection functionality. + * Given Scenario: A job exists in RUNNING state with an old timestamp + * ExpectedResult: The job is detected as abandoned, eventually marked as ABANDONED_PERMANENTLY + */ + @Test + @Order(8) + void test_Abandoned_Permanetly_Job() throws Exception { + + final String jobId = UUID.randomUUID().toString(); + final String queueName = "abandonedQueue"; + final Map parameters = Collections.singletonMap("test", "value"); + final String serverId = APILocator.getServerAPI().readServerId(); + final LocalDateTime oldTimestamp = LocalDateTime.now().minusMinutes(5); + + // Create a job directly in the database in RUNNING state to simulate an abandoned job + DotConnect dc = new DotConnect(); + + // Insert into job table + dc.setSQL("INSERT INTO job (id, queue_name, state, parameters, created_at, updated_at, started_at, execution_node) VALUES (?, ?, ?, ?::jsonb, ?, ?, ?, ?)") + .addParam(jobId) + .addParam(queueName) + .addParam(JobState.RUNNING.name()) + .addParam(new ObjectMapper().writeValueAsString(parameters)) + .addParam(Timestamp.valueOf(oldTimestamp)) + .addParam(Timestamp.valueOf(oldTimestamp)) + .addParam(Timestamp.valueOf(oldTimestamp)) + .addParam(serverId) + .loadResult(); + + // Insert into job_queue table + dc.setSQL("INSERT INTO job_queue (id, queue_name, state, created_at) VALUES (?, ?, ?, ?)") + .addParam(jobId) + .addParam(queueName) + .addParam(JobState.RUNNING.name()) + .addParam(Timestamp.valueOf(oldTimestamp)) + .loadResult(); + + // Insert initial state into job_history + dc.setSQL("INSERT INTO job_history (id, job_id, state, execution_node, created_at) VALUES (?, ?, ?, ?, ?)") + .addParam(UUID.randomUUID().toString()) + .addParam(jobId) + .addParam(JobState.RUNNING.name()) + .addParam(serverId) + .addParam(Timestamp.valueOf(oldTimestamp)) + .loadResult(); + + // Verify the job was created in RUNNING state + Job initialJob = jobQueueManagerAPI.getJob(jobId); + assertEquals(JobState.RUNNING, initialJob.state(), + "Job should be in RUNNING state initially"); + + // Start job queue manager if not started + if (!jobQueueManagerAPI.isStarted()) { + jobQueueManagerAPI.start(); + jobQueueManagerAPI.awaitStart(5, TimeUnit.SECONDS); + } + + // Register a processor for the abandoned job + jobQueueManagerAPI.registerProcessor(queueName, AbbandonedJobProcessor.class); + RetryStrategy noRetriesStrategy = new ExponentialBackoffRetryStrategy( + 5000, 300000, 2.0, 0 + ); + jobQueueManagerAPI.setRetryStrategy(queueName, noRetriesStrategy); + + // The job should be marked as abandoned + CountDownLatch latch = new CountDownLatch(1); + jobQueueManagerAPI.watchJob(jobId, job -> { + if (job.state() == JobState.ABANDONED) { + latch.countDown(); + } + }); + + boolean abandoned = latch.await(3, TimeUnit.MINUTES); + assertTrue(abandoned, "Job should be marked as abandoned within timeout period"); + + // Verify the abandoned job state and error details + Job abandonedJob = jobQueueManagerAPI.getJob(jobId); + assertEquals(JobState.ABANDONED, abandonedJob.state(), + "Job should be in ABANDONED state"); + assertTrue(abandonedJob.result().isPresent(), + "Abandoned job should have a result"); + assertTrue(abandonedJob.result().get().errorDetail().isPresent(), + "Abandoned job should have error details"); + assertTrue(abandonedJob.result().get().errorDetail().get().message() + .contains("abandoned due to no updates"), + "Error message should indicate abandonment"); + + // Verify the job was put back in queue for retry and completed successfully + Awaitility.await().atMost(15, TimeUnit.SECONDS) + .pollInterval(100, TimeUnit.MILLISECONDS) + .untilAsserted(() -> { + Job job = jobQueueManagerAPI.getJob(jobId); + assertEquals(JobState.ABANDONED_PERMANENTLY, job.state(), + "Job should be in ABANDONED_PERMANENTLY state"); + }); + + // Verify job history contains the state transitions + dc.setSQL("SELECT state FROM job_history WHERE job_id = ? ORDER BY created_at") + .addParam(jobId); + List> history = dc.loadObjectResults(); + + assertFalse(history.isEmpty(), "Job should have history records"); + assertEquals(JobState.RUNNING.name(), history.get(0).get("state"), + "First state should be RUNNING"); + assertEquals(JobState.ABANDONED.name(), history.get(1).get("state"), + "Second state should be ABANDONED"); + assertEquals(JobState.ABANDONED_PERMANENTLY.name(), history.get(2).get("state"), + "Latest state should be ABANDONED_PERMANENTLY"); } static class AbbandonedJobProcessor implements JobProcessor { diff --git a/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPITest.java b/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPITest.java index d9f7d36bea68..09d25aabb0ad 100644 --- a/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPITest.java +++ b/dotcms-integration/src/test/java/com/dotcms/jobs/business/api/JobQueueManagerAPITest.java @@ -367,8 +367,8 @@ public void test_JobRetry_single_retry() throws Exception { retryCount.incrementAndGet(); return mockJob; }); - when(mockJob.markAsCompleted(any())).thenAnswer(inv -> { - jobState.set(JobState.COMPLETED); + when(mockJob.markAsSuccessful(any())).thenAnswer(inv -> { + jobState.set(JobState.SUCCESS); return mockJob; }); when(mockJob.markAsFailed(any())).thenAnswer(inv -> { @@ -401,7 +401,7 @@ public void test_JobRetry_single_retry() throws Exception { throw new RuntimeException("Simulated failure"); } Job job = invocation.getArgument(0); - job.markAsCompleted(any()); + job.markAsSuccessful(any()); return null; }).when(mockJobProcessor).process(any()); @@ -413,7 +413,7 @@ public void test_JobRetry_single_retry() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { verify(mockJobProcessor, times(2)).process(any()); - assertEquals(JobState.COMPLETED, jobState.get()); + assertEquals(JobState.SUCCESS, jobState.get()); }); // Additional verifications @@ -462,8 +462,8 @@ public void test_JobRetry_retry_twice() throws Exception { lastRetry.set(LocalDateTime.now()); return mockJob; }); - when(mockJob.markAsCompleted(any())).thenAnswer(inv -> { - jobState.set(JobState.COMPLETED); + when(mockJob.markAsSuccessful(any())).thenAnswer(inv -> { + jobState.set(JobState.SUCCESS); return mockJob; }); when(mockJob.markAsFailed(any())).thenAnswer(inv -> { @@ -475,7 +475,7 @@ public void test_JobRetry_retry_twice() throws Exception { // Configure job queue to always return the mockJob until it's completed when(mockJobQueue.nextJob()).thenAnswer(inv -> - jobState.get() != JobState.COMPLETED ? mockJob : null + jobState.get() != JobState.SUCCESS ? mockJob : null ); // Configure retry strategy @@ -509,7 +509,7 @@ public void test_JobRetry_retry_twice() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { verify(mockJobProcessor, times(3)).process(any()); - assertEquals(JobState.COMPLETED, jobState.get()); + assertEquals(JobState.SUCCESS, jobState.get()); assertEquals(2, retryCount.get()); }); @@ -520,7 +520,7 @@ public void test_JobRetry_retry_twice() throws Exception { inOrder.verify(mockJob).markAsRunning(); inOrder.verify(mockJob).markAsFailed(any()); inOrder.verify(mockJob).markAsRunning(); - inOrder.verify(mockJob).markAsCompleted(any()); + inOrder.verify(mockJob).markAsSuccessful(any()); // Verify retry behavior verify(mockRetryStrategy, atLeast(2)).shouldRetry(any(), any()); @@ -573,6 +573,10 @@ public void test_JobRetry_MaxRetryLimit() throws Exception { jobState.set(JobState.FAILED); return mockJob; }); + when(mockJob.markAsFailedPermanently()).thenAnswer(inv -> { + jobState.set(JobState.FAILED_PERMANENTLY); + return mockJob; + }); when(mockJob.withProgressTracker(any(DefaultProgressTracker.class))).thenReturn(mockJob); @@ -603,14 +607,13 @@ public void test_JobRetry_MaxRetryLimit() throws Exception { .untilAsserted(() -> { verify(mockJobProcessor, times(maxRetries + 1)). process(any()); // Initial attempt + retries - assertEquals(JobState.FAILED, jobState.get()); + assertEquals(JobState.FAILED_PERMANENTLY, jobState.get()); assertEquals(maxRetries, retryCount.get()); }); // Verify the job was not retried after reaching the max retry limit verify(mockRetryStrategy, times(maxRetries + 1)). shouldRetry(any(), any()); // Retries + final attempt - verify(mockJobQueue, times(1)).removeJobFromQueue(mockJob.id()); // Stop the job queue jobQueueManagerAPI.close(); @@ -643,8 +646,8 @@ public void test_Job_SucceedsFirstAttempt() throws Exception { jobState.set(JobState.RUNNING); return mockJob; }); - when(mockJob.markAsCompleted(any())).thenAnswer(inv -> { - jobState.set(JobState.COMPLETED); + when(mockJob.markAsSuccessful(any())).thenAnswer(inv -> { + jobState.set(JobState.SUCCESS); return mockJob; }); when(mockJob.withProgressTracker(any(DefaultProgressTracker.class))).thenReturn(mockJob); @@ -661,7 +664,7 @@ public void test_Job_SucceedsFirstAttempt() throws Exception { // Configure job processor to succeed doAnswer(inv -> { Job job = inv.getArgument(0); - job.markAsCompleted(any()); + job.markAsSuccessful(any()); return null; }).when(mockJobProcessor).process(any()); @@ -673,14 +676,14 @@ public void test_Job_SucceedsFirstAttempt() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { verify(mockJobProcessor, times(1)).process(any()); - assertEquals(JobState.COMPLETED, jobState.get()); + assertEquals(JobState.SUCCESS, jobState.get()); }); // Verify the job was processed only once and completed successfully verify(mockRetryStrategy, never()).shouldRetry(any(), any()); verify(mockJobQueue, times(2)).updateJobStatus(any()); verify(mockJobQueue, times(2)).updateJobStatus( - argThat(job -> job.state() == JobState.COMPLETED)); + argThat(job -> job.state() == JobState.SUCCESS)); // Stop the job queue jobQueueManagerAPI.close(); @@ -748,7 +751,6 @@ public void test_Job_NotRetryable() throws Exception { // Verify the job was not retried verify(mockRetryStrategy, times(1)).shouldRetry(any(), any()); verify(mockJobQueue, times(1)).putJobBackInQueue(any()); - verify(mockJobQueue, times(1)).removeJobFromQueue(mockJob.id()); // Capture and verify the error details ArgumentCaptor jobResultCaptor = ArgumentCaptor.forClass(JobResult.class); @@ -869,13 +871,13 @@ public void test_CircuitBreaker_Closes() throws Exception { } Job processingJob = inv.getArgument(0); - processingJob.markAsCompleted(any()); + processingJob.markAsSuccessful(any()); return null; }).when(mockJobProcessor).process(any()); AtomicReference jobState = new AtomicReference<>(JobState.PENDING); - when(mockJob.markAsCompleted(any())).thenAnswer(inv -> { - jobState.set(JobState.COMPLETED); + when(mockJob.markAsSuccessful(any())).thenAnswer(inv -> { + jobState.set(JobState.SUCCESS); return mockJob; }); @@ -904,7 +906,7 @@ public void test_CircuitBreaker_Closes() throws Exception { .pollInterval(100, TimeUnit.MILLISECONDS) .untilAsserted(() -> { assertTrue(circuitBreaker.allowRequest()); - assertEquals(JobState.COMPLETED, jobState.get()); + assertEquals(JobState.SUCCESS, jobState.get()); }); verify(mockJobProcessor, atLeast(6)).process(any()); @@ -1079,8 +1081,8 @@ public void test_complex_cancelJob() throws Exception { stateUpdates.add(JobState.CANCELED); return mockJob; }); - when(mockJob.markAsCompleted(any())).thenAnswer(inv -> { - stateUpdates.add(JobState.COMPLETED); + when(mockJob.markAsSuccessful(any())).thenAnswer(inv -> { + stateUpdates.add(JobState.SUCCESS); return mockJob; }); when(mockJob.markAsFailed(any())).thenAnswer(inv -> { diff --git a/dotcms-integration/src/test/java/com/dotcms/jobs/business/queue/PostgresJobQueueIntegrationTest.java b/dotcms-integration/src/test/java/com/dotcms/jobs/business/queue/PostgresJobQueueIntegrationTest.java index f8fed037402e..a180cb5ea32d 100644 --- a/dotcms-integration/src/test/java/com/dotcms/jobs/business/queue/PostgresJobQueueIntegrationTest.java +++ b/dotcms-integration/src/test/java/com/dotcms/jobs/business/queue/PostgresJobQueueIntegrationTest.java @@ -122,7 +122,7 @@ void test_getActiveJobs() throws JobQueueException { * ExpectedResult: All completed jobs within the given time range are retrieved */ @Test - void testGetCompletedJobsForQueue() throws JobQueueException { + void test_getCompletedJobsForQueue() throws JobQueueException { String queueName = "testQueue"; LocalDateTime startDate = LocalDateTime.now().minusDays(1); @@ -132,14 +132,14 @@ void testGetCompletedJobsForQueue() throws JobQueueException { for (int i = 0; i < 3; i++) { String jobId = jobQueue.createJob(queueName, new HashMap<>()); Job job = jobQueue.getJob(jobId); - Job completedJob = job.markAsCompleted(null); + Job completedJob = job.markAsSuccessful(null); jobQueue.updateJobStatus(completedJob); } JobPaginatedResult result = jobQueue.getCompletedJobs(queueName, startDate, endDate, 1, 10); assertEquals(3, result.jobs().size()); assertEquals(3, result.total()); - result.jobs().forEach(job -> assertEquals(JobState.COMPLETED, job.state())); + result.jobs().forEach(job -> assertEquals(JobState.SUCCESS, job.state())); } /** @@ -148,7 +148,7 @@ void testGetCompletedJobsForQueue() throws JobQueueException { * ExpectedResult: All completed jobs are retrieved */ @Test - void testGetCompletedJobs() throws JobQueueException { + void test_getCompletedJobs() throws JobQueueException { String queueName = "testQueue"; @@ -156,14 +156,14 @@ void testGetCompletedJobs() throws JobQueueException { for (int i = 0; i < 3; i++) { String jobId = jobQueue.createJob(queueName, new HashMap<>()); Job job = jobQueue.getJob(jobId); - Job completedJob = job.markAsCompleted(null); + Job completedJob = job.markAsSuccessful(null); jobQueue.updateJobStatus(completedJob); } JobPaginatedResult result = jobQueue.getCompletedJobs(1, 10); assertEquals(3, result.jobs().size()); assertEquals(3, result.total()); - result.jobs().forEach(job -> assertEquals(JobState.COMPLETED, job.state())); + result.jobs().forEach(job -> assertEquals(JobState.SUCCESS, job.state())); } /** @@ -172,7 +172,7 @@ void testGetCompletedJobs() throws JobQueueException { * ExpectedResult: All canceled jobs are retrieved */ @Test - void testGetCanceledJobs() throws JobQueueException { + void test_getCanceledJobs() throws JobQueueException { String queueName = "testQueue"; @@ -214,6 +214,53 @@ void test_getFailedJobs() throws JobQueueException { result.jobs().forEach(job -> assertEquals(JobState.FAILED, job.state())); } + /** + * Method to test: getSuccessfulJobs in PostgresJobQueue Given Scenario: Multiple jobs are + * created and successfully completed ExpectedResult: All successful jobs are retrieved + * correctly + */ + @Test + void test_getSuccessfulJobs() throws JobQueueException { + + String queueName = "testQueue"; + + // Create and complete some jobs + for (int i = 0; i < 3; i++) { + String jobId = jobQueue.createJob(queueName, new HashMap<>()); + Job job = jobQueue.getJob(jobId); + Job completedJob = job.markAsSuccessful(null); + jobQueue.updateJobStatus(completedJob); + } + + JobPaginatedResult result = jobQueue.getSuccessfulJobs(1, 10); + assertEquals(3, result.jobs().size()); + assertEquals(3, result.total()); + result.jobs().forEach(job -> assertEquals(JobState.SUCCESS, job.state())); + } + + /** + * Method to test: getFailedJobs in PostgresJobQueue Given Scenario: Multiple jobs are created + * and set to failed state ExpectedResult: All failed jobs are retrieved correctly + */ + @Test + void test_getAbandonedJobs() throws JobQueueException { + + // Create and fail some jobs + for (int i = 0; i < 2; i++) { + String jobId = jobQueue.createJob("testQueue", new HashMap<>()); + Job job = jobQueue.getJob(jobId); + Job failedJob = Job.builder().from(job) + .state(JobState.ABANDONED) + .build(); + jobQueue.updateJobStatus(failedJob); + } + + JobPaginatedResult result = jobQueue.getAbandonedJobs(1, 10); + assertEquals(2, result.jobs().size()); + assertEquals(2, result.total()); + result.jobs().forEach(job -> assertEquals(JobState.ABANDONED, job.state())); + } + /** * Method to test: updateJobStatus in PostgresJobQueue * Given Scenario: A job's status is updated @@ -281,7 +328,7 @@ void test_nextJob() throws Exception { }); // Mark job as completed - Job completedJob = nextJob.markAsCompleted(null); + Job completedJob = nextJob.markAsSuccessful(null); jobQueue.updateJobStatus(completedJob); } } catch (Exception e) { @@ -307,8 +354,8 @@ void test_nextJob() throws Exception { // Verify all jobs are in COMPLETED state for (String jobId : createdJobIds) { Job job = jobQueue.getJob(jobId); - assertEquals(JobState.COMPLETED, job.state(), - "Job " + jobId + " is not in COMPLETED state"); + assertEquals(JobState.SUCCESS, job.state(), + "Job " + jobId + " is not in SUCCESS state"); } } @@ -495,7 +542,7 @@ void test_getJobs() throws JobQueueException { String completedJobId = jobQueue.createJob(queueName, new HashMap<>()); Job completedJob = jobQueue.getJob(completedJobId); - jobQueue.updateJobStatus(completedJob.markAsCompleted(null)); + jobQueue.updateJobStatus(completedJob.markAsSuccessful(null)); // Get all jobs JobPaginatedResult result = jobQueue.getJobs(1, 10); @@ -512,7 +559,7 @@ void test_getJobs() throws JobQueueException { } assertEquals(3, stateCounts.getOrDefault(JobState.PENDING, 0)); assertEquals(1, stateCounts.getOrDefault(JobState.RUNNING, 0)); - assertEquals(1, stateCounts.getOrDefault(JobState.COMPLETED, 0)); + assertEquals(1, stateCounts.getOrDefault(JobState.SUCCESS, 0)); } /** @@ -608,8 +655,18 @@ void test_removeJobFromQueue() throws JobQueueException { Job job = jobQueue.getJob(jobId); assertNotNull(job); - // Remove the job - jobQueue.removeJobFromQueue(jobId); + // Putting the job in a final state + Job updatedJob = Job.builder() + .from(job) + .state(JobState.FAILED_PERMANENTLY) + .progress(0.75f) + .startedAt(Optional.of(LocalDateTime.now().minusHours(1))) + .completedAt(Optional.of(LocalDateTime.now())) + .retryCount(2) + .build(); + + // Update the job + jobQueue.updateJobStatus(updatedJob); // Verify job is not returned by nextJob assertNull(jobQueue.nextJob()); @@ -648,7 +705,7 @@ void test_createUpdateAndRetrieveJob() throws JobQueueException { Job updatedJob = Job.builder() .from(initialJob) - .state(JobState.COMPLETED) + .state(JobState.SUCCESS) .progress(0.75f) .startedAt(Optional.of(LocalDateTime.now().minusHours(1))) .completedAt(Optional.of(LocalDateTime.now())) @@ -665,7 +722,7 @@ void test_createUpdateAndRetrieveJob() throws JobQueueException { // Verify all fields assertEquals(jobId, retrievedJob.id()); assertEquals(queueName, retrievedJob.queueName()); - assertEquals(JobState.COMPLETED, retrievedJob.state()); + assertEquals(JobState.SUCCESS, retrievedJob.state()); assertEquals(initialParameters, retrievedJob.parameters()); assertEquals(0.75f, retrievedJob.progress(), 0.001); assertTrue(retrievedJob.startedAt().isPresent()); @@ -775,8 +832,8 @@ void test_hasJobBeenInState() throws JobQueueException { assertFalse(jobQueue.hasJobBeenInState(jobId, JobState.CANCELED)); - jobQueue.updateJobStatus(job.withState(JobState.COMPLETED)); - assertTrue(jobQueue.hasJobBeenInState(jobId, JobState.COMPLETED)); + jobQueue.updateJobStatus(job.withState(JobState.SUCCESS)); + assertTrue(jobQueue.hasJobBeenInState(jobId, JobState.SUCCESS)); assertFalse(jobQueue.hasJobBeenInState(jobId, JobState.CANCELLING)); diff --git a/dotcms-postman/src/main/resources/postman/JobQueueResourceAPITests.postman_collection.json b/dotcms-postman/src/main/resources/postman/JobQueueResourceAPITests.postman_collection.json index 14ecba3220a9..f6215595d3c5 100644 --- a/dotcms-postman/src/main/resources/postman/JobQueueResourceAPITests.postman_collection.json +++ b/dotcms-postman/src/main/resources/postman/JobQueueResourceAPITests.postman_collection.json @@ -1,6 +1,6 @@ { "info": { - "_postman_id": "be9c354a-6c94-4b10-be0e-bc0c1b324c24", + "_postman_id": "8f0a1603-03b2-4f37-b2a3-d6c6a8f5c910", "name": "JobQueueResource API Tests", "description": "Postman collection for testing the JobQueueResource API endpoints.", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", @@ -965,8 +965,8 @@ "var response = pm.response.json();", "console.log(\"Current job state:\", response.entity.state);", " ", - "// Check if job status is \"COMPLETED\"", - "if (response.entity.state === \"COMPLETED\") {", + "// Check if job status is \"SUCCESS\"", + "if (response.entity.state === \"SUCCESS\") {", " // Clear environment variables once done", " pm.environment.unset(\"startTime\");", " pm.environment.unset(\"retryCount\");", @@ -1060,8 +1060,8 @@ "var response = pm.response.json();", "console.log(\"Current job state:\", response.entity.state);", " ", - "// Check if job status is \"FAILED\"", - "if (response.entity.state === \"FAILED\") {", + "// Check if job status is \"FAILED_PERMANENTLY\"", + "if (response.entity.state === \"FAILED_PERMANENTLY\") {", " // Clear environment variables once done", " pm.environment.unset(\"startTime\");", " pm.environment.unset(\"retryCount\");", @@ -1360,6 +1360,76 @@ }, { "name": "Get all completed Jobs", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "// Parse the response JSON", + "const response = pm.response.json();", + "", + "// Validate that the response status is 200 OK", + "pm.test(\"Response status is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "// Validate that the response contains an \"entity.jobs\" array", + "pm.test(\"Response should contain jobs array\", function () {", + " pm.expect(response.entity).to.have.property(\"jobs\");", + " pm.expect(response.entity.jobs).to.be.an(\"array\");", + "});", + "", + "// Validate that the jobs array contains 3 jobs", + "pm.test(\"Jobs array should contain 3 jobs\", function () {", + " pm.expect(response.entity.jobs.length).to.eql(3);", + "});" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "{{baseUrl}}/api/v1/jobs/completed?page={{page}}&pageSize={{pageSize}}", + "host": [ + "{{baseUrl}}" + ], + "path": [ + "api", + "v1", + "jobs", + "completed" + ], + "query": [ + { + "key": "page", + "value": "{{page}}" + }, + { + "key": "pageSize", + "value": "{{pageSize}}" + } + ] + }, + "description": "Lists completed jobs with pagination." + }, + "response": [] + }, + { + "name": "Get all successful Jobs", "event": [ { "listen": "test", @@ -1411,7 +1481,7 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/api/v1/jobs/completed?page={{page}}&pageSize={{pageSize}}", + "raw": "{{baseUrl}}/api/v1/jobs/successful?page={{page}}&pageSize={{pageSize}}", "host": [ "{{baseUrl}}" ], @@ -1419,7 +1489,7 @@ "api", "v1", "jobs", - "completed" + "successful" ], "query": [ { @@ -1432,7 +1502,7 @@ } ] }, - "description": "Lists completed jobs with pagination." + "description": "Lists successful jobs with pagination." }, "response": [] }, @@ -1507,7 +1577,7 @@ "response": [] }, { - "name": "List Jobs Expect Fail, Completed and Cancelled", + "name": "List Jobs Expect Fail, Successful and Cancelled", "event": [ { "listen": "test", @@ -1533,12 +1603,12 @@ "});", "", "// Check if there are jobs with \"FAILED\" and \"CANCELED\" status", - "const hasFailed = response.entity.jobs.some(job => job.state === \"FAILED\");", + "const hasFailed = response.entity.jobs.some(job => job.state === \"FAILED_PERMANENTLY\");", "const hasCanceled = response.entity.jobs.some(job => job.state === \"CANCELED\");", - "const hasCompleted = response.entity.jobs.some(job => job.state === \"COMPLETED\");", + "const hasSuccess = response.entity.jobs.some(job => job.state === \"SUCCESS\");", "", - "// Postman test to validate that there are jobs with \"FAILED\" statuses", - "pm.test(\"There are jobs in 'FAILED' state\", function () {", + "// Postman test to validate that there are jobs with \"FAILED_PERMANENTLY\" statuses", + "pm.test(\"There are jobs in 'FAILED_PERMANENTLY' state\", function () {", " pm.expect(hasFailed).to.be.true; ", "});", "", @@ -1547,9 +1617,9 @@ " pm.expect(hasCanceled).to.be.true;", "});", "", - "// Postman test to validate that there are jobs with \"COMPLETED\" statuses", - "pm.test(\"There are jobs in 'COMPLETED' state\", function () { ", - " pm.expect(hasCompleted).to.be.true;", + "// Postman test to validate that there are jobs with \"SUCCESS\" statuses", + "pm.test(\"There are jobs in 'SUCCESS' state\", function () { ", + " pm.expect(hasSuccess).to.be.true;", "});" ], "type": "text/javascript", @@ -1583,7 +1653,7 @@ } ] }, - "description": "List Jobs Expect Fail and Cancelled." + "description": "List Jobs Expect Fail, Successful and Cancelled." }, "response": [] } From a20b5e4c525d1b1d3e1a35b013080addacfd64c1 Mon Sep 17 00:00:00 2001 From: Geronimo Ortiz Date: Wed, 4 Dec 2024 18:03:16 -0300 Subject: [PATCH 6/7] Issue 26341 Rules do not fire on "Show Preview" (#30832) Removed a check that was avoiding to fire the rule when the user was admin --- .../portlets/rules/business/RulesEngine.java | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/dotCMS/src/main/java/com/dotmarketing/portlets/rules/business/RulesEngine.java b/dotCMS/src/main/java/com/dotmarketing/portlets/rules/business/RulesEngine.java index 2a1fb378dd25..956ac64d11c6 100644 --- a/dotCMS/src/main/java/com/dotmarketing/portlets/rules/business/RulesEngine.java +++ b/dotCMS/src/main/java/com/dotmarketing/portlets/rules/business/RulesEngine.java @@ -172,19 +172,7 @@ public static void fireRules(final HttpServletRequest request, final HttpServlet if (!UtilMethods.isSet(request)) { throw new DotRuntimeException("ERROR: HttpServletRequest is null"); } - - // do not run rules in admin mode - PageMode mode= PageMode.get(request); - if(mode.isAdmin) { - final boolean fireRulesFromParameter =Try.of(()->Boolean.valueOf - (request.getParameter("fireRules"))).getOrElse(false); - final boolean fireRulesFromAttribute =Try.of(()-> Boolean.valueOf((Boolean) - request.getAttribute("fireRules"))).getOrElse(false); - - if(!fireRulesFromParameter && !fireRulesFromAttribute) { - return; - } - } + final Set alreadyFiredRulesFor =request.getAttribute(DOT_RULES_FIRED_ALREADY)!=null?(Set)request.getAttribute(DOT_RULES_FIRED_ALREADY):new HashSet(); final String ruleRunKey = parent.getIdentifier() +"_"+ fireOn.name(); From cc77a5c0d6c5d9d0a40008af7600b915ede27d4c Mon Sep 17 00:00:00 2001 From: Arcadio Quintero Date: Wed, 4 Dec 2024 17:24:10 -0500 Subject: [PATCH 7/7] Add Reset Workflow Button in Edit Contentlet Sidebar Workflow Section (#30767) ### Proposed Changes * Allow resetting a contentlet if the sub-action for that. This pull request includes several changes to the `core-web` library, focusing on enhancing the `dot-edit-content-form` and `dot-edit-content-sidebar-workflow` components. The most important changes include the addition of new render modes, modifications to form value handling, and updates to the workflow component structure and functionality. ### Enhancements to `dot-edit-content-form`: * Added new render modes to the `DotRenderMode` enum in `dot-workflows-actions.service.ts`. * Updated the `changeValue` output type to handle various data types, including `string[]` and `Date`. * Introduced a new computed property `$formFields` to filter out certain fields from the form. * Refactored the `initializeFormListener` method to handle form value changes and emit the processed value. * Simplified the `initializeForm` method to use the new `$formFields` computed property. ### Updates to `dot-edit-content-sidebar-workflow`: * Consolidated workflow-related variables into a single `workflow` object and updated the template to use this object. [[1]](diffhunk://#diff-1e29fca586a864571ecaaf1794485fbb1b5d6a39bac17bcdcb67e44fb8f8e451L1-R3) [[2]](diffhunk://#diff-1e29fca586a864571ecaaf1794485fbb1b5d6a39bac17bcdcb67e44fb8f8e451L16-R13) [[3]](diffhunk://#diff-1e29fca586a864571ecaaf1794485fbb1b5d6a39bac17bcdcb67e44fb8f8e451L26-R58) [[4]](diffhunk://#diff-1e29fca586a864571ecaaf1794485fbb1b5d6a39bac17bcdcb67e44fb8f8e451L61-R67) [[5]](diffhunk://#diff-1e29fca586a864571ecaaf1794485fbb1b5d6a39bac17bcdcb67e44fb8f8e451L84-R90) * Introduced new interfaces `WorkflowSelection` and `DotWorkflowState` to manage workflow selection and state. [[1]](diffhunk://#diff-3465f2dbecb29cf855c8f271b85d396eff6ac9c64b1c1440147cdca68392b1d0L10-R27) [[2]](diffhunk://#diff-3465f2dbecb29cf855c8f271b85d396eff6ac9c64b1c1440147cdca68392b1d0L36-R54) * Added an output event `onResetWorkflow` to handle workflow resets. * Updated the workflow component to use the new `workflowSelection` and `resetWorkflowAction` inputs. ### General improvements: * Removed the unused `SlicePipe` import from `dot-edit-content-sidebar.component.ts`. [[1]](diffhunk://#diff-7de680fdc62a1bf1fd1243d8fcf077fee3a7e08ed213d2821b2eb53b2d9248feL1) [[2]](diffhunk://#diff-7de680fdc62a1bf1fd1243d8fcf077fee3a7e08ed213d2821b2eb53b2d9248feL44-R43) * Updated the `dot-edit-content-sidebar.component.html` to use the new workflow structure and properties. [[1]](diffhunk://#diff-96315ade336a88f5f2f51d7a64dec588aa9fa44384539792c69e1a3346eaa5d0L3-L13) [[2]](diffhunk://#diff-96315ade336a88f5f2f51d7a64dec588aa9fa44384539792c69e1a3346eaa5d0L26-R20) These changes enhance the flexibility and maintainability of the form and workflow components, ensuring better data handling and user experience. ### Checklist - [x] Tests - [ ] Translations - [ ] Security Implications Contemplated (add notes if applicable) ### Additional Info ** any additional useful context or info ** ### Screenshots Contenttype with 2 Workflows (1 resettable and 1 not) https://github.com/user-attachments/assets/e1da7a1e-886c-4270-a98a-4d3ce062a47c Contenttype with 1 resettable Workflow https://github.com/user-attachments/assets/7d0be90b-ba6a-4dad-ba2c-75abb3758d0b --- .../dot-workflows-actions.service.ts | 6 + .../dot-edit-content-form.component.spec.ts | 9 +- .../dot-edit-content-form.component.ts | 75 +++--- ...it-content-sidebar-workflow.component.html | 38 +-- ...content-sidebar-workflow.component.spec.ts | 233 ++++++++++++++++-- ...edit-content-sidebar-workflow.component.ts | 66 ++--- .../dot-edit-content-sidebar.component.html | 26 +- ...dot-edit-content-sidebar.component.spec.ts | 5 +- .../dot-edit-content-sidebar.component.ts | 51 ++-- .../edit-content.layout.component.html | 1 + .../edit-content.layout.component.ts | 17 +- .../edit-content/store/edit-content.store.ts | 2 + .../store/features/content.feature.spec.ts | 141 ++++++++++- .../store/features/content.feature.ts | 106 ++++++-- .../store/features/form.feature.ts | 34 +++ .../store/features/workflow.feature.spec.ts | 149 +++++------ .../store/features/workflow.feature.ts | 233 ++++++++++-------- .../models/dot-edit-content-form.interface.ts | 12 + .../src/lib/models/dot-edit-content.model.ts | 15 ++ .../libs/edit-content/src/lib/utils/mocks.ts | 179 ++++++++++++++ .../src/lib/utils/workflows.utils.spec.ts | 107 +------- .../src/lib/utils/workflows.utils.ts | 118 ++------- .../WEB-INF/messages/Language.properties | 6 +- 23 files changed, 1070 insertions(+), 559 deletions(-) create mode 100644 core-web/libs/edit-content/src/lib/feature/edit-content/store/features/form.feature.ts diff --git a/core-web/libs/data-access/src/lib/dot-workflows-actions/dot-workflows-actions.service.ts b/core-web/libs/data-access/src/lib/dot-workflows-actions/dot-workflows-actions.service.ts index ef7652e34eff..debbf0b26b6e 100644 --- a/core-web/libs/data-access/src/lib/dot-workflows-actions/dot-workflows-actions.service.ts +++ b/core-web/libs/data-access/src/lib/dot-workflows-actions/dot-workflows-actions.service.ts @@ -13,7 +13,13 @@ import { } from '@dotcms/dotcms-models'; export enum DotRenderMode { + LOCKED = 'LOCKED', LISTING = 'LISTING', + ARCHIVED = 'ARCHIVED', + UNPUBLISHED = 'UNPUBLISHED', + PUBLISHED = 'PUBLISHED', + UNLOCKED = 'UNLOCKED', + NEW = 'NEW', EDITING = 'EDITING' } diff --git a/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.spec.ts b/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.spec.ts index 877f29577054..e1e2d4a702ca 100644 --- a/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.spec.ts +++ b/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.spec.ts @@ -40,7 +40,8 @@ import { MOCK_CONTENTLET_1_TAB as MOCK_CONTENTLET_1_OR_2_TABS, MOCK_CONTENTTYPE_1_TAB, MOCK_CONTENTTYPE_2_TABS, - MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB + MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB, + MOCK_WORKFLOW_STATUS } from '../../utils/edit-content.mock'; import { MockResizeObserver } from '../../utils/mocks'; @@ -52,6 +53,7 @@ describe('DotFormComponent', () => { let workflowActionsService: SpyObject; let workflowActionsFireService: SpyObject; let dotEditContentService: SpyObject; + let dotWorkflowService: SpyObject; let router: SpyObject; const createComponent = createComponentFactory({ @@ -71,6 +73,7 @@ describe('DotFormComponent', () => { mockProvider(DotWorkflowService), mockProvider(MessageService), mockProvider(DotContentletService), + { provide: ActivatedRoute, useValue: { @@ -95,6 +98,7 @@ describe('DotFormComponent', () => { workflowActionsService = spectator.inject(DotWorkflowsActionsService); dotEditContentService = spectator.inject(DotEditContentService); workflowActionsFireService = spectator.inject(DotWorkflowActionsFireService); + dotWorkflowService = spectator.inject(DotWorkflowService); router = spectator.inject(Router); }); @@ -112,6 +116,7 @@ describe('DotFormComponent', () => { workflowActionsService.getWorkFlowActions.mockReturnValue( of(MOCK_SINGLE_WORKFLOW_ACTIONS) ); + dotWorkflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); store.initializeExistingContent(MOCK_CONTENTLET_1_OR_2_TABS.inode); // called with the inode of the contentlet @@ -189,6 +194,7 @@ describe('DotFormComponent', () => { workflowActionsService.getWorkFlowActions.mockReturnValue( of(MOCK_SINGLE_WORKFLOW_ACTIONS) ); + dotWorkflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); store.initializeExistingContent(MOCK_CONTENTLET_1_OR_2_TABS.inode); // called with the inode of the contentlet spectator.detectChanges(); @@ -279,6 +285,7 @@ describe('DotFormComponent', () => { workflowActionsService.getWorkFlowActions.mockReturnValue( of(MOCK_SINGLE_WORKFLOW_ACTIONS) ); + dotWorkflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); store.initializeExistingContent(MOCK_CONTENTLET_1_OR_2_TABS.inode); spectator.detectChanges(); diff --git a/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.ts b/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.ts index 9b5d7d95aeab..bbb34de04816 100644 --- a/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.ts +++ b/core-web/libs/edit-content/src/lib/components/dot-edit-content-form/dot-edit-content-form.component.ts @@ -36,6 +36,7 @@ import { FLATTENED_FIELD_TYPES } from '../../models/dot-edit-content-field.constant'; import { FIELD_TYPES } from '../../models/dot-edit-content-field.enum'; +import { FormValues } from '../../models/dot-edit-content-form.interface'; import { DotWorkflowActionParams } from '../../models/dot-edit-content.model'; import { getFinalCastedValue, isFilteredType } from '../../utils/functions.util'; import { DotEditContentFieldComponent } from '../dot-edit-content-field/dot-edit-content-field.component'; @@ -98,7 +99,7 @@ export class DotEditContentFormComponent implements OnInit { * * @memberof DotEditContentFormComponent */ - changeValue = output>(); + changeValue = output(); /** * Computed property that retrieves the filtered fields from the store. @@ -110,6 +111,10 @@ export class DotEditContentFormComponent implements OnInit { () => this.$store.contentType()?.fields?.filter(isFilteredType) ?? [] ); + $formFields = computed( + () => this.$store.contentType()?.fields?.filter((field) => !isFilteredType(field)) ?? [] + ); + /** * FormGroup instance that contains the form controls for the fields in the content type * @@ -158,72 +163,62 @@ export class DotEditContentFormComponent implements OnInit { } /** - * Initializes a listener for form value changes. - * When the form value changes, it calls the onFormChange method with the new value. - * The listener is automatically unsubscribed when the component is destroyed. + * Handles form value changes and emits the processed value. * - * @private + * @param {Record} value The raw form value * @memberof DotEditContentFormComponent */ - private initializeFormListener() { - this.form.valueChanges.pipe(takeUntilDestroyed(this.#destroyRef)).subscribe((value) => { - const processedValue = this.processFormValue(value); - this.changeValue.emit(processedValue); - }); + onFormChange(value: Record) { + const processedValue = this.processFormValue(value); + this.changeValue.emit(processedValue); } /** - * Emits the form value through the `formSubmit` event. + * Initializes a listener for form value changes. * - * @param {*} value + * @private * @memberof DotEditContentFormComponent */ - onFormChange(value) { - this.$filteredFields().forEach(({ variable, fieldType }) => { - if (FLATTENED_FIELD_TYPES.includes(fieldType as FIELD_TYPES)) { - value[variable] = value[variable]?.join(','); - } - - if (CALENDAR_FIELD_TYPES.includes(fieldType as FIELD_TYPES)) { - value[variable] = value[variable] - ?.toISOString() - .replace(/T|\.\d{3}Z/g, (match: string) => (match === 'T' ? ' ' : '')); // To remove the T and .000Z from the date) - } + private initializeFormListener() { + this.form.valueChanges.pipe(takeUntilDestroyed(this.#destroyRef)).subscribe((value) => { + this.onFormChange(value); }); - - this.changeValue.emit(value); } /** * Processes the form value, applying specific transformations for certain field types. * * @private - * @param {Record} value The raw form value + * @param {Record} value The raw form value * @returns {Record} The processed form value * @memberof DotEditContentFormComponent */ private processFormValue( value: Record - ): Record { + ): FormValues { return Object.fromEntries( - this.$filteredFields().map(({ variable, fieldType }) => { - let fieldValue = value[variable]; + Object.entries(value).map(([key, fieldValue]) => { + const field = this.$formFields().find((f) => f.variable === key); + + if (!field) { + return [key, fieldValue]; + } if ( Array.isArray(fieldValue) && - FLATTENED_FIELD_TYPES.includes(fieldType as FIELD_TYPES) + FLATTENED_FIELD_TYPES.includes(field.fieldType as FIELD_TYPES) ) { fieldValue = fieldValue.join(','); } else if ( fieldValue instanceof Date && - CALENDAR_FIELD_TYPES.includes(fieldType as FIELD_TYPES) + CALENDAR_FIELD_TYPES.includes(field.fieldType as FIELD_TYPES) ) { fieldValue = fieldValue .toISOString() .replace(/T|\.\d{3}Z/g, (match) => (match === 'T' ? ' ' : '')); } - return [variable, fieldValue?.toString() ?? '']; + return [key, fieldValue ?? '']; }) ); } @@ -236,13 +231,15 @@ export class DotEditContentFormComponent implements OnInit { * @memberof DotEditContentFormComponent */ private initializeForm() { - this.form = this.#fb.group({}); - this.$store.contentType().fields.forEach((field) => { - if (!isFilteredType(field)) { - const control = this.createFormControl(field); - this.form.addControl(field.variable, control); - } - }); + const controls = this.$formFields().reduce( + (acc, field) => ({ + ...acc, + [field.variable]: this.createFormControl(field) + }), + {} + ); + + this.form = this.#fb.group(controls); } /** diff --git a/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/components/dot-edit-content-sidebar-workflow/dot-edit-content-sidebar-workflow.component.html b/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/components/dot-edit-content-sidebar-workflow/dot-edit-content-sidebar-workflow.component.html index 6e22f664f42c..2bf55101b5e2 100644 --- a/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/components/dot-edit-content-sidebar-workflow/dot-edit-content-sidebar-workflow.component.html +++ b/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/components/dot-edit-content-sidebar-workflow/dot-edit-content-sidebar-workflow.component.html @@ -1,9 +1,6 @@ -@let scheme = $workflow().scheme; -@let task = $workflow().task; - +@let workflow = $workflow(); @let isLoading = $isLoading(); -@let noWorkflowSelectedYet = $noWorkflowSelectedYet(); -@let currentStep = $workflow().step; +@let workflowSelection = $workflowSelection();
{{ 'Workflow' | dm }}
@@ -13,7 +10,7 @@ data-testId="workflow-name"> @if (isLoading) { - } @else if (noWorkflowSelectedYet) { + } @else if (workflowSelection.isWorkflowSelected) { } @else { - {{ scheme?.name }} + {{ workflow.scheme?.name }} - - @if ($showWorkflowDialogIcon()) { + @if ($showWorkflowSelection()) { + } + + @if (workflow.resetAction) { + } } - @if (!noWorkflowSelectedYet) { -
{{ 'Step' | dm }}
+ @if (!workflowSelection.isWorkflowSelected) { +
+ {{ 'Step' | dm }} +
} @else { - {{ currentStep?.name }} + {{ workflow.step?.name }} }
- @if (task) { + @if (workflow.task) {
{{ 'Assignee' | dm }}
} @else { - {{ task.assignedTo }} + {{ workflow.task.assignedTo }} }
} @@ -81,7 +89,7 @@ - + - @if (!isNew) { + @if (!store.isNew()) { diff --git a/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.spec.ts b/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.spec.ts index 39a3dddf9874..0f207ea8a45a 100644 --- a/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.spec.ts +++ b/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.spec.ts @@ -30,11 +30,13 @@ import { DotEditContentSidebarComponent } from './dot-edit-content-sidebar.compo import { DotEditContentStore } from '../../feature/edit-content/store/edit-content.store'; import { DotEditContentService } from '../../services/dot-edit-content.service'; +import { MOCK_WORKFLOW_STATUS } from '../../utils/edit-content.mock'; import { MockResizeObserver } from '../../utils/mocks'; describe('DotEditContentSidebarComponent', () => { let spectator: Spectator; let dotEditContentService: SpyObject; + let dotWorkflowService: SpyObject; const createComponent = createComponentFactory({ component: DotEditContentSidebarComponent, @@ -73,8 +75,9 @@ describe('DotEditContentSidebarComponent', () => { spectator = createComponent({ detectChanges: false }); dotEditContentService = spectator.inject(DotEditContentService); - + dotWorkflowService = spectator.inject(DotWorkflowService); dotEditContentService.getReferencePages.mockReturnValue(of(1)); + dotWorkflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); spectator.detectChanges(); }); diff --git a/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.ts b/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.ts index a1be33580045..621153a08db9 100644 --- a/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.ts +++ b/core-web/libs/edit-content/src/lib/components/dot-edit-content-sidebar/dot-edit-content-sidebar.component.ts @@ -1,7 +1,7 @@ -import { SlicePipe } from '@angular/common'; import { ChangeDetectionStrategy, Component, + computed, effect, inject, model, @@ -22,6 +22,7 @@ import { DotEditContentSidebarWorkflowComponent } from './components/dot-edit-co import { TabViewInsertDirective } from '../../directives/tab-view-insert/tab-view-insert.directive'; import { DotEditContentStore } from '../../feature/edit-content/store/edit-content.store'; +import { DotWorkflowState } from '../../models/dot-edit-content.model'; /** * The DotEditContentSidebarComponent is a component that displays the sidebar for the DotCMS content editing application. @@ -41,7 +42,7 @@ import { DotEditContentStore } from '../../feature/edit-content/store/edit-conte TabViewInsertDirective, DotEditContentSidebarSectionComponent, DotCopyButtonComponent, - SlicePipe, + DialogModule, DropdownModule, ButtonModule, @@ -51,25 +52,34 @@ import { DotEditContentStore } from '../../feature/edit-content/store/edit-conte export class DotEditContentSidebarComponent { readonly store: InstanceType = inject(DotEditContentStore); readonly $identifier = this.store.getCurrentContentIdentifier; + readonly $formValues = this.store.formValues; + readonly $contentType = this.store.contentType; + readonly $contentlet = this.store.contentlet; /** - * Model for the showDialog property. + * Computed property that returns the workflow state of the content. */ - readonly $showDialog = model(false, { - alias: 'showDialog' - }); + readonly $workflow = computed(() => ({ + scheme: this.store.getScheme(), + step: this.store.getCurrentStep(), + task: this.store.lastTask(), + contentState: this.store.initialContentletState(), + resetAction: this.store.getResetWorkflowAction() + })); /** - * Effect that triggers the workflow status and new content status based on the contentlet and content type ID. + * Computed property that returns the workflow selection state. */ - #workflowEffect = effect(() => { - const inode = this.store.contentlet()?.inode; + readonly $workflowSelection = computed(() => ({ + schemeOptions: this.store.workflowSchemeOptions(), + isWorkflowSelected: this.store.showSelectWorkflowWarning() + })); - untracked(() => { - if (inode) { - this.store.getWorkflowStatus(inode); - } - }); + /** + * Model for the showDialog property. + */ + readonly $showDialog = model(false, { + alias: 'showDialog' }); /** @@ -84,4 +94,17 @@ export class DotEditContentSidebarComponent { } }); }); + + fireWorkflowAction(actionId: string): void { + this.store.fireWorkflowAction({ + actionId, + inode: this.$contentlet().inode, + data: { + contentlet: { + ...this.$formValues(), + contentType: this.$contentType().variable + } + } + }); + } } diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.html b/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.html index ae6e7a4d8c78..3f989d4ee916 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.html +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.html @@ -61,6 +61,7 @@ @if (showSidebar) { diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.ts index b766a00ab673..6aef93920769 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.ts +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/edit-content.layout.component.ts @@ -17,6 +17,7 @@ import { DotEditContentStore } from './store/edit-content.store'; import { DotEditContentFormComponent } from '../../components/dot-edit-content-form/dot-edit-content-form.component'; import { DotEditContentSidebarComponent } from '../../components/dot-edit-content-sidebar/dot-edit-content-sidebar.component'; +import { FormValues } from '../../models/dot-edit-content-form.interface'; import { DotEditContentService } from '../../services/dot-edit-content.service'; /** @@ -54,6 +55,14 @@ import { DotEditContentService } from '../../services/dot-edit-content.service'; changeDetection: ChangeDetectionStrategy.OnPush }) export class EditContentLayoutComponent { + /** + * The store instance. + * + * @type {InstanceType} + * @memberof EditContentLayoutComponent + */ + readonly $store: InstanceType = inject(DotEditContentStore); + /** * Whether the select workflow dialog should be shown. * @@ -72,10 +81,12 @@ export class EditContentLayoutComponent { } /** - * The store instance. + * Handles the form change event. * - * @type {InstanceType} + * @param {Record} value * @memberof EditContentLayoutComponent */ - readonly $store: InstanceType = inject(DotEditContentStore); + onFormChange(value: FormValues) { + this.$store.onFormChange(value); + } } diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/store/edit-content.store.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/store/edit-content.store.ts index e8f0f9e0c2f9..8ac5843f7cbd 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/store/edit-content.store.ts +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/store/edit-content.store.ts @@ -7,6 +7,7 @@ import { ComponentStatus } from '@dotcms/dotcms-models'; import { withLocales } from '@dotcms/edit-content/feature/edit-content/store/features/locales.feature'; import { withContent } from './features/content.feature'; +import { withForm } from './features/form.feature'; import { withInformation } from './features/information.feature'; import { withSidebar } from './features/sidebar.feature'; import { withWorkflow } from './features/workflow.feature'; @@ -33,6 +34,7 @@ export const DotEditContentStore = signalStore( withInformation(), withWorkflow(), withLocales(), + withForm(), withHooks({ onInit(store) { const activatedRoute = inject(ActivatedRoute); diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.spec.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.spec.ts index ecfb8d568cf0..92f4750cb8ad 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.spec.ts +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.spec.ts @@ -11,17 +11,24 @@ import { Router } from '@angular/router'; import { DotContentTypeService, DotHttpErrorManagerService, - DotWorkflowsActionsService + DotWorkflowsActionsService, + DotWorkflowService } from '@dotcms/data-access'; -import { ComponentStatus, DotCMSContentlet, DotCMSWorkflowAction } from '@dotcms/dotcms-models'; +import { + ComponentStatus, + DotCMSContentlet, + DotCMSWorkflowAction, + FeaturedFlags +} from '@dotcms/dotcms-models'; import { MOCK_SINGLE_WORKFLOW_ACTIONS } from '@dotcms/utils-testing'; import { withContent } from './content.feature'; import { workflowInitialState } from './workflow.feature'; import { DotEditContentService } from '../../../../services/dot-edit-content.service'; +import { MOCK_WORKFLOW_STATUS } from '../../../../utils/edit-content.mock'; import { CONTENT_TYPE_MOCK } from '../../../../utils/mocks'; -import { parseWorkflows } from '../../../../utils/workflows.utils'; +import { parseCurrentActions, parseWorkflows } from '../../../../utils/workflows.utils'; import { initialRootState } from '../edit-content.store'; describe('ContentFeature', () => { @@ -31,6 +38,7 @@ describe('ContentFeature', () => { let contentTypeService: SpyObject; let dotEditContentService: SpyObject; let workflowActionService: SpyObject; + let workflowService: SpyObject; let router: SpyObject; const createStore = createServiceFactory({ @@ -43,6 +51,7 @@ describe('ContentFeature', () => { DotEditContentService, DotHttpErrorManagerService, DotWorkflowsActionsService, + DotWorkflowService, Router ] }); @@ -53,6 +62,7 @@ describe('ContentFeature', () => { contentTypeService = spectator.inject(DotContentTypeService); dotEditContentService = spectator.inject(DotEditContentService); workflowActionService = spectator.inject(DotWorkflowsActionsService); + workflowService = spectator.inject(DotWorkflowService); router = spectator.inject(Router); }); @@ -71,6 +81,7 @@ describe('ContentFeature', () => { contentTypeService.getContentType.mockReturnValue(of(CONTENT_TYPE_MOCK)); workflowActionService.getByInode.mockReturnValue(of([])); workflowActionService.getWorkFlowActions.mockReturnValue(of([])); + workflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); store.initializeExistingContent('123'); tick(); @@ -119,13 +130,13 @@ describe('ContentFeature', () => { } ]; - // Mock all the requests that forkJoin is expecting dotEditContentService.getContentById.mockReturnValue(of(mockContentlet)); contentTypeService.getContentType.mockReturnValue(of(CONTENT_TYPE_MOCK)); workflowActionService.getByInode.mockReturnValue(of(expectedActions)); workflowActionService.getWorkFlowActions.mockReturnValue( of(MOCK_SINGLE_WORKFLOW_ACTIONS) ); + workflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); store.initializeExistingContent('123'); @@ -134,9 +145,95 @@ describe('ContentFeature', () => { // Verify all the expected values expect(store.contentlet()).toEqual(mockContentlet); expect(store.contentType()).toEqual(CONTENT_TYPE_MOCK); - expect(store.currentContentActions()).toEqual(expectedActions); + expect(store.currentContentActions()).toEqual(parseCurrentActions(expectedActions)); expect(store.schemes()).toEqual(parseWorkflows(MOCK_SINGLE_WORKFLOW_ACTIONS)); })); + + it('should return isLoaded as true when state is LOADED', fakeAsync(() => { + contentTypeService.getContentType.mockReturnValue(of(CONTENT_TYPE_MOCK)); + workflowActionService.getDefaultActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + + store.initializeNewContent('testContentType'); + tick(); + + expect(store.isLoaded()).toBe(true); + })); + + it('should return hasError as true when error exists', fakeAsync(() => { + const mockError = new HttpErrorResponse({ status: 404 }); + workflowActionService.getDefaultActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + contentTypeService.getContentType.mockReturnValue(throwError(() => mockError)); + + store.initializeNewContent('testContentType'); + tick(); + + expect(store.hasError()).toBe(true); + })); + + it('should return correct formData', fakeAsync(() => { + const mockContentlet = { + inode: '123', + contentType: 'testContentType' + } as DotCMSContentlet; + + dotEditContentService.getContentById.mockReturnValue(of(mockContentlet)); + contentTypeService.getContentType.mockReturnValue(of(CONTENT_TYPE_MOCK)); + workflowActionService.getByInode.mockReturnValue(of([])); + workflowActionService.getWorkFlowActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + workflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); + + store.initializeExistingContent('123'); + tick(); + + expect(store.formData()).toEqual({ + contentlet: mockContentlet, + contentType: CONTENT_TYPE_MOCK + }); + })); + + it('should return isEnabledNewContentEditor based on content type metadata', fakeAsync(() => { + // Test when feature flag is false + const contentTypeWithoutEditor = { + ...CONTENT_TYPE_MOCK, + metadata: { + [FeaturedFlags.FEATURE_FLAG_CONTENT_EDITOR2_ENABLED]: false + } + }; + + contentTypeService.getContentType.mockReturnValue(of(contentTypeWithoutEditor)); + workflowActionService.getDefaultActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + + store.initializeNewContent('testContentType'); + tick(); + + expect(store.isEnabledNewContentEditor()).toBe(false); + + // Test when feature flag is true + const contentTypeWithEditor = { + ...CONTENT_TYPE_MOCK, + metadata: { + [FeaturedFlags.FEATURE_FLAG_CONTENT_EDITOR2_ENABLED]: true + } + }; + + contentTypeService.getContentType.mockReturnValue(of(contentTypeWithEditor)); + workflowActionService.getDefaultActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + + store.initializeNewContent('testContentType'); + tick(); + + expect(store.isEnabledNewContentEditor()).toBe(true); + })); }); describe('initializeNewContent', () => { @@ -145,6 +242,11 @@ describe('ContentFeature', () => { workflowActionService.getDefaultActions.mockReturnValue( of(MOCK_SINGLE_WORKFLOW_ACTIONS) ); + workflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); + workflowActionService.getWorkFlowActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + workflowActionService.getByInode.mockReturnValue(of([])); }); it('should initialize new content successfully', fakeAsync(() => { @@ -192,6 +294,7 @@ describe('ContentFeature', () => { workflowActionService.getWorkFlowActions.mockReturnValue( of(MOCK_SINGLE_WORKFLOW_ACTIONS) ); + workflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); }); it('should initialize existing content successfully', fakeAsync(() => { @@ -200,7 +303,7 @@ describe('ContentFeature', () => { expect(store.contentlet()).toEqual(mockContentlet); expect(store.contentType()).toEqual(CONTENT_TYPE_MOCK); - expect(store.currentContentActions()).toEqual(mockActions); + expect(store.currentContentActions()).toEqual(parseCurrentActions(mockActions)); expect(store.state()).toBe(ComponentStatus.LOADED); })); @@ -217,5 +320,31 @@ describe('ContentFeature', () => { expect(router.navigate).toHaveBeenCalledWith(['/c/content']); })); + + it('should set initialContentletState to reset when no scheme or step', fakeAsync(() => { + const mockContentlet = { + inode: '123', + contentType: 'testContentType' + } as DotCMSContentlet; + + const workflowStatusWithoutScheme = { + ...MOCK_WORKFLOW_STATUS, + scheme: null, + step: null + }; + + dotEditContentService.getContentById.mockReturnValue(of(mockContentlet)); + contentTypeService.getContentType.mockReturnValue(of(CONTENT_TYPE_MOCK)); + workflowActionService.getByInode.mockReturnValue(of([])); + workflowActionService.getWorkFlowActions.mockReturnValue( + of(MOCK_SINGLE_WORKFLOW_ACTIONS) + ); + workflowService.getWorkflowStatus.mockReturnValue(of(workflowStatusWithoutScheme)); + + store.initializeExistingContent('123'); + tick(); + + expect(store.initialContentletState()).toBe('reset'); + })); }); }); diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.ts index c0f2dfe734e9..5ac754929ef7 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.ts +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/content.feature.ts @@ -20,7 +20,8 @@ import { DotContentTypeService, DotHttpErrorManagerService, DotRenderMode, - DotWorkflowsActionsService + DotWorkflowsActionsService, + DotWorkflowService } from '@dotcms/data-access'; import { ComponentStatus, @@ -36,7 +37,7 @@ import { WorkflowState } from './workflow.feature'; import { DotEditContentService } from '../../../../services/dot-edit-content.service'; import { transformFormDataFn } from '../../../../utils/functions.util'; -import { parseWorkflows } from '../../../../utils/workflows.utils'; +import { parseCurrentActions, parseWorkflows } from '../../../../utils/workflows.utils'; import { EditContentRootState } from '../edit-content.store'; export interface ContentState { @@ -45,19 +46,22 @@ export interface ContentState { /** Contentlet full data */ contentlet: DotCMSContentlet | null; /** Schemas available for the content type */ - schemes: { - [key: string]: { + schemes: Record< + string, + { scheme: DotCMSWorkflow; actions: DotCMSWorkflowAction[]; firstStep: WorkflowStep; - }; - }; + } + >; + initialContentletState: 'new' | 'existing' | 'reset'; } export const contentInitialState: ContentState = { contentType: null, contentlet: null, - schemes: {} + schemes: {}, + initialContentletState: 'new' }; export function withContent() { @@ -71,7 +75,7 @@ export function withContent() { * * @returns {boolean} True if content is new, false otherwise */ - isNew: computed(() => !store.contentlet()), + isNew: computed(() => store.initialContentletState() === 'new'), /** * Computed property that determines if the store's status is equal to ComponentStatus.LOADED. @@ -145,14 +149,25 @@ export function withContent() { dotEditContentService = inject(DotEditContentService), workflowActionService = inject(DotWorkflowsActionsService), dotHttpErrorManagerService = inject(DotHttpErrorManagerService), - router = inject(Router) + router = inject(Router), + dotWorkflowService = inject(DotWorkflowService) ) => ({ /** - * Method to initialize new content of a given type. - * New content + * Initializes the state for creating new content of a specified type. + * This method orchestrates the following operations: * - * @param {string} contentType - The type of content to initialize. - * @returns {Observable} An observable that completes when the initialization is done. + * 1. Sets the component state to loading + * 2. Makes parallel API calls to: + * - Fetch the complete content type definition + * - Retrieve all available workflow schemes and their default actions + * 3. Processes the workflow schemes: + * - Parses and organizes schemes by their IDs + * - Automatically selects default scheme if only one exists + * - Sets up initial available actions based on the default scheme + * + * @param {string} contentType - The identifier of the content type to initialize + * @returns {Observable} An observable that completes when all initialization data is loaded and processed + * @throws Will set error state and display error message if initialization fails */ initializeNewContent: rxMethod( pipe( @@ -165,17 +180,24 @@ export function withContent() { }).pipe( tapResponse({ next: ({ contentType, schemes }) => { + // Convert the schemes to an object with the schemeId as the key const parsedSchemes = parseWorkflows(schemes); const schemeIds = Object.keys(parsedSchemes); + // If we have only one scheme, we set it as the default one const defaultSchemeId = schemeIds.length === 1 ? schemeIds[0] : null; + // Parse the actions as an object with the schemeId as the key + const parsedCurrentActions = parseCurrentActions( + parsedSchemes[defaultSchemeId]?.actions || [] + ); patchState(store, { contentType, - schemes: parsedSchemes, currentSchemeId: defaultSchemeId, + currentContentActions: parsedCurrentActions, state: ComponentStatus.LOADED, + initialContentletState: 'new', error: null }); }, @@ -193,10 +215,22 @@ export function withContent() { ), /** - * Initializes the existing content by loading its details and updating the state. - * Content existing + * Initializes and loads all necessary data for an existing content by its inode. + * This method orchestrates multiple API calls to set up the complete content state: + * + * 1. Fetches the contentlet data using the inode + * 2. Based on the contentlet's content type: + * - Loads the full content type definition + * - Retrieves available workflow actions for the current inode + * - Fetches all possible workflow schemes for the content type + * - Gets the current workflow status including step and task information * - * @returns {Observable} An observable that emits the content ID. + * All this information is then consolidated and stored in the state to manage + * the content's workflow progression and available actions. + * + * @param {string} inode - The unique identifier for the content to be loaded + * @returns {Observable} An observable that emits the content's inode when initialization is complete + * @throws Will redirect to /c/content and show error if initialization fails */ initializeExistingContent: rxMethod( pipe( @@ -210,13 +244,17 @@ export function withContent() { return forkJoin({ contentType: dotContentTypeService.getContentType(contentType), + // Allowed actions for this inode currentContentActions: workflowActionService.getByInode( inode, DotRenderMode.EDITING ), + // Allowed actions for this content type schemes: workflowActionService.getWorkFlowActions(contentType), - contentlet: of(contentlet) + contentlet: of(contentlet), + // Workflow status for this inode + workflowStatus: dotWorkflowService.getWorkflowStatus(inode) }); }), tapResponse({ @@ -224,15 +262,41 @@ export function withContent() { contentType, currentContentActions, schemes, - contentlet + contentlet, + workflowStatus }) => { + // Convert the schemes to an object with the schemeId as the key const parsedSchemes = parseWorkflows(schemes); + // Parse the actions as an object with the schemeId as the key + const parsedCurrentActions = + parseCurrentActions(currentContentActions); + + const { step, task, scheme } = workflowStatus; + // If there's only one workflow scheme, use that scheme's ID + // Otherwise use the ID from the workflow status if available + const schemeIds = Object.keys(parsedSchemes); + const currentSchemeId = + schemeIds.length === 1 + ? schemeIds[0] + : scheme?.id || null; + + // If there's no scheme or step, content is considered in 'reset' state + const initialContentletState = + !scheme || !step ? 'reset' : 'existing'; + + // The current step is the first step of the selected scheme + const currentScheme = parsedSchemes[currentSchemeId]; + patchState(store, { contentType, + currentSchemeId, schemes: parsedSchemes, - currentContentActions, + currentContentActions: parsedCurrentActions, contentlet, - state: ComponentStatus.LOADED + state: ComponentStatus.LOADED, + currentStep: currentScheme?.firstStep, + lastTask: task, + initialContentletState }); }, error: (error: HttpErrorResponse) => { diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/form.feature.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/form.feature.ts new file mode 100644 index 000000000000..d9b26d57d44c --- /dev/null +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/form.feature.ts @@ -0,0 +1,34 @@ +import { patchState, signalStoreFeature, withMethods, withState } from '@ngrx/signals'; + +import { FormValues } from '../../../../models/dot-edit-content-form.interface'; + +export interface FormState { + formValues: FormValues; +} + +const initialState: FormState = { + formValues: {} +}; + +/** + * Feature that handles the form's state. + * + * @returns {SignalStoreFeature} The feature object. + */ +export function withForm() { + return signalStoreFeature( + withState(initialState), + + withMethods((store) => ({ + /** + * Handles the form change event and stores the form values. + * + * @param {FormValues} formValues + * @memberof withForm + */ + onFormChange: (formValues: FormValues) => { + patchState(store, { formValues }); + } + })) + ); +} diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.spec.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.spec.ts index e20470e58600..97ae7d4f1c2d 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.spec.ts +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.spec.ts @@ -13,10 +13,9 @@ import { DotHttpErrorManagerService, DotMessageService, DotWorkflowActionsFireService, - DotWorkflowsActionsService, - DotWorkflowService + DotWorkflowsActionsService } from '@dotcms/data-access'; -import { ComponentStatus } from '@dotcms/dotcms-models'; +import { ComponentStatus, DotCMSContentlet } from '@dotcms/dotcms-models'; import { contentInitialState, ContentState } from './content.feature'; import { withWorkflow } from './workflow.feature'; @@ -24,11 +23,10 @@ import { withWorkflow } from './workflow.feature'; import { MOCK_CONTENTLET_1_TAB, MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB, - MOCK_WORKFLOW_DATA, - MOCK_WORKFLOW_STATUS + MOCK_WORKFLOW_DATA } from '../../../../utils/edit-content.mock'; import { CONTENT_TYPE_MOCK } from '../../../../utils/mocks'; -import { parseWorkflows } from '../../../../utils/workflows.utils'; +import { parseCurrentActions, parseWorkflows } from '../../../../utils/workflows.utils'; import { initialRootState } from '../edit-content.store'; const mockInitialStateWithContent: ContentState = { @@ -41,7 +39,6 @@ const mockInitialStateWithContent: ContentState = { describe('WorkflowFeature', () => { let spectator: SpectatorService; let store: any; - let workflowService: SpyObject; let workflowActionService: SpyObject; let workflowActionsFireService: SpyObject; let router: SpyObject; @@ -54,7 +51,6 @@ describe('WorkflowFeature', () => { withWorkflow() ), mocks: [ - DotWorkflowService, DotWorkflowsActionsService, DotWorkflowActionsFireService, DotHttpErrorManagerService, @@ -67,7 +63,6 @@ describe('WorkflowFeature', () => { beforeEach(() => { spectator = createStore(); store = spectator.service; - workflowService = spectator.inject(DotWorkflowService); workflowActionService = spectator.inject(DotWorkflowsActionsService); workflowActionsFireService = spectator.inject(DotWorkflowActionsFireService); router = spectator.inject(Router); @@ -78,36 +73,6 @@ describe('WorkflowFeature', () => { }); describe('methods', () => { - describe('getWorkflowStatus', () => { - it('should get workflow status successfully', fakeAsync(() => { - workflowService.getWorkflowStatus.mockReturnValue(of(MOCK_WORKFLOW_STATUS)); - - store.getWorkflowStatus('123'); - tick(); - - expect(store.workflow()).toEqual({ - status: ComponentStatus.LOADED, - error: null - }); - expect(store.currentSchemeId()).toBe(MOCK_WORKFLOW_STATUS.scheme.id); - expect(store.currentStep()).toEqual(MOCK_WORKFLOW_STATUS.step); - expect(store.lastTask()).toEqual(MOCK_WORKFLOW_STATUS.task); - })); - - it('should handle error when getting workflow status', fakeAsync(() => { - const mockError = new HttpErrorResponse({ status: 404 }); - workflowService.getWorkflowStatus.mockReturnValue(throwError(() => mockError)); - - store.getWorkflowStatus('123'); - tick(); - - expect(store.workflow()).toEqual({ - status: ComponentStatus.ERROR, - error: 'Error getting workflow status' - }); - })); - }); - describe('fireWorkflowAction', () => { const mockOptions = { inode: '123', @@ -128,7 +93,7 @@ describe('WorkflowFeature', () => { expect(store.state()).toBe(ComponentStatus.LOADED); expect(store.contentlet()).toEqual(updatedContentlet); expect(store.currentContentActions()).toEqual( - MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB + parseCurrentActions(MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB) ); expect(router.navigate).toHaveBeenCalledWith( @@ -148,16 +113,6 @@ describe('WorkflowFeature', () => { expect(store.state()).toBe(ComponentStatus.LOADED); expect(store.error()).toBe('Error firing workflow action'); })); - - it('should redirect to content when contentlet has no inode', fakeAsync(() => { - const contentletWithoutInode = { ...MOCK_CONTENTLET_1_TAB, inode: undefined }; - workflowActionsFireService.fireTo.mockReturnValue(of(contentletWithoutInode)); - - store.fireWorkflowAction(mockOptions); - tick(); - - expect(router.navigate).toHaveBeenCalledWith(['/c/content']); - })); }); describe('setSelectedWorkflow', () => { @@ -167,36 +122,86 @@ describe('WorkflowFeature', () => { expect(store.currentSchemeId()).toBe(newSchemeId); }); }); + }); + + describe('computed properties', () => { + describe('getScheme', () => { + it('should return undefined when no scheme is selected', () => { + expect(store.getScheme()).toBeUndefined(); + }); + + it('should return the correct scheme when one is selected', () => { + const schemeId = MOCK_WORKFLOW_DATA[0].scheme.id; + store.setSelectedWorkflow(schemeId); + expect(store.getScheme()).toEqual(MOCK_WORKFLOW_DATA[0].scheme); + }); + }); + + describe('fireWorkflowAction', () => { + const mockOptions = { + inode: '123', + actionId: MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB[0].id, + formData: {} + }; - describe('computed properties', () => { - beforeEach(fakeAsync(() => { - const mockStatus = { - ...MOCK_WORKFLOW_STATUS, - scheme: MOCK_WORKFLOW_DATA[0].scheme, - step: MOCK_WORKFLOW_STATUS.step - }; - workflowService.getWorkflowStatus.mockReturnValue(of(mockStatus)); + it('should handle reset action correctly', fakeAsync(() => { + workflowActionsFireService.fireTo.mockReturnValue(of({} as DotCMSContentlet)); + workflowActionService.getByInode.mockReturnValue( + of(MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB) + ); - store.getWorkflowStatus('123'); + store.fireWorkflowAction(mockOptions); tick(); + + expect(store.getCurrentStep()).toBeNull(); + expect(messageService.add).toHaveBeenCalledWith( + expect.objectContaining({ + severity: 'success' + }) + ); })); - it('should return correct scheme', () => { - expect(store.currentSchemeId()).toBe(MOCK_WORKFLOW_DATA[0].scheme.id); - expect(store.getScheme()).toEqual(MOCK_WORKFLOW_DATA[0].scheme); - }); + it('should show processing message when action starts', fakeAsync(() => { + workflowActionsFireService.fireTo.mockReturnValue(of(MOCK_CONTENTLET_1_TAB)); - it('should return correct workflow scheme options', () => { - const expected = MOCK_WORKFLOW_DATA.map((workflow) => ({ - value: workflow.scheme.id, - label: workflow.scheme.name - })); - expect(store.workflowSchemeOptions()).toEqual(expected); - }); + store.fireWorkflowAction(mockOptions); + + expect(messageService.add).toHaveBeenCalledWith( + expect.objectContaining({ + severity: 'info', + icon: 'pi pi-spin pi-spinner' + }) + ); + tick(); + })); + }); + + describe('getCurrentStep', () => { + it('should return first step for new content with selected workflow', () => { + // Set up a new content scenario by selecting a workflow + const schemeId = MOCK_WORKFLOW_DATA[0].scheme.id; + store.setSelectedWorkflow(schemeId); - it('should return current step of workflow', () => { - expect(store.currentStep()).toEqual(MOCK_WORKFLOW_STATUS.step); + expect(store.getCurrentStep()).toEqual(MOCK_WORKFLOW_DATA[0].firstStep); }); + + it('should return current step for existing content', fakeAsync(() => { + // Mock a workflow action that would update the current step + const updatedContentlet = { ...MOCK_CONTENTLET_1_TAB, inode: '456' }; + workflowActionsFireService.fireTo.mockReturnValue(of(updatedContentlet)); + workflowActionService.getByInode.mockReturnValue( + of(MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB) + ); + + store.fireWorkflowAction({ + inode: '123', + actionId: MOCK_WORKFLOW_ACTIONS_NEW_ITEMNTTYPE_1_TAB[0].id, + formData: {} + }); + tick(); + + expect(store.getCurrentStep()).toBeDefined(); + })); }); }); }); diff --git a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.ts b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.ts index 3835ed5a9aea..aa0d2cc2913e 100644 --- a/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.ts +++ b/core-web/libs/edit-content/src/lib/feature/edit-content/store/features/workflow.feature.ts @@ -24,8 +24,7 @@ import { DotMessageService, DotRenderMode, DotWorkflowActionsFireService, - DotWorkflowsActionsService, - DotWorkflowService + DotWorkflowsActionsService } from '@dotcms/data-access'; import { ComponentStatus, @@ -37,19 +36,17 @@ import { import { ContentState } from './content.feature'; -import { - getWorkflowActions, - shouldShowWorkflowActions, - shouldShowWorkflowWarning -} from '../../../../utils/workflows.utils'; +import { parseCurrentActions } from '../../../../utils/workflows.utils'; import { EditContentRootState } from '../edit-content.store'; +export type CurrentContentActionsWithScheme = Record; + export interface WorkflowState { /** Current workflow scheme id */ currentSchemeId: string | null; /** Actions available for the current content */ - currentContentActions: DotCMSWorkflowAction[]; + currentContentActions: CurrentContentActionsWithScheme; /** Current workflow step */ currentStep: WorkflowStep | null; @@ -66,7 +63,7 @@ export interface WorkflowState { export const workflowInitialState: WorkflowState = { currentSchemeId: null, - currentContentActions: [], + currentContentActions: {}, currentStep: null, lastTask: null, workflow: { @@ -108,17 +105,19 @@ export function withWorkflow() { * Computed property that determines if workflow action buttons should be shown. */ showWorkflowActions: computed(() => { - const schemes = store.schemes(); - const contentlet = store.contentlet(); const currentSchemeId = store.currentSchemeId(); + const currentActions = store.currentContentActions()[currentSchemeId] || []; - return shouldShowWorkflowActions({ - schemes, - contentlet, - currentSchemeId - }); + return currentActions.length > 0; }), + /** + * Computed property that determines if the reset action should be shown. + * + * @returns {boolean} True if the reset action should be shown, false otherwise. + */ + resetActionState: computed(() => !store.currentStep()), + /** * Computed property that determines if the workflow selection warning should be shown. * Shows warning when content is new AND no workflow scheme has been selected yet. @@ -126,15 +125,26 @@ export function withWorkflow() { * @returns {boolean} True if warning should be shown, false otherwise */ showSelectWorkflowWarning: computed(() => { - const schemes = store.schemes(); - const contentlet = store.contentlet(); const currentSchemeId = store.currentSchemeId(); - return shouldShowWorkflowWarning({ - schemes, - contentlet, - currentSchemeId - }); + return !currentSchemeId; + }), + + /** + * Gets the first workflow action that has reset capability and is shown on EDITING. + * + * @returns {DotCMSWorkflowAction | undefined} First workflow action with reset capability shown on EDITING + */ + getResetWorkflowAction: computed(() => { + const currentActions = store.currentContentActions()[store.currentSchemeId()] || []; + + return ( + currentActions.find( + (action) => + action.hasResetActionlet && + action.showOn?.includes(DotRenderMode.EDITING) + ) || undefined + ); }), /** @@ -143,17 +153,10 @@ export function withWorkflow() { * @returns {DotCMSWorkflowAction[]} The actions for the current workflow scheme. */ getActions: computed(() => { - const schemes = store.schemes(); - const contentlet = store.contentlet(); const currentSchemeId = store.currentSchemeId(); const currentContentActions = store.currentContentActions(); - return getWorkflowActions({ - schemes, - contentlet, - currentSchemeId, - currentContentActions - }); + return currentSchemeId ? currentContentActions[currentSchemeId] : []; }), /** @@ -192,7 +195,6 @@ export function withWorkflow() { withMethods( ( store, - dotWorkflowService = inject(DotWorkflowService), workflowActionService = inject(DotWorkflowsActionsService), workflowActionsFireService = inject(DotWorkflowActionsFireService), dotHttpErrorManagerService = inject(DotHttpErrorManagerService), @@ -201,61 +203,32 @@ export function withWorkflow() { router = inject(Router) ) => ({ /** - * Get workflow status for an existing contentlet - * we use the inode to get the workflow status - */ - getWorkflowStatus: rxMethod( - pipe( - tap(() => - patchState(store, { - workflow: { - ...store.workflow(), - status: ComponentStatus.LOADING, - error: null - } - }) - ), - switchMap((inode: string) => { - return dotWorkflowService.getWorkflowStatus(inode).pipe( - tapResponse({ - next: (response) => { - const { scheme, step, task } = response; - patchState(store, { - currentSchemeId: scheme?.id, - currentStep: step, - lastTask: task, - workflow: { - ...store.workflow(), - status: ComponentStatus.LOADED - } - }); - }, - - error: (error: HttpErrorResponse) => { - patchState(store, { - workflow: { - ...store.workflow(), - status: ComponentStatus.ERROR, - error: 'Error getting workflow status' - } - }); - dotHttpErrorManagerService.handle(error); - } - }) - ); - }) - ) - ), - - /** - * Sets the selected workflow scheme ID in the store. + * Sets the selected workflow scheme ID and updates related state in the store. + * For new content, it sets the current scheme ID, parses and sets the workflow actions, + * and sets the first step of the selected scheme. + * For existing content, it only updates the current scheme ID and first step. * - * @param {string} schemeId - The ID of the workflow scheme to be selected. + * @param {string} currentSchemeId - The ID of the workflow scheme to be selected */ - setSelectedWorkflow: (schemeId: string) => { - patchState(store, { - currentSchemeId: schemeId - }); + setSelectedWorkflow: (currentSchemeId: string) => { + const schemes = store.schemes(); + const currentScheme = schemes[currentSchemeId]; + const actions = currentScheme.actions; + const isNew = !store.contentlet()?.inode; + + if (isNew) { + patchState(store, { + currentSchemeId, + currentContentActions: parseCurrentActions(actions), + currentStep: currentScheme.firstStep + }); + } else { + // Existing content + patchState(store, { + currentSchemeId, + currentStep: currentScheme.firstStep + }); + } }, /** @@ -272,40 +245,88 @@ export function withWorkflow() { DotFireActionOptions<{ [key: string]: string | object }> >( pipe( - tap(() => patchState(store, { state: ComponentStatus.SAVING })), + tap(() => { + patchState(store, { state: ComponentStatus.SAVING }); + messageService.clear(); + messageService.add({ + severity: 'info', + icon: 'pi pi-spin pi-spinner', + summary: dotMessageService.get( + 'edit.content.processing.workflow.message.title' + ), + detail: dotMessageService.get( + 'edit.content.processing.workflow.message' + ) + }); + }), switchMap((options) => { + const currentContentlet = store.contentlet(); + return workflowActionsFireService.fireTo(options).pipe( - tap((contentlet) => { - if (!contentlet.inode) { - router.navigate(['/c/content']); - } - }), - switchMap((contentlet) => { + switchMap((updatedContentlet) => { + // Use current contentlet if response is empty (reset action) + // otherwise use the updated contentlet from response + const contentlet = + Object.keys(updatedContentlet).length === 0 + ? currentContentlet + : updatedContentlet; + + const inode = contentlet.inode; + + // A reset action will return an empty object + const isReset = Object.keys(updatedContentlet).length === 0; + return forkJoin({ currentContentActions: workflowActionService.getByInode( - contentlet.inode, + inode, DotRenderMode.EDITING ), - contentlet: of(contentlet) + contentlet: of(contentlet), + isReset: of(isReset) }); }), tapResponse({ - next: ({ contentlet, currentContentActions }) => { - router.navigate(['/content', contentlet.inode], { - replaceUrl: true, - queryParamsHandling: 'preserve' - }); + next: ({ contentlet, currentContentActions, isReset }) => { + // Always navigate if the inode has changed + if (contentlet.inode !== currentContentlet?.inode) { + router.navigate(['/content', contentlet.inode], { + replaceUrl: true, + queryParamsHandling: 'preserve' + }); + } - patchState(store, { - contentlet, - currentContentActions, - state: ComponentStatus.LOADED, - error: null - }); + const parsedCurrentActions = + parseCurrentActions(currentContentActions); + + if (isReset) { + patchState(store, { + contentlet, + currentContentActions: parsedCurrentActions, + currentSchemeId: + Object.keys(store.schemes()).length > 1 + ? null + : store.currentSchemeId(), + initialContentletState: 'reset', + state: ComponentStatus.LOADED, + currentStep: null, + error: null + }); + } else { + patchState(store, { + contentlet, + currentContentActions: parsedCurrentActions, + currentSchemeId: store.currentSchemeId(), + state: ComponentStatus.LOADED, + error: null + }); + } + messageService.clear(); messageService.add({ severity: 'success', - summary: dotMessageService.get('success'), + summary: dotMessageService.get( + 'edit.content.success.workflow.title' + ), detail: dotMessageService.get( 'edit.content.success.workflow.message' ) diff --git a/core-web/libs/edit-content/src/lib/models/dot-edit-content-form.interface.ts b/core-web/libs/edit-content/src/lib/models/dot-edit-content-form.interface.ts index d7ef381c333a..d24fe67a986c 100644 --- a/core-web/libs/edit-content/src/lib/models/dot-edit-content-form.interface.ts +++ b/core-web/libs/edit-content/src/lib/models/dot-edit-content-form.interface.ts @@ -44,3 +44,15 @@ export interface DotFormData { contentlet: DotCMSContentlet | null; tabs: Tab[]; } + +/** + * Represents the form field value. + * @type FormFieldValue + */ +type FormFieldValue = string | string[] | Date; + +/** + * Represents the form values. + * @interface FormValues + */ +export type FormValues = Record; diff --git a/core-web/libs/edit-content/src/lib/models/dot-edit-content.model.ts b/core-web/libs/edit-content/src/lib/models/dot-edit-content.model.ts index af652190a5b9..60ac557f739a 100644 --- a/core-web/libs/edit-content/src/lib/models/dot-edit-content.model.ts +++ b/core-web/libs/edit-content/src/lib/models/dot-edit-content.model.ts @@ -1,3 +1,5 @@ +import { DotCMSWorkflowAction, DotCMSWorkflowStatus } from '@dotcms/dotcms-models'; + /** * Interface for workflow action parameters. * @@ -9,3 +11,16 @@ export interface DotWorkflowActionParams { inode: string; contentType: string; } + +/** + * Type for the internal contentlet state. + * + * @export + * @type {DotContentletState} + */ +export type DotContentletState = 'new' | 'existing' | 'reset'; + +export interface DotWorkflowState extends DotCMSWorkflowStatus { + contentState: DotContentletState; + resetAction?: DotCMSWorkflowAction; +} diff --git a/core-web/libs/edit-content/src/lib/utils/mocks.ts b/core-web/libs/edit-content/src/lib/utils/mocks.ts index 94bb16290940..ff3f6dc553f4 100644 --- a/core-web/libs/edit-content/src/lib/utils/mocks.ts +++ b/core-web/libs/edit-content/src/lib/utils/mocks.ts @@ -25,6 +25,7 @@ import { CustomTreeNode, TreeNodeItem } from '../models/dot-edit-content-host-folder-field.interface'; +import { DotWorkflowState } from '../models/dot-edit-content.model'; /* FIELDS MOCK BY TYPE */ export const TEXT_FIELD_MOCK: DotCMSContentTypeField = { @@ -1678,3 +1679,181 @@ export const NEW_WORKFLOW_MOCK: DotCMSWorkflowStatus = { schemeId: 'd61a59e1-a49c-46f2-a929-db2b4bfa88b2' } }; + +/** + * Mock for input of the sidebar workflow component + */ +export const WORKFLOW_MOCKS: Record<'EXISTING' | 'NEW' | 'RESET', DotWorkflowState> = { + EXISTING: { + scheme: { + archived: false, + creationDate: new Date(1732809856947), + defaultScheme: false, + description: '', + entryActionId: null, + id: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd', + mandatory: false, + modDate: new Date(1732554197546), + name: 'Blogs', + system: false, + variableName: 'Blogs' + }, + step: { + creationDate: 1732859987790, + enableEscalation: false, + escalationAction: null, + escalationTime: 0, + id: '5865d447-5df7-4fa8-81c8-f8f183f3d1a2', + myOrder: 0, + name: 'Editing', + resolved: false, + schemeId: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd' + }, + task: { + assignedTo: 'Admin User', + belongsTo: null, + createdBy: 'e7d4e34e-5127-45fc-8123-d48b62d510e3', + creationDate: 1732809812333, + description: null, + dueDate: null, + id: '9cc41c12-f72d-431a-9b22-ef9f1067e6d9', + inode: '9cc41c12-f72d-431a-9b22-ef9f1067e6d9', + languageId: 1, + modDate: 1732809830428, + new: false, + status: 'f43c5d5a-fc51-4c67-a750-cc8f8e4a87f7', + title: '6b102831-e96e-459f-aa41-b5b451f8b8e1', + webasset: '6b102831-e96e-459f-aa41-b5b451f8b8e1' + }, + contentState: 'existing', + resetAction: { + actionInputs: [], + assignable: false, + commentable: false, + condition: '', + hasArchiveActionlet: false, + hasCommentActionlet: false, + hasDeleteActionlet: false, + hasDestroyActionlet: false, + hasMoveActionletActionlet: false, + hasMoveActionletHasPathActionlet: false, + hasOnlyBatchActionlet: false, + hasPublishActionlet: false, + hasPushPublishActionlet: false, + hasResetActionlet: true, + hasSaveActionlet: false, + hasUnarchiveActionlet: true, + hasUnpublishActionlet: false, + icon: 'workflowIcon', + id: '2d1dc771-8fda-4b43-9e81-71d43a8c73e4', + name: 'Reset Workflow', + nextAssign: '654b0931-1027-41f7-ad4d-173115ed8ec1', + nextStep: '5865d447-5df7-4fa8-81c8-f8f183f3d1a2', + nextStepCurrentStep: false, + order: 0, + owner: null, + roleHierarchyForAssign: false, + schemeId: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd', + showOn: [ + 'LOCKED', + 'PUBLISHED', + 'ARCHIVED', + 'UNPUBLISHED', + 'LISTING', + 'UNLOCKED', + 'EDITING', + 'NEW' + ] + } + }, + NEW: { + scheme: { + archived: false, + creationDate: new Date(1732809856947), + defaultScheme: false, + description: '', + entryActionId: null, + id: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd', + mandatory: false, + modDate: new Date(1732554197546), + name: 'Blogs', + system: false, + variableName: 'Blogs' + }, + step: { + creationDate: 1732859904768, + enableEscalation: false, + escalationAction: null, + escalationTime: 0, + id: '5865d447-5df7-4fa8-81c8-f8f183f3d1a2', + myOrder: 0, + name: 'Editing', + resolved: false, + schemeId: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd' + }, + task: null, + contentState: 'new', + resetAction: null + }, + RESET: { + scheme: { + archived: false, + creationDate: new Date(1732809856947), + defaultScheme: false, + description: '', + entryActionId: null, + id: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd', + mandatory: false, + modDate: new Date(1732554197546), + name: 'Blogs', + system: false, + variableName: 'Blogs' + }, + step: { + creationDate: 1732860056894, + enableEscalation: false, + escalationAction: null, + escalationTime: 0, + id: '5865d447-5df7-4fa8-81c8-f8f183f3d1a2', + myOrder: 0, + name: 'Editing', + resolved: false, + schemeId: '2a4e1d2e-5342-4b46-be3d-80d3a2d9c0dd' + }, + task: { + assignedTo: 'Admin User', + belongsTo: null, + createdBy: 'e7d4e34e-5127-45fc-8123-d48b62d510e3', + creationDate: 1732854838710, + description: null, + dueDate: null, + id: 'f485675d-9e34-485d-9ec8-39a6e03b0272', + inode: 'f485675d-9e34-485d-9ec8-39a6e03b0272', + languageId: 1, + modDate: 1732854838710, + new: false, + status: null, + title: '74968ffd-7692-47d5-bd3a-44eeb5fbe551', + webasset: '74968ffd-7692-47d5-bd3a-44eeb5fbe551' + }, + contentState: 'reset', + resetAction: null + } +}; + +/** + * Mock for input of the sidebar workflow component + */ +export const WORKFLOW_SELECTION_MOCK = { + WITH_OPTIONS: { + schemeOptions: [ + { label: 'System Workflow', value: '1' }, + { label: 'Marketing Workflow', value: '2' } + ], + isWorkflowSelected: false + }, + NO_WORKFLOW: { + schemeOptions: [], + isWorkflowSelected: true + } +}; diff --git a/core-web/libs/edit-content/src/lib/utils/workflows.utils.spec.ts b/core-web/libs/edit-content/src/lib/utils/workflows.utils.spec.ts index 322de4727203..0e37d49b547e 100644 --- a/core-web/libs/edit-content/src/lib/utils/workflows.utils.spec.ts +++ b/core-web/libs/edit-content/src/lib/utils/workflows.utils.spec.ts @@ -1,5 +1,5 @@ -import { MOCK_CONTENTLET_1_TAB, MOCK_WORKFLOW_DATA } from './edit-content.mock'; -import { getWorkflowActions, parseWorkflows, shouldShowWorkflowWarning } from './workflows.utils'; +import { MOCK_WORKFLOW_DATA } from './edit-content.mock'; +import { parseWorkflows } from './workflows.utils'; describe('Workflow Utils', () => { describe('parseWorkflows', () => { @@ -37,109 +37,6 @@ describe('Workflow Utils', () => { }); }); - describe('shouldShowWorkflowWarning', () => { - const mockSchemes = parseWorkflows(MOCK_WORKFLOW_DATA); - - it('should return true when content is new, has multiple schemes and no scheme selected', () => { - const result = shouldShowWorkflowWarning({ - schemes: mockSchemes, - contentlet: null, - currentSchemeId: null - }); - expect(result).toBe(true); - }); - - it('should return false when content exists', () => { - const result = shouldShowWorkflowWarning({ - schemes: mockSchemes, - contentlet: MOCK_CONTENTLET_1_TAB, - currentSchemeId: null - }); - expect(result).toBe(false); - }); - - it('should return false when only one scheme exists', () => { - const singleScheme = { - 'd61a59e1-a49c-46f2-a929-db2b4bfa88b2': - mockSchemes['d61a59e1-a49c-46f2-a929-db2b4bfa88b2'] - }; - const result = shouldShowWorkflowWarning({ - schemes: singleScheme, - contentlet: null, - currentSchemeId: null - }); - expect(result).toBe(false); - }); - - it('should return false when scheme is selected', () => { - const result = shouldShowWorkflowWarning({ - schemes: mockSchemes, - contentlet: null, - currentSchemeId: 'd61a59e1-a49c-46f2-a929-db2b4bfa88b2' - }); - expect(result).toBe(false); - }); - }); - - describe('getWorkflowActions', () => { - const mockSchemes = parseWorkflows(MOCK_WORKFLOW_DATA); - - it('should return empty array when no scheme is selected', () => { - const result = getWorkflowActions({ - schemes: mockSchemes, - contentlet: null, - currentSchemeId: null, - currentContentActions: [] - }); - expect(result).toEqual([]); - }); - - it('should return empty array when selected scheme does not exist', () => { - const result = getWorkflowActions({ - schemes: mockSchemes, - contentlet: null, - currentSchemeId: 'non-existent-scheme', - currentContentActions: [] - }); - expect(result).toEqual([]); - }); - - it('should return current content actions for existing content', () => { - const currentActions = [MOCK_WORKFLOW_DATA[0].action]; - const result = getWorkflowActions({ - schemes: mockSchemes, - contentlet: MOCK_CONTENTLET_1_TAB, - currentSchemeId: 'd61a59e1-a49c-46f2-a929-db2b4bfa88b2', - currentContentActions: currentActions - }); - expect(result).toEqual(currentActions); - }); - - it('should return sorted scheme actions for new content with Save first', () => { - const result = getWorkflowActions({ - schemes: mockSchemes, - contentlet: null, - currentSchemeId: 'd61a59e1-a49c-46f2-a929-db2b4bfa88b2', - currentContentActions: [] - }); - - expect(result.length).toBeGreaterThan(0); - expect(result[0].name).toBe('Save'); - }); - - it('should return scheme actions when content exists but no current actions', () => { - const result = getWorkflowActions({ - schemes: mockSchemes, - contentlet: MOCK_CONTENTLET_1_TAB, - currentSchemeId: 'd61a59e1-a49c-46f2-a929-db2b4bfa88b2', - currentContentActions: [] - }); - - expect(result.length).toBeGreaterThan(0); - expect(result[0].name).toBe('Save'); - }); - }); - describe('parseWorkflows', () => { it('should return empty object when input is not an array', () => { expect(parseWorkflows(null)).toEqual({}); diff --git a/core-web/libs/edit-content/src/lib/utils/workflows.utils.ts b/core-web/libs/edit-content/src/lib/utils/workflows.utils.ts index 86168633c7dd..df9f7389329d 100644 --- a/core-web/libs/edit-content/src/lib/utils/workflows.utils.ts +++ b/core-web/libs/edit-content/src/lib/utils/workflows.utils.ts @@ -1,6 +1,6 @@ import { DotCMSWorkflow, DotCMSWorkflowAction, WorkflowStep } from '@dotcms/dotcms-models'; -import { ContentState } from '../feature/edit-content/store/features/content.feature'; +import { CurrentContentActionsWithScheme } from '../feature/edit-content/store/features/workflow.feature'; /** * Parses an array of workflow data and returns a new object with key-value pairs. @@ -37,111 +37,27 @@ export const parseWorkflows = ( }; /** - * Determines if workflow action buttons should be shown based on content and scheme state - * Shows workflow buttons when: - * - Content type has only one workflow scheme OR - * - Content is existing AND has a selected workflow scheme OR - * - Content is new and has selected a workflow scheme + * Parses current workflow actions into a map of scheme ID to actions * - * @param schemes - Available workflow schemes object - * @param contentlet - Current contentlet (if exists) - * @param currentSchemeId - Selected workflow scheme ID - * @returns boolean indicating if workflow actions should be shown + * @param actions Array of workflow actions + * @returns CurrentContentActionsWithScheme - Record of scheme IDs mapped to their corresponding actions */ -export const shouldShowWorkflowActions = ({ - schemes, - contentlet, - currentSchemeId -}: { - schemes: ContentState['schemes']; - contentlet: ContentState['contentlet']; - currentSchemeId: string | null; -}): boolean => { - const hasOneScheme = Object.keys(schemes).length === 1; - const isExisting = !!contentlet; - const hasSelectedScheme = !!currentSchemeId; - - if (hasOneScheme) { - return true; - } - - if (isExisting && hasSelectedScheme) { - return true; - } - - if (!isExisting && hasSelectedScheme) { - return true; +export const parseCurrentActions = ( + actions: DotCMSWorkflowAction[] +): CurrentContentActionsWithScheme => { + if (!Array.isArray(actions)) { + return {}; } - return false; -}; - -/** - * Determines if workflow selection warning should be shown - * Shows warning when: - * - Content is new (no contentlet exists) AND - * - Content type has multiple workflow schemes AND - * - No workflow scheme has been selected - * - * @param schemes - Available workflow schemes object - * @param contentlet - Current contentlet (if exists) - * @param currentSchemeId - Selected workflow scheme ID - * @returns boolean indicating if workflow selection warning should be shown - */ -export const shouldShowWorkflowWarning = ({ - schemes, - contentlet, - currentSchemeId -}: { - schemes: ContentState['schemes']; - contentlet: ContentState['contentlet']; - currentSchemeId: string | null; -}): boolean => { - const isNew = !contentlet; - const hasNoSchemeSelected = !currentSchemeId; - const hasMultipleSchemas = Object.keys(schemes).length > 1; - - return isNew && hasMultipleSchemas && hasNoSchemeSelected; -}; - -/** - * Gets the appropriate workflow actions based on content state - * Returns: - * - Empty array if no scheme is selected - * - Current content actions for existing content - * - Sorted scheme actions for new content (with 'Save' action first) - * - * @param schemes - Available workflow schemes object - * @param contentlet - Current contentlet (if exists) - * @param currentSchemeId - Selected workflow scheme ID - * @param currentContentActions - Current content specific actions - * @returns Array of workflow actions - */ -export const getWorkflowActions = ({ - schemes, - contentlet, - currentSchemeId, - currentContentActions -}: { - schemes: ContentState['schemes']; - contentlet: ContentState['contentlet']; - currentSchemeId: string | null; - currentContentActions: DotCMSWorkflowAction[]; -}): DotCMSWorkflowAction[] => { - const isNew = !contentlet; + return actions.reduce((acc, action) => { + const { schemeId } = action; - if (!currentSchemeId || !schemes[currentSchemeId]) { - return []; - } - - if (!isNew && currentContentActions.length) { - return currentContentActions; - } + if (!acc[schemeId]) { + acc[schemeId] = []; + } - return Object.values(schemes[currentSchemeId].actions).sort((a, b) => { - if (a.name === 'Save') return -1; - if (b.name === 'Save') return 1; + acc[schemeId].push(action); - return a.name.localeCompare(b.name); - }); + return acc; + }, {} as CurrentContentActionsWithScheme); }; diff --git a/dotCMS/src/main/webapp/WEB-INF/messages/Language.properties b/dotCMS/src/main/webapp/WEB-INF/messages/Language.properties index 3bcd36a3b631..3403d6d5413d 100644 --- a/dotCMS/src/main/webapp/WEB-INF/messages/Language.properties +++ b/dotCMS/src/main/webapp/WEB-INF/messages/Language.properties @@ -5842,7 +5842,10 @@ edit.content.wysiwyg-field.language-variable-tooltip=Start typing to see matchin edit.content.sidebar.information.references-with.pages.tooltip=Used in {0} pages edit.content.sidebar.information.references-with.pages.not.used=Not used on any page yet -edit.content.success.workflow.message=Your changes have being applied. +edit.content.success.workflow.title=Success +edit.content.success.workflow.message=Your changes have been applied. +edit.content.processing.workflow.message=Your changes are being applied. +edit.content.processing.workflow.message.title=Processing edit.content.layout.back.to.old.edit.content=Try out the new Edit Content experience, which makes it easier than ever to edit and manage content. You can easily edit.content.layout.back.to.old.edit.content.switch=switch back @@ -5851,6 +5854,7 @@ edit.content.layout.back.to.old.edit.content.subtitle=any time. edit.content.layout.select.workflow.warning=You haven't selected a Workflow yet. edit.content.layout.select.workflow.warning.switch=Select a Workflow edit.content.layout.select.workflow.warning.subtitle=to take action on this content. +edit.content.sidebar.general.title=General edit.content.sidebar.workflow.dialog.title=Select the workflow you want to work on. edit.content.sidebar.workflow.dialog.dropdown.placeholder=Select a Workflow edit.content.sidebar.workflow.select.workflow=Select Workflow