From 74f95eb4200c8bfb84e8aaca0dab11bb73110095 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dominik=20Han=C3=A1k?= Date: Tue, 27 Feb 2024 15:04:53 +0100 Subject: [PATCH] #532: Initial reorganization of Quarkus guides (#536) --- serverlessworkflow/antora.yml | 28 +- serverlessworkflow/modules/ROOT/nav.adoc | 87 ++--- .../downstream-post-create-project.adoc | 2 +- .../getting-started-requirement.adoc | 2 +- .../modules/ROOT/pages/cloud/index.adoc | 18 +- .../operator/building-custom-images.adoc | 6 +- .../operator/install-serverless-operator.adoc | 3 +- .../build-workflow-images-with-tekton.adoc | 1 - .../versioning-workflows-in-knative.adoc | 1 - ...erless-workflow-specification-support.adoc | 4 +- .../pages/core/custom-functions-support.adoc | 6 +- ...efining-an-input-schema-for-workflows.adoc | 2 +- .../core/handling-events-on-workflows.adoc | 6 +- .../ROOT/pages/core/timeouts-support.adoc | 2 +- .../core/understanding-jq-expressions.adoc | 2 +- ...understanding-workflow-error-handling.adoc | 2 +- .../pages/core/working-with-callbacks.adoc | 115 +----- .../pages/core/working-with-parallelism.adoc | 2 +- .../data-index/data-index-core-concepts.adoc | 12 +- .../pages/data-index/data-index-service.adoc | 147 +------- .../event-correlation-with-workflows.adoc | 2 +- ...hestration-of-asyncapi-based-services.adoc | 4 +- ...rkflow-service-with-kn-cli-and-vscode.adoc | 7 +- .../modules/ROOT/pages/index.adoc | 150 +------- .../pages/integrations/core-concepts.adoc | 3 + .../pages/job-services/core-concepts.adoc | 12 +- .../ROOT/pages/migration-guide/index.adoc | 5 + .../operator/to-1.43.0-migration-guide.adoc} | 3 +- .../ROOT/pages/persistence/core-concepts.adoc | 20 ++ ...onfiguring-openapi-services-endpoints.adoc | 327 ------------------ ...chestration-of-openapi-based-services.adoc | 6 +- .../working-with-openapi-callbacks.adoc | 101 +----- .../debugging-workflow-execution-runtime.adoc | 1 - ...development-tools-for-troubleshooting.adoc | 1 - .../kn-plugin-workflow-overview.adoc | 6 +- .../callbacks/callback-state-example.adoc | 117 +++++++ .../openapi-callback-events-example.adoc | 103 ++++++ .../_dataindex_deployment_operator.adoc | 0 .../data-index/common/_prerequisites.adoc | 0 .../common/_querying_dataindex.adoc | 0 .../data-index-as-quarkus-dev-service.adoc | 141 ++++++++ .../data-index-quarkus-extension.adoc | 10 +- .../data-index/data-index-usecase-multi.adoc | 8 +- .../data-index-usecase-singleton.adoc | 8 +- .../_common_proc_deploy_kubectl_oc.adoc | 2 +- .../_create_namespace_and_deploy_info.adoc | 0 ...eploy_workflow_application_requisites.adoc | 2 +- .../deployments}/common/_prerequisites.adoc | 2 +- .../common/_proc_deploy_sw_kn_cli.adoc | 0 .../common/_proc_deploy_sw_kubectl.adoc | 0 .../common/_proc_deploy_sw_oc.adoc | 0 .../common/_proc_deploy_sw_quarkus_cli.adoc | 0 .../common/_verify_if_swf_is_deployed.adoc | 0 .../deployments}/deploying-on-kubernetes.adoc | 18 +- .../deployments}/deploying-on-minikube.adoc | 16 +- .../deployments}/deploying-on-openshift.adoc | 12 +- ...-produce-events-with-knative-eventing.adoc | 10 +- .../consume-producing-events-with-kafka.adoc | 4 +- .../orchestration-based-saga-pattern.adoc | 2 +- ...build-workflow-image-with-quarkus-cli.adoc | 122 +------ .../create-your-first-workflow-service.adoc | 6 +- ...-serverless-workflow-quarkus-examples.adoc | 78 +++++ ...-serverless-workflow-quarkus-examples.adoc | 47 +++ .../advanced-developer-use-cases/index.adoc | 10 + .../camel-routes-integration.adoc | 6 +- .../custom-functions-knative.adoc | 12 +- .../expose-metrics-to-prometheus.adoc | 4 +- ...erverless-dashboard-with-runtime-data.adoc | 8 +- .../job-service}/quarkus-extensions.adoc | 12 +- .../integration-tests-with-postgresql.adoc | 10 +- .../persistence-with-postgresql.adoc | 10 +- .../postgresql-flyway-migration.adoc | 2 +- .../kubernetes-service-discovery.adoc | 4 +- ...enapi-services-endpoints-with-quarkus.adoc | 327 ++++++++++++++++++ .../orchestration-of-grpc-services.adoc | 2 +- ...ic-integration-tests-with-restassured.adoc | 12 +- ...ocking-http-cloudevents-with-wiremock.adoc | 12 +- ...ocking-openapi-services-with-wiremock.adoc | 16 +- .../timeouts}/timeout-showcase-example.adoc | 2 +- 79 files changed, 1106 insertions(+), 1147 deletions(-) delete mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-images-with-tekton.adoc delete mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/quarkus/versioning-workflows-in-knative.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/integrations/core-concepts.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/migration-guide/index.adoc rename serverlessworkflow/modules/ROOT/pages/{cloud/operator/migration-guide.adoc => migration-guide/operator/to-1.43.0-migration-guide.adoc} (98%) create mode 100644 serverlessworkflow/modules/ROOT/pages/persistence/core-concepts.adoc delete mode 100644 serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/debugging-workflow-execution-runtime.adoc delete mode 100644 serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/development-tools-for-troubleshooting.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/callback-state-example.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/data-index/common/_dataindex_deployment_operator.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/data-index/common/_prerequisites.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/data-index/common/_querying_dataindex.adoc (100%) create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-as-quarkus-dev-service.adoc rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/data-index/data-index-quarkus-extension.adoc (94%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/data-index/data-index-usecase-multi.adoc (94%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/data-index/data-index-usecase-singleton.adoc (94%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_common_proc_deploy_kubectl_oc.adoc (96%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_create_namespace_and_deploy_info.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_deploy_workflow_application_requisites.adoc (64%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_prerequisites.adoc (80%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_proc_deploy_sw_kn_cli.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_proc_deploy_sw_kubectl.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_proc_deploy_sw_oc.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_proc_deploy_sw_quarkus_cli.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{cloud => use-cases/advanced-developer-use-cases/deployments}/common/_verify_if_swf_is_deployed.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/{cloud/quarkus => use-cases/advanced-developer-use-cases/deployments}/deploying-on-kubernetes.adoc (89%) rename serverlessworkflow/modules/ROOT/pages/{cloud/quarkus => use-cases/advanced-developer-use-cases/deployments}/deploying-on-minikube.adoc (94%) rename serverlessworkflow/modules/ROOT/pages/{cloud/quarkus => use-cases/advanced-developer-use-cases/deployments}/deploying-on-openshift.adoc (94%) rename serverlessworkflow/modules/ROOT/pages/{eventing => use-cases/advanced-developer-use-cases/event-orchestration}/consume-produce-events-with-knative-eventing.adoc (95%) rename serverlessworkflow/modules/ROOT/pages/{eventing => use-cases/advanced-developer-use-cases/event-orchestration}/consume-producing-events-with-kafka.adoc (97%) rename serverlessworkflow/modules/ROOT/pages/use-cases/{ => advanced-developer-use-cases/event-orchestration}/orchestration-based-saga-pattern.adoc (99%) rename serverlessworkflow/modules/ROOT/pages/{cloud/quarkus => use-cases/advanced-developer-use-cases/getting-started}/build-workflow-image-with-quarkus-cli.adoc (62%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/getting-started/create-your-first-workflow-service.adoc (97%) create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/test-serverless-workflow-quarkus-examples.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/working-with-serverless-workflow-quarkus-examples.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/index.adoc rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/integrations/camel-routes-integration.adoc (93%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/integrations/custom-functions-knative.adoc (85%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/integrations/expose-metrics-to-prometheus.adoc (96%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/integrations/serverless-dashboard-with-runtime-data.adoc (95%) rename serverlessworkflow/modules/ROOT/pages/{job-services => use-cases/advanced-developer-use-cases/job-service}/quarkus-extensions.adoc (87%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/persistence/integration-tests-with-postgresql.adoc (93%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/persistence/persistence-with-postgresql.adoc (92%) rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/persistence/postgresql-flyway-migration.adoc (98%) rename serverlessworkflow/modules/ROOT/pages/{cloud/quarkus => use-cases/advanced-developer-use-cases/service-discovery}/kubernetes-service-discovery.adoc (98%) create mode 100644 serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/configuring-openapi-services-endpoints-with-quarkus.adoc rename serverlessworkflow/modules/ROOT/pages/{ => use-cases/advanced-developer-use-cases}/service-orchestration/orchestration-of-grpc-services.adoc (99%) rename serverlessworkflow/modules/ROOT/pages/{testing-and-troubleshooting => use-cases/advanced-developer-use-cases/testing}/basic-integration-tests-with-restassured.adoc (90%) rename serverlessworkflow/modules/ROOT/pages/{testing-and-troubleshooting => use-cases/advanced-developer-use-cases/testing}/mocking-http-cloudevents-with-wiremock.adoc (95%) rename serverlessworkflow/modules/ROOT/pages/{testing-and-troubleshooting => use-cases/advanced-developer-use-cases/testing}/mocking-openapi-services-with-wiremock.adoc (92%) rename serverlessworkflow/modules/ROOT/pages/use-cases/{ => advanced-developer-use-cases/timeouts}/timeout-showcase-example.adoc (99%) diff --git a/serverlessworkflow/antora.yml b/serverlessworkflow/antora.yml index 1bf585a9c..5538819c3 100644 --- a/serverlessworkflow/antora.yml +++ b/serverlessworkflow/antora.yml @@ -43,17 +43,21 @@ asciidoc: # kogito_sw_ga: org.kie.kogito:kogito-quarkus-serverless-workflow kogito_sw_ga: kogito-quarkus-serverless-workflow # downstream: 2.7.6.Final-redhat-00006 - quarkus_version: 2.16.10.Final + quarkus_version: 3.2.9.Final # downstream: 2.7.6.Final-redhat-00006 (might be different to the quarkus_version) - quarkus_platform_version: 2.16.10.Final - java_min_version: 11+ - maven_min_version: 3.8.6 + quarkus_platform_version: 3.2.9.Final + java_min_version: 17+ + maven_min_version: 3.9.3 graalvm_min_version: 22.3.0 spec_version: 0.8 vscode_version: 1.67.0 - kn_cli_version: 0.31.0 + kn_cli_version: 0.32.0 + kn_cli_tag: 0.32 docker_min_version: 20.10.7 docker_compose_min_version: 1.27.2 + kubernetes_version: 1.26 + openshift_version_min: 4.10 + openshift_version_max: 4.15 # only used in downstream operator_version: main # after TP1, change to Serverless Logic @@ -83,7 +87,10 @@ asciidoc: quarkus_guides_kafka_url: https://quarkus.io/guides/kafka quarkus_guides_config_reference_url: https://quarkus.io/guides/config-reference quarkus_guides_infinispan_client_reference_url: https://quarkus.io/guides/infinispan-client-reference + quarkus_guides_profiles_url: https://quarkus.io/guides/config-reference#profiles quarkus_url: https://quarkus.io/ + dev_services_url: https://quarkus.io/guides/dev-services + test_containers_url: https://www.testcontainers.org/ smallrye_messaging_url: https://smallrye.io/smallrye-reactive-messaging/smallrye-reactive-messaging/3.3 quarkus_config_url: https://quarkus.io/guides/config quarkus_swagger_url: https://quarkus.io/guides/openapi-swaggerui @@ -117,7 +124,10 @@ asciidoc: ocp_knative_eventing_url: https://docs.openshift.com/container-platform/4.12/serverless/install/installing-knative-eventing.html ocp_kn_cli_url: https://docs.openshift.com/container-platform/4.12/serverless/install/installing-kn.html serverless_workflow_vscode_extension_name: KIE Serverless Workflow Editor - # references to documents within the documentation. - data_index_xref: xref:/data-index/data-index-core-concepts.adoc - job_service_xref: xref:/job-services/core-concepts.adoc - + # xreferences to documents within the documentation. + data_index_xref: xref:data-index/data-index-core-concepts.adoc + job_service_xref: xref:job-services/core-concepts.adoc + # string unication references + data_index_ref: Data Index + workflow_instance: workflow instance + workflow_instances: ${workflow_instance}s diff --git a/serverlessworkflow/modules/ROOT/nav.adoc b/serverlessworkflow/modules/ROOT/nav.adoc index 37c110c81..4fff5c654 100644 --- a/serverlessworkflow/modules/ROOT/nav.adoc +++ b/serverlessworkflow/modules/ROOT/nav.adoc @@ -17,10 +17,8 @@ * xref:release_notes.adoc[Release notes] * Getting Started +** xref:getting-started/getting-familiar-with-our-tooling.adoc[] ** xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[] -** xref:getting-started/create-your-first-workflow-service.adoc[] -// We will refactor this section here: https://issues.redhat.com/browse/KOGITO-9451 -//** xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with tooling] * Core Concepts ** xref:core/cncf-serverless-workflow-specification-support.adoc[] ** xref:core/handling-events-on-workflows.adoc[Events] @@ -28,12 +26,10 @@ ** xref:core/understanding-jq-expressions.adoc[] ** xref:core/understanding-workflow-error-handling.adoc[Error handling] ** xref:core/configuration-properties.adoc[Configuration] -// TODO: describe output schema https://issues.redhat.com/browse/KOGITO-9468 ** xref:core/defining-an-input-schema-for-workflows.adoc[Input Schema] ** xref:core/custom-functions-support.adoc[Custom Functions] ** xref:core/timeouts-support.adoc[Timeouts] ** xref:core/working-with-parallelism.adoc[Parallelism] -// We will refactor this section here: https://issues.redhat.com/browse/KOGITO-9451 * Tooling ** xref:tooling/serverless-workflow-editor/swf-editor-overview.adoc[Editor] ** xref:tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc[VS Code extension] @@ -47,21 +43,14 @@ ** xref:service-orchestration/orchestration-of-openapi-based-services.adoc[OpenAPI] *** xref:service-orchestration/configuring-openapi-services-endpoints.adoc[Advanced Configuration] *** xref:service-orchestration/working-with-openapi-callbacks.adoc[Callbacks] -** xref:service-orchestration/orchestration-of-grpc-services.adoc[gRPC] ** xref:service-orchestration/troubleshooting.adoc[Troubleshooting] * Event Orchestration ** xref:eventing/orchestration-of-asyncapi-based-services.adoc[AsyncAPI] ** xref:eventing/event-correlation-with-workflows.adoc[Event Correlation] -** Quarkus -*** xref:eventing/consume-producing-events-with-kafka.adoc[Apache Kafka] -*** xref:eventing/consume-produce-events-with-knative-eventing.adoc[Knative Eventing] * Security ** Client Authentication *** xref:security/authention-support-for-openapi-services.adoc[OpenAPI Authentication] *** xref:security/orchestrating-third-party-services-with-oauth2.adoc[OpenAPI OAuth2] -//* Running Workflows Locally -//** Running a Workflow Project TODO: https://issues.redhat.com/browse/KOGITO-9457 -// ** Running a Quarkus Workflow Project TODO: https://issues.redhat.com/browse/KOGITO-9458 * Executing, Testing and Troubleshooting ** Executing and Testing Workflows *** xref:testing-and-troubleshooting/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc[Developer UI] @@ -70,16 +59,8 @@ **** xref:testing-and-troubleshooting/quarkus-dev-ui-extension/quarkus-dev-ui-monitoring-page.adoc[Monitoring] **** xref:testing-and-troubleshooting/quarkus-dev-ui-extension/quarkus-dev-ui-custom-dashboard-page.adoc[Dashboards] *** xref:testing-and-troubleshooting/kn-plugin-workflow-overview.adoc[Command Line] -// *** Swagger UI, GraphiQL, Console logging TODO: https://issues.redhat.com/browse/KOGITO-9462 -** Quarkus -*** xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents] -*** xref:testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc[Mocking OpenAPI Services] -*** xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc[Testing with REST Assured] * Persistence -** Quarkus -*** xref:persistence/persistence-with-postgresql.adoc[PostgreSQL Configuration] -*** xref:persistence/postgresql-flyway-migration.adoc[PostgreSQL Database Migration] -*** xref:persistence/integration-tests-with-postgresql.adoc[Integration Tests with PostgreSQL] +** xref:persistence/core-concepts.adoc[Core concepts] // * Java Workflow Library TODO: https://issues.redhat.com/browse/KOGITO-9454 * xref:cloud/index.adoc[Cloud] ** Operator @@ -93,29 +74,57 @@ *** xref:cloud/operator/building-custom-images.adoc[Building Custom Images] *** xref:cloud/operator/known-issues.adoc[Roadmap and Known Issues] *** xref:cloud/operator/customize-podspec.adoc[Custom Workflow PodSpec] -*** xref:cloud/operator/migration-guide.adoc[] -** Quarkus -*** xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building Workflow Images] -*** xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying on Minikube] -*** xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying on Kubernetes] -*** xref:cloud/quarkus/deploying-on-openshift.adoc[Deploying on OpenShift] -*** xref:cloud/quarkus/kubernetes-service-discovery.adoc[Service Discovery] * Integrations -** xref:integrations/camel-routes-integration.adoc[Camel Routes] -** xref:integrations/custom-functions-knative.adoc[Knative Serving] -** xref:integrations/expose-metrics-to-prometheus.adoc[Exposing Prometheus Metrics] -** xref:integrations/serverless-dashboard-with-runtime-data.adoc[Displaying workflow Data in Dashboards] +** xref:integrations/core-concepts.adoc[] * Job Service ** xref:job-services/core-concepts.adoc[Core Concepts] -** xref:job-services/quarkus-extensions.adoc[Quarkus Extensions] * Data Index ** xref:data-index/data-index-core-concepts.adoc[Core concepts] ** xref:data-index/data-index-service.adoc[Standalone service] -** xref:data-index/data-index-quarkus-extension.adoc[Quarkus Extensions] +* xref:migration-guide/index.adoc[Migration Guide] ** Operator -*** xref:data-index/data-index-usecase-singleton.adoc[] -*** xref:data-index/data-index-usecase-multi.adoc[] -//** Quarkus Extensions TODO: https://issues.redhat.com/browse/KOGITO-9463 +*** xref:migration-guide/operator/to-1.43.0-migration-guide.adoc[Migrating {product_name} operator to 1.43.0] * Use Cases -** xref:use-cases/orchestration-based-saga-pattern.adoc[Saga Orchestration] -** xref:use-cases/timeout-showcase-example.adoc[Timeouts Showcase] +** xref:use-cases/advanced-developer-use-cases/index.adoc[Advanced Development Use Cases of {product_name} applications using Quarkus and Java] +*** Getting started +**** xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[] +**** xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[] +**** xref:use-cases/advanced-developer-use-cases/getting-started/working-with-serverless-workflow-quarkus-examples.adoc[] +**** xref:use-cases/advanced-developer-use-cases/getting-started/test-serverless-workflow-quarkus-examples.adoc[] +*** Deployment +**** xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[Deploying on Minikube] +**** xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc[Deploying on Kubernetes] +**** xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-openshift.adoc[Deploying on OpenShift] +*** Persistence +**** xref:use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc[] +**** xref:use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc[] +**** xref:use-cases/advanced-developer-use-cases/persistence/integration-tests-with-postgresql.adoc[] +*** Job Service +**** xref:use-cases/advanced-developer-use-cases/job-service/quarkus-extensions.adoc[] +*** Data Index +**** xref:use-cases/advanced-developer-use-cases/data-index/data-index-as-quarkus-dev-service.adoc[] +**** xref:use-cases/advanced-developer-use-cases/data-index/data-index-usecase-singleton.adoc[] +**** xref:use-cases/advanced-developer-use-cases/data-index/data-index-usecase-multi.adoc[] +**** xref:use-cases/advanced-developer-use-cases/data-index/data-index-quarkus-extension.adoc[] +*** Service Orchestration +**** xref:use-cases/advanced-developer-use-cases/service-orchestration/configuring-openapi-services-endpoints-with-quarkus.adoc[] +**** xref:use-cases/advanced-developer-use-cases/service-orchestration/orchestration-of-grpc-services.adoc[] +*** Service Discovery +**** xref:use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc[Service Discovery] +*** Event Orchestration +**** xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[] +**** xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[] +**** xref:use-cases/advanced-developer-use-cases/event-orchestration/orchestration-based-saga-pattern.adoc[] +*** Timeouts +**** xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc[] +*** Callbacks +**** xref:use-cases/advanced-developer-use-cases/callbacks/callback-state-example.adoc[] +*** Integrations of external services +**** xref:use-cases/advanced-developer-use-cases/integrations/camel-routes-integration.adoc[] +**** xref:use-cases/advanced-developer-use-cases/integrations/custom-functions-knative.adoc[] +**** xref:use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc[] +**** xref:use-cases/advanced-developer-use-cases/integrations/serverless-dashboard-with-runtime-data.adoc[] +*** Testing +**** xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc[] +**** xref:use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc[] +**** xref:use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/_common-content/downstream-post-create-project.adoc b/serverlessworkflow/modules/ROOT/pages/_common-content/downstream-post-create-project.adoc index 48aad1d6f..d0c42b8f5 100644 --- a/serverlessworkflow/modules/ROOT/pages/_common-content/downstream-post-create-project.adoc +++ b/serverlessworkflow/modules/ROOT/pages/_common-content/downstream-post-create-project.adoc @@ -36,7 +36,7 @@ io.quarkus.platform quarkus-kogito-bom - 2.7.6.Final + {quarkus_platform_version}/version> pom import diff --git a/serverlessworkflow/modules/ROOT/pages/_common-content/getting-started-requirement.adoc b/serverlessworkflow/modules/ROOT/pages/_common-content/getting-started-requirement.adoc index 37a3a97b6..36cad55e4 100644 --- a/serverlessworkflow/modules/ROOT/pages/_common-content/getting-started-requirement.adoc +++ b/serverlessworkflow/modules/ROOT/pages/_common-content/getting-started-requirement.adoc @@ -1,3 +1,3 @@ * A workflow project is created. + -For more information about creating a workflow project, see xref:getting-started/create-your-first-workflow-service.adoc[Creating your first Serverless Workflow service]. \ No newline at end of file +For more information about creating a workflow project, see xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first Serverless Workflow Java service]. \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc index 1b7322c34..85ab28107 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc @@ -101,14 +101,6 @@ xref:cloud/operator/known-issues.adoc[] Learn about the known issues and feature Roadmap of the {operator_name} -- -[.card] --- -[.card-title] -xref:cloud/operator/migration-guide.adoc[] -[.card-description] -Migration guides --- - [.card-section] == Kubernetes with Quarkus @@ -117,7 +109,7 @@ For Java developers, you can use Quarkus and a few add-ons to help you build and [.card] -- [.card-title] -xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI] +xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI] [.card-description] Learn how to build images for your workflow applications using Quarkus CLI -- @@ -125,7 +117,7 @@ Learn how to build images for your workflow applications using Quarkus CLI [.card] -- [.card-title] -xref:cloud/quarkus/kubernetes-service-discovery.adoc[Kubernetes service discovery in {product_name}] +xref:use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc[Kubernetes service discovery in {product_name}] [.card-description] Learn what is and how the Kubernetes service discovery for workflow application configuration works -- @@ -133,7 +125,7 @@ Learn what is and how the Kubernetes service discovery for workflow application [.card] -- [.card-title] -xref:cloud/quarkus/deploying-on-openshift.adoc[Deploying your {product_name} application on OpenShift] +xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-openshift.adoc[Deploying your {product_name} application on OpenShift] [.card-description] Learn how to deploy your workflow application on OpenShift cluster -- @@ -141,7 +133,7 @@ Learn how to deploy your workflow application on OpenShift cluster [.card] -- [.card-title] -xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying your {product_name} application on Minikube] +xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[Deploying your {product_name} application on Minikube] [.card-description] Learn how to deploy your workflow application on Minikube for local tests and development -- @@ -149,7 +141,7 @@ Learn how to deploy your workflow application on Minikube for local tests and de [.card] -- [.card-title] -xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying your {product_name} application on Kubernetes] +xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc[Deploying your {product_name} application on Kubernetes] [.card-description] Learn how to deploy your workflow application on Kubernetes -- diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/building-custom-images.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/building-custom-images.adoc index b03124069..1f44fbea0 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/operator/building-custom-images.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/building-custom-images.adoc @@ -40,10 +40,10 @@ The table below lists the additional packages installed in the development mode |tzdata-java | -|java-11-openjdk-devel -|OpenJDK 11 +|java-17-openjdk-devel +|OpenJDK 17 -|apache-maven-3.8.6-bin.tar.gz +|apache-maven-3.9.3-bin.tar.gz |Apache Maven |=== diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc index 9edbee59e..0447ab4b5 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc @@ -11,7 +11,7 @@ :kubernetes_operator_uninstall_url: https://olm.operatorframework.io/docs/tasks/uninstall-operator/ :operatorhub_url: https://operatorhub.io/ -This guide describes how to install the {operator_name} in a Kubernetes or OpenShift cluster. The operator is in an xref:/cloud/operator/known-issues.adoc[early development stage] (community only) and has been tested on OpenShift 4.11+, Kubernetes 1.22+, and link:{minikube_url}[Minikube]. +This guide describes how to install the {operator_name} in a Kubernetes or OpenShift cluster. The operator is in an xref:/cloud/operator/known-issues.adoc[early development stage] (community only) and has been tested on OpenShift {openshift_version_min}+, Kubernetes {kubernetes_version}+, and link:{minikube_url}[Minikube]. .Prerequisites * A Kubernetes or OpenShift cluster with admin privileges. Alternatively, you can use Minikube or KIND. @@ -155,7 +155,6 @@ kubectl delete -f https://raw.githubusercontent.com/apache/incubator-kie-kogito- [TIP] ==== -<<<<<<< HEAD If you're running a snapshot version, use this URL instead `https://raw.githubusercontent.com/apache/incubator-kie-kogito-serverless-operator/main/operator.yaml`. The URL should be the same used when installing the operator. diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-images-with-tekton.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-images-with-tekton.adoc deleted file mode 100644 index 65f5cd828..000000000 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-images-with-tekton.adoc +++ /dev/null @@ -1 +0,0 @@ -//= Building Workflow Images with Tekton Pipelines \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/versioning-workflows-in-knative.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/versioning-workflows-in-knative.adoc deleted file mode 100644 index 36e8a1f9a..000000000 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/versioning-workflows-in-knative.adoc +++ /dev/null @@ -1 +0,0 @@ -//= Versioning workflows in Knative \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/core/cncf-serverless-workflow-specification-support.adoc b/serverlessworkflow/modules/ROOT/pages/core/cncf-serverless-workflow-specification-support.adoc index b55656067..dfbf5ee08 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/cncf-serverless-workflow-specification-support.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/cncf-serverless-workflow-specification-support.adoc @@ -244,7 +244,7 @@ For more information about timeouts, see xref:core/timeouts-support.adoc[Timeout {product_name} supports workflow compensation as described in the link:{spec_doc_url}#Workflow-Compensation[Serverless Workflow specification definition]. -For more information about compensations, see xref:use-cases/orchestration-based-saga-pattern.adoc[Saga orchestration example in {product_name}]. +For more information about compensations, see xref:use-cases/advanced-developer-use-cases/event-orchestration/orchestration-based-saga-pattern.adoc[Saga orchestration example in {product_name}]. [[constants]] == Constants @@ -266,7 +266,7 @@ Secrets are associated with the link:{quarkus_config_guide_url}[Quarkus Configur == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +* xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[] * xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc b/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc index 6728100cf..bd53207cb 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc @@ -208,7 +208,7 @@ Kogito supports the link:{camel_url}[Camel Routes] functions within an Apache Ma [NOTE] ==== -This section briefly exemplifies how to define and use Camel Routes within your workflow application. For more information, see the xref:integrations/camel-routes-integration.adoc[Integrating with Camel Routes]. +This section briefly exemplifies how to define and use Camel Routes within your workflow application. For more information, see the xref:use-cases/advanced-developer-use-cases/integrations/camel-routes-integration.adoc[Integrating with Camel Routes]. ==== === Function definition @@ -307,7 +307,7 @@ include::../../pages/_common-content/camel-valid-responses.adoc[] [NOTE] ==== -This section briefly exemplifies how to define and use Knative custom functions within your workflow application. For more information, see xref:integrations/custom-functions-knative.adoc[Invoking Knative services from Serverless Workflow]. +This section briefly exemplifies how to define and use Knative custom functions within your workflow application. For more information, see xref:use-cases/advanced-developer-use-cases/integrations/custom-functions-knative.adoc[Invoking Knative services from Serverless Workflow]. ==== === Function definition @@ -336,7 +336,7 @@ You can declare a {product_name} custom function using the Knative service name, <1> `greet` is the function name <2> `custom` is the function type -<3> In `operation` you set the coordinates of the Knative service. See xref:cloud/quarkus/kubernetes-service-discovery.adoc[Kubernetes Service Discovery] for supported Scheme and Kubernetes GVK +<3> In `operation` you set the coordinates of the Knative service. See xref:use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc[Kubernetes Service Discovery] for supported Scheme and Kubernetes GVK The above function will send a `POST` request to the http://custom-function-knative-service.default.10.109.169.193.sslip.io/plainJsonFunction URL. If you don't specify a path, {product_name} will use the root path (/). diff --git a/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc index b2bca86a0..1b3509ff5 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc @@ -38,7 +38,7 @@ Similar to Input schema, you must specify the URL to the JSON schema, using `out "schema" : "URL_to_json_schema", "failOnValidationErrors": false } - ] + } ] ---- The same rules described for `dataInputSchema` apply for `schema` and `failOnValidationErrors`. The difference is that the latter flag is applied after workflow execution. diff --git a/serverlessworkflow/modules/ROOT/pages/core/handling-events-on-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/core/handling-events-on-workflows.adoc index 336058ee8..3a7e3b4bf 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/handling-events-on-workflows.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/handling-events-on-workflows.adoc @@ -136,9 +136,9 @@ Similar to the callback state in a workflow, the workflow instance to be resumed == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] -* xref:eventing/consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka] -* xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] +* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc b/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc index 9761574cb..f1b32fda2 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc @@ -219,7 +219,7 @@ In the {job_service_xref}[job service] guide you can see all the available add-o [#timeouts-showcase] == Timeouts showcase -To see the examples about how configure the timeouts, and execute them in different deployment scenarios, you can use the xref:use-cases/timeout-showcase-example.adoc[Timeouts showcase in {product_name}] +To see the examples about how configure the timeouts, and execute them in different deployment scenarios, you can use the xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc[Timeouts showcase in {product_name}] == Additional resources diff --git a/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc b/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc index f3d3d8390..82cb7fcaf 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc @@ -7,7 +7,7 @@ :jq_url: https://stedolan.github.io/jq/manual/ :jsonpath_url: https://github.com/json-path/JsonPath/ :json_data_types_url: https://www.w3schools.com/js/js_json_datatypes.asp -:jq_play:https://jiehong.gitlab.io/jq_offline +:jq_play: https://jiehong.gitlab.io/jq_offline Each workflow instance is associated with a data model. A data model consists of a JSON object regardless of whether the workflow file contains YAML or JSON. The initial content of the JSON object depends on how the workflow is started. If the workflow is created using the link:{cloud_events_url}[Cloud Event], then the workflow content is taken from the `data` property. However, if the workflow is started through an HTTP POST request, then the workflow content is taken from the request body. diff --git a/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc b/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc index f413313c8..8ae0d7dd0 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc @@ -185,7 +185,7 @@ The `finish` state in the `serverless-workflow-error-quarkus` example applicatio == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +* xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode[] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/core/working-with-callbacks.adoc b/serverlessworkflow/modules/ROOT/pages/core/working-with-callbacks.adoc index 57766b5cc..4f9f02e52 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/working-with-callbacks.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/working-with-callbacks.adoc @@ -15,121 +15,12 @@ image::eventing/callbacks-explained.png[] The workflow correlation described in this document focuses on the former mechanism that is based on the fact that each workflow instance contains a unique identifier, which is generated automatically. -[[ref-example-callback]] -== Example of Callback state - -To understand the Callback state, see the link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus[`serverless-workflow-callback-quarkus`] example application in GitHub repository. - -The initial model of the `serverless-workflow-callback-quarkus` example application is empty. Once the workflow is started, it publishes a CloudEvent of the `resume` type and waits for a CloudEvent, which contains the type `wait`. - -A listener consumes the CloudEvent with the `resume` type and simulates the behavior of an external service. Consequently, on the external service side, when the actions associated with the `resume` type CloudEvent are completed, the listener publishes a `wait` type CloudEvent. Once the `wait` type CloudEvent is received, the workflow moves to the next state and ends successfully. - -To use the Callback state in a workflow, first CloudEvent types such as `resume` and `wait` are declared that the workflow uses. Following is an example of CloudEvents declaration in a workflow definition: - -.Example of CloudEvents declaration in a workflow definition -[code,json] ----- -"events": [ - { - "name": "resumeEvent", - "source": "", - "type": "resume" - }, - { - "name": "waitEvent", - "source": "", - "type": "wait" - } - ] ----- - -After that, a Callback state is declared. The Callback state publishes a `resume` type CloudEvent and waits for a CloudEvent with `wait` type. The published CloudEvent contains a `move` data field, and the CloudEvent that is received is expected to contain a `result` data field. According to the link:{spec_doc_url}#event-data-filters[eventDataFilter], the `result` data field is added to the workflow model as a `move` field. - -Following is an example of declaring a Callback state that handles the `wait` type CloudEvent: - -.Example of a Callback State declaration handling the `wait` CloudEvent -[code,json] ----- -{ - "name": "waitForEvent", - "type": "callback", - "action": { - "name": "publishAction", - "eventRef": { - "triggerEventRef": "resumeEvent", - "data": "{move: \"This is the initial data in the model\"}" - } - }, - "eventRef": "waitEvent", - "eventDataFilter": { - "data": ".result", - "toStateData": ".move" - }, - "transition": "finish" - } ----- - -An link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus/src/main/java/org/kie/kogito/examples/PrintService.java[event listener] consumes the `resume` type CloudEvent and publishes a new `wait` type CloudEvent. Following is an example of a Java method that publishes the `wait` type CloudEvent: - -.Example of a Java method that publishes the `wait` CloudEvent -[code,java] ----- - - private String generateCloudEvent(String id, String input) { - Map eventBody = new HashMap<>(); - eventBody.put("result", input + " and has been modified by the event publisher"); - eventBody.put("dummyEventVariable", "This will be discarded by the process"); - try { - return objectMapper.writeValueAsString(CloudEventBuilder.v1() - .withId(UUID.randomUUID().toString()) - .withSource(URI.create("")) - .withType("wait") - .withTime(OffsetDateTime.now()) - .withExtension(CloudEventExtensionConstants.PROCESS_REFERENCE_ID, id) - .withData(objectMapper.writeValueAsBytes(eventBody)) - .build()); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException(e); - } - - } ----- - -After that, the workflow application consumes the event published by the listener and sets the result field. The consumed CloudEvent contains an attribute named `kogitoprocrefid`, which holds the workflow instance ID of the workflow. - -The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. - -Note that each workflow is identified by a unique instance ID, which is automatically included in any published CloudEvent, as `kogitoprocinstanceid` CloudEvent extension. - -The following example shows that the event listener takes the workflow instance ID of a workflow from a CloudEvent attribute named `kogitoprocinstanceid`, which is associated with the CloudEvent that is consumed. - -.Example of a Java method that consumes the `resume` CloudEvent -[source,java] ----- - @Incoming("in-resume") - @Outgoing("out-wait") - @Acknowledgment(Strategy.POST_PROCESSING) - public String onEvent(Message message) { - Optional ce = CloudEventUtils.decode(message.getPayload()); - JsonCloudEventData cloudEventData = (JsonCloudEventData) ce.get().getData(); - return generateCloudEvent(ce.get().getExtension(CloudEventExtensionConstants.PROCESS_INSTANCE_ID).toString(), cloudEventData.getNode().get("move").asText()); - } ----- - -Apache Kafka configuration in `serverless-workflow-callback-quarkus`:: -+ --- -The `serverless-workflow-callback-quarkus` example application requires an external broker to manage the associated CloudEvents. The default setup in the `serverless-workflow-callback-quarkus` example application uses link:{kafka_doc_url}[Apache Kafka]. However, you can also use xref:eventing/consume-produce-events-with-knative-eventing.adoc[Knative Eventing]. - -Apache Kafka uses topics to publish or consume messages. In the `serverless-workflow-callback-quarkus` example application, two topics are used, matching the name of the CloudEvent types that are defined in the workflow, such as `resume` and `wait`. The `resume` and `wait` CloudEvent types are configured in the link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus/src/main/resources/application.properties[`application.properties`] file. - -For more information about using Apache Kafka with events, see link:xref:consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka]. --- - == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +* xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[] * xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] +* xref:use-cases/advanced-developer-use-cases/callbacks/callback-state-example.adoc[] +* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc b/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc index 7ff1fd3a6..c4b992da3 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc @@ -270,6 +270,6 @@ The parallel workflow data shows the concatenated string as result, but in this == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +* xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[Creating your first workflow service] include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-core-concepts.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-core-concepts.adoc index 9b99f3ea2..ff8b6f720 100644 --- a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-core-concepts.adoc +++ b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-core-concepts.adoc @@ -24,7 +24,7 @@ In {product_name} platform there is a dedicated supporting service that stores the data related to the {workflow_instances} and their associated jobs called *{data_index_ref}* service. This service also provides a GraphQL endpoint allowing users to query that data and perform operations, also known as mutations in GraphQL terms. -The data processed by the {data_index_ref} service is usually received via events. The events consumed can be generated by any workflow or the xref::job-services/core-concepts.adoc[Job service] itself. +The data processed by the {data_index_ref} service is usually received via events. The events consumed can be generated by any workflow or the xref:job-services/core-concepts.adoc[Job service] itself. This event communication can be configured in different ways as described in the <> section. The {data_index_ref} service uses Apache Kafka or Knative eventing to consume link:{cloud_events_url}[CloudEvents] messages from workflows. @@ -94,7 +94,7 @@ In this case, the indexation has some limitations: it is only able to index data image::data-index/data-index-addon.png[Image of data-index as a Quarkus Extension] -More details are available in the xref:data-index/data-index-quarkus-extension.adoc[{data_index_ref} Quarkus Extension] section. +More details are available in the xref:use-cases/advanced-developer-use-cases/data-index/data-index-quarkus-extension.adoc[{data_index_ref} Quarkus Extension] section. [#data-index-storage] == {data_index_ref} storage @@ -103,7 +103,7 @@ In order to store the indexed data, {data_index_ref} needs some specific tables It's necessary to activate the migrate-at-start option to migrate the {data_index_ref} schema automatically. -For more details about Flyway migrations, see xref:persistence/postgresql-flyway-migration.adoc[] section +For more details about Flyway migrations, see xref:use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc[] section. [#data-index-graphql] == {data_index_ref} GraphQL endpoint @@ -629,8 +629,8 @@ In this case, the data indexation is done internally, and all interactions are t == Additional resources -* xref:eventing/consume-producing-events-with-kafka.adoc[] -* xref:eventing/consume-produce-events-with-knative-eventing.adoc[] -* xref:use-cases/timeout-showcase-example.adoc[] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[] +* xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc[] include::../_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-service.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-service.adoc index dd63bf517..80175cf71 100644 --- a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-service.adoc +++ b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-service.adoc @@ -15,7 +15,7 @@ :workflow_instances: {workflow_instance}s [#data-index-service] -== {data_index_ref} service deployment. +== {data_index_ref} service deployment {data_index_ref} service can be deployed referencing directly a distributed {data_index_ref} image. There are different images provided that take into account what persistence layer is required in each case. In each distribution, there are some properties to configure things like the connection with the database or the communication with other services. @@ -96,7 +96,7 @@ In this case {data_index_ref} is ready to consume the events sent to the topics: [IMPORTANT] ==== -It is important to configure the workflows application to send the events to the topic `kogito-processinstances-events` using the connector `smallrye-kafka`. More information about how to configure workflows Kafka eventing is available in xref:eventing/consume-producing-events-with-kafka.adoc[] guide. +It is important to configure the workflows application to send the events to the topic `kogito-processinstances-events` using the connector `smallrye-kafka`. More information about how to configure workflows Kafka eventing is available in xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[] guide. To explore the specific configuration to add to the workflow to connect with {data_index_ref} using Kafka eventing see xref:data-index/data-index-core-concepts.adoc#_kafka_eventing[{data_index_ref} Kafka eventing] @@ -236,7 +236,7 @@ spec: <7> The URI where the {data_index_ref} service is expecting to consume those types of events. This deployment is using `KOGITO_DATA_INDEX_QUARKUS_PROFILE: http-events-support`. Workflow applications need to configure the connector to use `quarkus-http` and send the events to the Knative `K_SINK`. -You can find more information about Knative eventing and `K_SINK` environment variable in xref:eventing/consume-produce-events-with-knative-eventing.adoc[] +You can find more information about Knative eventing and `K_SINK` environment variable in xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[] To explore the specific configuration to add to the workflow to connect with {data_index_ref} using Knative eventing see xref:data-index/data-index-core-concepts.adoc#_knative_eventing[{data_index_ref} Knative eventing] @@ -253,147 +253,16 @@ mp.messaging.outgoing.kogito-processinstances-events.method=POST If that configuration values weren't there before the container image creation, they need to be passed as environment variables. More details about customizing Quarkus generated images can be found in {quarkus_container_image_customizing_url}[Quarkus Container Images Customizing] guide. ==== -A full example where the {data_index_ref} service standalone is deployed using Knative eventing can be found as part of xref:use-cases/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] guide. +A full example where the {data_index_ref} service standalone is deployed using Knative eventing can be found as part of xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] guide. --- -[#data-index-dev-service-details] -== {data_index_ref} as a Quarkus Development service -When you use the {product_name} Process Quarkus extension, a temporary {data_index_ref} service is automatically provisioned while the Quarkus application is running in development mode. When you use one of the following Quarkus extensions, the Dev Service is set up for immediate use: - -.{product_name} main Quarkus extension -[source,xml] ----- - - org.kie.kogito - kogito-quarkus - ----- - -.{product_name} Quarkus extension -[source,xml] ----- - - org.kie.kogito - kogito-quarkus-serverless-workflow - ----- - -When you start your Quarkus project in development mode, an in-memory instance of the {data_index_ref} service is automatically started in the background. This feature is enabled by link:{dev_services_url}[Quarkus Dev Services], and leverages link:{test_containers_url}[Testcontainers] to start an image of the {data_index_ref} service. - -The {product_name} Process Quarkus extension sets up your Quarkus application to automatically replicate any {product_name} messaging events related to {workflow_instances} or jobs into the provisioned {data_index_ref} instance. - -Once the service is up and running, you can query the GraphQL interface directly using `http://localhost:8180/graphql` or using the Quarkus Dev UI console `http://localhost:8080/q/dev`. - -The {data_index_ref} GraphQL endpoint can query for `ProcessInstances` and `Jobs`. For more information about operations and attributes to query, see xref:data-index/data-index-core-concepts.adoc#data-index-graphql[GraphQL endpoint provided by {data_index_ref}] section. - -You can share the same {data_index_ref} instance across multiple {product_name} services during development. Sharing {data_index_ref} instances is enabled by default, therefore, only one {data_index_ref} instance is started. This behavior can be adjusted to start multiple instances using the `quarkus.kogito.devservices.shared` property. - -The Quarkus Dev Service also allows further configuration options including: - -* To disable {data_index_ref} Dev Service, use the `quarkus.kogito.devservices.enabled=false` property. -* To change the port where the {data_index_ref} Dev Service runs, use the `quarkus.kogito.devservices.port=8180` property. -* To adjust the provisioned image, use `quarkus.kogito.devservices.imageName=quay.io/kiegroup/kogito-data-index-ephemeral` property. -* To disable sharing the {data_index_ref} instance across multiple Quarkus applications, use `quarkus.kogito.devservices.shared=false` property. - -For more information about Quarkus Dev Services, see link:{dev_services_url}[Dev Services guide]. - -[#data-index-service-configuration-properties] -== {data_index_ref} service configuration properties -The following table serves as a quick reference for commonly {data_index_ref} configuration properties supported. - -.Common configuration properties -[cols="40%,35%,10%,10%,5%", options="header"] -|=== -|Property|Description|Type|Default value|Override at runtime - -|`QUARKUS_DATASOURCE_JDBC_URL` -| The datasource URL -| string -| -| Yes - -|`QUARKUS_DATASOURCE_USERNAME` -| The datasource username -| string -| -| Yes - -|`QUARKUS_DATASOURCE_PASSWORD` -| The datasource password -| string -| -| Yes - -|`QUARKUS_DATASOURCE_DB_KIND` -a|The kind of database to connect: `postgresql`,.. -|string -| -|Yes - -|`QUARKUS_FLYWAY_MIGRATE_AT_START` -| `true` to execute Flyway automatically when the application starts, false otherwise. -| boolean -| false -| Yes - -|`KOGITO_DATA_INDEX_QUARKUS_PROFILE` -a| (Only when referencing an image distribution) - -Allows to change the event connection type. The possible values are: - -* `kafka-events-support` -* `http-events-support` - -| string -| `kafka-events-support` -| Yes - -|`kogito.persistence.query.timeout.millis` -|Defines timeout for a query execution. -|long -|`10000` -|Yes - -|`quarkus.kogito.devservices.enabled` -|Enables or disables the Dev Services for workflows. By default, the Dev Services are enabled, unless an existing configuration is present. -|boolean -|`true` -|No - -|`quarkus.kogito.devservices.port` -|Defines the optional fixed port that the Dev Services listen to. -|int -|`8180` -|No - -|`quarkus.kogito.devservices.image-name` -|Defines the {data_index_ref} image to use in Dev Service. -|string -|`quay.io/kiegroup/kogito-data-index-ephemeral:{page-component-version}` -|No - -|`quarkus.kogito.devservices.shared` -|Indicates if the {data_index_ref} instance, which is managed by Quarkus Dev Services, is shared. -|boolean -|`true` -|No - -|`quarkus.kogito.devservices.service-name` -|Defines the value of the label that is attached to the started container. This property is used when `shared` is set to `true`. -|string -|`kogito-data-index` -|No - - -|=== - == Additional resources * xref:data-index/data-index-core-concepts.adoc[] -* xref:cloud/quarkus/deploying-on-minikube.adoc[] -* xref:eventing/consume-producing-events-with-kafka.adoc[] -* xref:eventing/consume-produce-events-with-knative-eventing.adoc[] -* xref:use-cases/timeout-showcase-example.adoc[Timeout example in {product_name}] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[] +* xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc[Timeout example in {product_name}] include::../_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc index 48ecf5f40..910e22891 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc @@ -194,7 +194,7 @@ All consumed events must contain the same correlation attributes since the consu } ---- -The engine stores the correlation information in the same persistence mechanism that is configured in the workflow application. If a persistence add-on is not configured, then the correlation information is stored in memory. This means that entire correlation information is lost when the workflow application restarts, therefore this process must be used for testing purposes. For more information about the persistence configuration, see xref:persistence/persistence-with-postgresql.adoc[Running a workflow service using PostgreSQL]. +The engine stores the correlation information in the same persistence mechanism that is configured in the workflow application. If a persistence add-on is not configured, then the correlation information is stored in memory. This means that entire correlation information is lost when the workflow application restarts, therefore this process must be used for testing purposes. For more information about the persistence configuration, see xref:use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc[Running a workflow service using PostgreSQL]. [NOTE] ==== diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/orchestration-of-asyncapi-based-services.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/orchestration-of-asyncapi-based-services.adoc index 6802a0bef..963210e0e 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/orchestration-of-asyncapi-based-services.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/orchestration-of-asyncapi-based-services.adoc @@ -7,8 +7,8 @@ :async_api_spec_url: https://www.asyncapi.com/docs/reference/specification/v2.0.0 :async_quarkiverse_url: https://github.com/quarkiverse/quarkus-asyncapi // Referenced documentation pages. -:kafka_config: xref:eventing/consume-producing-events-with-kafka.adoc -:knative_config: xref:eventing/consume-produce-events-with-knative-eventing.adoc +:kafka_config: xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc +:knative_config: xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc :event_states: xref:core/handling-events-on-workflows.adoc This document describes how to trigger and publish events using an link:{async_api_spec_url}[AsyncAPI] specification file. diff --git a/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc b/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc index c45f3bf2a..4758cc562 100644 --- a/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc +++ b/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc @@ -6,9 +6,9 @@ This guide showcases using the Knative Workflow CLI plugin and Visual Studio cod * xref:testing-and-troubleshooting/kn-plugin-workflow-overview.adoc[Knative Workflow CLI] {kn_cli_version} is installed. * Visual Studio Code with https://marketplace.visualstudio.com/items?itemName=redhat.vscode-extension-serverless-workflow-editor[Red Hat Serverless Workflow Editor] is installed to edit your workflows. -.Preparing your environment +.Preparing your environment for local development with locally deployed cluster instance * Install https://docs.docker.com/engine/install/[Docker] or https://podman.io/docs/installation[Podman]. -* Install https://minikube.sigs.k8s.io/docs/start/[minikube]. +* Install https://minikube.sigs.k8s.io/docs/start/[minikube] * Install https://kubernetes.io/docs/tasks/tools/[kubectl]. * Start minikube. Depending on your environment, set `--driver` flag to `podman` or `docker` [source,bash] @@ -17,7 +17,7 @@ minikube start --cpus 4 --memory 8096 --addons registry --addons metrics-server ---- * (optional) Install https://k9scli.io/[k9scli.io] * xref:cloud/operator/install-serverless-operator.adoc[] -* Install https://github.com/kiegroup/kie-tools/releases/tag/0.30.3[KN Workflow CLI] by downloading the correct distribution for your development environment and adding it to the PATH. +* Install https://github.com/kiegroup/kie-tools/releases/tag/{kn_cli_version}[SonataFlow plug-in for Knative CLI]. Follow xref:testing-and-troubleshooting/kn-plugin-workflow-overview.adoc[] guide. [[proc-creating-app-with-kn-cli]] == Creating a workflow project with Visual Studio Code and KN CLI @@ -112,6 +112,7 @@ curl -X 'POST' \ * xref:testing-and-troubleshooting/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc[] * xref:getting-started/getting-familiar-with-our-tooling.adoc[] * xref:service-orchestration/orchestration-of-openapi-based-services.adoc[] +* xref:cloud/operator/developing-workflows.adoc[] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/index.adoc b/serverlessworkflow/modules/ROOT/pages/index.adoc index ac5b922b1..723644b6a 100644 --- a/serverlessworkflow/modules/ROOT/pages/index.adoc +++ b/serverlessworkflow/modules/ROOT/pages/index.adoc @@ -14,9 +14,9 @@ You can learn how to create, manage, and deploy your workflow applications with [.card] -- [.card-title] -xref:getting-started/create-your-first-workflow-service.adoc[] +xref:getting-started/getting-familiar-with-our-tooling.adoc[] [.card-description] -Learn how to create your first Quarkus Workflow Project +Learn about the tools we provide you with, on your journey to create {product_name} applications -- [.card] @@ -24,18 +24,9 @@ Learn how to create your first Quarkus Workflow Project [.card-title] xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[] [.card-description] -Learn how to create & run your first {Kogito Serverless Workflow} project. +An all-in-one starting guide. Learn how to create, run & deploy your first {product_name} project on your local environment. -- -// We will refactor this section here: https://issues.redhat.com/browse/KOGITO-9451 -//[.card] -//-- -//[.card-title] -//xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling] -//[.card-description] -//Learn which tools you can use to author your workflow assets -//-- - [.card-section] == Core Concepts @@ -173,14 +164,6 @@ xref:service-orchestration/working-with-openapi-callbacks.adoc[] Learn how to use the OpenAPI Callback in your workflow application -- -[.card] --- -[.card-title] -xref:service-orchestration/orchestration-of-grpc-services.adoc[] -[.card-description] -Learn about orchestrating gRPC services --- - [.card-section] == Event Orchestration @@ -200,22 +183,6 @@ xref:eventing/event-correlation-with-workflows.adoc[] Learn how to configure event correlation in your workflow application -- -[.card] --- -[.card-title] -xref:eventing/consume-producing-events-with-kafka.adoc[] -[.card-description] -Learn how to configure your Quarkus Workflow Project to produce and consume events using Apache Kafka --- - -[.card] --- -[.card-title] -xref:eventing/consume-produce-events-with-knative-eventing.adoc[] -[.card-description] -Learn how to configure your Quarkus Workflow Project to produce and consume events on Knative Eventing --- - [.card-section] == Security @@ -254,55 +221,15 @@ xref:testing-and-troubleshooting/kn-plugin-workflow-overview.adoc[{product_name} Learn how to install the {product_name} plug-in for Knative CLI -- -[.card] --- -[.card-title] -xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc[] -[.card-description] -Testing Quarkus Workflow Project that uses HTTP CloudEvents and Knative Sink Binding --- - -[.card] --- -[.card-title] -xref:testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc[] -[.card-description] -Learn how to mock external REST requests when testing your Quarkus Workflow Project --- - -[.card] --- -[.card-title] -xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc[] -[.card-description] -Learn how to add unit tests in your Quarkus Workflow Project using RestAssured --- - [.card-section] == Persistence [.card] -- [.card-title] -xref:persistence/persistence-with-postgresql.adoc[] -[.card-description] -Running Quarkus Workflow Applications using PostgresSQL --- - -[.card] --- -[.card-title] -xref:persistence/postgresql-flyway-migration.adoc[] -[.card-description] -Migrating your existing PostgreSQL Database with changes from the {product_name} upgrade using Flyway --- - -[.card] --- -[.card-title] -xref:persistence/integration-tests-with-postgresql.adoc[] +xref:persistence/core-concepts.adoc[] [.card-description] -Learn how to integrate tests on Quarkus Workflow Applications that use PostgreSQL as a persistence storage +Learn about the core concepts of persistence inf {product_name} -- [.card-section] @@ -322,33 +249,9 @@ Learn about the options to deploy workflow applications in Kubernetes [.card] -- [.card-title] -xref:integrations/camel-routes-integration.adoc[] -[.card-description] -Learn how to use Camel Routes within your workflow application --- - -[.card] --- -[.card-title] -xref:integrations/custom-functions-knative.adoc[] +xref:integrations/core-concepts.adoc[] [.card-description] -Learn how to invoke Knative Services from {product_name} custom functions --- - -[.card] --- -[.card-title] -xref:integrations/expose-metrics-to-prometheus.adoc[] -[.card-description] -Exposing the workflow base metrics to Prometheus --- - -[.card] --- -[.card-title] -xref:integrations/serverless-dashboard-with-runtime-data.adoc[] -[.card-description] -Learn how to use dashboards to display the runtime data of your workflow application +Learn how to handle integrations of external services in {product_name} application -- [.card-section] @@ -362,14 +265,6 @@ xref:job-services/core-concepts.adoc[] Details about Job Service to control timers in {PRODUCT_NAME} -- -[.card] --- -[.card-title] -xref:job-services/quarkus-extensions.adoc[] -[.card-description] -Details about how to configure you Quarkus Workflow Project to interact with the Job Service in {PRODUCT_NAME} --- - [.card-section] == Data Index service @@ -389,37 +284,18 @@ xref:data-index/data-index-service.adoc[] Go deeper in details about Data Index as standalone service deployment. -- -[.card] --- -[.card-title] -xref:data-index/data-index-quarkus-extension.adoc[] -[.card-description] -Explore Data Index as Quarkus extensions in {PRODUCT_NAME} --- +== Use Cases +Collection of guides showcasing core concepts of {product_name} or providing a solution to specific problem in our domain. -[.card] --- -[.card-title] -xref:data-index/data-index-usecase-singleton.adoc[Operator Data Index Deployment] -[.card-description] -Learn about the options to deploy workflow applications and Data Index using The {PRODUCT_NAME} Operator --- +In the `Advanced Developer Use Cases` section, you can find guides that use Java and Quarkus to create {product_name} applications. These guides allow users to vastly customize their applications depending on their use case. Good undertsanding and knowledge of these technologies is expected. [.card-section] -== Use Cases +== Advanced Developer Use Cases [.card] -- [.card-title] -xref:use-cases/orchestration-based-saga-pattern.adoc[] +xref:use-cases/advanced-developer-use-cases/index.adoc[{product_name} with Quarkus] [.card-description] -Learn how and when to use the SAGA pattern in your workflow projects +Learn how to develop & customize {product_name} applications with Quarkus -- - -[.card] --- -[.card-title] -xref:use-cases/timeout-showcase-example.adoc[] -[.card-description] -Learn how and when to use timeout in your workflow projects --- \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/core-concepts.adoc b/serverlessworkflow/modules/ROOT/pages/integrations/core-concepts.adoc new file mode 100644 index 000000000..01bd01178 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/integrations/core-concepts.adoc @@ -0,0 +1,3 @@ += Introduction + +This guides describes the possibilities of workflow services integrations. \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/job-services/core-concepts.adoc b/serverlessworkflow/modules/ROOT/pages/job-services/core-concepts.adoc index 491087b6d..1cd2fdf6a 100644 --- a/serverlessworkflow/modules/ROOT/pages/job-services/core-concepts.adoc +++ b/serverlessworkflow/modules/ROOT/pages/job-services/core-concepts.adoc @@ -31,7 +31,7 @@ For example, every time the workflow execution reaches a state with a configured image::job-services/Time-Based-States-And-Job-Service-Interaction.png[] -To set up this integration you can use different xref:job-services/quarkus-extensions.adoc#job-service-quarkus-extensions[communication alternatives], that must be configured by combining the Job Service and the Quarkus Workflow Project configurations. +To set up this integration you can use different xref:use-cases/advanced-developer-use-cases/job-service/quarkus-extensions.adoc#job-service-quarkus-extensions[communication alternatives], that must be configured by combining the Job Service and the Quarkus Workflow Project configurations. [NOTE] ==== @@ -59,7 +59,7 @@ We recommend that you follow this procedure: 1. Identify the <> to use and see the required configuration parameters. 2. Identify if the <> is required for your needs and see the required configuration parameters. -3. Identify if the project containing your workflows is configured with the appropriate xref:job-services/quarkus-extensions.adoc#job-service-quarkus-extensions[Job Service Quarkus Extension]. +3. Identify if the project containing your workflows is configured with the appropriate xref:use-cases/advanced-developer-use-cases/job-service/quarkus-extensions.adoc#job-service-quarkus-extensions[Job Service Quarkus Extension]. Finally, to run the image, you must use the <>, and other configurations that you can set using <> or using <>. @@ -127,7 +127,7 @@ spec: [NOTE] ==== This is the recommended approach when you execute the Job Service in kubernetes. -The timeouts showcase example xref:use-cases/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] contains an example of this configuration, https://github.com/apache/incubator-kie-kogito-examples/blob/main/serverless-workflow-examples/serverless-workflow-timeouts-showcase-extended/kubernetes/jobs-service-postgresql.yml#L65[see]. +The timeouts showcase example xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] contains an example of this configuration, https://github.com/apache/incubator-kie-kogito-examples/blob/main/serverless-workflow-examples/serverless-workflow-timeouts-showcase-extended/kubernetes/jobs-service-postgresql.yml#L65[see]. ==== [#using-java-like-system-properties] @@ -213,7 +213,7 @@ PostgreSQL is the recommended database to use with the Job Service. Additionally, it provides an initialization procedure that integrates Flyway for the database initialization. Which automatically controls the database schema, in this way, the tables are created or updated by the service when required. In case you need to externally control the database schema, you can check and apply the DDL scripts for the Job Service in the same way as described in -xref:persistence/postgresql-flyway-migration.adoc#manually-executing-scripts[Manually executing scripts] guide. +xref:use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc#manually-executing-scripts[Manually executing scripts] guide. To configure the PostgreSQL persistence you must provide these configurations: @@ -278,7 +278,7 @@ Using system properties with java like names:: |=== ==== -The timeouts showcase example xref:use-cases/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services], shows how to run a PostgreSQL based Job Service as a Kubernetes deployment. +The timeouts showcase example xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services], shows how to run a PostgreSQL based Job Service as a Kubernetes deployment. In your local environment you might have to change some of these values to point to your own PostgreSQL database. [#job-service-ephemeral] @@ -449,7 +449,7 @@ The following snippets shows an example on how you can configure these resources [NOTE] ==== -We recommend that you visit this example xref:use-cases/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] to see a full setup of all these configurations. +We recommend that you visit this example xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] to see a full setup of all these configurations. ==== [#knative-eventing-supporting-resources-trigger-create] diff --git a/serverlessworkflow/modules/ROOT/pages/migration-guide/index.adoc b/serverlessworkflow/modules/ROOT/pages/migration-guide/index.adoc new file mode 100644 index 000000000..05569fcf4 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/migration-guide/index.adoc @@ -0,0 +1,5 @@ += Migration guide for {product_name} components + +In this chapter you will find guide focusing on migration of our components across version. +We provide migration guides only when needed, so if there is no guide available it means the migration +is seamless. \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/migration-guide.adoc b/serverlessworkflow/modules/ROOT/pages/migration-guide/operator/to-1.43.0-migration-guide.adoc similarity index 98% rename from serverlessworkflow/modules/ROOT/pages/cloud/operator/migration-guide.adoc rename to serverlessworkflow/modules/ROOT/pages/migration-guide/operator/to-1.43.0-migration-guide.adoc index 17fc99604..43680122d 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/operator/migration-guide.adoc +++ b/serverlessworkflow/modules/ROOT/pages/migration-guide/operator/to-1.43.0-migration-guide.adoc @@ -1,4 +1,5 @@ -# Migration Guide +# Migration Guide for Operator + ## Version 1.43.0 diff --git a/serverlessworkflow/modules/ROOT/pages/persistence/core-concepts.adoc b/serverlessworkflow/modules/ROOT/pages/persistence/core-concepts.adoc new file mode 100644 index 000000000..e76e183b5 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/persistence/core-concepts.adoc @@ -0,0 +1,20 @@ += Persistence in {product_name} + +:compat-mode!: +// Metadata: +:description: persistence in {product_name} applications +:keywords: sonataflow, workflow, serverless, timeout, timer, expiration, persistence +// links + +Persistence in {product_name} is available on demand as a service. +Using configuration properties, users are able to configure the persistence for their workflows as required. + +The persistence is provided by our Data Index service. +To learn more about the service, examine the links in additional resources. + +== Additional resources + +* xref:data-index/data-index-core-concepts.adoc[] +* xref:data-index/data-index-service.adoc[] + +include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/service-orchestration/configuring-openapi-services-endpoints.adoc b/serverlessworkflow/modules/ROOT/pages/service-orchestration/configuring-openapi-services-endpoints.adoc index 0c4911526..ab8568f2f 100644 --- a/serverlessworkflow/modules/ROOT/pages/service-orchestration/configuring-openapi-services-endpoints.adoc +++ b/serverlessworkflow/modules/ROOT/pages/service-orchestration/configuring-openapi-services-endpoints.adoc @@ -148,334 +148,7 @@ quarkus.rest-client.remoteCatalog.url=http://localhost:8282/ In the previous example, {product_name} uses `remoteCatalog` as configuration key. -[[proc-configure-openapi-services-endpoints-different-environments]] -== Configuring the OpenAPI services endpoints in different environments -You can use different MicroProfile ConfigSources, such as environment variables and Kubernetes ConfigMaps, and MicroProfile Config profiles to configure the OpenAPI services in different environments. For more information about MicoProfile ConfigSources, see link:https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#configsource[ConfigSources]. - -[IMPORTANT] -==== -Some operating systems allow only alphabetic characters or an underscore (_), in environment variables. Other characters such as `.` and `/` are not allowed. You must use the link:https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#default_configsources.env.mapping[Environment Variables Mapping Rules], to set the value of a configuration property that contains a name with such characters. -==== - -The testing procedure described in this document is based on the `serverless-workflow-stock-profit` example application in link:{kogito_sw_examples_url}/serverless-workflow-stock-profit[GitHub repository]. The `serverless-workflow-stock-profit` example application is a workflow that computes the profit for a given stock based on an existing stock portfolio. - -The `serverless-workflow-stock-profit` example application sends request to the following services: - -* `stock-portfolio-service`: Calculates the stock portfolio profit for a given stock based on the current stock price. -* `stock-service`: Retrieves the current stock price. - -Developing an application using a service that returns different results every time can be difficult, therefore the `stock-service` uses the following implementations depending on the environment. - -* `real-stock-service` (default implementation): Returns the real stock price. This service returns a random price every time to simulate a real stock service. This implementation is used in normal or production environment. -* `fake-stock-service`: Returns the same price every time. This implementation is used in the development environment. - -The `stock-profit` service contains the following workflow definition: - -.Workflow definition in `stock-profit` service -[source,json] ----- -{ - "id": "stockprofit", - "specVersion": "0.8", - "version": "2.0.0-SNAPSHOT", - "name": "Stock profit Workflow", - "start": "GetStockPrice", - "functions": [ - { - "name": "getStockPriceFunction", - "operation": "openapi/stock-svc.yaml#getStockPrice" <1> - }, - { - "name": "getProfitFunction", - "operation": "openapi/stock-portfolio-svc.yaml#getStockProfit" <2> - } - ], - "states": [ - { - "name": "GetStockPrice", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "name": "getStockPrice", - "functionRef": { - "refName": "getStockPriceFunction", - "arguments": { - "symbol": ".symbol" - } - } - } - ], - "transition": "ComputeProfit" - }, - { - "name": "ComputeProfit", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "name": "getStockProfit", - "functionRef": { - "refName": "getProfitFunction", - "arguments": { - "symbol": ".symbol", - "currentPrice": ".currentPrice" - } - } - } - ], - "end": true - } - ] -} ----- - -<1> Defines the `stock-service` service operation -<2> Defines the `stock-portfolio-service` service operation - -{product_name} leverages Quarkus profiles to configure the workflow application depending on the target environment. - -To set properties for different profiles, each property needs to be prefixed with a percentage (%) followed by the profile name and a period (.) in the syntax as `%.config.name`. By default, Quarkus provides the following profiles that activate automatically in certain conditions: - -* `dev`: Activates in development mode, such as `quarkus:dev` -* `test`: Activates when tests are running -* `prod` (default profile): Activates when not running in development or test mode - -You can also create additional profiles and activate them using the `quarkus.profile` configuration property. For more information about Quarkus profiles, see link:{quarkus-profiles-url}[Profiles] in the Quarkus Configuration reference guide. - -[[proc-config-openapi-services-defining-urls]] -=== Defining URLs of the services in different environments - -You can define the URLs of the services in different environments by using profiles. - -.Procedure -. Create a file named `application.properties` in the `src/main/resources` directory of the workflow project, if the file does not exist. - -. In the `application.properties` file, add the OpenAPI configuration for the default environment: -+ --- -.Example properties in `application.properties` file -[source,properties] ----- -quarkus.rest-client.stock_svc_yaml.url=http://localhost:8383/ <1> -quarkus.rest-client.stock_portfolio_svc_yaml.url=http://localhost:8282/ ----- - -<1> URL of the `real-stock-service` service --- - -. In the `application.properties` file, add the OpenAPI configuration for the `dev` environment: -+ --- -.Example properties for development environment -[source,properties] ----- -%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/ <1> ----- - -<1> URL of the `fake-stock-service` service - -[NOTE] -==== -The `%dev.` prefix indicates the `dev` profile configuration, which is used when you run `mvn quarkus:dev` or `quarkus dev`. -==== --- - -[[proc-config-openapi-services-running-the-services]] -=== Running the services - -After defining the URLs of the services, you can run the services that the workflow sends request to. - -.Prerequisites -* URLs of the services in the different environments are defined. -+ -For more information, see <>. - -.Procedure -. In a separate command terminal window, run the `stock-portfolio-service` service: -+ --- -Run the `stock-portfolio-service` service -[source,shell] ----- -cd stock-portfolio-service -mvn quarkus:dev -Ddebug=false ----- - -You can access the `stock-portfolio-service` service at `http://localhost:8282/`. --- - -. In a separate command terminal window, run the `real-stock-service` service: -+ --- -Run `real-stock-service` service -[source,shell] ----- -cd real-stock-service -mvn quarkus:dev -Ddebug=false ----- - -You can access the `real-stock-service` service at `http://localhost:8383/`. --- - -. In a separate command terminal window, run the `fake-stock-service` service: -+ --- -.Run `fake-stock-service` service -[source,shell] ----- -cd fake-stock-service -mvn quarkus:dev -Ddebug=false ----- - -You can access the `fake-stock-service` service at `http://localhost:8181/`. --- - -[[proc-config-openapi-services-running-sw-application-in-development-mode]] -=== Running workflow application in development mode - -When you define `%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/`, the `fake-stock-service` service is used in the development mode and you get the same result every time you run the workflow. Using this example, you can run the workflow application in development mode. - -.Prerequisites -* Services that the workflow application sends requests to are started. -+ -For more information, see <>. - -.Procedure -. In a separate command terminal window, run the workflow application in development mode: -+ --- -.Run workflow application in development mode -[source,shell] ----- -cd stock-profit -mvn quarkus:dev -Ddebug=false ----- --- - -. In a separate command terminal window, send a request to the workflow application: -+ --- -.Example request -[source,shell] ----- -curl -X 'POST' \ - 'http://localhost:8080/stockprofit' \ - -H 'accept: */*' \ - -H 'Content-Type: application/json' \ - -d '{ "symbol": "KGTO" }' ----- - -.Example response -[source,json] ----- -{"id":"5ab5dcb8-5952-4730-b526-cace363774bb","workflowdata":{"symbol":"KGTO","currentPrice":75,"profit":"50%"}} ----- - -Note that, in the previous example `fake-stock-service` is used, therefore, the computed `profit` property is same no matter how many times you run the workflow. --- - -[[proc-config-openapi-services-running-sw-application-in-production-mode]] -=== Running workflow application in production mode - -When you define `quarkus.rest-client.stock_svc_yaml.url=http://localhost:8383/`, the `real-stock-service` service is used in the normal or production mode and you get different results every time you run the workflow. Using this example, you can run the workflow application in normal or production mode. - -.Prerequisites -* Services that the workflow application sends requests to are started. -+ -For more information, see <>. - -.Procedure -. In a separate command terminal window, package the workflow application to be run as fat JAR: -+ --- -.Package workflow application -[source,shell] ----- -cd stock-profit -mvn package ----- --- - -. In a separate command terminal window, run the workflow application in normal or production mode: -+ --- -.Run workflow application in normal or production mode -[source,shell] ----- -java -jar target/quarkus-app/quarkus-run.jar ----- --- - -. In a separate command terminal window, send a request to the workflow application: -+ --- -.Example request -[source,shell] ----- -curl -X 'POST' \ - 'http://localhost:8080/stockprofit' \ - -H 'accept: */*' \ - -H 'Content-Type: application/json' \ - -d '{ "symbol": "KGTO" }' ----- - -.Example response -[source,json] ----- -{"id":"a80c95d6-51fd-4ca9-b689-f779929c9937","workflowdata":{"symbol":"KGTO","currentPrice":59.36,"profit":"19%"}} ----- - -Note that, in the previous example, the `real-stock-service` is used, therefore, the computed `profit` property is different every time you run the workflow. --- - -[[proc-define-urls-using-environment-variables]] -=== Defining URLs of services in different environments using environment variables - -You can define the URLs of the services in different environments using profiles and environment variables. - -.Prerequisites -* Services that the workflow application sends requests to are started. -+ -For more information, see <>. - -.Procedure -. In a separate command terminal window, run the workflow application in development mode, overwriting the property defined in the `application.properties` file using an environment variable: -+ --- -.Run the workflow application in development mode -[source,shell] ----- -cd stock-profit -export _DEV_QUARKUS_REST_CLIENT_STOCK_SVC_YAML_URL=http://localhost:8383/ <1> -mvn quarkus:dev -Ddebug=false ----- - -<1> Overwrite the `%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/` defined in the `application.properties` file using an environment variable, which is pointing to `real-stock-service`. --- - -. In a separate command terminal window, send a request to the workflow application: -+ --- -.Example request -[source,shell] ----- -curl -X 'POST' \ - 'http://localhost:8080/stockprofit' \ - -H 'accept: */*' \ - -H 'Content-Type: application/json' \ - -d '{ "symbol": "KGTO" }' ----- - -.Example response -[source,json] ----- -{"id":"5ab5dcb8-5952-4730-b526-cace363774bb","workflowdata":{"symbol":"KGTO","currentPrice":56.35,"profit":"13%"}} ----- - -Note that, in the previous example, you overwrote the property defined in the `application.properties` file to point to `real-stock-service`, therefore, the computed `profit` property is different every time you run the workflow. --- == Additional resources diff --git a/serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-openapi-based-services.adoc b/serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-openapi-based-services.adoc index d89b7a860..8ca9a7da4 100644 --- a/serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-openapi-based-services.adoc +++ b/serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-openapi-based-services.adoc @@ -16,11 +16,11 @@ :mp_config_env_vars_url: https://github.com/eclipse/microprofile-config/blob/master/spec/src/main/asciidoc/configsources.asciidoc#environment-variables-mapping-rules // Referenced documentation pages. :getting-familiar-with-our-tooling: xref:getting-started/getting-familiar-with-our-tooling.adoc -:create-your-first-workflow-service: xref:getting-started/create-your-first-workflow-service.adoc -:build-workflow-image-with-quarkus-cli: xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc +:create-your-first-workflow-service: xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc +:build-workflow-image-with-quarkus-cli: xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc :understanding-jq-expressions: xref:core/understanding-jq-expressions.adoc :configuring-openapi-services-endpoints: xref:service-orchestration/configuring-openapi-services-endpoints.adoc -:camel-k-integration: xref:integrations/camel-k-integration.adoc +:camel-k-integration: xref:use-cases/advanced-developer-use-cases/integrations/camel-routes-integration.adoc :authention-support-for-openapi-services: xref:security/authention-support-for-openapi-services.adoc This document describes how to call REST services using an link:{open_api_spec_url}[OpenAPI] specification file. diff --git a/serverlessworkflow/modules/ROOT/pages/service-orchestration/working-with-openapi-callbacks.adoc b/serverlessworkflow/modules/ROOT/pages/service-orchestration/working-with-openapi-callbacks.adoc index 9c997e9b7..0c5b1108e 100644 --- a/serverlessworkflow/modules/ROOT/pages/service-orchestration/working-with-openapi-callbacks.adoc +++ b/serverlessworkflow/modules/ROOT/pages/service-orchestration/working-with-openapi-callbacks.adoc @@ -19,109 +19,10 @@ image::eventing/callbacks-explained.png[] The workflow correlation described in this document focuses on the former mechanism that is based on the fact that each workflow instance contains a unique identifier, which is generated automatically. -[[ref-example-callback]] -== Example of the OpenAPI Callback - -To understand the OpenAPI Callback, see the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus[`serverless-workflow-callback-events-over-http-quarkus`] example application in GitHub repository. - -This example contains a simple link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow[workflow-service] that illustrates callback state using OpenAPI callbacks functionality. A callback is a state that invokes an action and wait for an event (event that will be eventually fired by the external service notified by the action). This example consists of a callback state that waits for an event to arrive at the wait channel. Its action calls an external service named link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-event-service[callback-event-service] that publishes the wait event over HTTP. After consuming the wait event, the workflow prints the message received in the wait event and ends the workflow. - -The `serverless-workflow-callback-events-over-http-quarkus` application is initiated with the following request to `http://localhost:8080/callback`: -[code,json] ----- -{ - "message": "Hello" -} ----- -Once the workflow is started, it makes an external service call with the callback URL and the workflow instance ID in the request body to link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-event-service[callback-event-service]. Then, as configured in the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow/src/main/resources/specs/callback.yaml[OpenAPI] file, the callback URL is invoked to send a CloudEvent to the workflow. - -Once the `wait` type CloudEvent is received by the callback-workflow-service, the workflow moves to the next state and ends successfully. The following figure shows the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus[`serverless-workflow-callback-events-over-http-quarkus`] image: - -image::eventing/openapi-callback.png[] - -To use the OpenAPI callback in a workflow, the OpenAPI YAML file is configured with the callback as specified in the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow/src/main/resources/specs/callback.yaml[OpenAPI] file. - -To use the Callback state in a workflow, first CloudEvent type `wait` is declared that the workflow uses. Following is an example of CloudEvents declaration in a workflow definition: - -.Example of CloudEvent declaration in a workflow definition -[code,json] ----- - "events": [ - { - "name": "waitEvent", - "source": "", - "type": "wait" - } ----- - -After that, a Callback state is declared, which waits for a CloudEvent with the `wait` type. Following is an example of declaring a Callback state that handles the `wait` type CloudEvent: - -.Example of a Callback State declaration handling the `wait` CloudEvent -[code,json] ----- - { - "name": "waitForEvent", - "type": "callback", - "action": - { - "functionRef": { - "refName": "callBack", - "arguments": { - "uri": "http://localhost:8080/wait", - "processInstanceId": "$WORKFLOW.instanceId" - } - } - }, - "eventRef": "waitEvent", - "transition": "finish" - } ----- -[TIP] -==== -Please refer xref:service-orchestration/configuring-openapi-services-endpoints.adoc[configure openapi service endpoints] document to set the URL dynamically using an environment variable. -==== -An link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-event-service/src/main/java/org/kie/kogito/examples/CallbackResource.java[event listener] publishes a new `wait` type CloudEvent. Following is an example of a Java method that publishes the `wait` type CloudEvent: - -.Example of a Java method that makes a call to Callback URL and publishes the `wait` CloudEvent -[code,java] ----- - @POST - @Consumes(MediaType.APPLICATION_JSON) - public void wait(EventInput eventInput) throws JsonProcessingException { - logger.info("About to generate event for {}",eventInput); - CloudEventBuilder builder = CloudEventBuilder.v1() - .withId(UUID.randomUUID().toString()) - .withSource(URI.create("")) - .withType("wait") - .withTime(OffsetDateTime.now()) - .withExtension(CloudEventExtensionConstants.PROCESS_REFERENCE_ID, eventInput.getProcessInstanceId()) - .withData(objectMapper.writeValueAsBytes(Collections.singletonMap("message", "New Event"))); - - webClient.postAbs(eventInput.getUri()).sendJson(builder.build()).toCompletionStage(); - } ----- - -The callback-workflow-service consumes the CloudEvent, it contains an attribute named `kogitoprocrefid`, which holds the instance ID of the workflow. - -The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. - -Note that each workflow is identified by a unique instance ID, which is automatically included in any published CloudEvent, as `kogitoprocinstanceid` CloudEvent extension. - -=== HTTP transport configuration -The `serverless-workflow-callback-events-over-http-quarkus` example application consumes the Cloudevents using Knative Eventing. For more information about incoming and outgoing CloudEvents oer HTTP, see xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and Producing CloudEvents over HTTP]. - -The HTTP path where the workflow application will listen for the CloudEvents in the `serverless-workflow-callback-events-over-http-quarkus` example application, is configured in the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow/src/main/resources/application.properties[`application.properties`] file as shown below: -[source,properties] ----- -mp.messaging.incoming.wait.connector=quarkus-http -mp.messaging.incoming.wait.path=/wait ----- - == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] * xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] -* link:{open_api_swagger_spec_url}#callbacks[OpenAPI Callback Example] +* xref:use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc[] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/debugging-workflow-execution-runtime.adoc b/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/debugging-workflow-execution-runtime.adoc deleted file mode 100644 index 076a2eb19..000000000 --- a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/debugging-workflow-execution-runtime.adoc +++ /dev/null @@ -1 +0,0 @@ -//= Debugging the workflow execution in runtime \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/development-tools-for-troubleshooting.adoc b/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/development-tools-for-troubleshooting.adoc deleted file mode 100644 index dd5c2ebef..000000000 --- a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/development-tools-for-troubleshooting.adoc +++ /dev/null @@ -1 +0,0 @@ -//= Development tools for troubleshooting \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc b/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc index 809ee8870..cc5279cfd 100644 --- a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc +++ b/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc @@ -95,7 +95,7 @@ The `create` command sets up {product_name} project containing a minimal "hello + For more information about installing the plug-in, see <>. ifeval::["{kogito_version_redhat}" != ""] -* You followed the steps in xref:getting-started/create-your-first-workflow-service.adoc#proc-configuring-maven-rhbq[Configuring your Maven project to Red Hat build of Quarkus and OpenShift Serverless Logic] +* You followed the steps in xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc#proc-configuring-maven-rhbq[Configuring your Maven project to Red Hat build of Quarkus and OpenShift Serverless Logic] endif::[] .Procedure @@ -193,7 +193,7 @@ The `quarkus create` command sets up a {product_name} Quarkus project containing * {product_name} plug-in for Knative CLI is installed. For more information about installing the plug-in, see <>. ifeval::["{kogito_version_redhat}" != ""] -* You followed the steps in xref:getting-started/create-your-first-workflow-service.adoc#proc-configuring-maven-rhbq[Configuring your Maven project to Red Hat build of Quarkus and OpenShift Serverless Logic] +* You followed the steps in xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc#proc-configuring-maven-rhbq[Configuring your Maven project to Red Hat build of Quarkus and OpenShift Serverless Logic] endif::[] .Procedure @@ -392,7 +392,7 @@ You can use the `kubectl` command line if you want to use a complex deployment s == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first Quarkus Workflow project] * xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[Creating your first SonataFlow project] +* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first Quarkus Workflow project] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/callback-state-example.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/callback-state-example.adoc new file mode 100644 index 000000000..cbc3a6680 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/callback-state-example.adoc @@ -0,0 +1,117 @@ += Example of {product_name} application using callback state with Quarkus + +To understand the Callback state, see the link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus[`serverless-workflow-callback-quarkus`] example application in GitHub repository. + +The initial model of the `serverless-workflow-callback-quarkus` example application is empty. Once the workflow is started, it publishes a CloudEvent of the `resume` type and waits for a CloudEvent, which contains the type `wait`. + +A listener consumes the CloudEvent with the `resume` type and simulates the behavior of an external service. Consequently, on the external service side, when the actions associated with the `resume` type CloudEvent are completed, the listener publishes a `wait` type CloudEvent. Once the `wait` type CloudEvent is received, the workflow moves to the next state and ends successfully. + +To use the Callback state in a workflow, first CloudEvent types such as `resume` and `wait` are declared that the workflow uses. Following is an example of CloudEvents declaration in a workflow definition: + +.Example of CloudEvents declaration in a workflow definition +[code,json] +---- +"events": [ + { + "name": "resumeEvent", + "source": "", + "type": "resume" + }, + { + "name": "waitEvent", + "source": "", + "type": "wait" + } + ] +---- + +After that, a Callback state is declared. The Callback state publishes a `resume` type CloudEvent and waits for a CloudEvent with `wait` type. The published CloudEvent contains a `move` data field, and the CloudEvent that is received is expected to contain a `result` data field. According to the link:{spec_doc_url}#event-data-filters[eventDataFilter], the `result` data field is added to the workflow model as a `move` field. + +Following is an example of declaring a Callback state that handles the `wait` type CloudEvent: + +.Example of a Callback State declaration handling the `wait` CloudEvent +[code,json] +---- +{ + "name": "waitForEvent", + "type": "callback", + "action": { + "name": "publishAction", + "eventRef": { + "triggerEventRef": "resumeEvent", + "data": "{move: \"This is the initial data in the model\"}" + } + }, + "eventRef": "waitEvent", + "eventDataFilter": { + "data": ".result", + "toStateData": ".move" + }, + "transition": "finish" + } +---- + +An link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus/src/main/java/org/kie/kogito/examples/PrintService.java[event listener] consumes the `resume` type CloudEvent and publishes a new `wait` type CloudEvent. Following is an example of a Java method that publishes the `wait` type CloudEvent: + +.Example of a Java method that publishes the `wait` CloudEvent +[code,java] +---- + + private String generateCloudEvent(String id, String input) { + Map eventBody = new HashMap<>(); + eventBody.put("result", input + " and has been modified by the event publisher"); + eventBody.put("dummyEventVariable", "This will be discarded by the process"); + try { + return objectMapper.writeValueAsString(CloudEventBuilder.v1() + .withId(UUID.randomUUID().toString()) + .withSource(URI.create("")) + .withType("wait") + .withTime(OffsetDateTime.now()) + .withExtension(CloudEventExtensionConstants.PROCESS_REFERENCE_ID, id) + .withData(objectMapper.writeValueAsBytes(eventBody)) + .build()); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException(e); + } + + } +---- + +After that, the workflow application consumes the event published by the listener and sets the result field. The consumed CloudEvent contains an attribute named `kogitoprocrefid`, which holds the workflow instance ID of the workflow. + +The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. + +Note that each workflow is identified by a unique instance ID, which is automatically included in any published CloudEvent, as `kogitoprocinstanceid` CloudEvent extension. + +The following example shows that the event listener takes the workflow instance ID of a workflow from a CloudEvent attribute named `kogitoprocinstanceid`, which is associated with the CloudEvent that is consumed. + +.Example of a Java method that consumes the `resume` CloudEvent +[source,java] +---- + @Incoming("in-resume") + @Outgoing("out-wait") + @Acknowledgment(Strategy.POST_PROCESSING) + public String onEvent(Message message) { + Optional ce = CloudEventUtils.decode(message.getPayload()); + JsonCloudEventData cloudEventData = (JsonCloudEventData) ce.get().getData(); + return generateCloudEvent(ce.get().getExtension(CloudEventExtensionConstants.PROCESS_INSTANCE_ID).toString(), cloudEventData.getNode().get("move").asText()); + } +---- + +Apache Kafka configuration in `serverless-workflow-callback-quarkus`:: ++ +-- +The `serverless-workflow-callback-quarkus` example application requires an external broker to manage the associated CloudEvents. The default setup in the `serverless-workflow-callback-quarkus` example application uses link:{kafka_doc_url}[Apache Kafka]. However, you can also use xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[Knative Eventing]. + +Apache Kafka uses topics to publish or consume messages. In the `serverless-workflow-callback-quarkus` example application, two topics are used, matching the name of the CloudEvent types that are defined in the workflow, such as `resume` and `wait`. The `resume` and `wait` CloudEvent types are configured in the link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus/src/main/resources/application.properties[`application.properties`] file. + +For more information about using Apache Kafka with events, see link:xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka]. +-- ++ + +== Additional resources + +* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] +* xref:use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc[] + +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc new file mode 100644 index 000000000..3a03ba2dc --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc @@ -0,0 +1,103 @@ += Example of {product_name} application using OpenAPI callback events with Quarkus + +To understand the OpenAPI Callback, see the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus[`serverless-workflow-callback-events-over-http-quarkus`] example application in GitHub repository. + +This example contains a simple link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow[workflow-service] that illustrates callback state using OpenAPI callbacks functionality. A callback is a state that invokes an action and wait for an event (event that will be eventually fired by the external service notified by the action). This example consists of a callback state that waits for an event to arrive at the wait channel. Its action calls an external service named link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-event-service[callback-event-service] that publishes the wait event over HTTP. After consuming the wait event, the workflow prints the message received in the wait event and ends the workflow. + +The `serverless-workflow-callback-events-over-http-quarkus` application is initiated with the following request to `http://localhost:8080/callback`: +[code,json] +---- +{ + "message": "Hello" +} +---- +Once the workflow is started, it makes an external service call with the callback URL and the workflow instance ID in the request body to link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-event-service[callback-event-service]. Then, as configured in the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow/src/main/resources/specs/callback.yaml[OpenAPI] file, the callback URL is invoked to send a CloudEvent to the workflow. + +Once the `wait` type CloudEvent is received by the callback-workflow-service, the workflow moves to the next state and ends successfully. The following figure shows the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus[`serverless-workflow-callback-events-over-http-quarkus`] image: + +image::eventing/openapi-callback.png[] + +To use the OpenAPI callback in a workflow, the OpenAPI YAML file is configured with the callback as specified in the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow/src/main/resources/specs/callback.yaml[OpenAPI] file. + +To use the Callback state in a workflow, first CloudEvent type `wait` is declared that the workflow uses. Following is an example of CloudEvents declaration in a workflow definition: + +.Example of CloudEvent declaration in a workflow definition +[code,json] +---- + "events": [ + { + "name": "waitEvent", + "source": "", + "type": "wait" + } +---- + +After that, a Callback state is declared, which waits for a CloudEvent with the `wait` type. Following is an example of declaring a Callback state that handles the `wait` type CloudEvent: + +.Example of a Callback State declaration handling the `wait` CloudEvent +[code,json] +---- + { + "name": "waitForEvent", + "type": "callback", + "action": + { + "functionRef": { + "refName": "callBack", + "arguments": { + "uri": "http://localhost:8080/wait", + "processInstanceId": "$WORKFLOW.instanceId" + } + } + }, + "eventRef": "waitEvent", + "transition": "finish" + } +---- +[TIP] +==== +Please refer xref:service-orchestration/configuring-openapi-services-endpoints.adoc[configure openapi service endpoints] document to set the URL dynamically using an environment variable. +==== +An link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-event-service/src/main/java/org/kie/kogito/examples/CallbackResource.java[event listener] publishes a new `wait` type CloudEvent. Following is an example of a Java method that publishes the `wait` type CloudEvent: + +.Example of a Java method that makes a call to Callback URL and publishes the `wait` CloudEvent +[code,java] +---- + @POST + @Consumes(MediaType.APPLICATION_JSON) + public void wait(EventInput eventInput) throws JsonProcessingException { + logger.info("About to generate event for {}",eventInput); + CloudEventBuilder builder = CloudEventBuilder.v1() + .withId(UUID.randomUUID().toString()) + .withSource(URI.create("")) + .withType("wait") + .withTime(OffsetDateTime.now()) + .withExtension(CloudEventExtensionConstants.PROCESS_REFERENCE_ID, eventInput.getProcessInstanceId()) + .withData(objectMapper.writeValueAsBytes(Collections.singletonMap("message", "New Event"))); + + webClient.postAbs(eventInput.getUri()).sendJson(builder.build()).toCompletionStage(); + } +---- + +The callback-workflow-service consumes the CloudEvent, it contains an attribute named `kogitoprocrefid`, which holds the instance ID of the workflow. + +The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. + +Note that each workflow is identified by a unique instance ID, which is automatically included in any published CloudEvent, as `kogitoprocinstanceid` CloudEvent extension. + +== HTTP transport configuration +The `serverless-workflow-callback-events-over-http-quarkus` example application consumes the Cloudevents using Knative Eventing. For more information about incoming and outgoing CloudEvents oer HTTP, see xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[Consuming and Producing CloudEvents over HTTP]. + +The HTTP path where the workflow application will listen for the CloudEvents in the `serverless-workflow-callback-events-over-http-quarkus` example application, is configured in the link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-quarkus/callback-workflow/src/main/resources/application.properties[`application.properties`] file as shown below: +[source,properties] +---- +mp.messaging.incoming.wait.connector=quarkus-http +mp.messaging.incoming.wait.path=/wait +---- + +== Additional resources + +* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] +* xref:use-cases/advanced-developer-use-cases/callbacks/openapi-callback-events-example.adoc[] + +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/common/_dataindex_deployment_operator.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/common/_dataindex_deployment_operator.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/data-index/common/_dataindex_deployment_operator.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/common/_dataindex_deployment_operator.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/common/_prerequisites.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/common/_prerequisites.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/data-index/common/_prerequisites.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/common/_prerequisites.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/common/_querying_dataindex.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/common/_querying_dataindex.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/data-index/common/_querying_dataindex.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/common/_querying_dataindex.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-as-quarkus-dev-service.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-as-quarkus-dev-service.adoc new file mode 100644 index 000000000..54e4a5a7b --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-as-quarkus-dev-service.adoc @@ -0,0 +1,141 @@ += {data_index_ref} as a Quarkus Development service +When you use the {product_name} Process Quarkus extension, a temporary {data_index_ref} service is automatically provisioned while the Quarkus application is running in development mode. When you use one of the following Quarkus extensions, the Dev Service is set up for immediate use: + +.{product_name} main Quarkus extension +[source,xml] +---- + + org.kie.kogito + kogito-quarkus + +---- + +.{product_name} Quarkus extension +[source,xml] +---- + + org.kie.kogito + kogito-quarkus-serverless-workflow + +---- + +When you start your Quarkus project in development mode, an in-memory instance of the {data_index_ref} service is automatically started in the background. This feature is enabled by link:{dev_services_url}[Quarkus Dev Services], and leverages link:{test_containers_url}[Testcontainers] to start an image of the {data_index_ref} service. + +The {product_name} Process Quarkus extension sets up your Quarkus application to automatically replicate any {product_name} messaging events related to {workflow_instances} or jobs into the provisioned {data_index_ref} instance. + +Once the service is up and running, you can query the GraphQL interface directly using `http://localhost:8180/graphql` or using the Quarkus Dev UI console `http://localhost:8080/q/dev`. + +The {data_index_ref} GraphQL endpoint can query for `ProcessInstances` and `Jobs`. For more information about operations and attributes to query, see xref:data-index/data-index-core-concepts.adoc#data-index-graphql[GraphQL endpoint provided by {data_index_ref}] section. + +You can share the same {data_index_ref} instance across multiple {product_name} services during development. Sharing {data_index_ref} instances is enabled by default, therefore, only one {data_index_ref} instance is started. This behavior can be adjusted to start multiple instances using the `quarkus.kogito.devservices.shared` property. + +The Quarkus Dev Service also allows further configuration options including: + +* To disable {data_index_ref} Dev Service, use the `quarkus.kogito.devservices.enabled=false` property. +* To change the port where the {data_index_ref} Dev Service runs, use the `quarkus.kogito.devservices.port=8180` property. +* To adjust the provisioned image, use `quarkus.kogito.devservices.imageName=quay.io/kiegroup/kogito-data-index-ephemeral` property. +* To disable sharing the {data_index_ref} instance across multiple Quarkus applications, use `quarkus.kogito.devservices.shared=false` property. + +For more information about Quarkus Dev Services, see link:{dev_services_url}[Dev Services guide]. + +[#data-index-service-configuration-properties] +== {data_index_ref} service configuration properties +The following table serves as a quick reference for commonly {data_index_ref} configuration properties supported. + +.Common configuration properties +[cols="40%,35%,10%,10%,5%", options="header"] +|=== +|Property|Description|Type|Default value|Override at runtime + +|`QUARKUS_DATASOURCE_JDBC_URL` +| The datasource URL +| string +| +| Yes + +|`QUARKUS_DATASOURCE_USERNAME` +| The datasource username +| string +| +| Yes + +|`QUARKUS_DATASOURCE_PASSWORD` +| The datasource password +| string +| +| Yes + +|`QUARKUS_DATASOURCE_DB_KIND` +a|The kind of database to connect: `postgresql`,.. +|string +| +|Yes + +|`QUARKUS_FLYWAY_MIGRATE_AT_START` +| `true` to execute Flyway automatically when the application starts, false otherwise. +| boolean +| false +| Yes + +|`KOGITO_DATA_INDEX_QUARKUS_PROFILE` +a| (Only when referencing an image distribution) + +Allows to change the event connection type. The possible values are: + +* `kafka-events-support` +* `http-events-support` + +| string +| `kafka-events-support` +| Yes + +|`kogito.persistence.query.timeout.millis` +|Defines timeout for a query execution. +|long +|`10000` +|Yes + +|`quarkus.kogito.devservices.enabled` +|Enables or disables the Dev Services for workflows. By default, the Dev Services are enabled, unless an existing configuration is present. +|boolean +|`true` +|No + +|`quarkus.kogito.devservices.port` +|Defines the optional fixed port that the Dev Services listen to. +|int +|`8180` +|No + +|`quarkus.kogito.devservices.image-name` +|Defines the {data_index_ref} image to use in Dev Service. +|string +|`quay.io/kiegroup/kogito-data-index-ephemeral:{page-component-version}` +|No + +|`quarkus.kogito.devservices.shared` +|Indicates if the {data_index_ref} instance, which is managed by Quarkus Dev Services, is shared. +|boolean +|`true` +|No + +|`quarkus.kogito.devservices.service-name` +|Defines the value of the label that is attached to the started container. This property is used when `shared` is set to `true`. +|string +|`kogito-data-index` +|No + + +|=== + +--- + +== Additional resources + +* xref:data-index/data-index-core-concepts.adoc[] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[] +* xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc[Timeout example in {product_name}] + +include::../../../_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-quarkus-extension.adoc similarity index 94% rename from serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-quarkus-extension.adoc index 0304013b1..8d6d61488 100644 --- a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-quarkus-extension.adoc @@ -4,9 +4,9 @@ :description: Data Index Service to allow to index and query audit data in {product_name} :keywords: workflow, serverless, data, dataindex, data-index, index, service, extension, quarkus // Referenced documentation pages -:persistence_with_postgresql_guide: xref:persistence/persistence-with-postgresql.adoc +:persistence_with_postgresql_guide: xref:use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc :data-index-core-concepts_guide: xref:data-index/data-index-core-concepts.adoc -:getting_started_create_first_workflow_guide: xref:getting-started/create-your-first-workflow-service.adoc +:getting_started_create_first_workflow_guide: xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc // External pages :kogito_sw_timeouts_showcase_embedded_example_url: {kogito_sw_examples_url}/serverless-workflow-timeouts-showcase-embedded :kogito_sw_timeouts_showcase_embedded_example_application_properties_url: {kogito_sw_timeouts_showcase_embedded_example_url}/src/main/resources/application.properties @@ -196,9 +196,9 @@ For more information, see link:{kogito_sw_dataindex_persistence_example_url}[`s == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[] -* xref:persistence/persistence-with-postgresql.adoc[] +* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[] +* xref:use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc[] * xref:data-index/data-index-core-concepts.adoc[] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-multi.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-usecase-multi.adoc similarity index 94% rename from serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-multi.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-usecase-multi.adoc index c71d1191a..c090fb093 100644 --- a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-multi.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-usecase-multi.adoc @@ -1,4 +1,4 @@ -= Deploying Data Index and multiple {product_name} application on Minikube += Deploying Data Index and multiple {product_name} applications on Minikube :compat-mode!: // Metadata: :description: Deploying Multiple {product_name} pushing to single Data Index on Minikube @@ -200,8 +200,8 @@ include::common/_querying_dataindex.adoc[] == Additional resources * xref:data-index/data-index-core-concepts.adoc[] -* xref:data-index/data-index-usecase-singleton.adoc[] -* xref:cloud/quarkus/deploying-on-minikube.adoc[] +* xref:use-cases/advanced-developer-use-cases/data-index/data-index-usecase-singleton.adoc[] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[] * xref:cloud/operator/install-serverless-operator.adoc[] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-singleton.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-usecase-singleton.adoc similarity index 94% rename from serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-singleton.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-usecase-singleton.adoc index e496a93f2..b9ada42c2 100644 --- a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-singleton.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/data-index/data-index-usecase-singleton.adoc @@ -74,7 +74,7 @@ kubectl create namespace usecase1 . Deploy the {data_index_ref} Service and postgresql database: + -- -include::common/_dataindex_deployment_operator.adoc[] +include:common/_dataindex_deployment_operator.adoc[] Perform the deployments executing [source,shell] @@ -189,8 +189,8 @@ include::common/_querying_dataindex.adoc[] == Additional resources * xref:data-index/data-index-core-concepts.adoc[] -* xref:data-index/data-index-usecase-multi.adoc[] -* xref:cloud/quarkus/deploying-on-minikube.adoc[] +* xref:use-cases/advanced-developer-use-cases/data-index/data-index-usecase-multi.adoc[] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[] * xref:cloud/operator/install-serverless-operator.adoc[] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_common_proc_deploy_kubectl_oc.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_common_proc_deploy_kubectl_oc.adoc similarity index 96% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_common_proc_deploy_kubectl_oc.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_common_proc_deploy_kubectl_oc.adoc index 4527f5f9f..0dbd75e50 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/common/_common_proc_deploy_kubectl_oc.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_common_proc_deploy_kubectl_oc.adoc @@ -67,7 +67,7 @@ Once you have built your application, you can find the generated descriptors fil [IMPORTANT] ==== -The image used in this section is the one built in the following guide: xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Build Workflow Image with Quarkus CLI]. +The image used in this section is the one built in the following guide: xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[Build Workflow Image with Quarkus CLI]. ==== Following is an example of the generated files: diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_create_namespace_and_deploy_info.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_create_namespace_and_deploy_info.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_create_namespace_and_deploy_info.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_create_namespace_and_deploy_info.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_deploy_workflow_application_requisites.adoc similarity index 64% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_deploy_workflow_application_requisites.adoc index feda2539f..45a0ff2af 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_deploy_workflow_application_requisites.adoc @@ -5,5 +5,5 @@ For more information, see {knative_procedure}. * Knative CLI is installed. * (Optional) Quarkus CLI is installed. + For more information, see link:{quarkus_cli_url}[Building Quarkus Apps with Quarkus command line interface (CLI)]. -* Your xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[{product_name} application] is ready to use. +* Your xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[{product_name} application] is ready to use. diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_prerequisites.adoc similarity index 80% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_prerequisites.adoc index abbb39ec3..8b4d98a90 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_prerequisites.adoc @@ -1,7 +1,7 @@ .Prerequisites * Your {product_name} application is ready to use. + -For more information about building the application container, see xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. +For more information about building the application container, see xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. * {environment_prereq} * `kubectl` {kubectl_prereq} diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_kn_cli.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_kn_cli.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_kn_cli.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_kn_cli.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_kubectl.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_kubectl.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_kubectl.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_kubectl.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_oc.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_oc.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_oc.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_oc.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_quarkus_cli.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_quarkus_cli.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_proc_deploy_sw_quarkus_cli.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_proc_deploy_sw_quarkus_cli.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_verify_if_swf_is_deployed.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_verify_if_swf_is_deployed.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/common/_verify_if_swf_is_deployed.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/common/_verify_if_swf_is_deployed.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-kubernetes.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc similarity index 89% rename from serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-kubernetes.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc index 32b3bda4f..a208435a6 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-kubernetes.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc @@ -1,4 +1,4 @@ -= Deploying your Serverless Workflow application on Kubernetes += Deploying your {product_name} application on Kubernetes :compat-mode!: // Metadata: :description: Deploying Serverless Application on Kubernetes @@ -31,7 +31,7 @@ This document describes how to deploy a {product_name} application using a Kubernetes cluster, along with a procedure to run the Knative platform. // shared pre req -include::../common/_prerequisites.adoc[subs=quotes+] +include::./common/_prerequisites.adoc[subs=quotes+] Before proceeding further, make sure that you have access to the Kubernetes cluster with Knative available. @@ -67,7 +67,7 @@ Once Knative is ready, you can initiate the process of deploying your {product_n // shared app req -include::../common/_deploy_workflow_application_requisites.adoc[] +include::./common/_deploy_workflow_application_requisites.adoc[] [IMPORTANT] ==== @@ -79,11 +79,10 @@ If the registry requires authentication you need to create a Pull Secret with th .Procedure . Create `serverless-workflow-greeting-quarkus` namespace using the following command: + -include::../common/_create_namespace_and_deploy_info.adoc[] +include::./common/_create_namespace_and_deploy_info.adoc[] In the following procedures, you can find different approaches to deploy your workflow application, such as: -* <> * <> * <> @@ -94,13 +93,10 @@ For this tutorial, we use the `default-domain` provided by Knative that configur the Magic DNS for naming resolution, for more details please check the Knative link:{knative_domain_dns_url}[documentation]. ==== -// deploy with kn-cli -include::../common/_proc_deploy_sw_kn_cli.adoc[] - // deploy with kubectl -include::../common/_proc_deploy_sw_kubectl.adoc[] +include::./common/_proc_deploy_sw_kubectl.adoc[] // deploy with quarkus-cli -include::../common/_proc_deploy_sw_quarkus_cli.adoc[] +include::./common/_proc_deploy_sw_quarkus_cli.adoc[] -include::../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-minikube.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc similarity index 94% rename from serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-minikube.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc index f55a7b405..ad3618ef3 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-minikube.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc @@ -30,7 +30,7 @@ This document describes how to deploy your workflow application using a local Ku For more information about Minikube and related system requirements, see link:{minikube_url}/docs/start/[Getting started with Minikube] documentation. // shared pre req -include::../common/_prerequisites.adoc[] +include::./common/_prerequisites.adoc[] To deploy your workflow application on Minikube, you need to install Knative on Minikube. However, first you need to ensure that Minikube is installed correctly. @@ -128,7 +128,7 @@ To follow the manual process of installing Knative on Minikube, see link:{knativ Once you install Knative on Minikube, you can initiate the process of deploying your workflow application on Minikube. // shared app req -include::../common/_deploy_workflow_application_requisites.adoc[] +include::./common/_deploy_workflow_application_requisites.adoc[] .Procedure @@ -203,7 +203,7 @@ Status: . After starting the Minikube tunnel, create `serverless-workflow-greeting-quarkus` namespace using the following command: + -include::../common/_create_namespace_and_deploy_info.adoc[] +include::./common/_create_namespace_and_deploy_info.adoc[] In the following procedures, you can find different approaches to deploy your workflow application, such as: @@ -213,16 +213,16 @@ In the following procedures, you can find different approaches to deploy your wo // deploy with kn-cli -include::../common/_proc_deploy_sw_kn_cli.adoc[] +include::./common/_proc_deploy_sw_kn_cli.adoc[] // deploy with kubectl -include::../common/_proc_deploy_sw_kubectl.adoc[] +include::./common/_proc_deploy_sw_kubectl.adoc[] // deploy with quarkus-cli -include::../common/_proc_deploy_sw_quarkus_cli.adoc[] +include::./common/_proc_deploy_sw_quarkus_cli.adoc[] // verify deployed swf -include::../common/_verify_if_swf_is_deployed.adoc[] +include::./common/_verify_if_swf_is_deployed.adoc[] -include::../../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-openshift.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-openshift.adoc similarity index 94% rename from serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-openshift.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-openshift.adoc index 976e8717c..a6e8cc5ae 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-openshift.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/deployments/deploying-on-openshift.adoc @@ -27,7 +27,7 @@ This document describes how to deploy a {product_name} application using a OpenShift cluster, along with a procedure to run the OpenShift Serverless Operator, which is based on Knative. .Prerequisites -* Your xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[{product_name} application] is ready to use. +* Your xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[{product_name} application] is ready to use. * link:{ocp_cli_url}[OpenShift CLI] is installed. * link:{ocp_kn_cli_url}[Knative CLI] is installed. * xref:testing-and-troubleshooting/kn-plugin-workflow-overview.adoc[Knative CLI workflow plugin] is installed. @@ -69,7 +69,7 @@ If you get error messages related to `Istio`, this link:{knative_istio_issue_url Once `Knative Serving` is ready, you can initiate the process of deploying your {product_name} application on OpenShift. // shared app req -include::../common/_deploy_workflow_application_requisites.adoc[] +include::./common/_deploy_workflow_application_requisites.adoc[] After checking the prerequisites, prepare the project that will be used to deploy your application: @@ -168,12 +168,12 @@ In the next steps you will notice the value **{k8s_registry}** being used. It is * <> // deploy with kn-cli -include::../common/_proc_deploy_sw_kn_cli.adoc[] +include::./common/_proc_deploy_sw_kn_cli.adoc[] // deploy with kubectl -include::../common/_proc_deploy_sw_oc.adoc[] +include::./common/_proc_deploy_sw_oc.adoc[] // deploy with quarkus-cli -include::../common/_proc_deploy_sw_quarkus_cli.adoc[] +include::./common/_proc_deploy_sw_quarkus_cli.adoc[] -include::../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc similarity index 95% rename from serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc index c48ba907f..a82fd5e41 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc @@ -199,7 +199,7 @@ http://localhost:8080/ You can use this tool to test your {product_name} application locally and verify if the events are being consumed correctly by the workflow. -For more information about testing incoming and outgoing CloudEvents over HTTP, see xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock]. +For more information about testing incoming and outgoing CloudEvents over HTTP, see xref:use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock]. [[proc-generating-kn-objects-build-time]] @@ -296,7 +296,7 @@ kn workflow deploy ---- ==== -For more information about building and deploying the workflow application, see xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. +For more information about building and deploying the workflow application, see xref:use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. -- [[ref-example-sw-event-definition-knative]] @@ -375,9 +375,9 @@ For each consumed event definition, the Knative Eventing add-on generates one Kn == Additional resources -* xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock] -* xref:eventing/consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka] +* xref:use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka] * xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] * xref:core/working-with-callbacks.adoc[Callback state in {product_name}] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc similarity index 97% rename from serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc index 4a6deff11..4c07aaeb3 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc @@ -131,9 +131,9 @@ If all your channels use the same strategy and this strategy differs from the `B == Additional resources -* xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] +* xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] * xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] * xref:core/working-with-callbacks.adoc[Callback state in {product_name}] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/orchestration-based-saga-pattern.adoc similarity index 99% rename from serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/orchestration-based-saga-pattern.adoc index 1a51d6ab2..8998957d3 100644 --- a/serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/event-orchestration/orchestration-based-saga-pattern.adoc @@ -284,4 +284,4 @@ When executing the application, you can also verify the log with information rel * xref:core/understanding-workflow-error-handling.adoc[Error handling in {product_name}] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc similarity index 62% rename from serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc index 457bec56e..4e12aa771 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/build-workflow-image-with-quarkus-cli.adoc @@ -12,7 +12,7 @@ This document describes how to build a Serverless Application Container image using the link:{quarkus_cli_url}[Quarkus CLI]. .Prerequisites -include::../../../pages/_common-content/getting-started-requirement.adoc[] +include::./../../../../pages/_common-content/getting-started-requirement.adoc[] * Latest version of Docker is installed. Alternatively, you can use link:{google_jib_url}[Jib] to build container images. However, Docker is required to build GraalVM native image using the Quarkus native builder image. * Optionally, GraalVM {graalvm_min_version} is installed. @@ -20,53 +20,6 @@ Quarkus provides a few extensions to build container images, such as `Jib`, `doc The examples in this document assume that you have the Quarkus tooling installed. For more information about the tooling, see xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling]. -[[proc-using-example-application]] -== Using an example application - -To get started with building workflow images, you can use the link:{kogito_sw_examples_url}/serverless-workflow-greeting-quarkus[`serverless-workflow-greeting-quarkus`] example application. - -NOTE: You can skip the following procedure if you already have a workflow application. - -.Procedure -. Clone the link:{kogito_sw_examples_git_repo_url}[kogito-examples] repository and navigate to the link:{kogito_sw_examples_url}/serverless-workflow-greeting-quarkus[`serverless-workflow-greeting-quarkus`] example application. -+ -.Clone an example application -[source,shell,subs="attributes+"] ----- -git clone --branch main {kogito_sw_examples_git_repo_url} -cd kogito-examples/serverless-workflow-examples/serverless-workflow-greeting-quarkus ----- - -. To run the example application, follow the instructions in xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. -. Install the Quarkus command line interface (CLI). For more information, see link:{quarkus_cli_url}[Installing the Quarkus CLI]. -. Add the required Quarkus extension using Quarkus CLI: -+ --- -.Add the Jib extension - -[NOTE] -==== -The `kogito-examples` already have this extension added by default, and can be activated with the `container` Maven profile. -==== - -The steps to add the extension in your Serverless Workflow application are: -[source,shell] ----- -quarkus extension add 'container-image-jib' ----- - -After adding the Jib extension, you can verify the newly added dependency in the `pom.xml` file: - -.Verify the Jib extension -[source,xml] ----- - - io.quarkus - quarkus-container-image-jib - ----- --- - [[proc-building-serverless-workflow-application]] == Building the workflow application @@ -192,7 +145,7 @@ In case GraalVM is not installed, you can set the `-Dquarkus.native.container-bu [IMPORTANT] .Configure Docker to use the in-cluster (Remote) Docker daemon ==== -When build Container Images using a remote Docker Daemon, i.e. Minikube, you need to use the following system property instead of `-Dquarkus.native.container-build=true` +When you are building Container Images using a remote Docker Daemon, i.e. Minikube, you need to use the following system property instead of `-Dquarkus.native.container-build=true` .System property to use a remote Docker Daemon [source,shell] @@ -204,72 +157,13 @@ When build Container Images using a remote Docker Daemon, i.e. Minikube, you nee For more information about native builds, see link:{quarkus_native_builds_url}[Building a native executable]. -- -[[proc-testing-serverless-workflow-image]] -== Testing your workflow image - -After building your workflow image using Quarkus CLI, you can test the built image. - -.Prerequisites -* Latest version of Docker is installed. - -.Procedure -. Start JVM and native Container Images. -+ --- -[tabs] -==== -Example JVM image:: -+ -[source,shell] ----- -docker run -it -p 8080:8080 quay.io/kogito/serverless-workflow-greeting-quarkus:1.0 -... -INFO [io.quarkus] (main) serverless-workflow-greeting-quarkus 1.22.1.Final on JVM (powered by Quarkus 2.9.2.Final) started in 1.302s ----- -Example native image:: -+ -[source,shell] ----- -docker run -it -p 8080:8080 quay.io/kogito/serverless-workflow-greeting-quarkus:1.0-native -... -INFO [io.quarkus] (main) serverless-workflow-greeting-quarkus 1.22.1.Final native (powered by Quarkus 2.9.2.Final) started in 0.039s ----- -==== - -Note that the startup of native image is faster than the JVM image. --- - -. To invoke the workflow application, execute the following command once the container is started: -+ --- -[tabs] -==== -Example request:: -+ -[source,shell] ----- -curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://localhost:8080/jsongreet ----- -Example response:: -+ -[source,json] ----- -{ - "id": "2acf710d-7e4a-481d-925c-dfd85a369987", - "workflowdata": { - "name": "John", - "language": "English", - "greeting": "Hello from JSON Workflow, " - } -} ----- -==== --- +Once you have the container image of you {product_name} application, you can proceed to deployments or start testing it locally. == Additional resources -* xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying your {product_name} application on Minikube] -* xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying your {product_name} application on Kubernetes] -* xref:cloud/quarkus/deploying-on-openshift.adoc[Deploying your {product_name} application on OpenShift] +* xref:use-cases/advanced-developer-use-cases/getting-started/test-serverless-workflow-quarkus-examples.adoc[] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[Deploying your {product_name} application on Minikube] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc[Deploying your {product_name} application on Kubernetes] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-openshift.adoc[Deploying your {product_name} application on OpenShift] -include::../../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc similarity index 97% rename from serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc index e12dcf3df..44a6581a2 100644 --- a/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc @@ -260,7 +260,7 @@ For more information about Knative workflow CLI, see xref:testing-and-troublesho ==== The SwaggerUI[[swaggerui]] is available at `http://localhost:8080/q/swagger-ui/` when you run the application. + -Also, to deploy and run your workflow application, see xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying workflow application on Minikube] +Also, to deploy and run your workflow application, see xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[Deploying workflow application on Minikube] + .Example startup log [source,shell,subs="attributes"] @@ -357,11 +357,11 @@ Note that the `mantra` value is updated without restarting the application, beca == Testing your workflow application To test your workflow application, you can follow the instructions in the -xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc[Testing your workflow application using REST Assured]. +xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc[Testing your workflow application using REST Assured]. == Additional resources * xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling] * xref:service-orchestration/orchestration-of-openapi-based-services.adoc[Orchestrating the OpenAPI services] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/test-serverless-workflow-quarkus-examples.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/test-serverless-workflow-quarkus-examples.adoc new file mode 100644 index 000000000..ef92915ce --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/test-serverless-workflow-quarkus-examples.adoc @@ -0,0 +1,78 @@ += Testing images of {product_name} example applications using Quarkus CLI + +After building the image of your application using Quarkus CLI, you can use the built image +to do some test with your application. + +.Prerequisites +* Latest version of Docker is installed. +* Built image of your application. Go to xref:./build-workflow-image-with-quarkus-cli.adoc[] to see how. + +.Procedure +. Start JVM and native Container Images. ++ +-- +[tabs] +==== +Example JVM image:: ++ +[source,shell] +---- +docker run -it -p 8080:8080 quay.io/kogito/serverless-workflow-greeting-quarkus:1.0 +... +INFO [io.quarkus] (main) serverless-workflow-greeting-quarkus 1.22.1.Final on JVM (powered by Quarkus ${quarkus_platform}) started in 1.302s +---- +Example native image:: ++ +[source,shell] +---- +docker run -it -p 8080:8080 quay.io/kogito/serverless-workflow-greeting-quarkus:1.0-native +... +INFO [io.quarkus] (main) serverless-workflow-greeting-quarkus 1.22.1.Final native (powered by Quarkus ${quarkus_platform}) started in 0.039s +---- +==== + +Note that the startup of native image is faster than the JVM image. +The docker deploys the image on your local and you can now execute commands and start testing +the workflows behavior. +-- + +. To invoke the example greeting workflow application, execute the following command once the container is started: ++ +-- +[tabs] +==== +Example request:: ++ +[source,shell] +---- +curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://localhost:8080/jsongreet +---- +Example response:: ++ +[source,json] +---- +{ + "id": "2acf710d-7e4a-481d-925c-dfd85a369987", + "workflowdata": { + "name": "John", + "language": "English", + "greeting": "Hello from JSON Workflow, " + } +} +---- +==== +-- + +This command will trigger the workflows execution and return its output. + +More complex workflows require different kind of testing, see the additional resources +how to extend the test coverage of your {product_name} applications. + + +== Additional resources + +* xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc[] +* xref:use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc[] +* xref:use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc[] + +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/working-with-serverless-workflow-quarkus-examples.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/working-with-serverless-workflow-quarkus-examples.adoc new file mode 100644 index 000000000..7319fb7a8 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/getting-started/working-with-serverless-workflow-quarkus-examples.adoc @@ -0,0 +1,47 @@ += Working with {product_name} example application using Quarkus CLI +:compat-mode!: +// Metadata: +:description: Build Serverless Application with Quarkus CLI +:keywords: kogito, workflow, quarkus, serverless, quarkus-cli +// links +:quarkus_container_images_url: https://quarkus.io/guides/container-image +:quarkus_native_builds_url: https://quarkus.io/guides/building-native-image +:google_jib_url: https://github.com/GoogleContainerTools/jib +:kogito_sw_examples_git_repo_url: https://github.com/apache/incubator-kie-kogito-examples.git + +This document describes how to build a images of {product_name} example applications using the link:{quarkus_cli_url}[Quarkus CLI]. + +.Prerequisites +include::./../../../../pages/_common-content/getting-started-requirement.adoc[] +* Latest version of Docker is installed. Alternatively, you can use link:{google_jib_url}[Jib] to build container images. However, Docker is required to build GraalVM native image using the Quarkus native builder image. +* Optionally, GraalVM {graalvm_min_version} is installed. +* Install the Quarkus command line interface (CLI). For more information, see link:{quarkus_cli_url}[Installing the Quarkus CLI]. + +[[proc-using-example-application]] +== Using an example application + +To get started with building workflow images, you can use the link:{kogito_sw_examples_url}/serverless-workflow-greeting-quarkus[`serverless-workflow-greeting-quarkus`] example application. +However, same procedure can be applied to any example located in link:{kogito_sw_examples_url}[{product_name} example repository] + +NOTE: You can skip the following procedure if you already have a workflow application. + +.Procedure +. Clone the link:{kogito_sw_examples_git_repo_url}[kogito-examples] repository and navigate to the link:{kogito_sw_examples_url}/serverless-workflow-greeting-quarkus[`serverless-workflow-greeting-quarkus`] example application. ++ +.Clone an example application +[source,shell,subs="attributes+"] +---- +git clone --branch main {kogito_sw_examples_git_repo_url} +cd kogito-examples/serverless-workflow-examples/serverless-workflow-greeting-quarkus +---- + +. To run the example application, follow the instructions located in the README.md of the examples. Every example application provides a file with instructions on how to run and work with it. +. Play with the example and explore {product_name} capabilities. + + +== Additional resources + +* xref:./build-workflow-image-with-quarkus-cli.adoc[] +* xref:./test-serverless-workflow-quarkus-examples.adoc[] + +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/index.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/index.adoc new file mode 100644 index 000000000..4a91160ab --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/index.adoc @@ -0,0 +1,10 @@ += Development of advanced {product_name} Applications +:compat-mode!: +// Metadata: +:description: Workflow Applications in Kubernetes +:keywords: cloud, kubernetes, docker, image, podman, openshift, pipelines +// other + +{product_name} allows developers to implement workflow applications for advanced use cases using Quarkus and Java. + +In this section we will showcase how to implement various requirements for your workflow applications. \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/camel-routes-integration.adoc similarity index 93% rename from serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/camel-routes-integration.adoc index 009cc550e..53e1bfb35 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/camel-routes-integration.adoc @@ -10,7 +10,7 @@ You can enable Quarkus Camel in your project. .Prerequisites * A workflow application is created. + -For more information about creating a workflow, see xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. +For more information about creating a workflow, see xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. .Procedure . To add the Quarkus Camel to your workflow application, add the `org.kie.kogito:kogito-addons-quarkus-camel` dependency to the `pom.xml` file of your project as follows: @@ -36,7 +36,7 @@ You can add YAML or XML Camel routes to your workflow project. . The route `from` endpoint must be a `direct` component. That's the endpoint producer expected by the workflow engine. . The route response must be in a valid format that the workflow context can understand: + -include::../../pages/_common-content/camel-valid-responses.adoc[] +include::../../../../pages/_common-content/camel-valid-responses.adoc[] The response will be merged into the workflow state context. If it is an array or a complex object, the response will be added to the special attribute `response`. @@ -141,4 +141,4 @@ There is an link:{kogito_sw_examples_url}/serverless-workflow-camel-routes[examp * xref:core/custom-functions-support.adoc[Custom functions for your {product_name} service] * xref:core/understanding-jq-expressions.adoc[jq expressions in {product_name}] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/custom-functions-knative.adoc similarity index 85% rename from serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/custom-functions-knative.adoc index 94cf40126..793ca2c5a 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/custom-functions-knative.adoc @@ -13,7 +13,7 @@ For more details about the Knative custom function, see xref:core/custom-functio .Prerequisites -include::../cloud/common/_prerequisites.adoc[] +include::../deployments/common/_prerequisites.adoc[] * You have the link:{kogito_sw_examples_url}/serverless-workflow-custom-function-knative/custom-function-knative-service[custom-function-knative-service] project deployed on Knative. For more information on how to deploy a Quarkus project to Knative, see the https://quarkus.io/guides/deploying-to-kubernetes[Quarkus Kubernetes extension documentation]. @@ -33,7 +33,7 @@ include::../cloud/common/_prerequisites.adoc[] -- -. xref:cloud/quarkus/kubernetes-service-discovery.adoc#ref-enabling-kubernetes-service-discovery[Enable the Service Discovery feature]. +. xref:use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc#ref-enabling-kubernetes-service-discovery[Enable the Service Discovery feature]. . Discover the name of the Knative service that your workflow will invoke. In a terminal window, run the following command: + @@ -99,7 +99,7 @@ Save the Knative service name (`custom-function-knative-service`) to use it in t -- -. Deploy your workflow service to Knative. For more information on how to deploy a {product_name} {product_name} project to Knative, see the xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying on Kubernetes]. +. Deploy your workflow service to Knative. For more information on how to deploy a {product_name} {product_name} project to Knative, see the xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc[Deploying on Kubernetes]. . Submit a request to the workflow service @@ -132,7 +132,7 @@ Knative functions support https://github.com/knative/func/blob/main/docs/functio == Additional resources * xref:core/custom-functions-support.adoc[Custom functions for your {product_name} service] -* xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying your Serverless Workflow application on Minikube] -* xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying your Serverless Workflow application on Kubernetes] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-minikube.adoc[Deploying your Serverless Workflow application on Minikube] +* xref:use-cases/advanced-developer-use-cases/deployments/deploying-on-kubernetes.adoc[Deploying your Serverless Workflow application on Kubernetes] -include::../_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc similarity index 96% rename from serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc index a4d5c35df..46e2385ec 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc @@ -24,7 +24,7 @@ You can enable the metrics in your workflow application. .Prerequisites * A workflow application is created. + -For more information about creating a workflow, see xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. +For more information about creating a workflow, see xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. .Procedure . To add the metrics to your workflow application, add the `org.kie.kogito:kogito-addons-quarkus-monitoring-prometheus` dependency to the `pom.xml` file of your project: @@ -176,4 +176,4 @@ kogito_process_instance_completed_total{app_id="default-process-monitoring-liste Internally, {product_name} uses Quarkus Micrometer extension, which also exposes built-in metrics. You can disable the Micrometer metrics in {product_name}. For more information, see link:{quarkus_micrometer_url}[Quarkus - Micrometer Metrics]. ==== -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/serverless-dashboard-with-runtime-data.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/serverless-dashboard-with-runtime-data.adoc similarity index 95% rename from serverlessworkflow/modules/ROOT/pages/integrations/serverless-dashboard-with-runtime-data.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/serverless-dashboard-with-runtime-data.adoc index e0733a241..a583cbbd1 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/serverless-dashboard-with-runtime-data.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/integrations/serverless-dashboard-with-runtime-data.adoc @@ -36,7 +36,7 @@ You can build dashboards to monitor the data of your workflows using metrics. .Prerequisites * A workflow application is created. + -For more information about creating a workflow, see xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. +For more information about creating a workflow, see xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. .Procedure . To enable metrics for your workflows application add `org.kie.kogito:kogito-addons-quarkus-monitoring-prometheus` dependency in `pom.xml` file of your application: @@ -245,7 +245,7 @@ The Data Index service uses GraphQL, so that dashbuilder can connect with the se .Prerequisites * A workflow application is created. + -For more information about creating a workflow, see xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. +For more information about creating a workflow, see xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]. .Procedure . Go to the Data Index GraphQL interface (default to `http://localhost:8180/graphiql`) and test your query. @@ -380,7 +380,7 @@ For more examples of dashboards, see link:{dashbuilder_url}[Dashbuilder] website == Additional resources -xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/job-services/quarkus-extensions.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/job-service/quarkus-extensions.adoc similarity index 87% rename from serverlessworkflow/modules/ROOT/pages/job-services/quarkus-extensions.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/job-service/quarkus-extensions.adoc index 2bfaac7e2..853a10792 100644 --- a/serverlessworkflow/modules/ROOT/pages/job-services/quarkus-extensions.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/job-service/quarkus-extensions.adoc @@ -29,7 +29,7 @@ If your workflows are not using timer-based actions, like timeouts, there is no To interact with the Job Service by sending cloud events over the knative eventing system you must follow these steps: -. Be sure that you have read the xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] guide, and that you have configured the project accordingly. +. Be sure that you have read the xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] guide, and that you have configured the project accordingly. . Add the `kogito-addons-quarkus-jobs-knative-eventing` extension to your Quarkus Workflow Project using any of the following alternatives: @@ -76,7 +76,7 @@ If this variable is not present, the default value `http://localhost:8280/v2/job ==== [start=2] -. Build your project and locate the automatically generated `kogito.yml` and `knative.yml` files in the `/target/kubernetes` directory of your project, xref:eventing/consume-produce-events-with-knative-eventing.adoc#proc-generating-kn-objects-build-time[see]. +. Build your project and locate the automatically generated `kogito.yml` and `knative.yml` files in the `/target/kubernetes` directory of your project, xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc#proc-generating-kn-objects-build-time[see]. [source,shell] ---- @@ -93,14 +93,14 @@ kubectl apply -f target/kogito.yml kubectl apply -f target/knative.yml ---- -You can see a full example of this interaction mode configuration in the xref:use-cases/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] example project. +You can see a full example of this interaction mode configuration in the xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc#execute-quarkus-project-standalone-services[Quarkus Workflow Project with standalone services] example project. [#kogito-addons-quarkus-jobs-messaging] == Kafka messaging interaction To interact with the Job Service by sending cloud events over the kafka messaging system you must follow these steps: -. Be sure that you have read the xref:eventing/consume-producing-events-with-kafka.adoc[Consuming and producing events with Kafka] guide, and you have configured the project accordingly. +. Be sure that you have read the xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-producing-events-with-kafka.adoc[Consuming and producing events with Kafka] guide, and you have configured the project accordingly. . Add the `quarkus-smallrye-reactive-messaging-kafka` and `kogito-addons-quarkus-jobs-messaging` extensions to your Quarkus Workflow Project using any of the following alternatives. @@ -233,6 +233,6 @@ quarkus extension add kogito-addons-quarkus-jobs-management [start=3] . Build and deploy your workflow application using any of the available procedures. -You can see a full example of Job Service embedded usage in the xref:use-cases/timeout-showcase-example.adoc#execute-quarkus-project-embedded-services[Quarkus Workflow Project with embedded services] example project. +You can see a full example of Job Service embedded usage in the xref:use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc#execute-quarkus-project-embedded-services[Quarkus Workflow Project with embedded services] example project. -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/persistence/integration-tests-with-postgresql.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/integration-tests-with-postgresql.adoc similarity index 93% rename from serverlessworkflow/modules/ROOT/pages/persistence/integration-tests-with-postgresql.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/integration-tests-with-postgresql.adoc index e04766fa5..9bfb49899 100644 --- a/serverlessworkflow/modules/ROOT/pages/persistence/integration-tests-with-postgresql.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/integration-tests-with-postgresql.adoc @@ -4,9 +4,9 @@ :description: Serverless Workflow integration test with PostgreSQL :keywords: kogito, workflow, quarkus, serverless, quarkus-cli, test, integration, postgresql, persistence // Referenced documentation pages -:basic_integration_test_with_restassured_guide: xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc -:getting_started_create_first_workflow_guide: xref:getting-started/create-your-first-workflow-service.adoc -:persistence_with_postgresql_guide: xref:persistence/persistence-with-postgresql.adoc +:basic_integration_test_with_restassured_guide: xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc +:getting_started_create_first_workflow_guide: xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc +:persistence_with_postgresql_guide: xref:use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc // External pages :quarkus_testing_guide_url: {quarkus_guides_base_url}/getting-started-testing :quarkus_testing_guide_integration_test_url: {quarkus_testing_guide_url}#quarkus-integration-test @@ -41,7 +41,7 @@ This document describes the process of launching and testing the artifact that i You can test your workflow application using PostgreSQL persistence. .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../../pages/_common-content/getting-started-requirement.adoc[] * Workflow application persistence is enabled using PostgreSQL. + For more information, see {persistence_with_postgresql_guide}[Running workflow service using PostgreSQL]. @@ -214,4 +214,4 @@ mvn clean verify * {basic_integration_test_with_restassured_guide}[Testing your workflow application using REST Assured] * link:{quarkus_testing_guide_url}[Testing a Quarkus application] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/persistence/persistence-with-postgresql.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc similarity index 92% rename from serverlessworkflow/modules/ROOT/pages/persistence/persistence-with-postgresql.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc index b8023ea56..efb8341c0 100644 --- a/serverlessworkflow/modules/ROOT/pages/persistence/persistence-with-postgresql.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/persistence-with-postgresql.adoc @@ -35,7 +35,7 @@ git clone git@github.com:apache/incubator-kie-kogito-examples.git ---- .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../../pages/_common-content/getting-started-requirement.adoc[] * Docker is installed. * link:{postgresql_url}[PostgreSQL] is installed. For information about PostgreSQL installation and configuration, see link:{postgresql_doc_url}[PostgreSQL documentation]. @@ -106,7 +106,7 @@ quarkus.flyway.migrate-at-start=true quarkus.datasource.db-kind=postgresql ---- -You can find more details regarding the PostgreSQL database schema migration in the xref:persistence/postgresql-flyway-migration.adoc[Flyway migration guide]. +You can find more details regarding the PostgreSQL database schema migration in the xref:use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc[Flyway migration guide]. -- . Optional: To handle the concurrent requests to shared workflow instances, enable the persistence-enabled optimistic locking for concurrency control using the version field in the database. @@ -166,7 +166,7 @@ The following table serves as a quick reference of commonly used persistence con [[postgresql-persistence-additional-resources]] == Additional resources -* xref:persistence/integration-tests-with-postgresql.adoc[{product_name} integration test using PostgreSQL] -* xref:persistence/postgresql-flyway-migration.adoc[Migrating Your PostgreSQL Database with a Kogito upgrade] +* xref:use-cases/advanced-developer-use-cases/persistence/integration-tests-with-postgresql.adoc[{product_name} integration test using PostgreSQL] +* xref:use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc[Migrating Your PostgreSQL Database] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/persistence/postgresql-flyway-migration.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc similarity index 98% rename from serverlessworkflow/modules/ROOT/pages/persistence/postgresql-flyway-migration.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc index 82b101842..495c3dc84 100644 --- a/serverlessworkflow/modules/ROOT/pages/persistence/postgresql-flyway-migration.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/persistence/postgresql-flyway-migration.adoc @@ -101,4 +101,4 @@ NOTE: Be careful when enabling this feature as it removes the safety net which e * link:{flyway_baseline_migration_url}[Baseline Migrations Documentation]. * link:{flyway_migrate_existing_url}[Existing Database Setup Documentation]. -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/kubernetes-service-discovery.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc similarity index 98% rename from serverlessworkflow/modules/ROOT/pages/cloud/quarkus/kubernetes-service-discovery.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc index b044007a4..a6b302fc5 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/kubernetes-service-discovery.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-discovery/kubernetes-service-discovery.adoc @@ -323,7 +323,7 @@ This implementation retrieves information from the application's configuration, == Additional resources -* xref:service-orchestration/configuring-openapi-services-endpoints.adoc[Configuring the OpenAPI services endpoints] +* xref:use-cases/advanced-developer-use-cases/service-orchestration/configuring-openapi-services-endpoints-with-quarkus.adoc[Configuring the OpenAPI services endpoints with Quarkus] -include::../../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/configuring-openapi-services-endpoints-with-quarkus.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/configuring-openapi-services-endpoints-with-quarkus.adoc new file mode 100644 index 000000000..c4414644a --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/configuring-openapi-services-endpoints-with-quarkus.adoc @@ -0,0 +1,327 @@ += Configuring the OpenAPI services endpoints in different environments + +You can use different MicroProfile ConfigSources, such as environment variables and Kubernetes ConfigMaps, and MicroProfile Config profiles to configure the OpenAPI services in different environments. For more information about MicoProfile ConfigSources, see link:https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#configsource[ConfigSources]. + +[IMPORTANT] +==== +Some operating systems allow only alphabetic characters or an underscore (_), in environment variables. Other characters such as `.` and `/` are not allowed. You must use the link:https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#default_configsources.env.mapping[Environment Variables Mapping Rules], to set the value of a configuration property that contains a name with such characters. +==== + +The testing procedure described in this document is based on the `serverless-workflow-stock-profit` example application in link:{kogito_sw_examples_url}/serverless-workflow-stock-profit[GitHub repository]. The `serverless-workflow-stock-profit` example application is a workflow that computes the profit for a given stock based on an existing stock portfolio. + +The `serverless-workflow-stock-profit` example application sends request to the following services: + +* `stock-portfolio-service`: Calculates the stock portfolio profit for a given stock based on the current stock price. +* `stock-service`: Retrieves the current stock price. + +Developing an application using a service that returns different results every time can be difficult, therefore the `stock-service` uses the following implementations depending on the environment. + +* `real-stock-service` (default implementation): Returns the real stock price. This service returns a random price every time to simulate a real stock service. This implementation is used in normal or production environment. +* `fake-stock-service`: Returns the same price every time. This implementation is used in the development environment. + +The `stock-profit` service contains the following workflow definition: + +.Workflow definition in `stock-profit` service +[source,json] +---- +{ + "id": "stockprofit", + "specVersion": "0.8", + "version": "2.0.0-SNAPSHOT", + "name": "Stock profit Workflow", + "start": "GetStockPrice", + "functions": [ + { + "name": "getStockPriceFunction", + "operation": "openapi/stock-svc.yaml#getStockPrice" <1> + }, + { + "name": "getProfitFunction", + "operation": "openapi/stock-portfolio-svc.yaml#getStockProfit" <2> + } + ], + "states": [ + { + "name": "GetStockPrice", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "name": "getStockPrice", + "functionRef": { + "refName": "getStockPriceFunction", + "arguments": { + "symbol": ".symbol" + } + } + } + ], + "transition": "ComputeProfit" + }, + { + "name": "ComputeProfit", + "type": "operation", + "actionMode": "sequential", + "actions": [ + { + "name": "getStockProfit", + "functionRef": { + "refName": "getProfitFunction", + "arguments": { + "symbol": ".symbol", + "currentPrice": ".currentPrice" + } + } + } + ], + "end": true + } + ] +} +---- + +<1> Defines the `stock-service` service operation +<2> Defines the `stock-portfolio-service` service operation + +{product_name} leverages Quarkus profiles to configure the workflow application depending on the target environment. + +To set properties for different profiles, each property needs to be prefixed with a percentage (%) followed by the profile name and a period (.) in the syntax as `%.config.name`. By default, Quarkus provides the following profiles that activate automatically in certain conditions: + +* `dev`: Activates in development mode, such as `quarkus:dev` +* `test`: Activates when tests are running +* `prod` (default profile): Activates when not running in development or test mode + +You can also create additional profiles and activate them using the `quarkus.profile` configuration property. For more information about Quarkus profiles, see link:{quarkus_guides_profiles_url}[Profiles] in the Quarkus Configuration reference guide. + +[[proc-config-openapi-services-defining-urls]] +== Defining URLs of the services in different environments + +You can define the URLs of the services in different environments by using profiles. + +.Procedure +. Create a file named `application.properties` in the `src/main/resources` directory of the workflow project, if the file does not exist. + +. In the `application.properties` file, add the OpenAPI configuration for the default environment: ++ +-- +.Example properties in `application.properties` file +[source,properties] +---- +quarkus.rest-client.stock_svc_yaml.url=http://localhost:8383/ <1> +quarkus.rest-client.stock_portfolio_svc_yaml.url=http://localhost:8282/ +---- + +<1> URL of the `real-stock-service` service +-- + +. In the `application.properties` file, add the OpenAPI configuration for the `dev` environment: ++ +-- +.Example properties for development environment +[source,properties] +---- +%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/ <1> +---- + +<1> URL of the `fake-stock-service` service + +[NOTE] +==== +The `%dev.` prefix indicates the `dev` profile configuration, which is used when you run `mvn quarkus:dev` or `quarkus dev`. +==== +-- + +[[proc-config-openapi-services-running-the-services]] +== Running the services + +After defining the URLs of the services, you can run the services that the workflow sends request to. + +.Prerequisites +* URLs of the services in the different environments are defined. ++ +For more information, see <>. + +.Procedure +. In a separate command terminal window, run the `stock-portfolio-service` service: ++ +-- +Run the `stock-portfolio-service` service +[source,shell] +---- +cd stock-portfolio-service +mvn quarkus:dev -Ddebug=false +---- + +You can access the `stock-portfolio-service` service at `http://localhost:8282/`. +-- + +. In a separate command terminal window, run the `real-stock-service` service: ++ +-- +Run `real-stock-service` service +[source,shell] +---- +cd real-stock-service +mvn quarkus:dev -Ddebug=false +---- + +You can access the `real-stock-service` service at `http://localhost:8383/`. +-- + +. In a separate command terminal window, run the `fake-stock-service` service: ++ +-- +.Run `fake-stock-service` service +[source,shell] +---- +cd fake-stock-service +mvn quarkus:dev -Ddebug=false +---- + +You can access the `fake-stock-service` service at `http://localhost:8181/`. +-- + +[[proc-config-openapi-services-running-sw-application-in-development-mode]] +== Running workflow application in development mode + +When you define `%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/`, the `fake-stock-service` service is used in the development mode and you get the same result every time you run the workflow. Using this example, you can run the workflow application in development mode. + +.Prerequisites +* Services that the workflow application sends requests to are started. ++ +For more information, see <>. + +.Procedure +. In a separate command terminal window, run the workflow application in development mode: ++ +-- +.Run workflow application in development mode +[source,shell] +---- +cd stock-profit +mvn quarkus:dev -Ddebug=false +---- +-- + +. In a separate command terminal window, send a request to the workflow application: ++ +-- +.Example request +[source,shell] +---- +curl -X 'POST' \ + 'http://localhost:8080/stockprofit' \ + -H 'accept: */*' \ + -H 'Content-Type: application/json' \ + -d '{ "symbol": "KGTO" }' +---- + +.Example response +[source,json] +---- +{"id":"5ab5dcb8-5952-4730-b526-cace363774bb","workflowdata":{"symbol":"KGTO","currentPrice":75,"profit":"50%"}} +---- + +Note that, in the previous example `fake-stock-service` is used, therefore, the computed `profit` property is same no matter how many times you run the workflow. +-- + +[[proc-config-openapi-services-running-sw-application-in-production-mode]] +=== Running workflow application in production mode + +When you define `quarkus.rest-client.stock_svc_yaml.url=http://localhost:8383/`, the `real-stock-service` service is used in the normal or production mode and you get different results every time you run the workflow. Using this example, you can run the workflow application in normal or production mode. + +.Prerequisites +* Services that the workflow application sends requests to are started. ++ +For more information, see <>. + +.Procedure +. In a separate command terminal window, package the workflow application to be run as fat JAR: ++ +-- +.Package workflow application +[source,shell] +---- +cd stock-profit +mvn package +---- +-- + +. In a separate command terminal window, run the workflow application in normal or production mode: ++ +-- +.Run workflow application in normal or production mode +[source,shell] +---- +java -jar target/quarkus-app/quarkus-run.jar +---- +-- + +. In a separate command terminal window, send a request to the workflow application: ++ +-- +.Example request +[source,shell] +---- +curl -X 'POST' \ + 'http://localhost:8080/stockprofit' \ + -H 'accept: */*' \ + -H 'Content-Type: application/json' \ + -d '{ "symbol": "KGTO" }' +---- + +.Example response +[source,json] +---- +{"id":"a80c95d6-51fd-4ca9-b689-f779929c9937","workflowdata":{"symbol":"KGTO","currentPrice":59.36,"profit":"19%"}} +---- + +Note that, in the previous example, the `real-stock-service` is used, therefore, the computed `profit` property is different every time you run the workflow. +-- + +[[proc-define-urls-using-environment-variables]] +=== Defining URLs of services in different environments using environment variables + +You can define the URLs of the services in different environments using profiles and environment variables. + +.Prerequisites +* Services that the workflow application sends requests to are started. ++ +For more information, see <>. + +.Procedure +. In a separate command terminal window, run the workflow application in development mode, overwriting the property defined in the `application.properties` file using an environment variable: ++ +-- +.Run the workflow application in development mode +[source,shell] +---- +cd stock-profit +export _DEV_QUARKUS_REST_CLIENT_STOCK_SVC_YAML_URL=http://localhost:8383/ <1> +mvn quarkus:dev -Ddebug=false +---- + +<1> Overwrite the `%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/` defined in the `application.properties` file using an environment variable, which is pointing to `real-stock-service`. +-- + +. In a separate command terminal window, send a request to the workflow application: ++ +-- +.Example request +[source,shell] +---- +curl -X 'POST' \ + 'http://localhost:8080/stockprofit' \ + -H 'accept: */*' \ + -H 'Content-Type: application/json' \ + -d '{ "symbol": "KGTO" }' +---- + +.Example response +[source,json] +---- +{"id":"5ab5dcb8-5952-4730-b526-cace363774bb","workflowdata":{"symbol":"KGTO","currentPrice":56.35,"profit":"13%"}} +---- + +Note that, in the previous example, you overwrote the property defined in the `application.properties` file to point to `real-stock-service`, therefore, the computed `profit` property is different every time you run the workflow. +-- \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-grpc-services.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/orchestration-of-grpc-services.adoc similarity index 99% rename from serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-grpc-services.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/orchestration-of-grpc-services.adoc index aec731075..22355e13d 100644 --- a/serverlessworkflow/modules/ROOT/pages/service-orchestration/orchestration-of-grpc-services.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/service-orchestration/orchestration-of-grpc-services.adoc @@ -258,4 +258,4 @@ curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d ---- -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc similarity index 90% rename from serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc index b427f1631..39925ee6a 100644 --- a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc @@ -32,7 +32,7 @@ The following procedure describes how to test a workflow application that expose ---- .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../../pages/_common-content/getting-started-requirement.adoc[] [NOTE] ==== @@ -139,7 +139,7 @@ mvn clean verify [INFO] --- maven-jar-plugin:2.4:jar (default-jar) @ serverless-workflow-testing-with-rest-assured --- [INFO] Building jar: /home/user/dev/apache/kogito-examples/serverless-workflow-examples/serverless-workflow-testing-with-rest-assured/target/serverless-workflow-testing-with-rest-assured-1.0.jar [INFO] -[INFO] --- quarkus-maven-plugin:2.9.2.Final:build (default) @ serverless-workflow-testing-with-rest-assured --- +[INFO] --- quarkus-maven-plugin:${quarkus_version}.Final:build (default) @ serverless-workflow-testing-with-rest-assured --- [INFO] Performed addonsConfig discovery, found: AddonsConfig{usePersistence=false, useTracing=false, useMonitoring=false, usePrometheusMonitoring=false, useCloudEvents=true, useExplainability=false, useProcessSVG=false, useEventDrivenDecisions=false, useEventDrivenRules=false} [INFO] Generator discovery performed, found [processes] [INFO] [io.quarkus.deployment.QuarkusAugmentor] Quarkus augmentation completed in 2669ms @@ -156,10 +156,10 @@ If you see a similar output, this means your test is executed successfully. == Additional resources -* xref:testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc[Mocking OpenAPI services using WireMock] -* xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock] -* xref:persistence/integration-tests-with-postgresql.adoc[{product_name} integration test using PostgreSQL] +* xref:use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc[Mocking OpenAPI services using WireMock] +* xref:use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock] +* xref:use-cases/advanced-developer-use-cases/persistence/integration-tests-with-postgresql.adoc[{product_name} integration test using PostgreSQL] * link:https://quarkus.io/guides/getting-started-testing[Testing a Quarkus application] * link:https://rest-assured.io/#docs[REST Assured Documentation] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc similarity index 95% rename from serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc index 145a30414..d3d7383f2 100644 --- a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc @@ -4,11 +4,11 @@ :description: Mocking HTTP CloudEvents sink with WireMock :keywords: kogito, workflow, quarkus, serverless, test, integration, wiremock, cloudevents // Referenced documentation pages -:basic_integration_test_with_restassured_guide: xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc -:mocking_openapi_services_with_wiremock_guide: xref:testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc -:getting_started_create_first_workflow_guide: xref:getting-started/create-your-first-workflow-service.adoc -:consume-produce-events-with-knative-eventing_guide: xref:eventing/consume-produce-events-with-knative-eventing.adoc -:mocking_http_cloudevents_with_wiremock_test_class: xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc#ref-create_test_class +:basic_integration_test_with_restassured_guide: xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc +:mocking_openapi_services_with_wiremock_guide: xref:use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc +:getting_started_create_first_workflow_guide: xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc +:consume-produce-events-with-knative-eventing_guide: xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc +:mocking_http_cloudevents_with_wiremock_test_class: xref:use-cases/advanced-developer-use-cases/testing/mocking-http-cloudevents-with-wiremock.adoc#ref-create_test_class // External pages :knative_sink_binding_overview_url: https://knative.dev/docs/eventing/sinks/ :knative_sink_binding_impl_url: https://knative.dev/docs/eventing/custom-event-source/sinkbinding/ @@ -326,4 +326,4 @@ After declaring the verifications on received events, the test successfully ends * link:{quarkus_testing_guide_url}[Testing a Quarkus application] * link:{knative_eventing_components_url}[Knative Eventing components interaction: Source, Trigger, Broker, and Sink] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc similarity index 92% rename from serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc index 56e8fb1cd..e29ff3238 100644 --- a/serverlessworkflow/modules/ROOT/pages/testing-and-troubleshooting/mocking-openapi-services-with-wiremock.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/testing/mocking-openapi-services-with-wiremock.adoc @@ -8,7 +8,7 @@ :quarkus_test_resource_url: https://quarkus.io/guides/getting-started-testing#quarkus-test-resource // Referenced documentation pages. :getting-familiar-with-our-tooling: xref:getting-started/getting-familiar-with-our-tooling.adoc -:create-your-first-workflow-service: xref:getting-started/create-your-first-workflow-service.adoc +:create-your-first-workflow-service: xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc :orchestration-of-openapi-based-services: xref:service-orchestration/orchestration-of-openapi-based-services.adoc This document describes how to mock OpenAPI services using WireMock. The testing procedure described in this document is based on the link:{kogito_sw_examples_url}/serverless-workflow-service-calls-quarkus[`serverless-workflow-service-calls-quarkus`] example application. @@ -30,7 +30,7 @@ The procedure in this section describes how you can add WireMock to your workflo .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../../pages/_common-content/getting-started-requirement.adoc[] .Procedure . Add the following WireMock dependency to the `pom.xml` file of your project: @@ -119,7 +119,7 @@ In the process of adding a mocked OpenAPI service to your tests, you need to sta The procedure in this section describes how you can start the WireMock server as a `QuarkusTestResource`. For more information about `QuarkusTestResource`, see link:{quarkus_test_resource_url}[Starting services before the Quarkus application starts] document. .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../../pages/_common-content/getting-started-requirement.adoc[] * WireMock dependency is added in the `pom.xml` file. @@ -220,7 +220,7 @@ You can also start the WireMock server to be used in a specific test. .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../../pages/_common-content/getting-started-requirement.adoc[] * WireMock dependency is added in the `pom.xml` file. @@ -295,10 +295,10 @@ class CountryServiceWorkflowTest { ---- -- -You can test your workflow application using the instructions described in xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc[Testing your workflow application using REST Assured] document. +You can test your workflow application using the instructions described in xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc[Testing your workflow application using REST Assured] document. == Additional resources -* xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] -* xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc[Testing your workflow application using REST Assured] +* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] +* xref:use-cases/advanced-developer-use-cases/testing/basic-integration-tests-with-restassured.adoc[Testing your workflow application using REST Assured] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc similarity index 99% rename from serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc rename to serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc index b20e66741..9cf54752a 100644 --- a/serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/advanced-developer-use-cases/timeouts/timeout-showcase-example.adoc @@ -311,4 +311,4 @@ cd kogito-examples/serverless-workflow-examples/serverless-workflow-timeouts-sho * xref:core/timeouts-support.adoc[Timeouts support in {product_name}] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file