diff --git a/serverlessworkflow/antora.yml b/serverlessworkflow/antora.yml index eb283e70b..4824117e8 100644 --- a/serverlessworkflow/antora.yml +++ b/serverlessworkflow/antora.yml @@ -65,6 +65,7 @@ asciidoc: sonataflow_devmode_imagename: quay.io/kiegroup/kogito-swf-devmode kogito_examples_repository_url: https://github.com/apache/incubator-kie-kogito-examples kogito_sw_examples_url: https://github.com/apache/incubator-kie-kogito-examples/tree/main/serverless-workflow-examples + kogito_sw_operator_examples_url: https://github.com/apache/incubator-kie-kogito-examples/tree/main/serverless-operator-examples kogito_examples_url: https://github.com/apache/incubator-kie-kogito-examples.git kogito_apps_url: https://github.com/apache/incubator-kie-kogito-apps/tree/main quarkus_cli_url: https://quarkus.io/guides/cli-tooling diff --git a/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.drawio b/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.drawio index 062a02835..79d2b08cd 100644 --- a/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.drawio +++ b/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.drawio @@ -1,100 +1,164 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - + - - + + - - + + - + - - + + - + - - + + - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.png b/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.png index 727b2a6da..02f8fc144 100644 Binary files a/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.png and b/serverlessworkflow/modules/ROOT/assets/images/data-index/data-index-addon.png differ diff --git a/serverlessworkflow/modules/ROOT/nav.adoc b/serverlessworkflow/modules/ROOT/nav.adoc index 7429f9d6f..d020166e1 100644 --- a/serverlessworkflow/modules/ROOT/nav.adoc +++ b/serverlessworkflow/modules/ROOT/nav.adoc @@ -110,7 +110,10 @@ * Data Index ** xref:data-index/data-index-core-concepts.adoc[Core concepts] ** xref:data-index/data-index-service.adoc[Standalone service] -** xref:data-index/data-index-quarkus-extension.adoc[Quarkus Extension] +** xref:data-index/data-index-quarkus-extension.adoc[Quarkus Extensions] +** Operator +*** xref:data-index/data-index-usecase-singleton.adoc[] +*** xref:data-index/data-index-usecase-multi.adoc[] //** Quarkus Extensions TODO: https://issues.redhat.com/browse/KOGITO-9463 * Use Cases ** xref:use-cases/orchestration-based-saga-pattern.adoc[Saga Orchestration] diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/common/_dataindex_deployment_operator.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/common/_dataindex_deployment_operator.adoc new file mode 100644 index 000000000..7b3724e4c --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/data-index/common/_dataindex_deployment_operator.adoc @@ -0,0 +1,220 @@ + +link:{flow_examples_operator_url}/tree/main/infra/dataindex[Here] you can find the infrastructure kustomization required to deploy {data_index_ref} service and a postgresql database explained in this use case. + +Thas folder contains four files: + +* kustomization.yaml +* 01-postgres.yaml +* 02-dataindex.yaml +* application.properties + +.`kustomization.yaml` resources that deploy {data_index_ref} deployment with persistence to a postgresql database +[source,yaml,subs="attributes+"] +---- +resources: +- 01-postgres.yaml <1> +- 02-dataindex.yaml <2> + +secretGenerator: + - name: postgres-secrets + literals: + - POSTGRES_USER=sonataflow + - POSTGRES_PASSWORD=sonataflow + - POSTGRES_DB=sonataflow + - PGDATA=/var/lib/postgresql/data/mydata + +configMapGenerator: + - name: dataindex-properties + files: + - application.properties +---- +<1> Postgres database deployment +<2> {data_index_ref} deployment + +.`01_postgres.yaml` that deploys Postgresql database +[source,yaml,subs="attributes+"] +---- +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + labels: + app.kubernetes.io/name: postgres + name: postgres-pvc +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app.kubernetes.io/name: postgres + name: postgres +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: postgres + template: + metadata: + labels: + app.kubernetes.io/name: postgres + spec: + containers: + - name: postgres + image: postgres:13.2-alpine + imagePullPolicy: 'IfNotPresent' + ports: + - containerPort: 5432 + volumeMounts: + - name: storage + mountPath: /var/lib/postgresql/data + envFrom: + - secretRef: + name: postgres-secrets + readinessProbe: + exec: + command: ["pg_isready"] + initialDelaySeconds: 15 + timeoutSeconds: 2 + livenessProbe: + exec: + command: ["pg_isready"] + initialDelaySeconds: 15 + timeoutSeconds: 2 + resources: + limits: + memory: "256Mi" + cpu: "500m" + volumes: + - name: storage + persistentVolumeClaim: + claimName: postgres-pvc +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/name: postgres + name: postgres +spec: + selector: + app.kubernetes.io/name: postgres + ports: + - port: 5432 +---- + +.`02-dataindex.yaml` that deploys {data_index_ref} with persistence to the previous defined postgresql database +[source,yaml,subs="attributes+"] +---- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app.kubernetes.io/name: data-index-service-postgresql + name: data-index-service-postgresql +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/name: data-index-service-postgresql + template: + metadata: + labels: + app.kubernetes.io/name: data-index-service-postgresql + spec: + containers: + - name: data-index-service-postgresql + image: quay.io/kiegroup/kogito-data-index-postgresql:latest + imagePullPolicy: Always + resources: + limits: + memory: "256Mi" + cpu: "500m" + ports: + - containerPort: 8080 + name: http + protocol: TCP + env: + - name: KOGITO_DATA_INDEX_QUARKUS_PROFILE + value: http-events-support + - name: KUBERNETES_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + - name: QUARKUS_DATASOURCE_USERNAME + valueFrom: + secretKeyRef: + key: POSTGRES_USER + name: postgres-secrets + - name: QUARKUS_DATASOURCE_PASSWORD + valueFrom: + secretKeyRef: + key: POSTGRES_PASSWORD + name: postgres-secrets + volumeMounts: + - name: application-config + mountPath: "/home/kogito/config" + livenessProbe: + failureThreshold: 3 + httpGet: + path: /q/health/live + port: 8080 + scheme: HTTP + initialDelaySeconds: 0 + periodSeconds: 30 + successThreshold: 1 + timeoutSeconds: 10 + readinessProbe: + failureThreshold: 3 + httpGet: + path: /q/health/ready + port: 8080 + scheme: HTTP + initialDelaySeconds: 0 + periodSeconds: 30 + successThreshold: 1 + timeoutSeconds: 10 + volumes: + - name: application-config + configMap: + name: dataindex-properties + initContainers: + - name: init-postgres + image: registry.access.redhat.com/ubi9/ubi-minimal:latest + imagePullPolicy: IfNotPresent + command: ['sh', '-c', 'until (echo 1 > /dev/tcp/postgres.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local/5432) >/dev/null 2>&1; do echo "Waiting for postgres server"; sleep 3; done;'] +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/name: data-index-service-postgresql + name: data-index-service-postgresql +spec: + ports: + - name: http + port: 80 + targetPort: 8080 + selector: + app.kubernetes.io/name: data-index-service-postgresql + type: NodePort +---- +.`application.properties` referenced by `kustomization.yaml` +[source,properties] +---- +quarkus.http.port=8080 +quarkus.http.cors=true +quarkus.http.cors.origins=/.*/ + +quarkus.datasource.jdbc.url=jdbc:postgresql://postgres:5432/sonataflow?currentSchema=data-index-service +quarkus.hibernate-orm.database.generation=update +quarkus.flyway.migrate-at-start=true + +# Disable kafka client health check since the quarkus-http connector is being used instead. +quarkus.smallrye-health.check."io.quarkus.kafka.client.health.KafkaHealthCheck".enabled=false +---- diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/common/_prerequisites.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/common/_prerequisites.adoc new file mode 100644 index 000000000..80b3418e1 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/data-index/common/_prerequisites.adoc @@ -0,0 +1,28 @@ + +.Prerequisites +* Minikube installed with `registry` addon enabled +* `kubectl` {kubectl_prereq} +* SonataFlow operator installed if workflows are deployed. To install the operator you can see xref:cloud/operator/install-serverless-operator.adoc[]. + +[NOTE] +==== +We recommend that you start Minikube with the following parameters, note that the `registry` addon must be enabled. + +[source,shell] +---- +minikube start --cpus 4 --memory 10240 --addons registry --addons metrics-server --insecure-registry "10.0.0.0/24" --insecure-registry "localhost:5000" +---- + +To verify that the registry addon was property added you can execute this command: + +[source,shell] +---- +minikube addons list | grep registry +---- + +---- +| registry | minikube | enabled ✅ | Google | +| registry-aliases | minikube | disabled | 3rd party (unknown) | +| registry-creds | minikube | disabled | 3rd party (UPMC Enterprises) | +---- +==== \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/common/_querying_dataindex.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/common/_querying_dataindex.adoc new file mode 100644 index 000000000..7effdb6fe --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/data-index/common/_querying_dataindex.adoc @@ -0,0 +1,105 @@ +[[querying-dataindex-minikube]] +== Querying Data Index service on Minikube + +You can use the public Data Index endpoint to play around with the GraphiQL interface. + +.Procedure +This procedure apply to all use cases with that deploys the Data Index Service. + +* Get the Data Index Url: +[source,shell] +---- +minikube service data-index-service-postgresql --url -n my_usecase +---- + +* Open the GrahiqlUI + +Using the url returned, open a browser window in the following url http://192.168.49.2:32409/graphiql/, + +[NOTE] +==== +that IP and port will be different in your installation, and don't forget to add the last slash "/" to the url, otherwise the GraphiqlUI won't be opened. +==== + + +To see the process instances information you can execute this query: + +[source,shell] +---- +{ + ProcessInstances { + id, + processId, + processName, + variables, + state, + endpoint, + serviceUrl, + start, + end + } +} +---- + +The results should be something like: + +[source] +---- +{ + "data": { + "ProcessInstances": [ + { + "id": "3ed8bf63-85c9-425d-9099-49bfb63608cb", + "processId": "greeting", + "processName": "workflow", + "variables": "{\"workflowdata\":{\"name\":\"John\",\"greeting\":\"Hello from JSON Workflow, \",\"language\":\"English\"}}", + "state": "COMPLETED", + "endpoint": "/greeting", + "serviceUrl": "http://greeting", + "start": "2023-09-13T06:59:24.319Z", + "end": "2023-09-13T06:59:24.400Z" + } + ] + } +} +---- + +To see the jobs instances information, if any, you can execute this query: + +[source] +---- +{ + Jobs { + id, + processId, + processInstanceId, + status, + expirationTime, + retries, + endpoint, + callbackEndpoint + } +} +---- + +The results should be something like: + +[source] +---- +{ + "data": { + "Jobs": [ + { + "id": "55c7aadb-3dff-4b97-af8e-cc45014b1c0d", + "processId": "callbackstatetimeouts", + "processInstanceId": "299886b7-2b78-4965-a701-16783c4162d8", + "status": "EXECUTED", + "expirationTime": null, + "retries": 0, + "endpoint": "http://jobs-service-postgresql/jobs", + "callbackEndpoint": "http://callbackstatetimeouts:80/management/jobs/callbackstatetimeouts/instances/299886b7-2b78-4965-a701-16783c4162d8/timers/-1" + } + ] + } +} +---- \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc index dd8ebf882..0304013b1 100644 --- a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc +++ b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-quarkus-extension.adoc @@ -1,4 +1,4 @@ -= Data Index Quarkus extension += Data Index Quarkus extensions :compat-mode!: // Metadata: :description: Data Index Service to allow to index and query audit data in {product_name} @@ -10,6 +10,8 @@ // External pages :kogito_sw_timeouts_showcase_embedded_example_url: {kogito_sw_examples_url}/serverless-workflow-timeouts-showcase-embedded :kogito_sw_timeouts_showcase_embedded_example_application_properties_url: {kogito_sw_timeouts_showcase_embedded_example_url}/src/main/resources/application.properties +:kogito_sw_dataindex_persistence_example_url: {kogito_sw_examples_url}/serverless-workflow-data-index-persistence-addon-quarkus + :infinispan_url: https://infinispan.org/ :mongo_url: https://www.mongodb.com/ :postgresql_url: https://www.postgresql.org/ @@ -30,8 +32,10 @@ The example described in this document is based on the link:{kogito_sw_timeouts_ The {data_index_ref} service has been designed to store and manage data from different workflow instances. Communication with the service is through events that contain the workflows related data and the service is responsible for storing them and exposing a GraphQL endpoint to allow queries and maintenance operations on the different workflow instances. +image::data-index/data-index-addon.png[Image of data-index as a Quarkus Extension] + In specific use cases, to avoid deploying the service separately, it could be useful to have the indexing functionality and the query capabilities embedded in the same application. -For this purpose, the Quarkus {data_index_ref} extension can be added to any workflow application and incorporates the {data_index_ref} functionality into the same application without needing an external {data_index_ref} service. +For this purpose, the Quarkus {data_index_ref} extension can be added to any workflow application and incorporates the full {data_index_ref} functionality into the same application without needing an external {data_index_ref} service. These extensions are distributed as addons ready to work with different types of persistence: * kogito-addons-quarkus-data-index-inmemory (inmemory PostgreSQL) @@ -39,12 +43,21 @@ These extensions are distributed as addons ready to work with different types of * kogito-addons-quarkus-data-index-infinispan * kogito-addons-quarkus-data-index-mongodb +With the same purpose, the Quarkus {data_index_ref} persistence extension can be added to any workflow application and incorporates only the {data_index_ref} indexation and data persistence functionality into the same application without needing an external {data_index_ref} service to do that. +These extensions are distributed as addons ready to work with different types of persistence: + +* kogito-addons-quarkus-data-index-persistence-postgresql +* kogito-addons-quarkus-data-index-persistence-infinispan +* kogito-addons-quarkus-data-index-persistence-mongodb + +In this case to interact with that data and related runtimes using GraphQL you will need an external {data_index_ref} service that makes that endpoint available. + [NOTE] ==== The {data_index_ref} extensions are provided as addons for each kind of supported persistence relying on the link:{quarkus_guides_base_url}/writing-extensions[Quarkus extensions] mechanism. ==== -Once one of these `kogito-addons-quarkus-data-index` addons is added to a workflow, it incorporates the functionality to index and store the workflow data and also incorporates the GraphQL endpoint to perform queries and management operations. +Once one of these `kogito-addons-quarkus-data-index` or `kogito-addons-quarkus-data-index-persistence` addons is added to a workflow, it incorporates the functionality to index and store the workflow data. In case of the `kogito-addons-quarkus-data-index` also incorporates the GraphQL endpoint to perform queries and management operations. In the same way as the {data_index_ref} service, there is a specific addon for each type of persistence you want to work with. Currently, you can find {data_index_ref} addons for: link:{postgresql_url}[PostgreSQL], link:{infinispan_url}[Infinispan], and link:{mongo_url}[MongoDB] @@ -54,11 +67,14 @@ The {data_index_ref} addon distribution added to the workflow must match the wor The addon will share the data source used by the workflow where it is added, and it will create separate tables for that purpose. ==== -When any of the {data_index_ref} addons is added: +When any of the `kogito-addons-quarkus-data-index` or `kogito-addons-quarkus-data-index-persistence` addons is added: * The communication with the workflow is direct, the workflow data is *not* transmitted or consumed through events, they are stored directly in the configured database. There is no need to configure the events connection for this purpose. -* A new GraphQL endpoint is added to perform queries and management operations + +Only when any of the `kogito-addons-quarkus-data-index` addons is added: + +* A new GraphQL endpoint is added to perform queries and management operations when `kogito-addons-quarkus-data-index` is added [[data-index-ext-use]] == Adding {data_index_ref} extension to a workflow application @@ -67,14 +83,19 @@ You can add the {data_index_ref} quarkus extension as an addon: .Prerequisites * Your workflow is running and has persistence enabled. +* {data_index_ref} is using the same datasource to store indexed data + For more information about creating a workflow, see {getting_started_create_first_workflow_guide}[Creating your first workflow]. You also can find more details about enabling persistence in {persistence_with_postgresql_guide}[Running a workflow using PostgreSQL] .Procedure -. Add the required {data_index_ref} addon dependencies to the `pom.xml` file of your workflow: +. Add the `kogito-addons-quarkus-data-index` extension to your Quarkus Workflow Project using any of the following alternatives: + -- -.Add {data_index_ref} Addon dependencies to `pom.xml` file + +[tabs] +==== +Manually to the POM.xml:: ++ [source,xml] ---- @@ -82,6 +103,19 @@ For more information about creating a workflow, see {getting_started_create_firs kogito-addons-quarkus-data-index-postgresql ---- +Apache Maven:: ++ +[source,shell] +---- +mvn quarkus:add-extension -Dextensions="kogito-addons-quarkus-data-index-postgresql" +---- +Quarkus CLI:: ++ +[source,shell] +---- +quarkus extension add kogito-addons-quarkus-data-index-postgresql +---- +==== -- @@ -103,6 +137,63 @@ When adding the addon to the workflow, you need to disable it by setting `quarku For more information, see `application.properties` file of link:{kogito_sw_timeouts_showcase_embedded_example_application_properties_url}[`serverless-timeouts_showcase_embedded`] example application. -- + +[#kogito-addons-quarkus-dataindex-persistence-extension] +== Adding {data_index_ref} persistence extension to a workflow application + +You can add the {data_index_ref} persistence quarkus extension as an addon: + +.Prerequisites +* Your workflow is running and has persistence enabled +* {data_index_ref} is using the same datasource to store indexed data ++ +For more information about creating a workflow, see {getting_started_create_first_workflow_guide}[Creating your first workflow]. You also can find more details about enabling persistence in {persistence_with_postgresql_guide}[Running a workflow using PostgreSQL] + +.Procedure +. Add the `kogito-addons-quarkus-data-index-persistence` extension to your Quarkus Workflow Project using any of the following alternatives: ++ +-- + +[tabs] +==== +Manually to the POM.xml:: ++ +[source,xml] +---- + + org.kie.kogito + kogito-addons-quarkus-data-index-persistence-postgresql + +---- +Apache Maven:: ++ +[source,shell] +---- +mvn quarkus:add-extension -Dextensions="kogito-addons-quarkus-data-index-persistence-postgresql" +---- +Quarkus CLI:: ++ +[source,shell] +---- +quarkus extension add kogito-addons-quarkus-data-index-persistence-postgresql +---- +==== + +-- + +. Add the following configurations to the `application.properties` file of your project. + +.Example adding Data Index addon properties in `application.properties` file +[source,properties] +---- +quarkus.kogito.devservices.enabled=false <1> +---- +<1> By default, when a workflow is running in dev mode, automatically a Data Index Dev Service is started and a temporary dev service Database is created. +When adding the addon to the workflow, you need to disable it by setting `quarkus.kogito.devservices.enabled` to `false` in the `application.properties` file. + + +For more information, see link:{kogito_sw_dataindex_persistence_example_url}[`serverless-workflow-data-index-persistence-addon-quarkus`] example application. + == Additional resources * xref:getting-started/create-your-first-workflow-service.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-multi.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-multi.adoc new file mode 100644 index 000000000..c71d1191a --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-multi.adoc @@ -0,0 +1,207 @@ += Deploying Data Index and multiple {product_name} application on Minikube +:compat-mode!: +// Metadata: +:description: Deploying Multiple {product_name} pushing to single Data Index on Minikube +:keywords: kogito, workflow, quarkus, serverless, kubectl, minikube, operator, dataindex +:table-caption: Data Set +// envs for common content +:kubectl_prereq: command-line tool is installed. Otherwise, Minikube handles it. +//Common constants +:data_index_ref: Data Index +:flow_examples_operator_url: {kogito_sw_operator_examples_url}/serverless-workflow-dataindex-use-cases + + +This document describes how to deploy a multiple {product_name} workflow applications and the {data_index_ref} service using a local Kubernetes cluster, such as link:{minikube_url}[Minikube], using the link:{kogito_serverless_operator_url}[{operator_name}]. + +For more information about Minikube and related system requirements, see link:{minikube_url}/docs/start/[Getting started with Minikube] documentation. + +This use case is intended to represent an installation with: + +* A singleton Data Index Service with PostgreSQL persistence +* The `greeting` workflow (no persistence), that is configured to register events to the Data Index Service. +* The `helloworld` workflow (no persistence), that is configured to register events to the Data Index Service. +* Both workflows are configured to register the process events on the {data_index_ref} Service. + +You can directly access the UseCase2 example application we are going to follow at link:{flow_examples_operator_url}[{product_name} Data Index Use Cases with operator]. + +include::common/_prerequisites.adoc[] + +You can check the Minikube installation by entering the following commands in a command terminal: + +.Verify Minikube version +[source,shell] +---- +minikube version +---- + +.Verify `kubectl` CLI version +[source,shell] +---- +kubectl version +---- + +[NOTE] +==== +If `kubectl` is not installed, then Minikube handles it when you execute the following command: + +.`kubectl` is available using Minikube +[source,shell] +---- +alias kubectl="minikube kubectl --" +---- +==== + +.Procedure +. After cloning the link:{kogito_examples_url}[{product_name} examples repository]. Open a terminal and run the following commands + ++ +-- +[source,shell] +---- +cd serverless-operator-examples/serverless-workflow-dataindex-use-cases/ +---- +-- + +. Create the namespace: ++ +-- +[source,shell] +---- +kubectl create namespace usecase2 +---- +-- + +. Deploy the {data_index_ref} Service and postgresql database: ++ +-- +include::common/_dataindex_deployment_operator.adoc[] + +Perform the deployments executing +[source,shell] +---- +kubectl kustomize infra/dataindex | kubectl apply -f - -n usecase2 +---- + +---- +configmap/dataindex-properties-hg9ff8bff5 created +secret/postgres-secrets-22tkgc2dt7 created +service/data-index-service-postgresql created +service/postgres created +persistentvolumeclaim/postgres-pvc created +deployment.apps/data-index-service-postgresql created +deployment.apps/postgres created +---- + +Give some time for the data index to start, you can check that it's running by executing. + +[source,shell] +---- +kubectl get pod -n usecase2 +---- + +---- +NAME READY STATUS RESTARTS AGE +data-index-service-postgresql-5d76dc4468-lb259 1/1 Running 0 2m11s +postgres-7f78499688-lc8n6 1/1 Running 0 2m11s +---- +-- +. Deploy the workflow: ++ +-- + +link:{flow_examples_operator_url}/tree/main/usecases/usecase2[Here] you can find the use case kustomization required to deploy the workflow + +.Use case kustomization.yaml resources that deploys the workflow +[source,yaml,subs="attributes+"] +---- +resources: +- ../../infra/service_discovery +- ../../workflows/sonataflow-greeting +- ../../workflows/sonataflow-helloworld +---- + +To see in more detail access to xref:cloud/operator/build-and-deploy-workflows.adoc[] + + +Perform the deployment executing +[source,shell] +---- + kubectl kustomize usecases/usecase2 | kubectl apply -f - -n usecase2 +---- + +---- +configmap/greeting-props created +configmap/helloworld-props created +sonataflow.sonataflow.org/greeting created +sonataflow.sonataflow.org/helloworld created +---- + +Give some time for the sonataflow operator to build and deploy the workflow. +To check that the workflow is ready you can use this command. + +[source,shell] +---- +kubectl get workflow -n usecase2 +---- + +---- +NAME PROFILE VERSION URL READY REASON +greeting 0.0.1 True +helloworld 0.0.1 True +---- +-- + +. Expose the workflows and get the urls: ++ +-- +[source,shell] +---- +kubectl patch svc greeting helloworld -p '{"spec": {"type": "NodePort"}}' -n usecase2 +---- + +[source,shell] +---- +minikube service greeting --url -n usecase2 +---- + +[source,shell] +---- +minikube service helloworld --url -n usecase2 +---- +-- + +. Create a workflow instance: ++ +-- +You must use the URLs calculated in step 5. + +[source,shell] +---- +curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://192.168.49.2:32407/greeting +---- + +[source,shell] +---- +curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{}' http://192.168.49.2:32327/helloworld +---- +-- + +. Clean the use case: ++ +-- +[source,shell] +---- +kubectl delete namespace usecase2 +---- +-- + +include::common/_querying_dataindex.adoc[] + +== Additional resources + +* xref:data-index/data-index-core-concepts.adoc[] +* xref:data-index/data-index-usecase-singleton.adoc[] +* xref:cloud/quarkus/deploying-on-minikube.adoc[] +* xref:cloud/operator/install-serverless-operator.adoc[] + +include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-singleton.adoc b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-singleton.adoc new file mode 100644 index 000000000..e496a93f2 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/data-index/data-index-usecase-singleton.adoc @@ -0,0 +1,196 @@ += Deploying Data Index and {product_name} application on Minikube +:compat-mode!: +// Metadata: +:description: Deploying {product_name} application and Data Index on Minikube with operator +:keywords: kogito, workflow, quarkus, serverless, kn, kubectl, minikube, operator, dataindex +:table-caption: Data Set +// envs for common content +:kubectl_prereq: command-line tool is installed. Otherwise, Minikube handles it. +//Common constants +:data_index_ref: Data Index +:flow_examples_operator_url: {kogito_sw_operator_examples_url}/serverless-workflow-dataindex-use-cases + + +This document describes how to deploy a workflow application and the {data_index_ref} service using a local Kubernetes cluster, such as link:{minikube_url}[Minikube], using the link:{kogito_serverless_operator_url}[{operator_name}]. + +For more information about Minikube and related system requirements, see link:{minikube_url}/docs/start/[Getting started with Minikube] documentation. + +This use case is intended to represent an installation with: + +* A singleton Data Index Service with PostgreSQL persistence +* The `greeting` workflow (no persistence), that is configured to register events to the Data Index Service. + +You can directly access the UseCase1 example application we are going to follow at link:{flow_examples_operator_url}[{product_name} Data Index Use Cases with operator]. + +// shared pre req +include::common/_prerequisites.adoc[] + +You can check the Minikube installation by entering the following commands in a command terminal: + +.Verify Minikube version +[source,shell] +---- +minikube version +---- + +.Verify `kubectl` CLI version +[source,shell] +---- +kubectl version +---- + +[NOTE] +==== +If `kubectl` is not installed, then Minikube handles it when you execute the following command: + +.`kubectl` is available using Minikube +[source,shell] +---- +alias kubectl="minikube kubectl --" +---- +==== + +.Procedure + +. After cloning the link:{kogito_examples_url}[{product_name} examples repository]. Open a terminal and run the following commands + ++ +-- +[source,shell] +---- +cd serverless-operator-examples/serverless-workflow-dataindex-use-cases/ +---- +-- + +. Create the namespace: ++ +-- +[source,shell] +---- +kubectl create namespace usecase1 +---- +-- + +. Deploy the {data_index_ref} Service and postgresql database: ++ +-- +include::common/_dataindex_deployment_operator.adoc[] + +Perform the deployments executing +[source,shell] +---- +kubectl kustomize infra/dataindex | kubectl apply -f - -n usecase1 +---- + +---- +configmap/dataindex-properties-hg9ff8bff5 created +secret/postgres-secrets-22tkgc2dt7 created +service/data-index-service-postgresql created +service/postgres created +persistentvolumeclaim/postgres-pvc created +deployment.apps/data-index-service-postgresql created +deployment.apps/postgres created +---- + +Give some time for the data index to start, you can check that it's running by executing. + +[source,shell] +---- +kubectl get pod -n usecase1 +---- + +---- +NAME READY STATUS RESTARTS AGE +data-index-service-postgresql-5d76dc4468-lb259 1/1 Running 0 2m11s +postgres-7f78499688-lc8n6 1/1 Running 0 2m11s +---- +-- +. Deploy the workflow: ++ +-- + +link:{flow_examples_operator_url}/tree/main/usecases/usecase1[Here] you can find the use case kustomization required to deploy the workflow + +.Use case kustomization.yaml resources that deploys the workflow +[source,yaml,subs="attributes+"] +---- +resources: +- ../../infra/service_discovery +- ../../workflows/sonataflow-greeting +---- + +To see in more detail how to deploy the workflow access to xref:cloud/operator/build-and-deploy-workflows.adoc[] + +Perform the deployment executing + +[source,shell] +---- + kubectl kustomize usecases/usecase1 | kubectl apply -f - -n usecase1 +---- + +---- +configmap/greeting-props created +sonataflow.sonataflow.org/greeting created +---- + +To see in more detail how to generate this resources access to xref:cloud/operator/build-and-deploy-workflows.adoc[] + +Give some time for the sonataflow operator to build and deploy the workflow. +To check that the workflow is ready you can use this command. + +[source,shell] +---- +kubectl get workflow -n usecase1 +---- + +---- +NAME PROFILE VERSION URL READY REASON +greeting 0.0.1 True +---- +-- + +. Expose the workflow and get the url: ++ +-- +[source,shell] +---- +kubectl patch svc greeting -p '{"spec": {"type": "NodePort"}}' -n usecase1 +---- + +[source,shell] +---- +minikube service greeting --url -n usecase1 +---- +-- + +. Create a workflow instance: ++ +-- +You must use the URLs calculated in step 5. + +[source,shell] +---- +curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://192.168.49.2:32407/greeting +---- + +-- + +. Clean the use case: ++ +-- +[source,shell] +---- +kubectl delete namespace usecase1 +---- +-- + +include::common/_querying_dataindex.adoc[] + +== Additional resources + +* xref:data-index/data-index-core-concepts.adoc[] +* xref:data-index/data-index-usecase-multi.adoc[] +* xref:cloud/quarkus/deploying-on-minikube.adoc[] +* xref:cloud/operator/install-serverless-operator.adoc[] + +include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/index.adoc b/serverlessworkflow/modules/ROOT/pages/index.adoc index 0b3470c0d..ac5b922b1 100644 --- a/serverlessworkflow/modules/ROOT/pages/index.adoc +++ b/serverlessworkflow/modules/ROOT/pages/index.adoc @@ -394,7 +394,15 @@ Go deeper in details about Data Index as standalone service deployment. [.card-title] xref:data-index/data-index-quarkus-extension.adoc[] [.card-description] -Explore Data Index as Quarkus extension in {PRODUCT_NAME} +Explore Data Index as Quarkus extensions in {PRODUCT_NAME} +-- + +[.card] +-- +[.card-title] +xref:data-index/data-index-usecase-singleton.adoc[Operator Data Index Deployment] +[.card-description] +Learn about the options to deploy workflow applications and Data Index using The {PRODUCT_NAME} Operator -- [.card-section]