From fe4ede4ce9cbf9838cbb2aeea5b6cdf192d810a0 Mon Sep 17 00:00:00 2001 From: Ricardo Zanini <1538000+ricardozanini@users.noreply.github.com> Date: Wed, 19 Apr 2023 20:30:07 -0300 Subject: [PATCH] [KOGITO-8818] Rearrange guides to add operator and quarkus cloud (#313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [KOGITO-8818] Rearrange guides to add operator and quarkus cloud Signed-off-by: Ricardo Zanini * Refactoring navigation, installation and dev guide Signed-off-by: Ricardo Zanini * Add dev, config, workflow status guides Signed-off-by: Ricardo Zanini * Wrapping up first version before review Signed-off-by: Ricardo Zanini * Fix typos and grammar Signed-off-by: Ricardo Zanini * Apply krisv initial suggestions Co-authored-by: Kris Verlaenen * Adding Swagger UI to verify the workflow, reviewing prod, add cards Signed-off-by: Ricardo Zanini * Fixing typos Co-authored-by: Marián Macik Co-authored-by: Filippe Spolti * Incorporating Marian and spolti reviews Signed-off-by: Ricardo Zanini * Feedback fixes * Feedback review * Apply suggestions from code review Co-authored-by: Tristan Radisson * Apply feedback changes --------- Signed-off-by: Ricardo Zanini Co-authored-by: Kris Verlaenen Co-authored-by: Marián Macik Co-authored-by: Filippe Spolti Co-authored-by: Cristiano Nicolai <570894+cristianonicolai@users.noreply.github.com> Co-authored-by: Tristan Radisson --- serverlessworkflow/antora.yml | 10 +- .../images/cloud/swagger-ui-operator.png | Bin 0 -> 54441 bytes serverlessworkflow/modules/ROOT/nav.adoc | 73 ++--- ...downstream-project-setup-instructions.adoc | 2 +- .../pages/_common-content/report-issue.adoc | 3 +- ...ith-serverless-operator-on-kubernetes.adoc | 191 ------------- .../_create_namespace_and_deploy_info.adoc | 6 +- ...eploy_workflow_application_requisites.adoc | 2 +- .../pages/cloud/common/_prerequisites.adoc | 4 +- .../modules/ROOT/pages/cloud/index.adoc | 107 +++++++ .../operator/build-and-deploy-workflows.adoc | 140 ++++++++++ .../cloud/operator/configuring-workflows.adoc | 81 ++++++ .../cloud/operator/developing-workflows.adoc | 264 ++++++++++++++++++ .../operator/install-serverless-operator.adoc | 140 ++++++++++ .../pages/cloud/operator/known-issues.adoc | 68 +++++ .../operator/workflow-status-conditions.adoc | 150 ++++++++++ ...build-workflow-image-with-quarkus-cli.adoc | 10 +- .../build-workflow-images-with-tekton.adoc | 0 .../deploying-on-kubernetes.adoc | 18 +- .../{ => quarkus}/deploying-on-minikube.adoc | 16 +- .../kubernetes-service-discovery.adoc | 6 +- .../versioning-workflows-in-knative.adoc | 0 .../pages/core/configuration-properties.adoc | 4 +- .../pages/core/custom-functions-support.adoc | 11 +- ...efining-an-input-schema-for-workflows.adoc | 2 +- .../ROOT/pages/core/timeouts-support.adoc | 12 +- .../core/understanding-jq-expressions.adoc | 4 +- ...understanding-workflow-error-handling.adoc | 6 +- .../pages/core/working-with-parallelism.adoc | 4 +- ...-produce-events-with-knative-eventing.adoc | 8 +- .../consume-producing-events-with-kafka.adoc | 8 +- .../event-correlation-with-workflows.adoc | 12 +- .../handling-events-on-workflows.adoc | 4 +- .../eventing/working-with-callbacks.adoc | 6 +- .../working-with-openapi-callbacks.adoc | 6 +- ...erless-workflow-specification-support.adoc | 14 +- .../create-your-first-workflow-service.adoc | 14 +- .../getting-familiar-with-our-tooling.adoc | 6 +- .../modules/ROOT/pages/index.adoc | 72 ++--- .../camel-routes-integration.adoc | 16 +- .../custom-functions-knative.adoc | 18 +- .../expose-metrics-to-prometheus.adoc | 22 +- .../persistence-with-postgresql.adoc | 4 +- ...thention-support-for-openapi-services.adoc | 4 +- ...ting-third-party-services-with-oauth2.adoc | 4 +- ...onfiguring-openapi-services-endpoints.adoc | 2 +- .../orchestration-of-grpc-services.adoc | 2 +- ...chestration-of-openapi-based-services.adoc | 6 +- ...ic-integration-tests-with-restassured.adoc | 2 +- .../integration-tests-with-postgresql.adoc | 2 +- ...ocking-openapi-services-with-wiremock.adoc | 2 +- .../tooling/kn-plugin-workflow-overview.adoc | 26 +- ...rless-logic-web-tools-deploy-projects.adoc | 18 +- ...ss-logic-web-tools-github-integration.adoc | 2 +- ...logic-web-tools-openshift-integration.adoc | 10 +- .../serverless-logic-web-tools-overview.adoc | 12 +- ...dhat-application-services-integration.adoc | 2 +- .../orchestration-based-saga-pattern.adoc | 12 +- .../use-cases/timeout-showcase-example.adoc | 4 +- 59 files changed, 1203 insertions(+), 451 deletions(-) create mode 100644 serverlessworkflow/modules/ROOT/assets/images/cloud/swagger-ui-operator.png delete mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/build-and-deploy-with-serverless-operator-on-kubernetes.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/index.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/operator/build-and-deploy-workflows.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/operator/configuring-workflows.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/operator/developing-workflows.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/operator/known-issues.adoc create mode 100644 serverlessworkflow/modules/ROOT/pages/cloud/operator/workflow-status-conditions.adoc rename serverlessworkflow/modules/ROOT/pages/cloud/{ => quarkus}/build-workflow-image-with-quarkus-cli.adoc (96%) rename serverlessworkflow/modules/ROOT/pages/cloud/{ => quarkus}/build-workflow-images-with-tekton.adoc (100%) rename serverlessworkflow/modules/ROOT/pages/cloud/{ => quarkus}/deploying-on-kubernetes.adoc (91%) rename serverlessworkflow/modules/ROOT/pages/cloud/{ => quarkus}/deploying-on-minikube.adoc (95%) rename serverlessworkflow/modules/ROOT/pages/cloud/{ => quarkus}/kubernetes-service-discovery.adoc (98%) rename serverlessworkflow/modules/ROOT/pages/cloud/{ => quarkus}/versioning-workflows-in-knative.adoc (100%) diff --git a/serverlessworkflow/antora.yml b/serverlessworkflow/antora.yml index b7921def8..9ee32a0b7 100644 --- a/serverlessworkflow/antora.yml +++ b/serverlessworkflow/antora.yml @@ -10,12 +10,12 @@ asciidoc: attributes: # for product names #in Downstream product_name: OpenShift Serverless Logic - product_name: Kogito - #in Downstream context: OpenShift Serverless Logic - context: Serverless Workflow + product_name: Kogito Serverless Workflow # upstream: empty #kogito_version_redhat: 1.24.0.Final-redhat-00001 kogito_version_redhat: "" + # same for upstream and downstream while the operator is community only + operator_name: Kogito Serverless Workflow Operator # upstream: io.quarkus.platform #quarkus_platform: com.redhat.quarkus.platform quarkus_platform: io.quarkus.platform @@ -52,6 +52,7 @@ asciidoc: quarkus_openapi_gen_url: https://github.com/quarkiverse/quarkus-openapi-generator kie_tools_releases_page_url: https://github.com/kiegroup/kie-tools/releases quarkus_guides_base_url: https://quarkus.io/guides + quarkus_url: https://quarkus.io/ smallrye_messaging_url: https://smallrye.io/smallrye-reactive-messaging/smallrye-reactive-messaging/3.3 quarkus_config_url: https://quarkus.io/guides/config quarkus_swagger_url: https://quarkus.io/guides/openapi-swaggerui @@ -82,5 +83,6 @@ asciidoc: # must align this version camel_extensions_url: https://camel.apache.org/camel-quarkus/2.14.x/reference/extensions kaoto_url: https://marketplace.visualstudio.com/items?itemName=redhat.vscode-kaoto - minikube_url: https://minikube.sigs.k8s.io + kogito_serverless_operator_url: https://github.com/kiegroup/kogito-serverless-operator/ + docs_issues_url: https://github.com/kiegroup/kogito-docs/issues/new diff --git a/serverlessworkflow/modules/ROOT/assets/images/cloud/swagger-ui-operator.png b/serverlessworkflow/modules/ROOT/assets/images/cloud/swagger-ui-operator.png new file mode 100644 index 0000000000000000000000000000000000000000..a14ed68b276d391d09232576d2a47aadca2880e5 GIT binary patch literal 54441 zcmd43WmH`~)Tj#-ihGN-xDOS`FBT-1vUSO!m%q5CSo*$tb@yM3PI-O=X3O6&`@=n#j8MAgBeWkHV0|95YVXPnE zYX!^7c78K!!m?t!C2hGA(S=!Yb~in051^74iU-s%H|4q7uGBmcn-O7ro+%Zex?4aB_VY>EW^Zwot7e<6W6Lp?Mn=b)N#6zdC;Rv0fyvnvJYi~B zb-IJAC(9e-Sxax+;4i9bs6|H2TB4#Kl2|^CM zhXMf&Gywqz-hl!?SildkJ)#3ZAc5Z~z>i=i*q^6h#+k4F+`poJiO44}ASMRbIc4!pH6u-CzJwlKG} zV|V5v_!)y8c>fYiLxA@)ioF>Zfr_LIo`99D0Uk3oEj29xHw+#g9;dCoA-kNQ@bBcn zzqkmD?d`4EX=t3BoT!}`sI6>`Xz1A3*l1|!Y3S*xfHA1-TrBN%oT)7B2!9FrBS+A{ zPS4iF+TO&<67NN>j;@u1Jr@DNOGf|w{3@q`v&r9_EbV?*3s^y#mnSrI)U-7J$p$9n zdy99R~0m|0ICO=2RADnBj?Zf|I3rVIsTJU9jTJ3KjtY0Tl~666DY2J%$9hNlLFE zB;p18?@9)Nz-ov3?@yvNJ#f--_oILcj=y2NKHpCNlL7+$0|OL>5w{ut2;q-p5NMgq zGmd|xatnKdjr3VUH4FUB>5cIN!as$HK+fd7sC?IT@;B_ID6jrz`~RybJ|MVY4fe)ZiuCE?6dIG;G}~cJoCd8P)p!z33Xr?KE9k_0Zc zJTwE*5bR2JX3aW}?B~3-kQX;@jH3jUl*4M_9%rv8ods|>BR%dB#rC>o2BL|->JLZa zG;1zwp4-hwhG4I`9%XFad2S7*s8<<{6_s_{(rCQjnAk`;aI@Jze>-sHju26I&&U*T zs`F)Qp!Fn)%jqe*CuEmqe=7Hil-oT}I*pqGlOa$3xt;jRc+oucr7@O>L2T+gA6+%E z52a&7FXXeK7X}DlWt@2`xhaXa>m$yM-bwQ830F;F#B z$d*gNld0uohGsAd1P7r;b0{MuaasnILFzCYJ}4xe9fGx5-0uo)&)luwJ4Ta8nl9@4 z7^^k8*IpjnhiI}fR~Zcvq;b?m@k3nfdmzE%GMZC6ovwt)u!u$XoL@v}=BiBaPqxg| zJF*$uj&6WS)ZLYp_M@!^p_tlmws)l6Ge!)E<0qg9OUfHa-I1Y&1_++TBGm(@ZBoNT=ho1 zX3>2C(Y;2}X|L8~?PwCOtP%|NtRV7a>Bo`|t=L5W7ImQwjL(LEwgylt*_ z4Ee#c-o>-H-z#&N@b{0(SRU7L57WglEm{W#DO?)Fz@F*z9sg6${9qhKt?1!=b$%?B z`gI|)3C=|(moq?S!E;K{@n{Lv=b?_r35L54EziTWjyV3R-SH$osTy7QyUp&X=53Cu zdU2#(iT8!)WJwwv&bT}*O?A6ShEJ7yuUfT9y&w8XHBDQ5hO1Njoc4Xj8dhpB!+XE; zdwhJyK9SmqlSNCZ#l`bJ$J?g`2TZP#jB_oO7XS1Obk&SI`1knlSghincpRgYwW$k> zyNX%eF9oUC6Q~o($>h1Q;E+|-BdSmDuHMl*b}-GiLw(AW zsMmUxbvLo*Pb*OlRi!%t{f-6Y-wYep>BuBX{V0#EDCPCpN0-D8hn+ zAc(#OcQkEog+o5ecpWU7A38yS43}ngIH)z1 ztx6p5cyFr|jIXzMbJ7Z#BX~y8)Gz-aaUVX^dXF%8N-;NhgWVQAr{yxx#9Em}H*SMa%TncTxk{}8cuWL^ey z5^{wPJt62c zLkm|a4z1Ey9tB=dtk;-}fVK?|&xiJyg_!vq9jBzU&Y0L%e}Q~(OkrGC&V-7^Vn zddB;;w;a}hpq2ORx-DdN=7z&8s;5X0V&f?Q#w&?Or(V-`T%;RUFUU_BPbI8?B?SRmzLWZ5eKnJsf)EedK8&Dpijx|7( zm;Oeq>RtavFU+CaO6$$oorruf4UeME*u(h+4PhzIBuXJJM+Nvtg{hY{1YR9f1B%hk zgK!Q$4ZMTLJm&^qv+GrIniCXGegu!}DmxTNdr`f`YN6M1^Q*7Y5vMiE-P8U~owEl8 zOKHPN^n2MI9XaQGX->VI0NJ2a8s|~~62t4LPc4cVet;*p)4ht6`F54y5X5QUftc%w zz*AKr?AL-Vs#&3wvN)uA%5PW_^KX-`EfNfMH5Yx5k%aOP3t+k76*9skGZ-~&ITIvd zU;B~bC#!p&1+RoXbdpFe8h84F4Z|9xSjEb~gJE)-moVP9WTBY@=;LUVKL%#te|~SV zCcRt((#>|jawqo$eUlTe#1`1^M$aaP1bNKi=)|dECGcHvR@!+G>)CHW4LR||Qt6~Nj{Flhp zyBjH#TvqG)Eti#s=Z~E~hiR}?FJhYzVUGm+Y8C^b zCS3JYXYY@tXbi2Kt<~1jSi{nshw;%%Xvp2=leLq64_s30o;1WoG?<2P1xOx`_lLNn4(H8*j6CNm3Ru6VF5 z`ae4GG&l+YG(C@2rbyjhAEbveydJ4A)^yDqck8q4$hXup z#{2gP4O$qU(+}laUn}Tr(eQilP$qbsC-F9r!UI^U`ETcH6W(eGJb-*Q7J>n#OuAdo zNf@9MZ|&rHS>YoIfG@o@+PReW6}@0d^N8LyxEimoz1$~6n3rBRl%vBzF?5BQ+PMw6 z21DBJKdpL9Y*NwX1w9XWKT_8=fLLAX8<_in%HfBGNuH$0gy|sHz*x1Ip}A^Jc+_9%eUK113CV07TgOURaLI@C6Y8w zrBhie*)(&+{oulG1Ep_uPNFQ{i78O0QiK#`up-=axVJ}A7{RS3B|D__(=zd$wPxbE zY;U7O5c*<|NPy}MM&<7g%H>wzTHEk!`5?$H%=X#8p@rh?MjJXe5FdaYRNpg3D#e`4 zuTDYhb+N`#{h@B= zX!g;3hoO2A8TMWJ)^1Vr&cgHeCV|ko{^?$U8OL!yG8XZzuuj^BwdDh=eFr$0#4_8m zNC`yg_i$O}Q0Cv;`<~?o&eShGywR!H4f7=UblV5g?xl&(o2v@$f;W+L{rX^5a6?c0 zS)k@>)m&!qpy=?xE)F`GpV!m-l$9ONcePWHIQYFdVgr%cTvm(AS9CcO{aC^%ufwH&M zXjhXGD0(r|IKfFUKI}Vz#NWSJO>$KV-6hJaRpuEcG6&p1m^vBn&uPaiKiy<&V(uU@ zph>$Ro~%@73{iTuEq#`?uL}qyIk)-*SorRWXMWgw=_+v{@Ej!`@PO}E5M5Y-YMUL| z9=KHLBOol|8Vtpz%eXo`4{LvdyF>qu$0Sec#Oo$)ko_tNGj(f)^%@BDN?|Ya>>LB-j;lF3wgo^$I4gcba$7;SDPSTb?`yNIg(II;<1Q> zx7^+H*K5_A+`~q5yX{HC114giPNn5uVQ}C6c;$2BaNW|&-eU}2x=AN%@va>2{DSt= zW`d4hj>sGW`*7Gqoy$2VjX%s*%nbQMNaTmRU|cy2(d>W*(PO9LkCEpE{97r{G_r`g zScH0EYhgvWOVU3#+<+i&5pb;Nk|;ObbF+%0$Ei>W8L=D@g{R=28Zz(#nuNh`?;?f zJMau?tH$T|ItuRQtB!9L)FBX%B7n1-+*e}t!G5|Jh~TIG4-Bu6@I>*M>6rTVY~*L& z;nA}lm+5_9h<|rp_nIOLM(4Fw_>_5~9$n_^>XSvOyioeZeVO5b^(;XZWJ)^IHvw&) zr90#yb^fbnTcV%g{jNC_$;IkNkFQ)4EO}H@0&cK;F#viyA$eh!_5>IpV6S+Bm@qiq zF)TvJ&@>=hmR*3*ZV_Zi&2mCxXrQg;JdL9%jKCBxp*f|BW(Boxs1`i_oI@9yx7i~rz z`hmV<#Yv~_Yhl`w$8W^B2;jQW=$R8)&w?*s-gG8MBrAW{!UScq$r_Y4cebm? zWU!m-9q8z%f7QvBme~FTw}PYY3)KTTrO%t$>(|T3v&gL{m{lw$V{ynl%Jv z0pGdvkx|D>A^VlXdyBKGxbvGbk4dEwp+FzV#1PmG&Oj9JGRh*)CU%7Fa6$_Ukvd_= z)M5|7bW#havV#QMO5pwp2VEzK&i!}VAo)evth!ylwQzI&QVY$pn4j>jLApZ^48DC0 z*^o@DrGiqcO70q)HAm=;p$W_B27?Z{Jq=#^g}h#slu^KzzQm6&OYi*l-owYtIUYXL z&QKU#J1JLxYk!b~PE&p*yKB7awW!wqVY==;KiGu_e#BrBe&0wE+x7B9@p^;BeYS)2 zncZx8ej^;tHwtB8sIC}VD2NtqTyO$LSR39>56+u0F|Jrvn>n^?>WdM zaDAdTDH?C5H35gC~IF~EX4C0^&YRnh#Wmw+5_;QT>r3*RVKX9ZKB zRPB@QiO&m7IQ*>tPOBtKD@~6C)?*UZ87=%nyU{xJIo6+|*@1JSZ3v<~8fm**eCSDa zK>bSUwKpx*&bcuE)SxEpX>&Dt(e$fdisBJ~MnE$%h6VE)jc5_s?a+iWYV6N# zeMK~5P5f1Mp}p=H|OO zRqswK!_s5>w=`U!P?dtvD5BAR&_uHQ;s+Q#3wqFuo5a4ma2rPlrHj$biZBDQ+@(K6 zvM-OCyQu!w5)Tv!O3_5G{$wd?XwFj96uKsWBdbv(m@yxRaVxdc_w^WNkLs5WUKEnQ z{S77PH7ILCXnd2$ZaPsAF5b9WznGl?@-+q`7C2T{@<23+g9GLN(8CfDh$Mu?wv?6^ zxKDr`MpOWCce16N5!2st1o?q5#+82il2gR`)|eFpe*AA|^9JV>@4yb*W`rC`vwUE+ z|4gaXDtod5Yn$(yS{_4WLr|!cRpY*xZIf!m2lAhhBMLPrp4XT4-r;PpUwuc50}QDu z08=O#8_V)Pvj{7aHRgtWfl)p57$%h~ct5r4lJlECBbpd&0>v24O!%3C-*LjIkMPf{ zp$rKqH-y64Iqko!h}PhNgK{Nv)GqozV-7D7(Aq=b)2;cx#Cr1EU8=gGF_|fg(P;PF zgjPRq?H)^OCk2w*7)V5%JFVUCgunF?1S~|dAz}QTA(v#fAM|$VdHH~NFN_#3FDIMx zWy7#ag~aF6K$XKSSL)(7WRZPM#|>4kQ_O{cyP@!2$Hl|v@Az|;OEnZ1weT%Cx@(aW zw%124jrn;eqDl1r?Wpc)rOJpAAfvs1m? z>+q*A4B+lsnQ^X#4G)v8twd6>{jTywij+X0Q%Zw<4;OD>#&UR0!|kaZ+#om09!|kx zwH&X7t;UMBXi?Tf_4RiIBi_$UL+L%C!vy zo$RH0F!;PWK)VaAkd37akfW;BI)0rlI;-o6z~vd$n$s+|;5Oc{+%X%?&J2TV~12yedC6n?{IHl8*oyVlTuu-xP!!s5Z< zzJJ7-SK<_Lsz2~R2DFe!$#2Hs z&|SOaUpfZw63KYAk#wO#p>wKpl zF5=6gZC*8nWVr4dFCWBh@5;)TytiC#q+sHBSijWhrrczI`eCk2Ga=g_ z76xdz8QnO308(w2`n$#1ru?{ZVRa6t;~4`*@^<`Z9G=3(j>=MXPs7qf3W~Y%)i;;# zUNavmjmG={<$i3>FVEEDd3l#~d;@Nlt7FJh_b9i7K_~Ea3dYD|Rkcq4lE5YS+<(D{P1 z`Sep1ukOJTCsMw8iw6^t!J50s>A8{PzSx#qqE;7LJ~KuSs{ovmO_v( zl@XUk_-*LPH;~w^u0*a~i;C>(K>Y3!DTz=-de%JV17hr;ReXZ$X*)&A+e1}5bkq1x z&$l0N2gZDb2c=s%U|GF&fZNx>OVx|5osmVkLS0ppJ5_-*W&WFu-XVJZ9#*!T#5YtL zyj2DxCEeMqG(X9VD4r`g-td}GcOQ0?A;qZ7D(cP@7?n-7-yPQT?P<-GT0X))@&05^ zoh6MjoAIkYiCRlY(lfpmA zR-a3eD|PI&1>tbwRMb+)rHLd_kU}g@a;#bWBjAw+1*)#g|J;lQ?|kMxm0rN*9p^Fo zL#9)$@rod9hIDf6jP6ko$~uxUQl)Sz{L!3baFO|>^mYfn6C$&tvsbJA%~<66X56k# zi2HI=;|{oNh?&1;v!^B+rCMHVMA*C8n8iUiSgHPMUvCcABkkLrW*qv51a7$!WBg;# zmQ3)3v$d-Sn@EH?Wsm0UShA}xC1lVba&y59QhLFKn7W?g^j(XPfq1*A`bbmw zeX3$`{Ts4q^`ZC5a=Gp`<=4%ML_T*oZ^#sQVPjGTzd~}@_e}Dn?s^exj~n2&Xit%k zgT0D5pm%y&QeIbrsyNYHQ1329R~+4l-Yz~uc+T}g)?CObA1^cLA_7m8eGGekiM)8*U>0m zKiYmT5;+Ar;zUElYy#5nYk2SNcC`vscHRzTa0J$vEyx5&C1$ngLFR0Kz*Xr;RsJ|Wi#wR&=T5e{J^7Xs0I4JAT)I}7lG~LwR@hQ$gPJc=Y`}mAHgF#>B*fG; zIcvV_ARfiph~TCBrikoW+NEi&u?3C05a2iAtDru%PePmaEv|UZJjJdNM|#;vgdsG_ zDS=%>g3xH>a)*;jCD@I|Wi!FYl2_#cN`U&AV_ z&;?b$#%=q66fXKbun#KhTkAcdsZOBl+rd`Lw=tNp^$5(8lF0Ph&?1}rz7>AFz96Ra zJ8o%*c@#xF{f}LcD^-~C!lFURhPli2%a@jLYI`#`=mX_4OEC==6n-P1M!(+(l4G+d=N zbFws^povNh2N>Cz`~1!Yj=BBC*#nN_a-pS7iP9(@ZOdhUF?p|<8@;)cN3~RX$dR*A z>y(%EXL_o4si=S%9ogu(XHUj9=@l|;(=%_8=d=c`GA5k7We0-|4XMf?XF;kA7xhX#PmNcBmCRqj;9F3woy=6_e@AvTK=E!x%hejbG8xawy=Jd7o>gH{Y;o2Zx&2B)RU)|Cmw5p|1RBH$Rh@`tv{wz>pjU{kPRx0;^&t^7 z7oV$87vn!RrUiXQvGUj(I1I}!rHzBbCNPoDi&fU5BzIHjdLF3WRls0q*DOo34yssg7m+_;`h=>f&r)aobw0pw|1mCEjOt_f>f}E=@^& zb2rYpUqf3rqk}H)E}GdT^RAvd*b@y;aUki|K4@xEXPXID6mQOQH7@tCZqM)8`HYH9 z5)>M@^=UxcWfovF_Rw>!aua?gMdh56CxwBwJwW%^t;WO!u`}0|YWU5xa3Xwy$%vp! zRUHHRi?qUyWoyAgu<5huRdP-iAxS|eOmJ<7pA$S z{&jZER%p;hSHqmrO@~i%J$-a=(U`Q(HE(3HwEURpKEJ(~^O)`NjO^O18EE>oA#USa zBSSKd;O(u;Kb%PN@Pa=j7fKxWgFzNHU&>4ur(3&Me)EV!B9;2RtCJ)H7hl^C3wMs& z;xfy%xJ#Go1|x6qhnzo=nO~p_ZatOsoJd2v(VUh3x>fs75M9h=PXm-o6G~t@H_zfP z8XCj~Ce0VbutU9~PQ@b?bYXKBluFrhih}cgi?i3j4X~EArl^mp* z({!@-4m>>zDc=gj1IyJ1+>0T%cWQh!@rck*)qJO^cdNF-;@6_4L8QaQP;F_mgW?}d zl7Wvzv@>o~Fb?dxx(fk%xDL*$qs3J(_SD^-q)=!xdyNe^>m6lJfFu6rA{j&op3U@yA-rPU;4gRZ7J+Wy_%dY> zGUjIbWP@J|IX%)B0$%ISN=VwNYK>H1hY`{w4;% zacwOLf(4@nlMJ%}gEcfGJ}F6o{2Kbxi|ZYy`wtm@etsg-A8kvD8JYJt#G&g_-C^SY zqzwUSF#x3Ur4q|!06m{X$R<;zs(k`7Pd=$gBk}M;*ZHYs%H}jW(EJN4OR^;P$lbxf zz%Ehl$m#9K#;qf5Iy=9<))BTr!~A&ik2Zq9@xB50RHjNq#51!@ra%NSv2s{Lpi+td zR(A~nl(2u8zDO_Cb=(}09zH4enEN;L{{@kp)4iu;Q`33i)n6PR{#@1xK!)0!qZ09# zFzN;a87igEj>%uF1%%~qj8CF_7EWAwb#|tlfbJGsF~s-1sNswIuvcBo8b@1=Pd2yuNr0xNRW} ziYFL5!4`v-UCU#V@ApUl$PWWfqz< zDG@357V>t38hZ=>@oWZ4SW^W%F{aFCGd{8ZdX=b1@A2r_=yBS{F|+BO*3kpK)m`PU zKW_APlM@!~KTQUu^45T3^~~>Wdx@;j-z*?|M=}kQP9BY0`~GR;4j=C!q=b|LLDrNI?Ja*AD6bSQ-fMfrbF+_a}dC z1E9ZRU47u(1^RJ|6+5PF0o_KWzek^9d!9T9%{REDa5@9VZ`k{Ae(Qyo5;fQWAf=oS4IUU;V=@272=|M;8oJo=(ev zSg~A-5#=1-x|)OKZ2T(IIUCITW`i%0shDyflL*EdTT&qr1q>DzTB&c`^dPkm>8$&W z+1##MmYUIt$mH_?!Ynw4sVtT&(J}d>qMZ*eL|QG*Q8@QQ<8*47VK}qWFmH#^O*Bt_ zYr)4T&RZI~X5iRVa4mG+5iz}qV5WH_b`!nq@UgqiSv|Q;v+D#N;4t(iy6kF5)IVHa zf36+fzBOWnlmK->q25hXF>Mkecdl=n_<6TGS2YfO`*5LLm^I(RryQEmc?@s#V}h$H zy}{t8Ma2xwQy%Mv64iRu*NA&$G;K1Blb-zET__|HNG7dDF1v9QbNWCBBe~*lizqyv z4@S3gE*8JuK-5s2w#tnKYhfF9%2orTUyZh=a}U-0-VUcLp*fTmH-pLgM`XBW%=8}C z8*H4n>)&2?-_0nH#gIh5<>HBUe)4QONmQDtVQqHb3=h?=(~P&|$QhaDB++Eg-pNHg?*NJx?9eiG9CDYZrR z--z_{sk)$8_nkwIM$%l#{pnIGg=W)DjM-e7_(T^3ghjblz1ZpVUqm{VI%RRZWB@I# zT=)?1;P#M{3IBevY1?SFsjbTP@lKMjUFTD^P^ID!)b39&vPh!_AbGTp2B2>!+?xQ7OS&tfu&*xrK@aeSg(Of9rQV9`FKRdIZzS?Xlj`vEX{kO6Ym;{BV-|qO zIzRM~E$^J6e%M~TrUD)_2%F4bMY4lQVsrDkQ(>lPSCwjVpPOw$WHUv|_y+W^HdeYi zpDkT1^5VF>TPJ+}etgJc?bmYmQEVWRJ556otn6&Jbo*r4ZI*qnnNH(oQ$@zQA{3KF zlG%KPoUO_zE}Hnu0BCQu`tCKmIV^H2vn2V8k2qyIi;xuX*oJ^eXu=~TqWHDBYOZIi zn%u!m!X+6#l1O-Z$vlZP4(W@x<<*LpGBd?&^$ztkW#`j{G9AzIf*Z5)Ohn%hO;`dsR_SkrZqM&;Z(j@ zzMQM0d3iS=8OpiU52_YOn=`nn$g{N(=5S&2Qj)y6ITp!L85Lnuu4X=}|BCY*!yViChxrp*|E?_^SGlsI;;QY4V>MnPv1k>~G-kD(XLcf8<*PJ&$ToAlJtaue%s1HN^LcPT4{d|s&i!L*p$7~XUR4ax8a zF5~@~esT9u{12rw{kgUuSf}5JwUz8o-w{T@I_ALDK>4iQBigqze^u^pfp^QbNTWYj z%i1NffDuo@m}p$j7mj{tZEISY0vN4)&TUG$I%qLVU=6n>>PoIio+&N}ba!mk`U_p7 zysc^QQAQaIYJxPJzXiY@5sSqTMG^R)AL;=h@x?F%rN+2lU9mqUx|2Gue_C`>;qT5L zO^tKJ_8d2;^9sk|C~I{D%S0*M)F>@dLvA%C=ft6?giy~v5q}|Y|9+tc5cs8RN(7rFuPG~cnenImT*05eb_6m ztUq8IBFc39knKk%JVmYX(7ZEymZB6bkp}r0K^GxiPy?OQ8z~IX)w#C)IZ=oe{boB6 zrS|rGYZ8Zjs!r8&b)Dkn6qbvae=a(WI@t&5^!UJ!9ej+4vc5EvxsqS~_v((lQUn@) zEhzvXpdvPg$$m1vH*oi7XEh&2K0quKrHL4HpKRJ5Uuz~+8YXBozav0dMH%gxMh65b zdNF0#C)IUYM=z0!P?= z+ZV%k+nGA$3Xm-XvjAMxv<@qLKoYMYXF5DK36F}@%Rr1f z7@p>4k0Xz^?GFPDJ&}&v{Ke9;L;RKRce7>Si5!CGdAr@2OrP49Q)8_>+AB-!yOd%Z zf|HUiiFl#3Mdy*{JEe;F?OV)`f}>bRRGRbZ=xnIW7dvBYkB$@-kE0+zo&qX)01A?6+{!mFUkTGxLkuPDbvaS4Ih^dU z>y8(`W+2pO!4qvIkZWtQ1Oim!>M7ZD-mZ{R$t#4Tw(tS%^%HF7P_nuwOe&e@;e4q~ zKLA`l)`DH!oWf8{C(B+kupHCHGA9njiNl#K4izS*Um|bG^^G|_km7+BD_*Ka_i{+W zy)Ordw(S6)dh>6w$SAfvf(sstlBeG?{k1&2q8^HHp>VrsJ1s1Usp)hbk`B=CA_~dV zd+-A2bR9=qdOwe!ac##{W4o_^zZNGB84_iRcAVIFD<=tG_r3Wv63co*eK#ocQ@+b+Auq8c>27uALY>3pSy(c&M5kk7F44xdzc zTd6=^Ao}Tk3^Ccx!`Iq_5;04EPGe&)U zJ9q#JjUj`ic{o%0RevB=mfQHdh8ihyun9X-mAxU7M1y0?epZ*d!)5;h@L=0Sqw}8f z81R5tdUtz{>B#HJOfm6e2lI)NO3NbWv##U|5lJJ$QHFZ<|@O_Yv32@Kmoyp|;)(SutE$l`{KFr5`j zzy6aV&iElyQ7_96*?3~dp3EZ7)=d4;dsNWTq=;&qbMS=H80g`6Bft(-gFZQxv(7HV3o=r3Zq=4>%J)c3&6f+fHw_4XS?@pvv;V zm~aruuHQXby0*j(0tIQkLx>(V{Rl%8*{s*##dD2Y#H9fhQfIb2oYqY+&BuX0&4>V% zkrb7POkN1K#i`lqs`;TQ1r)mMfKNZkI<=ZTo!YlTJtz9+9(b-RDQ-NuR5*`2oDa`P zdE?Cr<5nxad6RtH)$z=L0mGWj$r40bN&-o(!4tPUn;`LZhLXGu-cc|ga$P{55rlK}LjeurU+aqD39bxrmT1#3P6>|}1?3ZJX&-V}361i z)rHSRkw)E305IZ#=PLiVlvs~(py<~Yn47;tcJha4{nF8EaEuiqq};6ZJI?NS6|N;@ z62>fkKje2T1-o)^oZX}fi42wg-MJ#l!4)(sd4Ps%ldoue1)u)>F95_D^|xq;EVQ4B zLTMC5tAw2sZ)_IN?LFWG_vWwOss30zrPm!M;-aHoaXR?;-u3Rm{1?2#+kyI_1mwnL zQ~7G1sYtt<_V3wLomYuDS^HhS3|+5v3Un41AlXWy84qxU4skjk|0*|-Ztug{+`Vqj zuR|9Fu*j=%)}pUxg$pG18kVI5HjNnmrg^-m*6B&Ytbd6kFPIh~kQ$>Uey0yM?bE2M{#?2X>t-J|XO ziih;gev&yN5UeqDpCq-2YW?wVi}V(O5;S|0FETnT{0~|TN49o)V^qVRY7xTZi@Da)V>{}%`bDZcQ_FDW1XQahglXhF6w7kgg$*_QrHe^Ud! zzlqj%cbYp1-x_CSWy!Hxosdd@gZWnzvZ&vkwp6C$6yOu^M=L65U48(v)Wa%J{!q9V zm#S&-%+k!tZ2AN+c*v$HUWE^9o_~vj2MP=7R2wJm6YzS;!f*Vy2j5S}%xsdivNl_? z@oa7gv;X`7fj2$ipfELwnS;!KBd#xZg7q3AX4x3e7ix79XQ*!ec3^=5PJI=T{ zfJY4uBb*o^7B?M9dCZQM%b~_+4d<-L8~s zt!bS3LXFt@&PZ`wBZ0NlRekpKQ0%O)iI!gK8{RfR86SuT!y6LXi zedneos%SD}VJxXEzjA}^_LnER?Iy?J<1F3~wmS2qm#(RSG7d(?St2%;BrdVU2d0G& z67{;-=@bVdS@{G+`e$=#5v)O5g(ftOPDS&k60A{`19wO^2WHeM*-}(N0 zWzLT?bIuHtfz3{``(Eo>*Sc<(FOO9gMX+8$Ui&k@hg>$Q(F|jE(mK{g-WA&nW|gxp z+1lUQ{UT|1DqnaR`R%d1S-x6r0H4#u>u478ucb4zh_e<-7mBh zd>0s1pk)A`B^!+5Hk%rX0$6z+S;$O1^YjM} z4`%_W@bNc)JQ3$VnH|pOt?NVKCeMZs6Yu|t3MiY*NbS*!w?FJHXYIR7qFWH_l`!*~ zdE$i-#3W|ALNVj@_j7dOx#^Ui8v024@KZ0qAzS9wqgkK@>njTpQ#^B;8@wZd9C9Io zdh}mQ5eASqKoBsDpmu@Cn{;o7qSS2|Da_{M*vMpbEYH@b-E1SzIE^3 z!yS#{$4#8N4}d(?Hx61Fc&@lOAcO2TE>P}I<~_1FS#f{y_u+GZGWlrXiBbXtC-$I^ z^#npaLQ`)6am4Fe_UPtqY-)gjRPGXp#iTm)^PS=n^Pf4zx2W!MFl#pH=NbQ48_W)= zoMG20iHR!|==}auE<4Lit@^O=g7G#bj=wmB{Bp0HQZSg>sD)INN=bb2n#=f%hIK(# zRDrIC&a^V$Iz|^h)Gqll^SSJy9KLLr`7fZuTYtp4!HW&H66mHT10DX-6SZPrUn^H>J})yUp{qJ7cv8=MCQJ zT+*kmjcB#m1I&D)i?;GT7=XPV-Sn>u7SrfZPD2-dy(^*Vw9sCnS0M#a%R}Z$<`o+@ z$?TxYHSgbXyg=OcVl1sG8NOVk%intZnklu>ffIhbnPAD;_*}DURXTl}&0S}_yy?l<4QE%{nKgx!HVq4IOZuI$f=${V$A%8%2Q_GXb)0%=9@4V|~Aj}uPwRmaA@ z+tTzgMJ=!l%kh85rB+N#ewxqS|N3+HYGG)?FX)u}sG-Yz*6aez8~PDDrhNXS@5WoR zep~!K+NNy@g)z$YJ4eJR%lC9NWghl zz)Src;dTGT(^D2^&rbwexW&kzu|?aw?kMWR zJ-jRG&idghI?QXqhe7E5Y98QG$je^G%9Gb77?*#h_sF*20u&l722$jsSzrpnn(@r8 zvWWtA^?YhgBqorZiNb^h78NAI3uN0lN}-swak6!IoW*Kx zi*=#M<}N4WH%N3b7y~fHX*ARbE}xJrQH@O#bC2SZZOT!ldoWXpRf8vRCqpEAr2fo~ zAIs&ZszGI?W}kWrEJ6s0bSpl;>oVE1Hf(y3>AM&d#>i!=uK9ZS+y0kvy)oK!g|SNK zi2YyJ*`T33O~xzw1pw+3&d6@+&{%2z=!_8$Af6Sa%j>7cEFrNzQadNf_kB~cU*Ow` z2nE>-M$cbEx*B!WMHrc?rwcTUwr850sI3~{?y(Z1-0^gCwMbrH!;FoWDJ{E22G!Pe zOc09igTQ{WCL@2vSns%@)eR_BaL#0hu}x6z&-!}>T_5gfeOvYpr*VNnPlqEQ_3KiB z`_=r5S%6z8A5D6x>)TCil|Z5PjFj-9^?HlCrJcF#wbJWstAJY0)!QWrpPD!m01&x< z{T_%>_;It=Q)4}ze2C8%YR;mKSWaxxet}iIcC3gSAl@L%7qXz_0gC7v@sp#RdwVAq zzs5V)^b{1b)VKFOg09gdFj%+I2evagBI3R?1?|=D*F#m$Fn^(qgG8ijbbK9Qd~dDO z6h7i$FZP-J!lVNPpZ0!AM_4N|Tfi$96~5)~=FXH;&m*&P05d>p#i;8D3H_EF#TTjp8o zG6e#ntBm@H*yn-1Ptw&ND4Qd+ibneM)Po`|3SwC+0~f#UR|8r*#o;^;lye&t?zSG# zJiP9<(9XFwTG(Wx!2Nu-$JyvlmXooz~`j$0UB8 zk@{OYx^J|bmbzpV1><~)SnMVxd8*wMI8||GmW=Mp_qpoBL=Yx)O#j>i0)gn(+h^m_ ziNEXk)PWqK2ix7w|EycN%Xuj}g%4mH$b(Vkw?g&48?hY{hI&`Ln{rO8)NB~oU|JN= z+y1aEC+e9}D z*-7;HoHAYe8J_N8SBxPD?JePryc?Nsu8bTE>)d#W>Y7z*;e8&Jg!`qs^z0%PU@2O4 zw9izK$FkQbPvXEbyy9w8B-zdM-0D7O*Xvvj9Q~2QP9zaqJ!6gLFdD1N+P$elTH8;# zV}J8k6kEZ4a$=z-|$BR8j@Y`-<{ zUgEs7fj8GqRSzm%IU`RvBp|Nna|@9oRnP4kkPd|am(}iud4LPAbOxv1z6fa&C&z`8 z;IxMTE22@cH%>?|yM$AVAxlMGEk_T7)37D(r z79~rnLhdBKOl$?!%Y2#pKuw22w3hMKw)2ShDH#c{sa-+jjHGnK)nyo zq?xnmT(4;-?JurFEIo2&kMv5dYtinDCH!ms*F>LzrVJa^v!k!w5mmSdGtkC%wq92M z_%I4}3F&ZtO5-DfpDvXCv6KInYnglsz0s>zimm>TEj~4NQ$jHVeM^L{-X~P^{<*4O zB-tGpF-9-dqqa;q^v9h}jSugF)=oxe=u?pJpGifA^;?hc1+8#8YA zHkj1(Ajk7WfvI=6Ng~r|>&FVKE0ylLR3Yy9sHe@q_S1UST+P#4_@ILG>dS6NNqLa^ zky5WXwoI2~pSE+`5u{Oq3*+3K^kGp zbDobYjLQ_7KVJ@?r`s@R(-GR+J$OJljGQe}(7o2*A+rCAK#CvpyTbFajW#n0uvb!;&SJCr1ri`xbgTJXm2&uT8XmLjL7v{wcFwqU`labII z=?NcJ+xe&bLWRtecd;~{@04!&k+t16qEfB89fPDpqGWY=?M92#3biEO1oK}h z?I`_@-ruf~SoYrv39hW&3Ax2Qn3-}i`BR{qxF^v&91|YFvhDq$PR5_+zOCpVdY`Cd z^Rbjk(gHCmayKm7Q!qK3#mfTIzk)p`dD~ZQ>J4>3ih0N9>Hm-nO}hC9%98wg^Suv& zpkv2a$atytm2VjPc%!Xrs@pLnqFnu5_r5(yc1+v*9SWuqsVeIyQ)APOx7r)@P`(() zi=ni-@}6o%-FG9=2pu~k>6ZR?5v2g6F;}7`8@Bm+dDWpQGvmJi8K4FTuo#`qIR6Ds zfPv&EV(61Xz1a=a*$`Fz13Ufm;e8JY6Bziua?GtpTkXXkB>jg5*2^>-f~oV#D8dXH zynvXF_XgK~{oe)wpb81V$E#0mHe5-#fbyOpK&>CI92H&{Oq}N3!_Lb5hu?Yl6MPt1 zsE=#SmXw|uxG~JomPot(SoL2r+~ZxPo6jEhq=tUc$oNjk7xd3g``ZX6x63v69^Vkj zUD5roS->y%0QRNOt+{{FYJg=57#N8Fxii6uK=mJSl|Q2v4&?gY3;8E8;GcKbD*-aC z2m!y`3;nylKXSlGg}%Ugr zdjA+`#Osf!|Mfl#pruBQgrZ{qLSF<|1aATJ|G4!JB>7K=c6or7a?cQ?rTx!2^Z!G` zC9cZE$DtGu5V!;|WFw}Kk&!pRo92w2#DA9$6U~)hb&?``Gpwwdjdi#b5%(6ad`5np zT7oWl&3p{WMb2geWLdo07AfDGBF1Qkor0hEX?PTd15gHMm+SVp`x^jPdaPYyGz3Bc zfl)^9mF=mg5f-F>r*4p^9J{{@1$e2>D_K=nakWfvmwlWP5Y>9kC-S^ zG@klzCz|z1>d~=!WnK-#`PgtOj!5J-IxryM-LG%I%Voq27TQ8tF7$b2Db^KC1|JVL znp6HxW&u!9S2?%M33h11d8S6u8?B&-6sCl@gsk5kM~hnuPOHbGMJNt+tL}l;?>bR0 z2XjVNqP4^(ZB$hR>p-DxMu97c6&dw9uktgAs#0j3P!Z2CK00Om8A z)kwvu_9p!UK;fdNM&NLQ(9JM&B+zWQu?SK0_jWcwz(N!C0?wyHXZTo}xExnp5$GyaR;^>GXJ@{ML z^LJx{XLUWaF6+a(LOzS%n3S$cGX{s>;ohyZP~RHK*HyM{(3fF)rC)fk{5w^ByRh`> z1m~IAbDP1^G$6}eiV^WgnC;?lRVS%mmI`Ef1muYFygL=VLJrYM3QcLUuI)~Q=}6=a z|Vg{`&#_8JACRZ&2NwJji>@pl-#fW;0@dazdC zSO|nJ5=}o4(uJ9ZR$3o4`0gehykEOZ)b>U17c%Mxp6w<`E(wO#s>W9-0#w-C9olL@ zUWP;3Bi8=-*bN( z(r_V_2@9p+%J_x43k3XPIWXWbn)jzA4@A)zz$D&hdPR;n3yuFS^sf}gucw62E0wXV z(TWwS^y|S30Qqvv@bRm=^;}|gZjo$p?5kiP3!LYYYDeq%?QB4;wr(|*&8h3$xP!da zvGjl~g_N73s9`_p!&ymjeWs{`2f>ay!|f!iD_N=uS^}2`h@mUMW?O8~8_61i6(50r z^;T^NAONjukl5t|9)*kroR->jALeAoPAYsqHm#I`#LKlF7-5PfNbC3xX7)8!?;$<2{KXB`1Q`9cKfv* zC~Sq(z_vJ8yeCOm!BXXJHUgjwT26IP|J~D{S&0D1O$z{YEPHjI{*+aYC3`?wg#0EB z7ETm(s75?~e_7B&4~Xyli1;1#DA_1SDe)z@bX@LM_qeN9gFZC#4po|Qhq4qK5&4cUzqzXv|H947}t zk3j{omg)h40mU2t3U~t=kh7ZpK{tjIl<9w;7(y!Hjg8pgPCz0y3y|>q;5$f0!I{Pb zSBSDdJv);xcwaV}Z*IpL$sIw$&fSB>Jct&vTYeqBSkl2v03Dw^qj?Y%nRB=2%&4b8 zO`YOlv#&?%V2(<*bj*zVsX~cCEgL|#%WVMSvla?XIc*aob~IAd_k4_V`jS}IVy#ME z)*?X(Kx(e+M3ork`l_o`4c>b`t0eiMO!?~w`5$N!>C+=p=JgxWt-lpN`FOn&}MyYWZc({CN8>zV)X3v8AU z@q=k>lPAqtY>Q`Tc3~EW;j!N?01@^~nL@DHgutDm?hfPMDudHl3_di&aVd;_PW_l$aZp-$p zjm1)(o4z0lTne24#VaO2t#`W0AgXzjPY{!uI9$9V`~Bs?$b``vQ$r!#jQrZckO{Uw z=;Vo^htBYl|KuDE;b*SPCXde_9SVLh;l6M7%kOK+4D7Z?uGbM(CY0A>=o#BV&sB>` zrVi^qfa(_sWX&puE-RCKU#r_0xv>}pNJGno@_uBUQGdi123WlgK34ZQU6;RlMG~Mt zx;0{!+4r=Mj5yvZX@BaMVA$5>?Vx%4V4ORde>C8I79Ib!vd@gy7{NOF=fe=b|L#J< zrrB8&gb(qYSo0y#H;4OXtNT5uXw-u;;cDS8`7gF7Ygudtbz0vHh;ajo<}iQ+y7m@4 z;1Jv)CQf*{6cBvgSbJ+9(VpQ4=<+^|(2^A?`}F?l{`f6sh1KH^PX%JNOBBM;UVg75 z7==5T`M=y&&u1F8xi+)ny^e|HlfMObsbG9C;=029)HFfOn^ajS;jcM0YeUqbk``b2 zn>g-(HDr{z`ub&*tEbp68`NDCD2BsPpC%P#;PGNPRfcTpHe(E+SfA@~!F4RRETa_2 zeE2G^!X-CMko98+Mwe-o`fI}nMp_nXm^^qqXT^*Z+1 zcRp;Te_G}7T0PNm(qHTBAz;9Tc79}5y-DcNL^ldOpgQ_y_&1ve2-16ifnd7fJBt6Y z3qR`vw&MIKc6sE#h_)yIUiPO2B>&?2ujto>M3wV~?}6=q)@j{<(exq!a4zW5#6uE5cW}Gw!~n z+F|TSzajjZ_Kk3W8KJ#S^VO?gx>Rq{e`zq;F?>@-%klZ4S%*pmd&XyK(zk>*M1#qP zMz?VXsikMeFX`mNQE;CiO)gqNAdfywCoT5PKeo6D%I|vJa%=E+uj=^M&BjsH4BkrJur7z zZHdL+7sWmpd7lLPp+%10zjI^g|Bcid5f3e8h!8iIz?~;+7VEW5q`prfR}t2AaVam> zZ<^k2z&nf7I@sB{&pWRXfsH&7qtF=>*!1v7)A348(^m3LiHTAtS-r6Jktxv^~8 zbSvOKgh6t!c239mey(ko@U5wu?_!u%cCCCf(n4csIK`dw+MYy_zUyPqkg4}u7S$^Z z{6i=DN3A}kHbYPJ8+~dELyu_2 zZ|Qu)FmVH`l4*b>Iy$<@e!N^OTY;Kgvp^&BK0}9kC((2Aey@hp0*Tn`xz!bSx%xei z!rUE33P#+9jJI&|slxG6rzY5^P?}s=rSdnywmKvCVB=h@ zyo3Exm*yzEZ{vl{zQl1q)&|;yUiN;`PwJ7{E7(Ri47K|ycW#nk3@!ve&uRjiJm8|g z&Fc=K`pDLtgzmcrUu<mC0vTX&JBgJSej4Nz#HjP8~M$Kjl67v%+-Av+5pj zv1^wcrhl)pLq2(pND9%0k^kb5EL6{pr{wEOVBZKVU@xidsj>%K8XB!TgI z8GYkK9BW5mk@=+SztgxUj2BpzYu3~wK(KhZd+SJfuCz_!Gz$6`dUpl1w$7%p=NswQ ze1-~YwZyVlj8<0=+3_`ECwX&$6+iUvI||Iak2ChICch4apE1zmPu|%%clrx)Ni63* zI)8z7MHx~~bnCie4>CYp;P2kM8&2ln_oHW%;ch)~w26(LsqVdZ-Ii+;P~ME!-3S*_ z-CW?K;;Z*CE2oF%baMeUz@sko4lFTpmfF@4BQ80!N;3rVf4c8U+iwNl9nbF@6gwV! zv_oHE2}rJuKlTS0khWKQV~30LGvsWh>q&9qFo`&J&HX~rv+mFrW{Z7|RU6^@&?;>s zA2yWo$z+A47t@cp0LR~Lc+NdeM_W^o`nHy8ivt?{n)hW*{9+j)eNN{e{=>_N8~xPn?AHYtlzx0dQQNhCX4&Va=D}#oY!p} zLus%ph;mL)@{Srs{C)QiicP;MOS0eC49w$wnt}elcxyMV2D8lGI`+*ym%2FY$5;u$ zLF*N*WQsZdJHH!oM+$p<{{HRGX!G90pC|_YSAHxBm&`y?X`kQbBwS zGVd3L#}_TP*xubP)XYzr^L%zmvZwcIXY4Q!GXH@3?&weqza0{tk-M^Rg8HG_Fbk&{ zX~^^2tUP9!h;ux)uo1{%f-On48$xUP-IW=}&I#byZHD9KM`BwJ0{K#$o(irjxugXG zGro{g`5J)D)=eJW<%Qp1GL@%%CU(Ze%(|z?O#sonywr0#GV(z$A5t^t+7}r&Y`1th5bM6YN z*hdI2c=}^orAnBsN^DMtQ6oJ0*?_Xa>F;5Mv3Kc}CIUL?-hZ#n&)ojsK+fTJG1`~% zolza(Bd&%)*$TNmj=JUVbkV{|sWoP81&1qQ>E7co(KuILGpa>v%aynU!`XzwLMN*j zBKm9_UffY2o}W+>))tyDHsx4~YvNrw&DfkIhgQ$v@9ZR`u06SW=)3*4DV6BOl#AZB zcK3WA2x+iP+@~teXe$AiCsd%IA3i!s%DWMpoo1)IiQT6^c4>lPd9XqSOQyIzKdJOu zNl<2$`&9DP+1LNxNKF?9=?Bj+T1=x&orWyloeb@4nCmV$&Q_;rnpapVg%!^6lWF

KPjj(tlUV4T*4z=UoSkg<)!SPN}4l-_ATG;MOEyz;v79}UJOystIw6Pz z{3VyxM;GK#mCiBn6dChEZCG{R`q?z}G@cjB7C`hUyPD-*!iGze84ZMw28x;zz%Z|` zYX~^K54jdrMt`_Ri__VuUwI(mIa^9Ymc61jT~$&gUJ`dQ6v|R5E)2d)`Hxv6$s|td z=1FVSU-!eYAwmM%HH|STO&EIYzfortUE+bDFx*1xZ&%@VC=Q`(gLl@ahmVG0!)-ll zC}j^TUbm<|vJoAVp4urjD9S9E97txp2kcloXr+rOJn?@}#HEyZ*|>DqCW_91B-2 zOg_Ne`IRmi4KL9QGUi13d1AlST7Kg&Y#60;aN_^c{@o@6^A%#HUuCOVq_9+}n;3-| z*e7+|#cex!-e8j9LN4IrIjY88moaz0+s>&)YV~I^y>G;fGlS9#3ZGw|4e-${3E7^o zt~1gHx%*E}RPm_9^*VS)sv2#>?5Av9Cr|6cN~odL`1=jD&-%D*JRLb-+G>(Nx&AJpueZ@?q~~~;6Q1Q_({Am?3R@a?NEq2bYkq8? z8||}=a`-?SrmQRvLdww3b0cpW6g)=6neS!>fTzB|&6)nS$fikgO?d7dRH@}_l3@z( zdDfNFJsTvbVYu%xh5Ufeb0e=cX81-4Jku!aWCk)*(r~Od1ikl>dF^2=+mOgkeT8(C z_}Sv^rp;=Uqia*z9{#qmn@$w%pJn!?h(MX%+2Dw*)-Dg!v_c~?UmA#A{30kGOGMoO z@D3Oz1;8C;2DKlpp-Z_kq*9I zHBzXJnMvE2tgRS<8qbKV9+jDQOr*L`C;IM93Arxo;BnZkqKe+~hLdTOu`?z&-M4(k z7t>?em$n75El^H=;Wp{qaVFY`$9dE9T7I4KSP1i-0%}c!OW-_`B_NI1y$;4|&jZW! zJ{elWSlq&|q|QB{2xSbxT5OCKS2P??AXG%Q`0&KL;_4@IfgS+c%PRSqX+Rvic5jk^ z(>8+YYd!*4OcC3f?GL1bPO&rKlPM7rtMaqiKuV>UXCLDBo+u~jtm!_Od!$bS$ zpfLXYAj0O_6o4idYZhwx`lJWEB7N(ryRAKeTS+kXEsGFGck=1WaazH58od|I^f!EE zH~q;Nb}fqZstSN9>my4M^E%u`*be8-iBV--l9yWPjCzFqy0K{~NVee;aWUC9AVifJ zZN{l>h9M$uo7T2_YII^=l?_Lu`i+G<^P$|w(4}L!VtZRnHS)ymK9LwFOWuC{g*4(v#RBydliVL5?GcPlKV-A9-x|8Ur;{L3hNE+@T;I1I z$oynnWU{#hn>pt6C?DF(qu%<;+q;VZzSju?osIf<(TzwCpc~9asX^(}YKF2DD(D>i2^3NtG%%q4oO;>V@%YjtH#6 zc7z@=|ASdzp&eu1MADAy3RVV$>3I8@H+ZO>TztE zJ_*a^7Krm^u=3@86Ev-{&j5qh>i15~Ax1KcBILom)HBPCx{Z>9z}-hQXTi2TgInbY zt^=uzIyGRit3ozVFsM_*P)&78Jeenh$E<%&yij(DA$;OTgt%?p^$9|o$iLM~qYJIVn^s70F+1Z^7SQ{f=5uXNT zl^HA0U509mVpHOy1FD&n6M2|DR*7$h3DEOdM^b(W7h1jp2Ub4T{*B?a7q0H6=pZ4{ zH}1L7mMR78Zv{Zy1=$?mNOpmf7A;TAmfl`#=zF%bW>|HV$yGmXWV>ocT}7ykTP@V2 zUo8*eU_BqG_cLJMdUTbXyM7j~UFA)=8ofcx!t*VtdoFh*8EL*yrPvvB)uQ(iaL+w8l5`#4wu`zLNF6)RGC%%h7rX zbbQvcRvfm^8Z4~~sO2jp__^3~hri^+i;$|YsX8KfcX&N~_kXh_TUxNy3|k1qs)!wFK%j^~>zDqPRt2KA z;A`!hHlN}-_0dPeng!TGOzb}!?;FwgjrrjnMYo3S>+tFg=Fzj1%DZ+V(lgdjv{r@e zQR?_d(2u7XezJx>Q4*e6B`xbCL+h^tS3qZ1DS9D04JYT!&-wBt6Phna2hfF)4UKV5 z#l}KQ^mC|4Y+4#1&x0k_c{JS@?4S+HRxy_f3qC0~H&IXF#P-p)JxICQY+)d{^Q}P&LV8p=BR^7L8d3NfCg>~njOAcgTUU)gL z4g4PqG(hwh=WOf|K-cWC=%eZnM|yS|Ft^3|m!6^@PG4Wxg)f{3HIUirh!+l%@1(`O zrnHI-EEpl^xCV5)w_9gR&t_y`l5X2o`1sm=m17Lny5H);Zzx{FDJx019v*uWW2;pq ze48**dtPpc`;=poXXHipiG#`Ji+ko zZ(KWDWcS|r@t`ExA#P@c<3#a*Y!enS<&IQd^W0@pmNakrFjbnWyxuyjY#7q=$>`C+ zoBr>vD#eQ9%vIK`loU0{JJm(|za3FUM%uJLh$IZ>U!Jj^{!ZcBR^E8Zq(N)3tW^k% zUN3jB%ax2teJ!JEQWyHXK=bKN z@aZc08~SCM=EM@Gwe16MbglSk&0FaW?BtIe#&aC5!`NqhxyN`lkXP(3ro#hLXi%F8 z60AyhFn~C zk?Xj&bLD7lMH5{&V+IVU`P+S^KRF6hM2AJTYBWT)d`+c^lKzFN@-Jmx0`U~x06$VAyDj)~jO9gt@ zuk@C`o0qvjflM#JI;~c0t_~p9LNDN(DR$Dz&XqTT+=lZOXDe`;OH1IJCsMsi3_+f> z@ZIwY9!Z#R`6uwf7Uhdg)Xvt3xLC1K&S--ow{YKRiM|tKZ)&?w7EEz ze%$+Gq<>;BMG*5D@*U#5ohsfoVI7;!(Qv2VYOv*5O5+N;5qEb^VjHp=&x3gT2#sRf z#J~!B?Qoq^%(jh$iebJQss5qyMD{&}c#b*bSS1)jZ%BUsiht=9Ytf{D3hXf|Vzi=E z^ZL>S43Nnb3F@$-a-(`zYkNcoU#oWXwbyN;O)u|VaXSo+=TwFB$=bLVvt&CpVg|(T z?3PV=?)-EE95)P77QjfcB6ijEyp`j83=TDR%H)Ig$Xr12dS!bBg4^S^IfHRgW?zA* z9-qWKQ3X}tX;Al&tvP#^yjDGBxAhqp5~da`eW!ihDe0e`^5Rn@E`OAlBH^EcI7aLA*a2eu)9lw=(im>vlZMM z1s2;Bfk*&Y&G8R? zRrBH4V7=V}ZRqk7Dp9B0Dv1HLDN;>+2IoBE1c|?%t)FBOi5X9*>)Dq$tzL@HMst>n z7ho$N7pLn)y=O&sR#x3}(`;UDw!;+w>Ey54wE5bHz)n|(lw@zk5IQ9V$f(KspZCsp zel(j_3__R+w$4+)r7X(Z@JbP>1rk1`K$#x_FChY;N|PY1E4R9Ao;Tz zxr@^bc$Xnpsrk?d7x&9ecaH^-_82Qmx(U62{b2$sZ=!|Q*|(6nw%{o zG+!21>M3<#et}42BoXq#79y8d-8qinQRp%!F!|#@&eEc?WO^cc zbJ!|RTv7h=;$ul#D-~f5lvGA6TqV_4mG?Aqe3?u6<)2fz(@=6M$yR9Szq)?$G1n30 zRQ+TK2Jg+NB_5+!f4dt%-ruJCUfSi4H+b=KRnw24_>Iu-Yk!6VczId_m@F+6zX|^< z(D3gkv97S{;~FMr-|UX(&@}`63-(R>-<1DW8IxjKCY%6NfHs#BjH`DV6){o=_Z99$ z)8fz@+}?X_w8~4&e?33~q5-PUYaG1*6G3lhx}o@0$gNSpb~;=*UrC2-7D5~MIkl+S zGua0Xa38PSKF6tw?!CgS<)&X;UE9GE zh%pL*$ysBk@$&322C<2RLi3LBHH#%ez%2R`MR+qt^xQ#8{aLoj24F-N3lZ&i$!qyL*pB)4y{JThjW98wE z3L3sDAUR!l#JCb_)Zn#7b^>(yOpB|^2k3U^wATh_vVc=s{)mFV4v;s7b@~>^p*sjz z7XKLP7jylI>qZ5EvUy$X3x|o=xeUHBV<`ahR0NYJ(~K5qvV-qq zn~SOk770kLuGZ0T8`;zSf|gmK!(Nk`0KQ83`bZ%L>5Ip@V=y~SV1}!shX$OkQTZyV z!nl0BDZu8uQQNs=KAiW~w%SZFGsA+73GMkcF3W>^6%{<$%H48mzVdP7VIG4;LWwKS>4>D*dHwD;^YqGNx1UlIK(M!hcTwi{oF=zrI~-{E~GS z6Eu-2`~r3Ob16i^C5;ve?TITV-COAXmI415xWObD8pP)Lmk>ZeL<$tJ6EGzISMYzQ zM(5tks)4Zw|9r|j?u@%eb&<>!s}%xfSBM5^`l<1u)a;fbu1ok$#;^`5TcE@l9e;OS>cAgZzz8JLWcF%(AOF#I998M1y_k$Ug+ zf!&Z1NiuBvVs0nIZK(L<#y9F{)^VmtMndp&x}%Ldc;>a*vI9v(FVt7lgoK#NzZou(fGYXjE>B&nd`>q z5$_lR2U-m-_o2(y!gH@#t?Z4}2jFjkkZFf5m02?Ixd8tFE+TI4`8j1E|n%a+Ey&wN+?bFRi5SI#JwW6&n9ouh; z^6}lJg(dK*My#!$W?m`kI^y7s41c%xgYIdp*iS-(NK8YdW z=~Gi%H${rtB=K7uVADgcfH(|cDdSEcyn?~^`f z2#VGiZ0^lpv_i~RWy^}UKEE`tEiHG-0~6U3EL5wF-YSy|dnt5@M~aE=;_g$3ed?I4 z5T_*Zq~UG2M0IvP$ab!^Oc2-`MdrBn8nLzIXFN&SkdY$A|ik%K&yl zVp1#R`=-RBA?_brXK9p)0eKJ7eQX-iJRgG@RS$gz0Ko%?l@I82$d>kr!+H;naZCxB zz zN%w4(7js@XCk~H6Rjc$J_ccAyN!Ok(hdVY+^Y~`_*(9kN08Gtz^XE% zO@qYayJLs%pNQM*!|ban49FWsQ-$nG?vIpz!ib8_bj7kZ5HBZH**Q!cCa$Ib)RX$D zW#qYaAFn%^;cIvirgtm1%bw@~1+G6#$Ij5dlZc=*2NVt}b1(vBk0>REVP*goCNT_$ zRc(rhkibsNFb=K1Pj9Nz2;_6Z5qX#G=)Z8Dr@{~VVzwQ$jXk4Sbu`pMr`Jd9&R15E zKN~BATvt%f`r4?xMgsXwYNr^_Q6b_4?|lImz*-nOl>tuBc$;DGp83NxpJf?!f&4)r-j1d3 zkr>Cxnb%U?VGBFMa>H@U-bHN>ykZPjiOYIv4b1~3JbG-A(?fM99)oOJc_sj}BOAMi zoXM%*KtdM+GwQaJfA|(|)R^lz^m%r`JJP*cdc`XOZMp-A+AI(W7?Zu3#0qdN_16Ls zZ7F16#Ni<$6${MHAy6FGx})f{Pdrtu+61$2uq##vVj}))sAcy@5EaV)L;pe~-PVXn z$ID&doPAvBCr)2d^Q4&NWjYw`GfX>L2Kfp@8g1Kz;`PnU+i^=ygue`Ezn!m%51|_l zD;XjDU0_13K`$wECs;nE$s|e)w;OIOnE>ZfEly5JdC_72d(gtIg&9ejjOO{uaFB~P z@G9z(vfCYpiI$83G!Ptj!gzii1wAxiF zc&U}7*ui+MQXRuewHZjU%3F+LxzF*C8Bgl-(2 zsfy<>P4E(PyszryGyxT#JzO3PPHBv@{n{p9Lb{PA;#j%?t?gEu6v(96)(41kpdzhe zG3A~`0NyN`@Q_0g>5re2l4Av)ZOI|{M^!fNZ8fgtGpkCiu|tpDXSgWKv@wT= z#b&CagPdSmcpSG=afC=$@=u~A%Wpr4)-kJB|olj&SJ%%{?;sxu(L3} z=GkH7g?kR^#@CE`RvF0T0F<3BNXhDfqI|J-)LNzb!~_fROeKPAd(&8pAS*s^GTMtS zmTK%8=A77jJ_>X0%1BVGyCI>-PgsQHr9|ZhPVXCksS5DAFN*>J%zaVVHr z0(cza%K##tses>Cb|8n&K_h*l3sm$%_K}9JBW<{&PQR@R?qE!xeAN?66)E;<=Z<@8 z-X4$yP1h~TkYk*5K^aHc?;yjenAmYsgt;fjC{9VAn z$sypl+#VoC(zhTcIJ8GadEHif23b@Fzwdn*q4?Cani z!yLMRRQp7N|${m$`SK|1LB)dxL8R(NY&H+gMR1^^>nPvlQo#FP#a$( z)F^K-mEYHNTHtg`n;^UVQqNioT4<_}LrQpY))F_@DuW-7h2+4w`lNaJitRGiBT7&j zGgL=CDm{29+i4DSSoV`Alc1SLFPv~Mk<7N!b6to!l0sJ?WGVAnQd)$;bC+v|`27l) z5;cQ@6Uao)i?0NHFiSSKpLr679U4*<`5keR%I|GdIyRFIpshQZ8-r&`-uNF68sn@R zF)8DPrr2v67QQ@Fq^t_))&j1#;6@4$XgWmMSe9|sEgAC?8w1`Z%3ASmTGy|te8$Y& z28}_b%f}26rMxJCzbUp*@G8PTdZOHQ=X{mi9-vZk2YML`Pii7?3N~hY(xWI} z!3@jE0__3Rzu#e31bZ5!6i{qPzT%Sh`8GWMhQiWdAJkEmxnVB$P- zLFpSch5@JsI;mc1+j$R~QiT>emrMs24cKQ7Rn`;9;YmRgArV9Yd;WCw?jR2pa*Mo?mWyW1@xGOp&;YI7>s}`@ zkwk=Nf8rYIcRKT$I`e(AsAsr=k!5D45Wi=_E|@T-hnhN&-nSEVDt(Le1At~#W~_I< z#{tsHE_GI&x*OQM+uxVfDQSdl5&b}&%F#~IG<{mI@UzGwx9uUp9gyU11g%|t`Qp^9)xdZBi9rqgn0k^RGX zQ-kJ-e3&-e&1Gz83cWwdwrzlRezpKv3_p|?$(%t~9v?V%LDGk_SQO*r&Y~ptkefc< zvw6HmSwQxhW6#r{$_#kz{>MYHnXmRw&bn0ICOOnQan*V^NAT|Dn>%Gq@HPBtW_e+2 z760c=Q>BN#<05uF&5oIf-|1VaqG6f&G&?eV;+^+2h}&MF1kJjZgI{po)=!(0$Yd*9 zBwIT)h<{s`<$)|gR+pQ7t+*3G;4rbCcvjyVeqAAVB=F)wYjbx(`+a zaA>r&Cye*P3HAh~?1@BR3X{L-T*`O{QecH+(x=qxwIV{dA5L-)lpk$c7!JQ@yED!A zIoD>_RGAIn`suUf*81uSzyTtgsv!2>m`?&;SOWUhH#Q$ibA}Avz{tfVnzbzz3O^1r zy2qpIyKdQ5ZXa?U}V(RinOH|aeSS+alr@g_$F=%FIS zR#C0W^P?^Sq$ioqT{LzqNkblUa<|!Mt_^q0Hq+4_M@74KFT3`c_NTR9lB@pj6LGv- zcu*t9^fybQA0%5;P8|ASo7Ba$VBiXF2CP#Ari^C6;4jMCRo0bD5qYEW2Cq{a3S>XZ zra4aT@$NWC)j=r)^EmXQK}U6`;pLLEcN)C-_8(>$JzHFe3pySY?_%mo7UWfuhdU>p zyz0i*KAhj5aR1VP0+02L0PZlqZ=+YIxv8DY^LGqrH3|Iv^= zSiah{hZNd-NPnXyTxEv;_$@$Qg)-euH^9YK$FJEj)0)-|;LD@7pkccj4-HE_&m;BN7M8#)#UC-N@NHri}n=G|{8+H|*YI6Fc8#Kcao zT{jr;kn(A0h>_EJs#`}H6Xl7u#T0?C>dU@C*g*|0nj0-&gNP7}zsU>On-t+Yd}2q7 zZFZzfA|!x<>9zP8Jy}zN+gYm@>Dv(buHG&e^_KRMJL0rHRB)Z<xXzY6q=A*=YJ<$vaNnN2qrJ@RJ*UwYi6?WnB?gn;0PzIf)4*5=meBCS@f!&p z*$-4jU^r#wE{rl^v{q*g+pD&2l9e62QhYs<2grk$@raKGkAhEJkyp0M~9U+`<<+P%mEkf$+ zm}+v0D!&6irx!Hc1yI^o1?M0a+7pWrhPgH z>H*mL#+h0%-Tr&Vo*Fw`Uxu`sK1e|vDW^`)hv6G4`x@q+Rm-qAH*IrO0w>pLipK*@ z2?H25Ua^plQbqg-|IiBa@^DCklR{1K%U6tzoy&`*lr@yPS{z%Lys%l1XOrJvcW^E~ z=B}L_)HGW({;12M?=~;8&!Yjg2+|leFUK!p^aGQS^?VxpnwhRBSnJ|~p*_A8jnIE} z7}`0y+>u?6Wf4zqYqqa@+xCl#e+Jigo5N}p@<#m(uLq3$#V3hYS;II1-mrHC%52{XM-0o5h-Fi-gug>YW7SyK5IvI*8Kf0{r!YW=h3xNGWNy#-%A~t&6Vcy zkHyo61TtwK*xvRb(uQcU4wql z`#GU)=0n91X+8d0t?8!cw8)hLzxtGAX{gg|do^rSWSh#%rD1Ezr|5o@p_s45wuv`z z)il3}(6+X=HH&tI!~HMkh+p$(?67M*zAmj==XacepG)yf+Bx&=4uC9}938TuSCV{2 zfXeTn+NWUXr-{&|K}{n{*q{I{55`Dgp7Y+h3~7d)*vpxyM*MoG(~L1ANR0vyRIYRc>M=AH}oWvIhX29!zq)HwFK#=_8Mp57!o5Mm^;BG7s0oCIC`tB0${i*7AjpyCL z3iTxt*s1^=;G!^i{4c5sM2FeIew2^BT*EHAn6gCJ`DG_ODM>a+eE!m%@UmyH1pp{( ziy4?dPT)4IA2YlcX|wfndl5Vg1dq?#FvNjE@?7WKB+CeUV$?RAaciP@F(BNqSnt1l zGGIg`2-pjI-{w;2EjrqB0GKrmj*{=C=sg!5W>UFa*Fvp}lt%yuhmkbDL0t5XEVvDfQE;vH@3BErMm)`Zng8cPq)!-jFh)wvxFlSv zOU=jvlN+yKb|*?a7r(y}kain~ZQ0_;h=khk8xydk_u@Qc@u z?^;7YU$s@Ek{h~aH+JxTFTS=bS*XS5PO92M;ObN88meUR^Q9nXs@(QIy+ zED7g%9zgka{7b74V%J!II3a95My|21vy!~;$=yE;yu(j47+zu`KH_+r)89N8@;O-4 zUOs#=X-S=U8t`>1akOB0*KMW>*qxu1h_8DO@z$K+_%^tAQ1-39(_7mu0?_!#JC#V} z4`jogwe3MES@e--YUw9Qi_|U`D!hqdgz4h_+I47)w~?#Lev9qNWVD-b=pB^ra>~b* zPG`O{ApFqoSZDMS(LuJtR4YoF0}!Gw_Pj&)AM&gc!zSye*4(Rcc;zZ6uo&&KaF25VS4V7$u${xo6~3K8H6&x zXZ(xxkn#m(UUu!m0=uEAU)f42sdj2E?;~*O-AA+vU)Wis4KsSzO|><_e;E+kE{NQ< zTP&SjETS3=0Jw6Hx|;G;SjJu3;Vs0ZP#4d75S{1X&S(yD-;2HSxd3<3i|Qy}-fhmc z68pEYb9_3M=ji)+U?W^&sz<_%rfX{^?^=RZh?aZ4X5I?Yd7SHcVazXdvtI46gUP8J zU)T-$nzQbm`P!xLGVz8Eec4E*NqvB;xN-z=6{o|k5>6pL^fUWtpC6mDqmyK8qZ-Js zmh|=83O;T~*S}vs0%lvk!My1_Iv9|@d9e7U~;G~2L!ZEP9Jz^55trCE*B zdHn!(^G%#uV_gRBn^rViZSlf0eSCJmrdTRert4Pnxp)zeZ(7ea>>7Gb>z`^9+SmW# zJl@$Z8u#_^9m!)$v#gwS3Zr!#Vg#ZgCzGVLhkI-Gbz!a(VmYH?DW~(bx`y7{&2pMe$qBFu-b(pfI8 z5j7s7GFkeX7KM0DGfZ6HH9cfVJiC2S!)`qb6&JwcaLjL0!&oPiG(`MbtI{rbVa0P( ztf#Pg$=GIyp{r8834aha-v}sDSv;qH*;lwtzl*Fb7qUT(=%J;ZaD>JS#`bQ<&F%pS zOb}Wb1l`M~0EEzO=h7fiyQPG-c;~Xd87NNVx(!{3$?ID=M=f2n4VNlAx-GV5toB;T zki&FBHbYqZX&%lN5+-xDXD=|2bV9K6c+@oSb%9ot1rYOD9Vv_-de}wOS|aiN`wVLS zcR=C@=D7X-vG1Q@*=O^zrc1k>jr9_=$m;$yK0v~yu{7Apz8st*HU`-#hy1Iv2Zt`(G9+y1vRGWuE z-W&tFM-jNLiJSOno!f(&ZDtORHh{Qg&bQ;<(a3KpmNbx<=5_Y!)J{Jl-tyRqa+r92ChyOXn3mh!JV=~D%PdKBwqob@|f>a0{sqU zv$M3`w6*y!)`PighCnowdT%qwP!Bn8xVOx?k@&#woeMy6rZ)DhiYYgm9`?Q3vy4=-Z+*+uR*xu3Zo4 zlk>QR^i?+is1+V~tYR~r{j8os8R}o77*wrQjZfja4Y!Cq)pA(DzaFb29veuzw-S>`Jnm0f9eVfUe ztTR<2+lq!*9u3yUQniL~As3B&J^ATIS5_X~sJVhr%5^1m8@;(LpeW&Yiws9a08A== zP)PT@5SxJN5LLFF2e5Ye+=!FIj_eAA#Eqn>c2p20g`rCNMMM%Oh}D7OdZI^zMTYCO z^;A5{+ZuDau`TQU=(ZxH{cML`A3CYZnRnP*O;og%7hV5}pfmuSZQ4evRiG1Pn5|Xb zcvG^~<8?zLz>u{l`Z_q-keY-9;u2;>b%*uk(s^O}j%|l~%L^s0%PIS{g?)C5$DK>i z2{HxF(DvI1W*C=ODM6G&)vgqMa>`&mT)KhYcL`gAs;-wz()0NDN^dA~`}?r?I#Fr7 zfqNfcUs}P$`XYg|yK4Olv9l%{3uN1iQZs(QC{!7sfrLG!>eT>`_oU;f;?%aN-nGwe z+>0yN;OkT`!AeKQ`7j}@dwsnYV9xR&=z|Qeac%MJF?%0 zCdqP+>`JL+K-YMi02)z4@fHFz6(_{hpIuPeciEt=@@-Z1tDl`r9Jv^7tn7n8bYd_V#5yB*v=0A z;I1J??dwN8r1wAh78bgPz3-357&!CC4(-GIp3zv}k!)6vux_terJ2G zKOpx9Z$4_hYhS8L5a2WFycOfC1gAeznF|Gc>#VlT(9`|2X!V|v^b#`#fKa%%|DE2& zF7G*O&d#rIDl1N?8r6sx4fjM5#bsd{!5ISII}Ilfngy?C>^jO;pRxCutDMV=*QKh6 z+I#C-G<=kzn$K3&nsf75SW0&v^YA8qjS%x*J|%lDvI#K%D}dAh&OA1uJZ5Y-$*_)+ zG6GdtRa|b0{5i^^C#os?a2eTco7PuDU0L2)euLABxK`a>%Gx_7QvLlDix;Z6`Ih}i zjCuC-0+C zcruS|*lqG`MlTBBNkh!(d9*95nyIXF{d(J0A)W!9^SZ9H_y$DHO&kn^R5!9p?N`1u z@I?=X!T0@Yd}0-=TY@GTy12-pKE5F(&4R|c2p0>Cca-An(ICbJx@U?ZYYwxyLJHhrj+a9ay z#$_2ui#iFHI3YfJlb6&rV!Hm6H<-^n%}ZDiY1Uozang#?w|^w-8@dk}G(ux9k)htS zI$lerf3FCkGX1n3oBP(Zh|dqxN$<3jkM%Qmd~Fr!GdWZTL~8*uuCA|JA=dXaq-dzL zZyJ95Om465-Np)&N1_Mx+33T}3u zopF8=8sJT^_4{MPtNGumzREUHG97pIOw~fpd=~+4zp+k`;#@->T+c1JtK&A>76Ttss!exhlU{si7_~!pp0@;jtevKkf?Ql^IYyt+ZSBjMEHsM}xWEA> z%{6Ez=;#G^Ct}R|JKu71`Lbf6@#htf8K6wyNX$!92ai{CIHG*#lJIIpL2~on&NdxZ zY{4TUt+O@)nU6w|49>rEwc_w=V?Uay39N|2wsL3flWa&dY!YgiU7)?4W`n-g59&MR zQ5leFoF$8LrR60Fo#p0RX5A|10<4$R_Zl=X;)GwzA9pPVD(Ts@G~kCIS9=G}f>Pr* zJ;mpWor`AXqV?>TaANFS9lowqAd6I75A!??3@O{IuG8r1?>(Wa95^KFWbfupD=h^q)_Z-u|DwmBs0k`ozD)C@)%i&SOEAPI|!5zBu23l zkyG332e5Q&krSm-avf`Dy-ZZ;6y$NMCU?`%J|QL6@As|rTBI%g;UgqXiYi7uA+4J2 z#yv5;{p_e}y)cP%u}72=I2$tnIX&|46iTw_3(JwXa2=CeH{a_mqYY)ctyN{dG+{4yIxuj}~qp9Thk zUtSj+f%&{Nlacznslq3()=hq!FKd2dcR^2j1>Qj!aRmx|W*}a=22ZWUN#ZTYNBCcZvnPMN zbNAweqUSh7eumQ;Q=&r6Da928#EuWjAnLKMo*eI^=viOd3qCu{`mD-v)sNf`+JkF( z1aDMW!%amMP5$=fDzVh2lKrbj1^`T%hYUOM!=I#mGtMw(kJsx4l75m_i=;gnH2?A?Ex6wpF zLjrY2=Nh{LVC@t7=DFj+#lQhz)FayTls7+02Gbu(+6=xn!fS82R{{U*LF<6hJap3IFaayjBCpo`sVcVCO4>4W>fK;_|2L5IK{kdfEOO2KD?U!o@&*A270r=Xgz5C>UOk zDDKq^MgSQ6+k!{zR|H|OU$Yo6$p6k2s00ANrWB7HFWX~)1iLS90?c3vAaIN@;*h`c z5ezAKpsXi1~7r{Th%+45FYTz&i_5~f3y6bVgL6o|L4B`^Vt7=r2hLZ|F;zR ze{S_fPNeI8wcaFmg!ZeEaqo`)s#uM`USYRnJ1Xo9rINQU&S6>AK!%jtn#!3&1ig8Mk4ZPKP9cINZFDfi#onSRTv~A6g`oKL_FNbCkVnh29AtvU0B1}kkevns zBnK_*+1htJQRrQN2rZh z(}?J`JFm|p-XjHe=N;ARcAj=)vnOX^S-zc?+2saSkyf&#m6JG~ zVmN>P+rPbDa3uKsRGenHalb2F*Vivv3re@i(#=4BE|~*Q3>ortr*l`C6WwLuJqh<& z!5vsXF3AbO_26E{XP+ygjnz8Et>1RX@MAz&iBm6VPT(EvkaBBtE&F-Cj7Xcd7pd~_ z#Q7S?kT$Rv@+4G~x|m_r^r6ilh*OTR`-4)uywNR0s+M{xV3N$p-I|RD!H>h`i#v(VjXGn&*&L$0#|p1Tk4Bb zLfWEhQ?1$sNJx1s->Zh6O90P}!;v$>Yr49^ZUCa_K4n)%n6%|aZl<8Sl|rSv6*fQP za7=73<45^JA>$$;BRLCnYMie!Gh~)$cG1mt^n^v+WkJ5&wQ^>(hFS9!>opJ}aTdci6JSWuLJ zbkz~N_^dFrRg&!NpFftOS$Ai zeZunpZ3~Ej`FSV-rpw{*c`mE-ZFisy1NZj?2_Pd4<#MnfHyg(6r4QV|GrifpBN4|> zfB|{aO{j30wVI!}l~c6u7J`W)Ju5Tw=Z>F=>iP=lh5vcP8KM6wE-}$MMAta>AFfB9 zjE?}6uX+tWbv)p>$@(w6E^3;`8s$p5X?_TBwf)La1<|W7iulH9Jpw z0Yvl$Qxcj?pc$BxH7+*Xu3b_WPkRm8Ltsi<@wR@5{C_wsF96WuCtNT5GEK*nCICbz zbA=mU4#gBcz#X(7cQU~a(FGy=pQ3_zj`$E19UG#{k z0(!jHtGsjjVibSo0Tz&~b*Jg+Kac|Y6X>BhRx{gr;r|(wfwYHHh$(jAiyq8{K#!?G z9@$IkoFgXy#pF=PmR!!HBhZ7i(q8QT1qKqV0X&!}Tr}j8Irp9mAVpTWbVOcej7$a| ztjVfL&Gw>)-2aK8h?eExx^|5?TmH#I&Hskw|3(XGYBX5uw=*9d<}JBxFp#HB5- z*{)`5>Mer=uPB77`-=byup zu3AP(IIyRgn)?}1KLvB~Na&dS{IH^?OjSKCK?p)U*OkFLKy_xqJC!4I@DXe3=)bp8 zNt2jJ5ZDuB|JEaWgTMr8_k zOu+e4FKHoEu`G4NP4pMMy1HZH}L%PJ4w9W~ygDep|Bt!5sbt+oDMtG}fC zwzp|zPlakErisRFz<&7L?lc$xCEdiE9F72cwIyG%Oqxq3m?NiSpRZRA(+~N06r?Nb?*w=t(rwE|E z3nae4#<_9IEh#Eqt#1R<$?zvc&xs!cS5wcY4e~jEC`q1_r{({Tlf&q{$fjWe-!h1Qyo^uoZ@)9!tNqY$!gHYHm{o2EOHY zSK`r!?s$)&8|2W9R?HtXQPlRO=Me2H%Bl3(++Q0^A<+*?^YOi8LcJ6^4< zCAS<{_I@5ZB2_oTjBkI)A^3cIKg~WcJnV6#q2>oDfl9g+g4UOgt|-mwdj)|`#rA!l zxS1av<0iyw#lLGHzPlak&i72vw~@{by#hE$S0~0!I{%eqCR^o>#uY*f#wNRje$qI| zx2o9NL-8({(zev^+;CbR4!W<3uZy0F6;@lA9}c&Vh&XIxpV;l2eHCNB`LP^7sFB*A zOT_+9K-YX^>2XMo5*<){Pm>U8BPIsq)+NFe3hirvYTpOF89oV9Al9iEQ80Gsn2N`>}glE`Z ziLM-Ulu``Ut}~>1Y{?E)cvAr-i}K+vX813$u4^&@bx;%nX$o{h4>Z8hjHi`TgRSl$bnxvZ{HayOU z@BwaEc7rdBF0Y7Q^z~0^t{d)4Kn4cELM4g2a_Obl0oBBCV8FdI+ML4U0tZ$~!MjR^ z@oy)1emtkjKCXnmR`c{&#P}n{v#Btn88R)Vg%5Ae>KO4@CRn>|IOXaH`g2QuIC(9+2AwiukmB8f zR*6=21n4w&o2NYv7&=NV)zI?JFL=2qx3qJQ&MO_xW^j|aF7WdYrkT0f#c5?)`u^kK z;8z7N>TWtq>trHpnO&YtPKiZ~zV#*^QVQrG#c?$YivgF(={ zJzsS}KV_A?TIYClLUaXkzFC*B`<6(ie#exoapBO&I5>FvfR{0Y|8!?>`RHJoXH+fS zYha|@?ky1&`gsf5;|O-Er-4Wc^0h#Tj+BJd=}fEENx+Etx~3m^QN#@U{vSB=?`KBq zh|-(oLERHS`;ny^##EKRM>4Tuf-^?i+jsx;G%svUsM*SQ3`{Mk2^~3&e=-kt8-A%B z$5T?;HT((g)o1_tS%JpkXPGaC%XorX0a#CpSJk6`D^z)u=@5~B>W9&#dj8D9A&No2 z>G+LZ(t|b!bNe)G-r?nBWL-4h``Cmh^gvL}B|#y6X^_lar)u@{z6oG2atzcN&@X1SLlJ@TpV%U;~DMe$rdCu80dYi2_r1rugfV{2M9uQ;AAu zJhfRxEhxv(G6Vtkr_Jix5}(JkI*y=3So( z)*}}MpCrF$S>q8Zv3sM#{!UszCrz<(ZYryTe`B-$Xs>aSwy7yJ%UETE2ZuD56I8yn zz3pAEH%s;8<;#qv#)fbLl+&-z^3w`MNnUx`$2^#VZ}N^6t9uD7FtYEe@{NVaPCbrv z3kp#ju>uUuH zmk$TpYd<=&vZNP$u5_R)Yf`g|U2Di={6@-2edsucL=R-NDaubVA3$rRVR`)#RAH$&WKF1jClV9715%8}J#%JkE z%5{3gf9CimOm{JdZvn+dJLSPI$V?%G!)qeJ8`g=$hW1W}>bB}rFfDt~(60Ch*!_{p^9#V<*h#RdfIS{MztT(-T(S7C*RZ0@Su`MqV|zcKy=_KMt%!CGZmc zqji3Quf86s|5HNu+c@4#YytP5*AFnIJhtt3IoA_`wlV`hbliUBntIXRyeFjd>Z^iNWg{4gWS9;+2r$6kn1H9VCWWfZH9U`| z3qn2uF{@rbFS6t{*8~e>;!Mus;I`Z%S58`g;wZnV+_^#(Fu1#t`nH_w1knjp>`i}1 zx?v}6z3_Oe1UL~V7+fPXJvDXDvWJ10mDL(bxgc+SmbR4C?9Xa*|J|qq7Z+p^V#(BN zYI0tr7b;Ft1y9xq?f ---- + -. Follow the instructions in the link:{redhat_registry_auth_url}[Red Hat Container Registry Authentication] article to log in to the registry and use the {context} images locally. +. Follow the instructions in the link:{redhat_registry_auth_url}[Red Hat Container Registry Authentication] article to log in to the registry and use the {product_name} images locally. diff --git a/serverlessworkflow/modules/ROOT/pages/_common-content/report-issue.adoc b/serverlessworkflow/modules/ROOT/pages/_common-content/report-issue.adoc index 985970f5c..58a21f61d 100644 --- a/serverlessworkflow/modules/ROOT/pages/_common-content/report-issue.adoc +++ b/serverlessworkflow/modules/ROOT/pages/_common-content/report-issue.adoc @@ -1,5 +1,4 @@ == _**Found an issue?**_ -:new_issue: https://github.com/kiegroup/kogito-docs/issues/new -If you find an issue or any misleading information, please feel free to report it {new_issue}[here]. +If you find an issue or any misleading information, please feel free to report it link:{docs_issues_url}[here]. We really appreciate it! diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/build-and-deploy-with-serverless-operator-on-kubernetes.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/build-and-deploy-with-serverless-operator-on-kubernetes.adoc deleted file mode 100644 index abba59aae..000000000 --- a/serverlessworkflow/modules/ROOT/pages/cloud/build-and-deploy-with-serverless-operator-on-kubernetes.adoc +++ /dev/null @@ -1,191 +0,0 @@ -= Building and deploying a {context} application on Kubernetes using the {product_name} Serverless Operator -:compat-mode!: -// Metadata: -:description: Build and deploy using the Kogito Serverless Workflow Operator a serverless workflow application -:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, openshift -// links -:kogito_serverless_operator_url: https://github.com/kiegroup/kogito-serverless-operator/ -:kogito_greeting_example_url: https://github.com/kiegroup/kogito-examples/tree/stable/serverless-workflow-examples/serverless-workflow-greeting-quarkus -:kaniko_issue_url: https://github.com/GoogleContainerTools/kaniko/issues/2201 - -This document describes how to build and deploy your workflow application using a Kubernetes cluster with the link:{kogito_serverless_operator_url}[{product_name} Serverless Operator]. If you don't have any cluster operational, link:{minikube_url}[Minikube] commands are given throughout the guide. - -Using the {product_name} Serverless Operator, you will be able to build and deploy a {product_name} {context} application only by having a workflow definition. - -If you already have a container built and pushed to a container registry and you want to deploy it on the Kubernetes cluster, then you can do it without the operator following the guide xref:cloud/deploying-on-kubernetes.adoc[Deploying your {context} application on Kubernetes]. - -The link:{kogito_serverless_operator_url}[{product_name} Serverless Operator] is currently in Alpha version, is under active development and is at the moment supporting {context} definitions that are using: - -* Functions -* States - - Switch including dataConditions - - Inject including data with a transition -* Operations including Actions containing functionRef with arguments -* KeepActive -* AutoRetries -* ExpressionsLang (jq or jsonpath) - -.Prerequisites -* A workflow definition. -* A Kubernetes cluster with admin privileges. If you haven't got one prepared, you can use a local link:{minikube_url}[Minikube] instance. -* `kubectl` command-line tool is installed. Otherwise, Minikube provides it. - -== Prepare a Minikube instance - -[source,shell,subs="attributes+"] ----- -minikube start --cpus 4 --memory 4096 --addons registry --addons metrics-server --insecure-registry "10.0.0.0/24" --insecure-registry "localhost:5000" ----- - -[NOTE] -==== -To speed up the build time, you can increase CPUs and memory options so that you minikube instance will have more resources. For example, use `--cpus 12 --memory 16384`. -==== - -[TIP] -==== -If it does not work with the default driver, as known as `docker`, you can try to start with the `podman` driver as follows: -==== - -.Start minikube with podman driver -[source,shell,subs="attributes+"] ----- -minikube start [...] --driver podman ----- - - -[IMPORTANT] -==== -There are some issues with the `crio` container runtime and Kaniko that the operator is using. Reference: link:{kaniko_issue_url}[ISSUE-2201] -==== - -== Setup {product_name} Serverless Operator - -In order to have an up-and-running instance of the {product_name} Serverless Operator you can use the following command: - -.Install {product_name} Serverless Operator on Kubernetes -[source,shell,subs="attributes+"] ----- -kubectl create -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/operator.yaml ----- - -You can follow, then, the deployment of the {product_name} Serverless Operator: - -.Watch the {product_name} Serverless Operator pod -[source,shell,subs="attributes+"] ----- -kubectl get pod -n kogito-serverless-operator-system --watch ----- - -You can also follow the operator’s log: - -.Watch the {product_name} Serverless Operator pod logs -[source,shell,subs="attributes+"] ----- -kubectl logs deployment/kogito-serverless-operator-controller-manager -n kogito-serverless-operator-system -f ----- - -Once the operator is running, it will watch for new custom resources (CR) so that you can prepare your environment to be ready to build a new {context} application based on the definitions you will send to the operator. - -== Preparing for the build - -You should follow these steps to create a container that you can deploy as a service on Kubernetes. - -=== Create a namespace for the building phase - -Let's create a new namespace that will hold all the resources that we (or the operator) will create (pods, deployments, services, secretes, config map, and Custom Resources) in this guide. - -.Create a namespace for the application to build & run in -[source,bash,subs="attributes+"] ----- -kubectl create namespace kogito-workflows ----- - -=== Create a secret for the container registry authentication -.Create a secret for the container registry authentication -[source,bash,subs="attributes+"] ----- -kubectl create secret docker-registry regcred --docker-server= --docker-username= --docker-password= --docker-email= -n kogito-workflows ----- - -or you can directly import your local docker config into your Kubernetes cluster: - -.Create a secret for the container registry authentication based on local docker config -[source,bash,subs="attributes+"] ----- -kubectl create secret generic regcred --from-file=.dockerconfigjson=${HOME}/.docker/config.json --type=kubernetes.io/dockerconfigjson -n kogito-workflows ----- - -=== Create a {product_name} Serverless Platform containing the configuration (i.e. registry address, secret) for building your workflows - -The {product_name} Serverless Platform CR is the resource used to control the behavior of the {product_name} Serverless Operator. -It defines the behavior of all Custom Resources (Workflow and Build) in the given namespace. - -Since the {product_name} Serverless Operator is installed in global mode, you will need to specify a {product_name} Serverless Platform CR in each namespace where you want the operator to be executed. -You can find a basic {product_name} Serverless Platform CR example in the `config/samples` folder that you can simply apply to configure your operator. - -.Create a {product_name} Serverless Platform CR -[source,bash,subs="attributes+"] ----- -kubectl apply -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessplatform.yaml -n kogito-workflows ----- - -Note: In this Custom Resource, spec.platform.registry.secret is the name of the secret you created just before. - -[TIP] -==== -You can also update "on-the-fly" the {product_name} Serverless Platform CR registry field with this command (change ) - -.Create a {product_name} Serverless Platform CR with a specific registry -[source,bash,subs="attributes+"] ----- -curl https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessplatform.yaml | sed "s|address: .*|address: " | kubectl apply -f - ----- - -In order to retrieve the Cluster IP address of Minikube's internal registry to configure your platform, you can use the following command: - -.Retrieve Minikube registry internal IP -[source,bash,subs="attributes+"] ----- -kubectl get svc registry -n kube-system -ojsonpath='{.spec.clusterIP}' ----- -==== - -== Build and deploy your {context} application - -You can now send your {product_name} {context} Custom Resource to the operator which includes the {context} definition. - -You can find a basic {product_name} {context} Custom Resource in the `config/samples` folder that is defining the link:{kogito_greeting_example_url}[{product_name} {context} Greeting example]. - -[source,bash,subs="attributes+"] ----- -kubectl apply -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessworkflow.yaml -n kogito-workflows ----- -You can check the logs of the build of your workflow via: - -.Get the {product_name} {context} application pod logs -[source,bash,subs="attributes+"] ----- -kubectl logs kogito-greeting-builder -n kogito-workflows ----- - -The final pushed image must be printed into the logs at the end of the build. - -== Check the {product_name} {context} application is running -In order to check that the {product_name} {context} Greeting application is up and running, you can try to perform a test HTTP call, from the greeting pod. - -.Check the greeting application is running -[source,bash,subs="attributes+"] ----- -kubectl patch svc greeting -n kogito-workflows -p '{"spec": {"type": "NodePort"}}' -GREETING_SVC=$(minikube service greeting -n kogito-workflows --url) -curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' $GREETING_SVC/greeting ----- - -If everything is working well you should receive a response like this: - -.Response from the greeting application -[source,json,subs="attributes+"] ----- -{"id":"b5fbfaa3-b125-4e6c-9311-fe5a3577efdd","workflowdata":{"name":"John","language":"English","greeting":"Hello from JSON Workflow, "}} ----- diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_create_namespace_and_deploy_info.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/common/_create_namespace_and_deploy_info.adoc index 126954b0a..bec325f4e 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/common/_create_namespace_and_deploy_info.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/common/_create_namespace_and_deploy_info.adoc @@ -26,15 +26,15 @@ kubectl config view --minify -o jsonpath='{..namespace}' ---- -- -. Deploy your {context} application {deploy_application}. +. Deploy your {product_name} application {deploy_application}. + -- The next step is to deploy your workflow application and execute it. -You can read the further sections on the different procedures to deploy your {context} application. +You can read the further sections on the different procedures to deploy your {product_name} application. [NOTE] ==== You can use the native image due to the faster startup. + -For more information about installing the workflow application, see xref:cloud/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI] document. +For more information about installing the workflow application, see xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI] document. ==== -- diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc index 50c3c60f1..510b15ac4 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/common/_deploy_workflow_application_requisites.adoc @@ -5,4 +5,4 @@ For more information, see {knative_procedure}. * Knative CLI is installed. * (Optional) Quarkus CLI is installed. + For more information, see link:{quarkus_cli_url}[Building Quarkus Apps with Quarkus command line interface (CLI)]. -* {context} application container is ready. +* {product_name} application container is ready. diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc index 50c3f5d6f..231c80a94 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/common/_prerequisites.adoc @@ -1,8 +1,8 @@ :kn_cli_url: https://knative.dev/docs/client/install-kn/ .Prerequisites -* Your {context} application is ready to use. + -For more information about building the application container, see xref:cloud/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. +* Your {product_name} application is ready to use. + +For more information about building the application container, see xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. * {environment_prereq} * `kubectl` {kubectl_prereq} diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc new file mode 100644 index 000000000..203f6439d --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/index.adoc @@ -0,0 +1,107 @@ += {product_name} Applications in Cloud +:compat-mode!: +// Metadata: +:description: Workflow Applications in Kubernetes +:keywords: cloud, kubernetes, docker, image, podman, openshift, pipelines +// other + +{product_name} is evolving as platform to build and deploy workflow applications in Kubernetes clusters. + +You have two options to deploy your workflow applications: either as a full Quarkus project or using the {operator_name}. + +The cards below list all features included in the platform to deploy workflow applications either with Quarkus or the Operator. + +[NOTE] +==== +Eventually these two options will converge, the {operator_name} will also be able to handle full Quarkus projects. So if you opt in to use Quarkus now and manually deploy your workflows, bear in mind that it's on the project's roadmap to integrate the Quarkus experience with the Operator. +==== + +[.card-section] +== Kubernetes with Quarkus + +For Java developers, you can use Quarkus and a few add-ons to help you build and deploy the application in a Kubernetes cluster. {product_name} also generates basic Kubernetes objects YAML files to help you getting started. The application should be managed by a Kubernetes administrator. + +[.card] +-- +[.card-title] +xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI] +[.card-description] +Learn how to build images for your workflow applications using Quarkus CLI +-- + +[.card] +-- +[.card-title] +xref:cloud/quarkus/kubernetes-service-discovery.adoc[Kubernetes service discovery in {product_name}] +[.card-description] +Learn what is and how the Kubernetes service discovery for workflow application configuration works +-- + +[.card] +-- +[.card-title] +xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying your {product_name} application on Minikube] +[.card-description] +Learn how to deploy your workflow application on Minikube for local tests and development +-- + +[.card] +-- +[.card-title] +xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying your {product_name} application on Kubernetes] +[.card-description] +Learn how to deploy your workflow application on Kubernetes +-- + +[.card-section] +== Kubernetes with the Operator + +For developers that are looking for a native Kubernetes approach where you can model workflows using YAML definitions and directly deploy them, you can use the {operator_name}. The operator registers a new Kubernetes resource in the cluster to manage your workflow development iteration cycle and composition of services and events. The application is managed by the operator. + +[.card] +-- +[.card-title] +xref:cloud/operator/install-serverless-operator.adoc[] +[.card-description] +Learn how to install the {operator_name} in a Kubernetes cluster +-- + +[.card] +-- +[.card-title] +xref:cloud/operator/developing-workflows.adoc[] +[.card-description] +Learn how to deploy a workflow for development purposes +-- + +[.card] +-- +[.card-title] +xref:cloud/operator/configuring-workflows.adoc[] +[.card-description] +Learn how to configure workflows deployed with {operator_name} +-- + +[.card] +-- +[.card-title] +xref:cloud/operator/workflow-status-conditions.adoc[] +[.card-description] +Learn to interpret the workflow resource status conditions +-- + +[.card] +-- +[.card-title] +xref:cloud/operator/build-and-deploy-workflows.adoc[] +[.card-description] +Learn how to build and deploy workflow services with {operator_name} +-- + +[.card] +-- +[.card-title] +xref:cloud/operator/known-issues.adoc[] +[.card-description] +Learn about the known issues and feature roadmap of the {operator_name} +-- diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/build-and-deploy-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/build-and-deploy-workflows.adoc new file mode 100644 index 000000000..e77457b65 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/build-and-deploy-workflows.adoc @@ -0,0 +1,140 @@ += Building and Deploying Workflows with the Operator +:compat-mode!: +// Metadata: +:description: Build and deploy with {operator_name} +:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, openshift, containers +// links +:kogito_serverless_operator_url: https://github.com/kiegroup/kogito-serverless-operator/ +:kogito_greeting_example_url: https://github.com/kiegroup/kogito-examples/tree/stable/serverless-workflow-examples/serverless-workflow-greeting-quarkus +:kaniko_issue_url: https://github.com/GoogleContainerTools/kaniko/issues/2201 + +This document describes how to build and deploy your workflow on a Kubernetes cluster using the link:{kogito_serverless_operator_url}[{operator_name}] only by having a workflow definition. + +[IMPORTANT] +==== +{operator_name} is under active development with features yet to be implemented. Please see xref:cloud/operator/known-issues.adoc[]. +==== + +.Prerequisites +* A workflow definition. +* The {operator_name} installed. See xref:cloud/operator/install-serverless-operator.adoc[] + +== Preparing for the build + +You should follow these steps to create a container that you can deploy as a service on Kubernetes. + +=== Create a namespace for the building phase + +Create a new namespace that will hold all the resources that the operator will create (pods, deployments, services, secretes, config map, and Custom Resources) in this guide. + +.Create a namespace for the application to build & run in +[source,bash,subs="attributes+"] +---- +kubectl create namespace kogito-workflows +# set the kogito-workflows namespace to your context +kubectl config set-context --current --namespace=kogito-workflows +---- + +=== Create a secret for the container registry authentication +.Create a secret for the container registry authentication +[source,bash,subs="attributes+"] +---- +kubectl create secret docker-registry regcred --docker-server= --docker-username= --docker-password= --docker-email= -n kogito-workflows +---- + +or you can directly import your local docker config into your Kubernetes cluster: + +.Create a secret for the container registry authentication based on local docker config +[source,bash,subs="attributes+"] +---- +kubectl create secret generic regcred --from-file=.dockerconfigjson=$\{HOME\}/.docker/config.json --type=kubernetes.io/dockerconfigjson -n kogito-workflows +---- + +[WARNING] +==== +Double check your `$\{HOME\}/.docker/config.json`. If you're using local desktop authentication, this configuration **won't work** in the cluster. You can initialize this by logging in in the target registry, e.g. `docker login`. +==== + +=== Configure the {operator_name} (i.e. registry address, secret) for building your workflows + +The `KogitoServerlessPlatform` is the resource used to control the behavior of the {operator_name}. +It defines the behavior of all Custom Resources (Workflow and Build) in the given namespace. + +Since the operator is installed in global mode, you will need to specify a platform in each namespace where you want to deploy workflows. +You can find a basic `KogitoServerlessPlatform` custom resource example in the link:https://github.com/kiegroup/kogito-serverless-operator/tree/{operator_version}/config/samples[`config/samples` folder] that you can simply apply to configure your operator. + +.Create a `KogitoServerlessPlatform` +[source,bash,subs="attributes+"] +---- +kubectl apply -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessplatform.yaml -n kogito-workflows +---- + +[NOTE] +==== +In this Custom Resource, `spec.platform.registry.secret` is the name of the secret you created just before. +==== + +You can also update "on-the-fly" the `KogitoServerlessPlatform` registry field with this command (change ) + +.Create a `KogitoServerlessPlatform` with a specific registry +[source,bash,subs="attributes+"] +---- +curl https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessplatform.yaml | sed "s|address: .*|address: " | kubectl apply -f - +---- + +In order to retrieve the Cluster IP address of Minikube's internal registry to configure your platform, you can use the following command: + +.Retrieve Minikube registry internal IP +[source,bash,subs="attributes+"] +---- +kubectl get svc registry -n kube-system -ojsonpath='{.spec.clusterIP}' +---- + +== Build and deploy your workflow application + +You can now send your workflow definition (`KogitoServerlessWorkflow`) to the operator. + +You can find a basic `KogitoServerlessWorkflow` in the link:https://github.com/kiegroup/kogito-serverless-operator/tree/{operator_version}/config/samples[`config/samples` folder] that is defining the link:{kogito_greeting_example_url}[{product_name} Greeting example]. + +[source,bash,subs="attributes+"] +---- +kubectl apply -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessworkflow.yaml -n kogito-workflows +---- +You can check the logs of the build of your workflow via: + +.Get the workflow application pod logs +[source,bash,subs="attributes+"] +---- +kubectl logs kogito-greeting-builder -n kogito-workflows +---- + +The final pushed image must be printed into the logs at the end of the build. + +== Check the workflow application is running + +In order to check that the {product_name} Greeting application is up and running, you can try to perform a test HTTP call, from the greeting pod. + +.Check the greeting application is running +[source,bash,subs="attributes+"] +---- +kubectl patch svc greeting -n kogito-workflows -p '{"spec": {"type": "NodePort"}}' +GREETING_SVC=$(minikube service greeting -n kogito-workflows --url) +curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' $GREETING_SVC/greeting +---- + +If everything is working well you should receive a response like this: + +.Response from the greeting application +[source,json,subs="attributes+"] +---- +{"id":"b5fbfaa3-b125-4e6c-9311-fe5a3577efdd","workflowdata":{"name":"John","language":"English","greeting":"Hello from JSON Workflow, "}} +---- + +// TODO: add a troubleshooting guide - https://issues.redhat.com/browse/KOGITO-8864 + +== Additional resources + +* xref:cloud/operator/known-issues.adoc[] +* xref:cloud/operator/developing-workflows.adoc[] + +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/configuring-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/configuring-workflows.adoc new file mode 100644 index 000000000..f20b8cb68 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/configuring-workflows.adoc @@ -0,0 +1,81 @@ += Configuring Workflow Services +:compat-mode!: +// Metadata: +:description: Configuration of Workflow Services deployed by the operator +:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, config, openshift, containers + +This document describes how to configure a workflow service with the {operator_name}. + +[IMPORTANT] +==== +This feature is supported only for development profile. Configuring workflows not in development is mapped and tracked by link:https://issues.redhat.com/browse/KOGITO-8522[KOGITO-8522]. See xref:cloud/operator/known-issues.adoc[] +==== + +== Editing the Workflow Configuration + +When the operator deploys the workflow service, it also creates a `ConfigMap` named after the `KogitoServerlessWorkflow` object with the suffix `-props`. For example, if your workflow name is `greeting`, then the `ConfigMap` name is `greeting-props`. + +You can use the Kubernetes object editor of your preference to add or edit the link:https://en.wikipedia.org/wiki/.properties[properties] in the workflow configuration. Using `kubectl` you can do: + +.Editing the Workflow Properties +[source,shell,subs="attributes+"] +---- +kubectl edit cm -props +---- + +Note that it's important to respect the properties format, otherwise the operator will replace your configuration with the default one. + +Here's an example of a workflow properties: + +.Example of a Workflow ConfigMap Properties +[source,yaml,subs="attributes+"] +---- +apiVersion: v1 +kind: ConfigMap +metadata: + labels: + app: greeting + name: greeting-props + namespace: default +data: + application.properties: | + my.properties.key = any-value +---- + +The underlying runtime engine that executes the workflow service is based on link:{quarkus_url}[Quarkus]. So that, you can configure the workflow as you normally would any link:{quarkus_config_url}[Quarkus application]. + +Any xref:core/configuration-properties.adoc[{product_name} configuration that the documentation describes] or general Quarkus application property can be configured using this method. + +== Immutable properties + +A few properties can not be changed in this configuration. Usually, they are already defined in the properties file. The table below lists them. + +.List of immutable properties +[cols="2,1,1"] +|=== +|Property Key | Immutable Value | Profile + +|quarkus.http.port +|8080 +|all + +|quarkus.http.host +|0.0.0.0 +|all + +|org.kie.kogito.addons.knative.health-enabled +|false +|dev + +|=== + +If you try to change any of them, the operator will override them with the default, but preserving your changes in other property keys. + +== Additional resources + +* xref:core/configuration-properties.adoc[] +* xref:cloud/operator/known-issues.adoc[] +* xref:cloud/operator/developing-workflows.adoc[] +* xref:cloud/operator/build-and-deploy-workflows.adoc[] + +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/developing-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/developing-workflows.adoc new file mode 100644 index 000000000..b4fb22b76 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/developing-workflows.adoc @@ -0,0 +1,264 @@ += Developing Workflow Services with the Operator +:compat-mode!: +// Metadata: +:description: Developing workflow service with the operator on Kubernetes +:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, devmode + +This document describes how you can develop your workflows directly on Kubernetes with the {operator_name}. + +Workflows in development profile are not tailored for production environments. To build and deploy an immutable workflow application with the operator, see xref:cloud/operator/build-and-deploy-workflows.adoc[]. + +[IMPORTANT] +==== +{operator_name} is under active development with features yet to be implemented. Please see xref:cloud/operator/known-issues.adoc[]. +==== + +== Introduction to the Development Profile + +The development profile is the easiest way to start playing around with workflows and the operator. + +To get started, you can use an editor of your choice to create a new `KogitoServerlessWorkflow` Custom Resource YAML definition. For example: + +.Example of a Kubernetes {product_name} YAML definition +[source,yaml,subs="attributes+"] +---- +apiVersion: sw.kogito.kie.org/v1alpha08 +kind: KogitoServerlessWorkflow +metadata: + name: greeting + annotations: + sw.kogito.kie.org/description: Greeting example on k8s! + sw.kogito.kie.org/version: 0.0.1 + sw.kogito.kie.org/profile: dev <1> +spec: <2> + start: ChooseOnLanguage + functions: + - name: greetFunction + type: custom + operation: sysout + states: + - name: ChooseOnLanguage + type: switch + dataConditions: + - condition: "${ .language == \"English\" }" + transition: GreetInEnglish + - condition: "${ .language == \"Spanish\" }" + transition: GreetInSpanish + defaultCondition: GreetInEnglish + - name: GreetInEnglish + type: inject + data: + greeting: "Hello from JSON Workflow, " + transition: GreetPerson + - name: GreetInSpanish + type: inject + data: + greeting: "Saludos desde JSON Workflow, " + transition: GreetPerson + - name: GreetPerson + type: operation + actions: + - name: greetAction + functionRef: + refName: greetFunction + arguments: + message: ".greeting+.name" + end: true +---- + +<1> The annotation `sw.kogito.kie.org/profile: dev` tells the operator to deploy your workflow using the development profile. This means that the operator will build a running instance of the workflow ready to receive changes during your development cycle. + +<2> In the `spec` attribute goes the workflow definition as described by the xref:getting-started/cncf-serverless-workflow-specification-support.adoc[CNCF Serverless Workflow specification]. So if you already have a workflow definition, you can use it there. Alternatively, you can use the xref:tooling/serverless-workflow-editor/swf-editor-overview.adoc[editors to create your workflow definition]. + +== Deploying a New Workflow Service + +.Prerequisites +* You have xref:cloud/operator/install-serverless-operator.adoc[installed the {operator_name}] +* You have created a new {product_name} Kubernetes YAML file + +Having a new Kubernetes workflow definition in a YAML file (you can use the above example), you can deploy it in your cluster with the following command: + +.Deploying a new KogitoServerlessWorkflow custom resource in Kubernetes +[source,bash,subs="attributes+"] +---- +kubectl apply -f -n +---- + +Alternatively, you can try one of the examples available in the operator repository: + +.Deploying the greeting workflow example +[source,bash,subs="attributes+"] +---- +kubectl apply -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessworkflow_devmode.yaml -n +---- + +[TIP] +==== +Replace `` with the namespace you're using to deploy your applications +==== + +You can follow the workflow status to check if everything is fine with: + +.Checking the workflow status +[source,bash,subs="attributes+"] +---- +kubectl get workflow -n -w +---- + +You should see the workflow conditions evolving to `READY` in a few seconds: + +.Example workflow deployment status +[source,bash,subs="attributes+"] +---- +NAME PROFILE VERSION ADDRESS READY REASON +greeting dev 0.0.1 False WaitingForDeployment +greeting dev 0.0.1 True +---- + +[TIP] +==== +The `REASON` field gives you a cue about the current workflow status. +==== + +You can make changes to the workflow YAML using any Kubernetes editor. For example you can use `kubectl` and the following commanda: + +[source,bash,subs="attributes+"] +---- +kubectl edit workflow/greeting -n +---- +and changing the workflow definition inside the CustomResource Spec section. + +Otherwhise you can save the CustomResource definition file and edit it with your desired editor and re-applying it. + +For example using VSCode, there are the commands needed: + +[source,bash,subs="attributes+"] +---- +curl -S https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/config/samples/sw.kogito_v1alpha08_kogitoserverlessworkflow_devmode.yaml > workflow_devmode.yaml +code workflow_devmode.yaml +kubectl apply -f workflow_devmode.yaml -n +---- + +The operator ensures that the latest workflow definition is running and ready. +This way, you can include the workflow application in your development scenario and start making requests to it. + +== Check the workflow application is running + +In order to check that the {product_name} Greeting application is up and running, you can try to perform a test HTTP call. First, you must expose the service: + +.Exposing the workflow application +[source,bash,subs="attributes+"] +---- +kubectl patch svc greeting -n -p '{"spec": {"type": "NodePort"}}' +minikube service greeting -n --url +http://127.0.0.1:57053 + +# use the above output to get the current workflow URL in your environment +---- + +Alter exposing the workflow service, you can point your browser to the Swagger UI and start making requests with the REST interface. + +For example, using the above command execution you can access the Swagger UI via `http://127.0.0.1:57053/q/swagger-ui/`. + +At the Swagger UI, click on "POST /greeting", then on "Try it out!". Copy the following JSON message and hit execute: + +.Operation Greeting result +[source,json,subs="attributes+"] +---- +{ + "name": "Jane Doe" +} +---- + +.The Swagger UI executing the POST /greeting operation +image::cloud/swagger-ui-operator.png[] + +You should see a result similar to this: + +.Operation Greeting result +[source,json,subs="attributes+"] +---- +{ + "id": "984b5c6c-36ef-48ba-aa11-89fa54d25e98", + "workflowdata": { + "name": "Jane Doe", + "greeting": "Hello from JSON Workflow, " + } +} +---- + +You can even make changes to your `KogitoServerlessWorkflow` YAML file and see the results using the Swagger UI. + +[[troubleshooting]] +== Troubleshooting the Workflow Service + +Since during development you are iterating over the deployed workflow service, it's likely that you will need to troubleshoot the application if something goes wrong. + +To ensure the workflow is running in a healthy state, the operator deploys the application pod with health checks probes. +So if your changes impact the application somehow, the pod will stop responding. + +[[basic-troubleshooting]] +=== Basic Troubleshooting + +1. Analyze the workflow status with: ++ +.Get the workflow status conditions +[source,shell,subs="attributes+"] +---- +kubectl get workflow -o jsonpath={.status.conditions} | jq . +---- ++ +It can give you a clue about what might be happening. See xref:cloud/operator/workflow-status-conditions.adoc[] for more information. ++ +2. Fetch the logs and look for `ERROR` messages: ++ +.Watch the application logs +[source,shell,subs="attributes+"] +---- +kubectl logs deployment/ -f +---- ++ +If you are looking for opening an issue or ask in {product_name} communication channels, this logging information is always useful for the person who will try to help you. + +=== Possible Failure Scenarios + +==== Feature Not Yet Supported +The {operator_name} is under active development. Sometimes a feature might not be available yet. Please see xref:cloud/operator/known-issues.adoc[] for a comprehensive list of available features. + +If you identify you're refering to a feature not yet available, please file a new issue so we can prioritize it for you or ask in {product_name} communication channels. + +==== Wrong Application Configuration +A wrong configuration, or lack of one might impact your workflow to run correctly. +The operator deploys a `ConfigMap` that holds the application properties for the workflow. + +.Get the properties ConfigMap +[source,shell,subs="attributes+"] +---- +kubectl get cm -props +---- + +The `ConfigMap` name pattern is the workflow name followed by `-props`. + +Make sure that the configuration is correct and you're not lacking any required properties for a given feature to work. +If so, you can make your changes to the configuration as you normally would to any `ConfigMap`. + +The operator ensures that these properties are applied to the application. + +See xref:cloud/operator/configuring-workflows.adoc[] for more information. + +==== Wrong Workflow Definition +The {operator_name} validates the workflow definition at the moment you create or edit the YAML file, avoiding to persist a workflow in an invalid state. +Although, the operator is under active development, so errors during the validation might occur. + +In this case, you might have to make a few modifications to the workflow definition to fix any structural error. + +You can identify such problems by looking at the deployed workflow application logs as explained <>. + +If you found an issue that a cause is not listed in this section, please link:{docs_issues_url}[let us know]. + +== Additional resources + +* xref:cloud/operator/known-issues.adoc[] +* xref:cloud/operator/build-and-deploy-workflows.adoc[] + +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc new file mode 100644 index 000000000..e68744cbb --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/install-serverless-operator.adoc @@ -0,0 +1,140 @@ += Install the {operator_name} +:compat-mode!: +// Metadata: +:description: Install the operator on Kubernetes clusters +:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, openshift, containers + +This guide describes how to install the {operator_name} in a Kubernetes cluster. The operator is in an xref:/cloud/operator/known-issues.adoc[early development stage] (community only) and has been tested on Kubernetes 1.22+, and link:{minikube_url}[Minikube]. + +.Prerequisites +* A Kubernetes cluster with admin privileges. Alternatively, you can use Minikube or KIND. +* `kubectl` command-line tool is installed. Otherwise, Minikube provides it. + +== Prepare a Minikube instance + +[WARNING] +==== +You can safely skip this section if you're not using Minikube. +==== + +.Prerequisites +* A machine with at least 8GB memory and a link:https://en.wikipedia.org/wiki/Multi-core_processor[CPU with 8 cores] +* Docker or Podman installed + +Run the following command to create a new instance capable of installing the operator and deploy workflows: + +[source,shell,subs="attributes+"] +---- +minikube start --cpus 4 --memory 4096 --addons registry --addons metrics-server --insecure-registry "10.0.0.0/24" --insecure-registry "localhost:5000" +---- + +[NOTE] +==== +To speed up the build time, you can increase CPUs and memory options so that your minikube instance will have more resources. For example, use `--cpus 12 --memory 16384`. In order to work, you will have to recreate your instance. +==== + +If it does not work with the default driver, also known as `docker`, you can try to start with the `podman` driver as follows: + +.Start minikube with podman driver +[source,shell,subs="attributes+"] +---- +minikube start [...] --driver podman +---- + +== Install the {product_name} Operator + +In order to have an up-and-running instance of the {product_name} Operator you can use the following command: + +// TODO: replace this URI with the GitHub's artifact release instead. +.Install {product_name} Operator on Kubernetes +[source,shell,subs="attributes+"] +---- +kubectl create -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{operator_version}/operator.yaml +---- + +You can follow the deployment of the {product_name} Operator: + +.Watch the {product_name} Operator pod +[source,shell,subs="attributes+"] +---- +kubectl get pod -n kogito-serverless-operator-system --watch +---- + +A successful installation should have an output like this: + +.Successful Installation Output +[source] +---- +NAME READY STATUS RESTARTS AGE +kogito-serverless-operator-controller-manager-948547ffd-sr2j2 0/2 ContainerCreating 0 6s +kogito-serverless-operator-controller-manager-948547ffd-sr2j2 1/2 Running 0 7s +kogito-serverless-operator-controller-manager-948547ffd-sr2j2 2/2 Running 0 20s +---- + +You can also follow the operator’s log: + +.Watch the {product_name} Operator pod logs +[source,shell,subs="attributes+"] +---- +kubectl logs deployment/kogito-serverless-operator-controller-manager -n kogito-serverless-operator-system -f +---- + +Once the operator is running, it will watch for new custom resources (CR) so that you can prepare your environment to be ready to create a new {product_name} application based on the definitions you will send to the operator. + +To check if the definitions are correclty installed, try running: + +.Check if the CRDs are correctly installed +[source,shell,subs="attributes+"] +---- +kubectl get crds | grep kogito +kogitoserverlessbuilds.sw.kogito.kie.org 2023-03-08T18:31:15Z +kogitoserverlessplatforms.sw.kogito.kie.org 2023-03-08T18:31:15Z +kogitoserverlessworkflows.sw.kogito.kie.org 2023-03-08T18:31:15Z +---- + +== Uninstall the Operator +// TODO: this is super verbose for now because we don't have OLM/OperatorHub. This procedure should be replaced as soon as we have them. Or it should be renamed to "Uninstalling snapshot/local versions", so crazy users that don't rely on operatorhub/olm can also install/uninstall their instances. + +To uninstall the {operator_name}, first you should remove all the object instances managed by it. Then, you can delete every object created during the installation. + +To delete every object instance managed by the workflow in your cluster, you can run these series of commands: + +.Delete every {product_name} object instances +[source,shell,subs="attributes+"] +---- +kubectl delete --all workflow --all-namespaces +kubectl delete --all kogitoserverlessbuild --all-namespaces +kubectl delete --all kogitoserverlessplatform --all-namespaces +---- + +Alternatively, if you created everything under the same namespace, deleting the given namespace has the same outcome. + +To uninstall the correct version of the operator, first you must get the current version by running: + +.Getting the operator version +[source,shell,subs="attributes+"] +---- +kubectl get deployment kogito-serverless-operator-controller-manager -n kogito-serverless-operator-system -o jsonpath="{.spec.template.spec.containers[?(@.name=='manager')].image}" + +quay.io/kiegroup/kogito-serverless-operator-nightly:1.34.0 +---- + +The operator manager image reflects the current operator's version. Replace the major and minor version names in the command below. For example, if the image version is `1.34.0` use `1.34` in the placeholder: + +.Uninstalling the operator +[source,shell,subs="attributes+"] +---- +kubectl delete -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/.x/operator.yaml +---- + +[TIP] +==== +If you're running a snapshot version, use this URL instead `https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/main/operator.yaml`. +==== + +== Additional resources + +* xref:cloud/operator/known-issues.adoc[] +* xref:cloud/operator/developing-workflows.adoc[] + +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/known-issues.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/known-issues.adoc new file mode 100644 index 000000000..c33a1b473 --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/known-issues.adoc @@ -0,0 +1,68 @@ += {operator_name} Known Issues, Limitations and Roadmap +:compat-mode!: +// Metadata: +:description: Known issues, features, and limitations of the operator +:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, roadmap +:rest_example_url: + +The link:{kogito_serverless_operator_url}[{operator_name}] is currently in Alpha version, is under active development. + +== Supported Features + +* Functions + - xref:core/custom-functions-support.adoc#con-func-sysout[Sysout] + - link:{kogito_sw_examples_url}/serverless-workflow-functions-quarkus/src/main/resources/restfunctions.sw.json[Rest Custom Functions] + - xref:core/understanding-jq-expressions.adoc[Expression functions] +* Events + - xref:eventing/consume-produce-events-with-knative-eventing.adoc[Knative Eventing integration]. Every Knative Eventing object (brokers, sources, sinks) must be manually configured. +* States + - Switch including dataConditions + - Inject including data with a transition + - States with Operations including Actions containing functionRef with arguments +* ExpressionsLang (jq or jsonpath) +* xref:cloud/operator/configuring-workflows.adoc[Configuring the workflow application in development profile] + +== Not Supported Features + +Every other feature from the xref:getting-started/cncf-serverless-workflow-specification-support.adoc[CNCF Serverless Workflow Specification] not listed in the section above, is not supported or tested at the moment. + +Any feature requiring external file configuration such as OpenAPI or Camel Routes is not supported at the moment. + +== Known Bugs + +- link:https://issues.redhat.com/browse/KOGITO-8805[Workflow Dev Profile Builder Image lacking permissions to write on mvn dir on OpenShift] + +== Roadmap + +The following issues is currently being prioritized. + +=== CNCF Specification v0.8 Alignment + +- link:https://issues.redhat.com/browse/KOGITO-8452[Alignment with Serverless Workflow v0.8 model] +- link:https://issues.redhat.com/browse/KOGITO-7840[Implement admission webhooks for workflow validation] + +=== Workflow Development Profile + +- link:https://issues.redhat.com/browse/KOGITO-8675[Make Workflow Dev Profile builder image configurable via Platform] +- link:https://issues.redhat.com/browse/KOGITO-8517[Support external resources configuration for Workflow CR in devmode] +- link:https://issues.redhat.com/browse/KOGITO-8643[Expose Workflow Dev Profile application endpoint externally] +- link:https://issues.redhat.com/browse/KOGITO-8650[Ensure that Data Index is embedded in the workflow dev profile] +- link:https://issues.redhat.com/browse/KOGITO-8651[Ensure that the Management Console is embedded in the workflow dev profile] +- link:https://issues.redhat.com/browse/KOGITO-8866[Ensure that Jobs Service is embedded in the workflow dev profile] + +=== Workflow Productization Profile + +- link:https://issues.redhat.com/browse/KOGITO-8522[Map an external ConfigMap for application properties on Serverless Workflow services] +- link:https://issues.redhat.com/browse/KOGITO-7755[Manage the Functions included in a Workflow with Operator] +- link:https://issues.redhat.com/browse/KOGITO-8524[Enable toggle Workflow CR from devmode to production mode and vice-versa] +- link:https://issues.redhat.com/browse/KOGITO-8792[Review build failures and signal the reasoning in the Events API] +- link:https://issues.redhat.com/browse/KOGITO-8794[Handle deployment failures in prod profile] +- link:https://issues.redhat.com/browse/KOGITO-8806[Evaluate internal registry integration on OpenShift, Kubernetes and Minikube] + +=== Knative Integration + +- link:https://issues.redhat.com/browse/KOGITO-8648[Implement the Knative Addressable interface in dev profile] +- link:https://issues.redhat.com/browse/KOGITO-8409[Add support to Cloud Events to Knative custom function] +- link:https://issues.redhat.com/browse/KOGITO-8410[Add support to GET method to Knative custom function] +- link:https://issues.redhat.com/browse/KOGITO-8766[Adjust the Knative Function definition to use the same interface as defined by the extension] +- link:https://issues.redhat.com/browse/KOGITO-8646[Review the need of the knative/kubernetes addons inside the dev profile builder image] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/operator/workflow-status-conditions.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/operator/workflow-status-conditions.adoc new file mode 100644 index 000000000..cbe0ac22c --- /dev/null +++ b/serverlessworkflow/modules/ROOT/pages/cloud/operator/workflow-status-conditions.adoc @@ -0,0 +1,150 @@ += Understanding Workflow Services Status Conditions +:compat-mode!: +// Metadata: +:description: Description of the status and conditions of a workflow deployed by the operator +:keywords: kogito, workflow, serverless, operator, kubernetes, minikube, status, conditions + +This document describes the status and conditions of the `KogitoServerlessWorkflow` object deployed by the {operator_name}. + +link:https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#typical-status-properties[Kubernetes Status] is an important property to observe in order to understand what is currently happening with the object. It can also help you troubleshoot or integrate with other objects in the cluster. + +== General Status + +The table below lists the general structure of a workflow status: + +.Description of KogitoServerlessWorkflow status object +[cols="1,2"] +|=== +|Status | Description + +| `ObservedGeneration` +| Last object generation observed by the status + +| `Conditions` +| Current observed workflow conditions + +| `Address` +| External or internal workflow access endpoint. Implements the link:https://github.com/knative/specs/blob/main/specs/eventing/overview.md#addressable[Knative Addressable Interface] + +| `RecoverFailureAttempts` +| How many attempts the operator tried to recover from a failure + +|=== + +The `Conditions` property might vary depending on the workflow profile. The next sections describe the current implementation. + +== Development Profile Conditions + +When you deploy a workflow with the xref:cloud/operator/developing-workflows.adoc[development profile], the operator deploys a ready-to-use container with a running workflow instance. + +The following table lists the possible conditions. + +.Conditions Scenarios in Development +[cols="0,0,1,2"] +|=== +|Condition | Status | Reason | Description + +| Built +| Unknown +| +| In development profile there's no build process, so this state remains in `Unknown` status + +| Running +| True +| +| The workflow is running and in healthy state + +| Running +| False +| WaitingForDeployment +| The workflow is waiting for the underlying deployment object to have a minimum availability + +| Running +| False +| DeploymentFailure +| There was a problem with the underlying deployment object. Check the message in this condition and the workflow pod logs for more info + +| Running +| False +| DeploymentIsUnavailable +| The underlying deployment object doesn't have the minimum availability for this workflow. Check the message in this condition and the workflow pod logs for more info + +| Running +| False +| AttemptToRedeployFailed +| If the workflow deployment is not available, the operator will try to rollout the deployment three times before entering in this stage. Check the message in this condition and the workflow pod logs for more info + +|=== + +In normal conditions, the workflow will transition from `Running`, `WaitingForDeployment` condition to `Running`. In case something wrong happens, consult the section xref:cloud/operator/developing-workflows.adoc#troubleshooting[Workflow Troubleshooting in Development]. + +== Production Profile Conditions + +Deploying the workflow in xref:cloud/operator/build-and-deploy-workflows.adoc[Production profile] makes the operator to build an immutable image for the workflow service. The build step can be followed by observing the workflow conditions. + +.Condition Scenarios in Production +[cols="0,0,1,2"] +|=== +|Condition | Status | Reason | Description + +| Built +| False +| BuildIsRunningReason +| The build is currently running + +| Built +| False +| BuildFailedReason +| The build has failed and any attempts to have it run failed + +| Built +| True +| +| Last build has finished successfully + +| Running +| True +| +| The workflow is running and in healthy state + +| Running +| False +| WaitingForPlatform +| The workflow can't have a pod running until a `KogitoServerlessPlatform` is ready + +| Running +| False +| WaitingForBuildReason +| The workflow is waiting for the build to finish to start running + +| Running +| False +| WaitingForDeployment +| The workflow is waiting for the underlying deployment object to have a minimum availability + +| Running +| False +| DeploymentFailure +| There was a problem with the underlying deployment object. Check the message in this condition and the workflow pod logs for more info + +| Running +| False +| DeploymentIsUnavailable +| The underlying deployment object doesn't have the minimum availability for this workflow. Check the message in this condition and the workflow pod logs for more info + +| Running +| False +| AttemptToRedeployFailed +| If the workflow deployment is not available, the operator will try to roll out the deployment three times before entering this stage. Check the message in this condition and the workflow pod logs for more info + +|=== + +The normal conditions for the workflow resource are to place a `KogitoServerlessBuild` to run and wait for it to finish. As soon as the image is ready, the workflow transitions to the deployment phase, which is to provision a new workflow service pod to run with the built image. + +== Additional resources + +* xref:cloud/operator/known-issues.adoc[] +* xref:cloud/operator/developing-workflows.adoc[] +* xref:cloud/operator/build-and-deploy-workflows.adoc[] + +include::../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/build-workflow-image-with-quarkus-cli.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc similarity index 96% rename from serverlessworkflow/modules/ROOT/pages/cloud/build-workflow-image-with-quarkus-cli.adoc rename to serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc index b88133eb6..e469eceac 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/build-workflow-image-with-quarkus-cli.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc @@ -12,13 +12,13 @@ This document describes how to build a Serverless Application Container image using the link:{quarkus_cli_url}[Quarkus CLI]. .Prerequisites -include::../../pages/_common-content/getting-started-requirement.adoc[] +include::../../../pages/_common-content/getting-started-requirement.adoc[] * Latest version of Docker is installed. Alternatively, you can use link:{google_jib_url}[Jib] to build container images. However, Docker is required to build GraalVM native image using the Quarkus native builder image. * Optionally, GraalVM {graalvm_min_version} is installed. Quarkus provides a few extensions to build container images, such as `Jib`, `docker`, `s2i`, and `buildpacks`. For more information about the Quarkus extensions, see the link:{quarkus_container_images_url}[Quarkus documentation]. -The examples in this document assume that you have the Quarkus tooling installed. For more information about the tooling, see xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {context} tooling]. +The examples in this document assume that you have the Quarkus tooling installed. For more information about the tooling, see xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling]. [[proc-using-example-application]] == Using an example application @@ -262,7 +262,7 @@ Example response:: == Additional resources -* xref:cloud/deploying-on-minikube.adoc[Deploying your {context} application on Minikube] -* xref:cloud/deploying-on-kubernetes.adoc[Deploying your {context} application on Kubernetes] +* xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying your {product_name} application on Minikube] +* xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying your {product_name} application on Kubernetes] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/build-workflow-images-with-tekton.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-images-with-tekton.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/build-workflow-images-with-tekton.adoc rename to serverlessworkflow/modules/ROOT/pages/cloud/quarkus/build-workflow-images-with-tekton.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/deploying-on-kubernetes.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-kubernetes.adoc similarity index 91% rename from serverlessworkflow/modules/ROOT/pages/cloud/deploying-on-kubernetes.adoc rename to serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-kubernetes.adoc index 8925427ce..52e1922be 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/deploying-on-kubernetes.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-kubernetes.adoc @@ -20,10 +20,10 @@ :quarkus_k8s_deploy_url: https://github.com/quarkusio/quarkus/issues/26385 -This document describes how to deploy a {context} application using a Kubernetes cluster, along with a procedure to run the Knative platform. +This document describes how to deploy a {product_name} application using a Kubernetes cluster, along with a procedure to run the Knative platform. // shared pre req -include::common/_prerequisites.adoc[subs=quotes+] +include::../common/_prerequisites.adoc[subs=quotes+] Before proceeding further, make sure that you have access to the Kubernetes cluster with Knative available. @@ -55,11 +55,11 @@ If not, follow the installation steps described in the Knative link:{knative_kub [[proc-deploy-sw-application-kubernetes]] == Deploying your workflow application on Kubernetes -Once Knative is ready, you can initiate the process of deploying your {context} application on Kubernetes. +Once Knative is ready, you can initiate the process of deploying your {product_name} application on Kubernetes. // shared app req -include::common/_deploy_workflow_application_requisites.adoc[] +include::../common/_deploy_workflow_application_requisites.adoc[] [IMPORTANT] ==== @@ -72,7 +72,7 @@ information please take a look in this link:{k8s_pull_secret_url}[link]. .Procedure . Create `serverless-workflow-greeting-quarkus` namespace using the following command: + -include::common/_create_namespace_and_deploy_info.adoc[] +include::../common/_create_namespace_and_deploy_info.adoc[] In the following procedures, you can find two examples of deploying your workflow application, including: @@ -88,10 +88,10 @@ the Magic DNS for naming resolution, for more details please check the Knative l ==== // deploy with kn-cli -include::common/_proc_deploy_sw_kn_cli.adoc[] +include::../common/_proc_deploy_sw_kn_cli.adoc[] // deploy with kubectl -include::common/_proc_deploy_sw_kubectl.adoc[] +include::../common/_proc_deploy_sw_kubectl.adoc[] [[proc-deploy-sw-application-quarkus-cli]] === Deploying your workflow application using Quarkus CLI @@ -161,6 +161,6 @@ Note that the maven profile activated is named as `container`, which provides th target container image. // verify deployed swf -include::common/_verify_if_swf_is_deployed.adoc[] +include::../common/_verify_if_swf_is_deployed.adoc[] -include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/deploying-on-minikube.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-minikube.adoc similarity index 95% rename from serverlessworkflow/modules/ROOT/pages/cloud/deploying-on-minikube.adoc rename to serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-minikube.adoc index 2671a521d..9f557c85e 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/deploying-on-minikube.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/deploying-on-minikube.adoc @@ -1,4 +1,4 @@ -= Deploying your {context} application on Minikube += Deploying your {product_name} application on Minikube :compat-mode!: // Metadata: :description: Deploying Serverless Application on Minikube @@ -25,7 +25,7 @@ This document describes how to deploy your workflow application using a local Ku For more information about Minikube and related system requirements, see link:{minikube_url}/docs/start/[Getting started with Minikube] documentation. // shared pre req -include::common/_prerequisites.adoc[] +include::../common/_prerequisites.adoc[] To deploy your workflow application on Minikube, you need to install Knative on Minikube. However, first you need to ensure that Minikube is installed correctly. @@ -123,7 +123,7 @@ To follow the manual process of installing Knative on Minikube, see link:{knativ Once you install Knative on Minikube, you can initiate the process of deploying your workflow application on Minikube. // shared app req -include::common/_deploy_workflow_application_requisites.adoc[] +include::../common/_deploy_workflow_application_requisites.adoc[] .Procedure @@ -198,7 +198,7 @@ Status: . After starting the Minikube tunnel, create `serverless-workflow-greeting-quarkus` namespace using the following command: + -include::common/_create_namespace_and_deploy_info.adoc[] +include::../common/_create_namespace_and_deploy_info.adoc[] In the following procedures, you can find two examples of deploying your workflow application, including: @@ -208,10 +208,10 @@ In the following procedures, you can find two examples of deploying your workflo // deploy with kn-cli -include::common/_proc_deploy_sw_kn_cli.adoc[] +include::../common/_proc_deploy_sw_kn_cli.adoc[] // deploy with kubectl -include::common/_proc_deploy_sw_kubectl.adoc[] +include::../common/_proc_deploy_sw_kubectl.adoc[] [[proc-deploy-sw-application-quarkus-cli]] === Deploying your workflow application using Quarkus CLI @@ -272,7 +272,7 @@ Note that the maven profile activated is named as `container`, which provides th target container image. // verify deployed swf -include::common/_verify_if_swf_is_deployed.adoc[] +include::../common/_verify_if_swf_is_deployed.adoc[] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/kubernetes-service-discovery.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/kubernetes-service-discovery.adoc similarity index 98% rename from serverlessworkflow/modules/ROOT/pages/cloud/kubernetes-service-discovery.adoc rename to serverlessworkflow/modules/ROOT/pages/cloud/quarkus/kubernetes-service-discovery.adoc index b2fe9f22d..26e42623a 100644 --- a/serverlessworkflow/modules/ROOT/pages/cloud/kubernetes-service-discovery.adoc +++ b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/kubernetes-service-discovery.adoc @@ -1,4 +1,4 @@ -= Kubernetes service discovery in {context} += Kubernetes service discovery in {product_name} :compat-mode!: // Metadata: :description: Explain what is and how the service discovery works @@ -105,7 +105,7 @@ Based on the resource to be discovered, the Kubernetes service discovery follows image::cloud/sw-discovery-flow.jpg[] [[ref-example-kubernetes-service-discovery]] -== Example of Kubernetes service discovery in {context} +== Example of Kubernetes service discovery in {product_name} The Kubernetes service discovery is performed at the _STATIC_INIT_ time of Quarkus during the workflow application startup. First, the service discovery scans the Quarkus configuration values and searches for the Kubernetes URI pattern. If the URI pattern is found, the engine parses the URI, queries the Kubernetes API searching for the given resource, and overrides the given application property. @@ -203,4 +203,4 @@ You can disable the Kubernetes service discovery by removing the `kogito-addons- * xref:service-orchestration/configuring-openapi-services-endpoints.adoc[Configuring the OpenAPI services endpoints] -include::../../pages/_common-content/report-issue.adoc[] +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/cloud/versioning-workflows-in-knative.adoc b/serverlessworkflow/modules/ROOT/pages/cloud/quarkus/versioning-workflows-in-knative.adoc similarity index 100% rename from serverlessworkflow/modules/ROOT/pages/cloud/versioning-workflows-in-knative.adoc rename to serverlessworkflow/modules/ROOT/pages/cloud/quarkus/versioning-workflows-in-knative.adoc diff --git a/serverlessworkflow/modules/ROOT/pages/core/configuration-properties.adoc b/serverlessworkflow/modules/ROOT/pages/core/configuration-properties.adoc index 826c57889..1c7eebac4 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/configuration-properties.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/configuration-properties.adoc @@ -1,11 +1,11 @@ -= Configuration properties in {context} += Configuration properties in {product_name} :compat-mode!: // Metadata: :description: Configuration Properties :keywords: kogito, workflow, serverless, configuration, properties -The following table serves as a quick reference for commonly used configuration properties supported in {context}. You can define the following properties in the `src/main/resources/application.properties` file of your project. +The following table serves as a quick reference for commonly used configuration properties supported in {product_name}. You can define the following properties in the `src/main/resources/application.properties` file of your project. .Common configuration properties [cols="20%,30%,20%,15%,15%", options="header"] diff --git a/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc b/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc index f15ae1835..4f3b243fa 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/custom-functions-support.adoc @@ -1,4 +1,4 @@ -= Custom functions for your {context} service += Custom functions for your {product_name} service :compat-mode!: // Metadata: @@ -201,6 +201,7 @@ public class MyInterfaceOrClass { Avoid using `java` functions to call the external services, instead, you can use the xref:service-orchestration/orchestration-of-openapi-based-services.adoc[services orchestration features]. ==== +[[con-func-camel]] == Camel custom function Kogito supports the link:{camel_url}[Camel Routes] functions within an Apache Maven project, in which you define your workflow service. @@ -229,7 +230,7 @@ The following example shows the declaration of a Camel function: <1> `myCamelEndpoint` is the function name <2> `custom` is the function type -<3> `camel:direct:myendpoint` is the custom operation definition. In this definition, `camel` is the reserved keyword followed by the `direct` endpoint. link:{camel_extensions_url}/direct.html[Camel Direct] is the only supported consumer by {context}. Finally, `myendpoint` is the endpoint URI name found in the route within your project's context. +<3> `camel:direct:myendpoint` is the custom operation definition. In this definition, `camel` is the reserved keyword followed by the `direct` endpoint. link:{camel_extensions_url}/direct.html[Camel Direct] is the only supported consumer by {product_name}. Finally, `myendpoint` is the endpoint URI name found in the route within your project's context. === Function arguments @@ -299,7 +300,7 @@ The Camel route is responsible to produce the return value in a way that the wor include::../../pages/_common-content/camel-valid-responses.adoc[] -[#knative-custom-function] +[[con-func-knative]] == Knative custom function {product_name} provides an implementation of a custom function through the `knative-serving` add-on to invoke Knative services. It allows you to have a static URI, defining a Knative service, which is used to perform HTTP requests. The Knative service defined in the URI is queried in the current Knative cluster and translated to a valid URL. @@ -320,7 +321,7 @@ NAME URL custom-function-knative-service http://custom-function-knative-service.default.10.109.169.193.sslip.io custom-function-knative-service-00001 3h16m 3 OK / 3 True ---- -You can declare a {context} custom function using the Knative service name, like the following: +You can declare a {product_name} custom function using the Knative service name, like the following: [source,json] ---- @@ -695,6 +696,6 @@ void init () { == Additional resources * xref:getting-started/cncf-serverless-workflow-specification-support.adoc[CNCF Serverless Workflow specification] -* xref:core/understanding-jq-expressions.adoc[jq expressions in {context}] +* xref:core/understanding-jq-expressions.adoc[jq expressions in {product_name}] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc index 8a106bd30..2bfe4a4a0 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/defining-an-input-schema-for-workflows.adoc @@ -1,4 +1,4 @@ -= Input schema definition for {context} += Input schema definition for {product_name} :compat-mode!: // Metadata: :description: Defining input schema for Serverless Workflow diff --git a/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc b/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc index ea3feb9ad..1d56134d1 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/timeouts-support.adoc @@ -1,7 +1,7 @@ -= Timeouts in {context} += Timeouts in {product_name} :compat-mode!: // Metadata: -:description: Using timeouts in {context} +:description: Using timeouts in {product_name} :keywords: kogito, workflow, serverless, timeout, timer, expiration @@ -41,7 +41,7 @@ Event-based states can use the sub-property `eventTimeout` to configure the maxi === Callback state timeout Callback state can be used when you need to execute an action, in general to call an external service, and wait for an asynchronous response in form of an event, the callback. -Once the response event is consumed, the workflow continues the execution, in general moving to the next state defined in the `transition` property. See more on xref:eventing/working-with-callbacks.adoc[Callback state in {context}]. +Once the response event is consumed, the workflow continues the execution, in general moving to the next state defined in the `transition` property. See more on xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}]. Since the callback state halts the execution util the event is consumed, you can define an `eventTimeout` for it, and in case the event does not arrive in the defined duration time, the workflow continues the execution moving to the next state defined in the transition, see the <>. @@ -227,12 +227,12 @@ xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and pr [#timeout-example] == Timeout showcase example -You can check xref:use-cases/timeout-showcase-example.adoc[Timeout example in {context}] +You can check xref:use-cases/timeout-showcase-example.adoc[Timeout example in {product_name}] to see how to use and configure workflows with timeouts. == Additional resources -* xref:eventing/working-with-callbacks.adoc[Callback state in {context}] -* xref:use-cases/timeout-showcase-example.adoc[Timeout example in {context}] +* xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}] +* xref:use-cases/timeout-showcase-example.adoc[Timeout example in {product_name}] include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc b/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc index 539126e2d..aa4d0b8ad 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/understanding-jq-expressions.adoc @@ -1,4 +1,4 @@ -= jq expressions in {context} += jq expressions in {product_name} :compat-mode!: // Metadata: :description: JQ expressions in Serverless Workflow @@ -46,7 +46,7 @@ The switch state in the `serverless-workflow-greeting-quarkus` example applicati [NOTE] ==== -The Serverless Workflow specification requires all the expressions to be embedded within `${… }`. However, {context} figures out whether or not a string is an expression. Therefore, you can save characters and skip `${` in the beginning and `}` in the end. In case of portability, you must embed the expressions within `${… }`. +The Serverless Workflow specification requires all the expressions to be embedded within `${… }`. However, {product_name} figures out whether or not a string is an expression. Therefore, you can save characters and skip `${` in the beginning and `}` in the end. In case of portability, you must embed the expressions within `${… }`. ==== [[ref-example-jq-expression-function-arguments]] diff --git a/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc b/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc index 6f3f350c3..f413313c8 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/understanding-workflow-error-handling.adoc @@ -1,4 +1,4 @@ -= Error handling in {context} += Error handling in {product_name} :compat-mode!: // Metadata: :description: Understanding workflow error handling @@ -8,13 +8,13 @@ :java_regex_url: https://docs.oracle.com/javase/tutorial/essential/regex/index.html :java_regex_pattern_url: https://docs.oracle.com/javase/tutorial/essential/regex/pattern.html -This document describes how you can handle the errors that might occur in {context}. +This document describes how you can handle the errors that might occur in {product_name}. The Serverless Workflow specification provides an link:{spec_doc_url}#workflow-error-handling[error handling] mechanism, enabling you to handle the errors that might happen during the interactions between the workflow and external systems. When an error occurs, it changes the regular workflow sequence. In such cases, a workflow state transitions to an alternative state that can potentially handle the error, instead of transitioning to the predefined state. -Note that error definition for a workflow is optional. If error handling is not defined, then the workflow execution is aborted when an error occurs. As a developer, you can consider the error handling in {context} as a `try-catch` or a `goto` construct. +Note that error definition for a workflow is optional. If error handling is not defined, then the workflow execution is aborted when an error occurs. As a developer, you can consider the error handling in {product_name} as a `try-catch` or a `goto` construct. [[con-error-definition]] == Error definition diff --git a/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc b/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc index 67ecaf8c8..7ff1fd3a6 100644 --- a/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc +++ b/serverlessworkflow/modules/ROOT/pages/core/working-with-parallelism.adoc @@ -1,10 +1,10 @@ -= Parallelism in {context} += Parallelism in {product_name} :compat-mode!: // Metadata: :description: Working with parallelism in Serverless Workflow :keywords: kogito, workflow, quarkus, serverless, parallelism -This document describes how you can run parallel tasks in {context}. +This document describes how you can run parallel tasks in {product_name}. The testing procedure described in this document is based on the `serverless-workflow-service-calls-quarkus` example application in link:{kogito_sw_examples_url}/serverless-workflow-service-calls-quarkus[GitHub repository]. diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc index f9357def9..c2edf9810 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/consume-produce-events-with-knative-eventing.adoc @@ -43,7 +43,7 @@ Manually:: [TIP] ==== -If you have used the Knative workflow CLI to create your project, then the Kogito Knative Eventing extension is already present. For more information about creating a project using Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{context} plug-in for Knative CLI]. +If you have used the Knative workflow CLI to create your project, then the Kogito Knative Eventing extension is already present. For more information about creating a project using Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{product_name} plug-in for Knative CLI]. ==== The Kogito Knative Eventing add-on takes care of the required dependencies and additional configuration that the workflow application needs, to interact with the Knative Eventing platform. @@ -296,7 +296,7 @@ kn workflow deploy ---- ==== -For more information about building and deploying the workflow application, see xref:cloud/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. +For more information about building and deploying the workflow application, see xref:cloud/quarkus/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI]. -- [[ref-example-sw-event-definition-knative]] @@ -377,7 +377,7 @@ For each consumed event definition, the Knative Eventing add-on generates one Kn * xref:testing-and-troubleshooting/mocking-http-cloudevents-with-wiremock.adoc[Mocking HTTP CloudEvents sink using WireMock] * xref:eventing/consume-producing-events-with-kafka.adoc[Consuming and producing events using Apache Kafka] -* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}] -* xref:eventing/working-with-callbacks.adoc[Callback state in {context}] +* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] +* xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc index 61682ab18..5375ddda4 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/consume-producing-events-with-kafka.adoc @@ -28,14 +28,14 @@ You need to add the Kafka Quarkus Smallrye connector dependency to indicate that ---- -The messaging capabilities are included in the Quarkus Serverless Workflow extension, even though the messaging capabilities are optional. This means you do not need to explicitly add the messaging add-on dependency when using {context}. +The messaging capabilities are included in the Quarkus Serverless Workflow extension, even though the messaging capabilities are optional. This means you do not need to explicitly add the messaging add-on dependency when using {product_name}. [[con-sw-smallrye-channel-configuration]] == Smallrye channels configuration for a workflow You can configure Smallrye channels for a workflow using event definitions. The Smallrye channels are defined using link:{quarkus_config_url}[Quarkus configuration]. The format for Smallrye channel properties is `mp.messaging.[incoming|outgoing]..`. -{context} allows the following channel mapping strategies: +{product_name} allows the following channel mapping strategies: * Define one default incoming channel to receive all the incoming messages and one default outgoing channel to store all the published messages. * Define a channel for each link:{cloud_events_url}[CloudEvent] type so that every message type has a dedicated channel. @@ -132,8 +132,8 @@ If all your channels use the same strategy and this strategy differs from the `B == Additional resources * xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] -* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}] -* xref:eventing/working-with-callbacks.adoc[Callback state in {context}] +* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] +* xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc index 2aa1c2462..40cf91a0e 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/event-correlation-with-workflows.adoc @@ -1,4 +1,4 @@ -= Event correlation in {context} += Event correlation in {product_name} :compat-mode!: // Metadata: :description: Event Correlation in Serverless Workflow @@ -15,7 +15,7 @@ You can optionally set the `contextAttributeValue` property, which matches the v The incoming events consumed by the engine must contain the correlation attributes, set in the definition as extension context attributes. The correlation attributes are compliant with the link:{cloud_events_url}[CloudEvent] format, therefore, the attributes are not part of the event payload. ==== -A new workflow instance must be created using an event, which must be declared in the workflow definition file, containing correlation attributes in the event definition section. For more information about events, see xref:eventing/handling-events-on-workflows.adoc[Event state in {context}]. Once the event is consumed, the engine extracts the correlation attributes and associates the attributes with the created workflow instance. +A new workflow instance must be created using an event, which must be declared in the workflow definition file, containing correlation attributes in the event definition section. For more information about events, see xref:eventing/handling-events-on-workflows.adoc[Event state in {product_name}]. Once the event is consumed, the engine extracts the correlation attributes and associates the attributes with the created workflow instance. A start event does not trigger a correlation evaluation, but acts as a moment in which correlation attributes and values are set. The correlation attributes and values are evaluated against other incoming events that might trigger the given instance. Therefore, when a non-start event is consumed and correlation attributes are evaluated, then the engine continues the execution of the matched instances (if any). @@ -124,12 +124,12 @@ When the workflow consumes a new event of <>. This means that once the workflow execution reaches the Callback state, the workflow publishes an event of <> type and waits to receive an event of <> type. For more information about callback state, see xref:eventing/working-with-callbacks.adoc[Callback state in {context}]. +The `serverless-workflow-correlation-quarkus` example application uses Callback states, such as <>. This means that once the workflow execution reaches the Callback state, the workflow publishes an event of <> type and waits to receive an event of <> type. For more information about callback state, see xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}]. [[ref-validade-user-email-state]] .Example Callback state definition @@ -203,7 +203,7 @@ Currently, only `kogito-addons-quarkus-persistence-jdbc` persistence add-on supp == Additional resources -* xref:eventing/handling-events-on-workflows.adoc[Event state in {context}] -* xref:eventing/working-with-callbacks.adoc[Callback state in {context}] +* xref:eventing/handling-events-on-workflows.adoc[Event state in {product_name}] +* xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}] include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/handling-events-on-workflows.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/handling-events-on-workflows.adoc index f8d2eb9cb..4d796b18f 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/handling-events-on-workflows.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/handling-events-on-workflows.adoc @@ -1,4 +1,4 @@ -= Event state in {context} += Event state in {product_name} :compat-mode!: // Metadata: @@ -132,7 +132,7 @@ To start a new workflow instance, set the `start` property to the event state na An event state can also be used to pause an existing workflow instance. When the workflow execution reaches an event state, which is not starting, then the execution is paused until there is an event match for that workflow instance. -Similar to the callback state in a workflow, the workflow instance to be resumed is identified by `kogitoprocrefid` CloudEvent attribute or calculated according to the xref:eventing/event-correlation-with-workflows.adoc[event correlation] functionality. While callback state is used for _fire&wait_ scenaiors, event state covers _wait&fire_ scenarios. For more information about callback state, see xref:eventing/working-with-callbacks.adoc[Callback state in {context}]. +Similar to the callback state in a workflow, the workflow instance to be resumed is identified by `kogitoprocrefid` CloudEvent attribute or calculated according to the xref:eventing/event-correlation-with-workflows.adoc[event correlation] functionality. While callback state is used for _fire&wait_ scenarios, event state covers _wait&fire_ scenarios. For more information about the callback state, see xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}]. == Additional resources diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/working-with-callbacks.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/working-with-callbacks.adoc index 0b1fd9713..f794ce00f 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/working-with-callbacks.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/working-with-callbacks.adoc @@ -1,4 +1,4 @@ -= Callback state in {context} += Callback state in {product_name} :compat-mode!: // Metadata: @@ -97,7 +97,7 @@ An link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus/src/main/j After that, the workflow application consumes the event published by the listener and sets the result field. The consumed CloudEvent contains an attribute named `kogitoprocrefid`, which holds the workflow instance ID of the workflow. -The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}]. +The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. Note that each workflow is identified by a unique instance ID, which is automatically included in any published CloudEvent, as `kogitoprocinstanceid` CloudEvent extension. @@ -129,7 +129,7 @@ For more information about using Apache Kafka with events, see link:xref:consume == Additional resources * xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] -* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}] +* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/eventing/working-with-openapi-callbacks.adoc b/serverlessworkflow/modules/ROOT/pages/eventing/working-with-openapi-callbacks.adoc index b233adb9c..9c997e9b7 100644 --- a/serverlessworkflow/modules/ROOT/pages/eventing/working-with-openapi-callbacks.adoc +++ b/serverlessworkflow/modules/ROOT/pages/eventing/working-with-openapi-callbacks.adoc @@ -1,4 +1,4 @@ -= OpenAPI Callback in {context} += OpenAPI Callback in {product_name} :compat-mode!: // Metadata: @@ -103,7 +103,7 @@ An link:{kogito_sw_examples_url}/serverless-workflow-callback-events-over-http-q The callback-workflow-service consumes the CloudEvent, it contains an attribute named `kogitoprocrefid`, which holds the instance ID of the workflow. -The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}]. +The `kogitoprocrefid` attribute is crucial because when the correlation is not used, then this attribute is the only way for the Callback state to identify that the related CloudEvent needs to be used to resume the workflow. For more information about correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. Note that each workflow is identified by a unique instance ID, which is automatically included in any published CloudEvent, as `kogitoprocinstanceid` CloudEvent extension. @@ -120,7 +120,7 @@ mp.messaging.incoming.wait.path=/wait == Additional resources * xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] -* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}] +* xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] * link:{open_api_swagger_spec_url}#callbacks[OpenAPI Callback Example] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/getting-started/cncf-serverless-workflow-specification-support.adoc b/serverlessworkflow/modules/ROOT/pages/getting-started/cncf-serverless-workflow-specification-support.adoc index 0902075a9..5457eab0f 100644 --- a/serverlessworkflow/modules/ROOT/pages/getting-started/cncf-serverless-workflow-specification-support.adoc +++ b/serverlessworkflow/modules/ROOT/pages/getting-started/cncf-serverless-workflow-specification-support.adoc @@ -174,7 +174,7 @@ The following table shows the status of the workflow functions that {product_nam | link:{spec_doc_url}#defining-custom-function-types[Defining custom function types] |=== -For additional functions, the Serverless Workflow specification support the `custom` function type, such as `sysout` and `java`. For more information about these custom function types, see xref:core/custom-functions-support.adoc[Custom functions for your {context} service]. +For additional functions, the Serverless Workflow specification support the `custom` function type, such as `sysout` and `java`. For more information about these custom function types, see xref:core/custom-functions-support.adoc[Custom functions for your {product_name} service]. [[events]] == Events @@ -182,7 +182,7 @@ For additional functions, the Serverless Workflow specification support the `cus {product_name} supports events of the workflow model as defined in the link:{spec_doc_url}#Event-Definition[Serverless Workflow specification definition], except the following: * `resultEventRef` property in link:{spec_doc_url}#eventref-definition[`EventRefDefinition`] is not implemented and, if specified, this property is ignored. Same functionality can be achieved by using xref:eventing/working-with-callbacks.adoc[Callback] state. -* link:{spec_doc_url}#correlation-definition[Correlation] has limited support, that evaluates correlation rules matching a single event consumed per time with a workflow instance. The correlation among `N` different events to be consumed and matched with a workflow instance is not supported in {product_name}. For more information about event correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}]. +* link:{spec_doc_url}#correlation-definition[Correlation] has limited support, that evaluates correlation rules matching a single event consumed per time with a workflow instance. The correlation among `N` different events to be consumed and matched with a workflow instance is not supported in {product_name}. For more information about event correlation, see xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}]. [[workflow_data]] == Serverless Workflow data @@ -221,14 +221,14 @@ For additional functions, the Serverless Workflow specification support the `cus {product_name} supports the error handling feature as described in the link:{spec_doc_url}#workflow-error-handling[Serverless Workflow specification definition]. -For more information about error handling, see xref:core/understanding-workflow-error-handling.adoc[Error handling in {context}]. +For more information about error handling, see xref:core/understanding-workflow-error-handling.adoc[Error handling in {product_name}]. [[retries]] == Retries {product_name} does not support Retries feature, however, it will be implemented in a future release. -Alternatively, you can use xref:core/understanding-workflow-error-handling.adoc[Error handling in {context}]. +Alternatively, you can use xref:core/understanding-workflow-error-handling.adoc[Error handling in {product_name}]. [[timeouts]] == Timeouts @@ -237,14 +237,14 @@ Alternatively, you can use xref:core/understanding-workflow-error-handling.adoc[ For start event state the `exclusive` property is not supported if set to `false`, therefore the timeout is not supported for the event state when starting a workflow. -For more information about timeouts, see xref:core/timeouts-support.adoc[Timeouts on events for {context}]. +For more information about timeouts, see xref:core/timeouts-support.adoc[Timeouts on events for {product_name}]. [[compensation]] == Compensation {product_name} supports workflow compensation as described in the link:{spec_doc_url}#Workflow-Compensation[Serverless Workflow specification definition]. -For more information about compensations, see xref:use-cases/orchestration-based-saga-pattern.adoc[Saga orchestration example in {context}]. +For more information about compensations, see xref:use-cases/orchestration-based-saga-pattern.adoc[Saga orchestration example in {product_name}]. [[constants]] == Constants @@ -267,6 +267,6 @@ Secrets are associated with the link:{quarkus_config_guide_url}[Quarkus Configur == Additional resources * xref:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service] -* xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {context} tooling] +* xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc b/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc index 713f57293..8bf2502f4 100644 --- a/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc +++ b/serverlessworkflow/modules/ROOT/pages/getting-started/create-your-first-workflow-service.adoc @@ -1,6 +1,6 @@ = Creating your first workflow service -As a developer, you can use {context} and create a `Hello World` application, which includes the following procedures: +As a developer, you can use {product_name} and create a `Hello World` application, which includes the following procedures: * <> * <> @@ -24,7 +24,7 @@ image::getting-started/hello-world-workflow.png[] * Visual Studio Code with https://marketplace.visualstudio.com/items?itemName=redhat.java[Red Hat Java Extension] and https://marketplace.visualstudio.com/items?itemName=redhat.vscode-extension-serverless-workflow-editor[Red Hat Serverless Workflow Editor] is installed to edit your workflows. -For more information about the tooling and the required dependencies, see xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {context} tooling]. +For more information about the tooling and the required dependencies, see xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling]. ifeval::["{kogito_version_redhat}" != ""] include::../../pages/_common-content/downstream-project-setup-instructions.adoc[] @@ -93,7 +93,7 @@ kn workflow create \ --quarkus-version={quarkus_platform_version} ---- -For more information about Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{context} plug-in for Knative CLI]. +For more information about Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{product_name} plug-in for Knative CLI]. -- ==== @@ -209,7 +209,7 @@ kn workflow build --image dev.local/serverless-workflow-hello-world --verbose ---- The `--verbose` flag is used to display the output of the build command. This flag is optional. -For more information about Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{context} plug-in for Knative CLI]. +For more information about Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{product_name} plug-in for Knative CLI]. -- ==== @@ -259,9 +259,9 @@ mvn clean quarkus:dev quarkus dev ---- -For more information about Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{context} plug-in for Knative CLI]. +For more information about Knative workflow CLI, see xref:tooling/kn-plugin-workflow-overview.adoc[{product_name} plug-in for Knative CLI]. -Also, to deploy and run your workflow application, see xref:cloud/deploying-on-minikube.adoc[Deploying workflow application on Minikube] +Also, to deploy and run your workflow application, see xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying workflow application on Minikube] -- ==== + @@ -364,7 +364,7 @@ xref:testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc[T == Additional resources -* xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {context} tooling] +* xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling] * xref:service-orchestration/orchestration-of-openapi-based-services.adoc[Orchestrating the OpenAPI services] include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/getting-started/getting-familiar-with-our-tooling.adoc b/serverlessworkflow/modules/ROOT/pages/getting-started/getting-familiar-with-our-tooling.adoc index c6a64bec9..5b9768b42 100644 --- a/serverlessworkflow/modules/ROOT/pages/getting-started/getting-familiar-with-our-tooling.adoc +++ b/serverlessworkflow/modules/ROOT/pages/getting-started/getting-familiar-with-our-tooling.adoc @@ -1,4 +1,4 @@ -= Getting familiar with {context} tooling += Getting familiar with {product_name} tooling :compat-mode!: // Metadata: @@ -7,12 +7,12 @@ // links :kubesmarts_url: https://start.kubesmarts.org/ -The tooling in {context} provides the best developer experience for the workflow ecosystem. The following tools are provided that you can use to author your workflow assets: +The tooling in {product_name} provides the best developer experience for the workflow ecosystem. The following tools are provided that you can use to author your workflow assets: * xref:tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc[*VS Code extension*]: Use the Serverless Workflow editor and edit the link:{spec_website_url}[CNCF Serverless Workflow specification] files in Visual Studio Code. * xref:tooling/serverless-workflow-editor/swf-editor-chrome-extension.adoc[*Chrome GitHub extension*]: View and edit the CNCF Serverless Workflow specification files in GitHub. * xref:tooling/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc[*Kogito Serverless Workflow Tools extension in Quarkus Dev UI*]: View, manage, and start the workflow instances. -* xref:tooling/kn-plugin-workflow-overview.adoc[*{context} plug-in for Knative CLI*]: Set up a local workflow project using the command line. +* xref:tooling/kn-plugin-workflow-overview.adoc[*{product_name} plug-in for Knative CLI*]: Set up a local workflow project using the command line. * link:{kubesmarts_url}[*Serverless Logic online tooling*]: Try and run the Serverless Workflow example applications in a web environment. include::../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/index.adoc b/serverlessworkflow/modules/ROOT/pages/index.adoc index 06867e2cd..af3604cd9 100644 --- a/serverlessworkflow/modules/ROOT/pages/index.adoc +++ b/serverlessworkflow/modules/ROOT/pages/index.adoc @@ -1,10 +1,10 @@ = {page-component-title} -{product_name} {context} is a tool for building cloud-native workflow applications. You can use it to do the services and events orchestration and choreography. Currently, with {product_name} {context} you can integrate with services and events in your architecture using: +{product_name} is a tool for building cloud-native workflow applications. You can use it to do the services and events orchestration and choreography. Currently, with {product_name} you can integrate with services and events in your architecture using: 1. **CloudEvents**. Ideal for an Event-Driven architecture where the services are ready to consume and produce events working in a more reactive way. 2. **Sync or Async REST services invocations via OpenAPI/Async API**. There are options even to directly call a REST service in the architecture or ecosystem. Either async or sync methods are supported depending on your requirements. -3. **Internal Service execution or invocation**. {product_name} {context} is also a workflow framework to build applications. You can use it to create custom services in the same thread to run a lightweight workflow-based application within the same instance. +3. **Internal Service execution or invocation**. {product_name} is also a workflow framework to build applications. You can use it to create custom services in the same thread to run a lightweight workflow-based application within the same instance. You can learn how to create, manage, and deploy your workflow applications with the following guides. @@ -30,7 +30,7 @@ Learn about the CNCF Serverless Workflow Specification implementation [.card] -- [.card-title] -xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {context} tooling] +xref:getting-started/getting-familiar-with-our-tooling.adoc[Getting familiar with {product_name} tooling] [.card-description] Learn which tools you can use to author your workflow assets -- @@ -41,7 +41,7 @@ Learn which tools you can use to author your workflow assets [.card] -- [.card-title] -xref:core/custom-functions-support.adoc[Custom functions for your {context} service] +xref:core/custom-functions-support.adoc[Custom functions for your {product_name} service] [.card-description] Learn about the custom functions supported by Serverless Workflow -- @@ -49,7 +49,7 @@ Learn about the custom functions supported by Serverless Workflow [.card] -- [.card-title] -xref:core/understanding-jq-expressions.adoc[jq expressions in {context}] +xref:core/understanding-jq-expressions.adoc[jq expressions in {product_name}] [.card-description] Learn how to create jq expressions to manipulate data within a workflow execution -- @@ -57,7 +57,7 @@ Learn how to create jq expressions to manipulate data within a workflow executio [.card] -- [.card-title] -xref:core/understanding-workflow-error-handling.adoc[Error handling in {context}] +xref:core/understanding-workflow-error-handling.adoc[Error handling in {product_name}] [.card-description] Learn how to handle errors in your workflow application -- @@ -65,7 +65,7 @@ Learn how to handle errors in your workflow application [.card] -- [.card-title] -xref:core/working-with-parallelism.adoc[Parallelism in {context}] +xref:core/working-with-parallelism.adoc[Parallelism in {product_name}] [.card-description] Working with parallelism in your workflow service -- @@ -73,7 +73,7 @@ Working with parallelism in your workflow service [.card] -- [.card-title] -xref:core/configuration-properties.adoc[Configuration properties in {context}] +xref:core/configuration-properties.adoc[Configuration properties in {product_name}] [.card-description] Quick reference of configuration properties in workflow -- @@ -81,7 +81,7 @@ Quick reference of configuration properties in workflow [.card] -- [.card-title] -xref:core/defining-an-input-schema-for-workflows.adoc[Input schema definition for {context}] +xref:core/defining-an-input-schema-for-workflows.adoc[Input schema definition for {product_name}] [.card-description] Learn about the input schema definition used to validate the workflow data input against a defined JSON Schema -- @@ -89,7 +89,7 @@ Learn about the input schema definition used to validate the workflow data input [.card] -- [.card-title] -xref:core/timeouts-support.adoc[Timeouts in {context}] +xref:core/timeouts-support.adoc[Timeouts in {product_name}] [.card-description] Learn how to configure timeouts in the workflow -- @@ -116,9 +116,9 @@ Learn how to use the Serverless Workflow extension in Quarkus Dev UI [.card] -- [.card-title] -xref:tooling/kn-plugin-workflow-overview.adoc[{context} plug-in for Knative CLI] +xref:tooling/kn-plugin-workflow-overview.adoc[{product_name} plug-in for Knative CLI] [.card-description] -Learn how to install the {context} plug-in for Knative CLI +Learn how to install the {product_name} plug-in for Knative CLI -- [.card] @@ -169,7 +169,7 @@ Learn about orchestrating gRPC services [.card] -- [.card-title] -xref:eventing/handling-events-on-workflows.adoc[Event state in {context}] +xref:eventing/handling-events-on-workflows.adoc[Event state in {product_name}] [.card-description] Learn how to use the Event state in your workflow application -- @@ -177,7 +177,7 @@ Learn how to use the Event state in your workflow application [.card] -- [.card-title] -xref:eventing/working-with-callbacks.adoc[Callback state in {context}] +xref:eventing/working-with-callbacks.adoc[Callback state in {product_name}] [.card-description] Learn how to use the Callback state in your workflow application -- @@ -201,7 +201,7 @@ Learn how to configure your Serverless Workflow application to produce and consu [.card] -- [.card-title] -xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {context}] +xref:eventing/event-correlation-with-workflows.adoc[Event correlation in {product_name}] [.card-description] Learn how to configure event correlation in your workflow application -- @@ -209,7 +209,7 @@ Learn how to configure event correlation in your workflow application [.card] -- [.card-title] -xref:eventing/working-with-openapi-callbacks.adoc[OpenAPI Callback in {context}] +xref:eventing/working-with-openapi-callbacks.adoc[OpenAPI Callback in {product_name}] [.card-description] Learn how to use the OpenAPI Callback in your workflow application -- @@ -220,7 +220,7 @@ Learn how to use the OpenAPI Callback in your workflow application [.card] -- [.card-title] -xref:security/authention-support-for-openapi-services.adoc[Authentication for OpenAPI services in {context}] +xref:security/authention-support-for-openapi-services.adoc[Authentication for OpenAPI services in {product_name}] [.card-description] Learn how to use authentication methods when calling REST services using OpenAPI specification -- @@ -247,7 +247,7 @@ Learn how to add unit tests in your workflow application using RestAssured [.card] -- [.card-title] -xref:testing-and-troubleshooting/integration-tests-with-postgresql.adoc[{context} integration test using PostgreSQL] +xref:testing-and-troubleshooting/integration-tests-with-postgresql.adoc[{product_name} integration test using PostgreSQL] [.card-description] Learn how to integrate tests on workflow applications that use PostgreSQL as a persistence storage -- @@ -293,41 +293,25 @@ Migrating your existing PostgreSQL Database with changes from the Kogito upgrade [.card] -- [.card-title] -xref:cloud/build-workflow-image-with-quarkus-cli.adoc[Building workflow images using Quarkus CLI] +xref:cloud/index.adoc[{product_name} in the Cloud] [.card-description] -Learn how to build images for your workflow applications using Quarkus CLI +Learn about the options to deploy workflow applications in Kubernetes -- [.card] -- [.card-title] -xref:cloud/kubernetes-service-discovery.adoc[Kubernetes service discovery in {context}] +xref:use-cases/orchestration-based-saga-pattern.adoc[Saga orchestration example in {product_name}] [.card-description] -Learn what is and how the Kubernetes service discovery for workflow application configuration works --- - -[.card] --- -[.card-title] -xref:cloud/deploying-on-minikube.adoc[Deploying your {context} application on Minikube] -[.card-description] -Learn how to deploy your workflow application on Minikube for local tests and development --- - -[.card] --- -[.card-title] -xref:cloud/deploying-on-kubernetes.adoc[Deploying your {context} application on Kubernetes] -[.card-description] -Learn how to deploy your workflow application on Kubernetes +Learn how and when to use the SAGA pattern in your workflow projects -- [.card] -- [.card-title] -xref:cloud/build-and-deploy-with-serverless-operator-on-kubernetes.adoc[Building and deploying a {context} application on Kubernetes using the {product_name} Serverless Operator] +xref:use-cases/timeout-showcase-example.adoc[Timeout example in {product_name}] [.card-description] -Learn how to build and deploy your workflow application on Kubernetes using the Kogito Serverless Workflow Operator +Learn how and when to use timeout in your workflow projects -- [.card-section] @@ -344,9 +328,9 @@ Learn how to use Camel Routes within your workflow application [.card] -- [.card-title] -xref:integrations/custom-functions-knative.adoc[Invoking Knative services from {context}] +xref:integrations/custom-functions-knative.adoc[Invoking Knative services from {product_name}] [.card-description] -Learn how to invoke Knative services from {context} custom functions +Learn how to invoke Knative services from {product_name} custom functions -- [.card] @@ -382,7 +366,7 @@ Details about Job Service to control timers in {PRODUCT_NAME} [.card] -- [.card-title] -xref:use-cases/orchestration-based-saga-pattern.adoc[Saga orchestration example in {context}] +xref:use-cases/orchestration-based-saga-pattern.adoc[Saga orchestration example in {product_name}] [.card-description] Learn how and when to use the SAGA pattern in your workflow projects -- @@ -390,7 +374,7 @@ Learn how and when to use the SAGA pattern in your workflow projects [.card] -- [.card-title] -xref:use-cases/timeout-showcase-example.adoc[Timeout example in {context}] +xref:use-cases/timeout-showcase-example.adoc[Timeout example in {product_name}] [.card-description] Learn how and when to use timeout in your workflow projects -- \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc b/serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc index bdfc7da9b..009cc550e 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc +++ b/serverlessworkflow/modules/ROOT/pages/integrations/camel-routes-integration.adoc @@ -1,9 +1,9 @@ = Integrating with Camel routes -{context} can integrate with link:{camel_url}[Apache Camel Routes] by adding the Kogito Quarkus Camel Add-on to your project. It enables the workflow engine to identify and call Camel routes declared in YAML or XML in the same workflow project context. +{product_name} can integrate with link:{camel_url}[Apache Camel Routes] by adding the Kogito Quarkus Camel Add-on to your project. It enables the workflow engine to identify and call Camel routes declared in YAML or XML in the same workflow project context. [[proc-enable-quarkus-camel]] -== Enabling Quarkus Camel in {context} +== Enabling Quarkus Camel in {product_name} You can enable Quarkus Camel in your project. @@ -27,7 +27,7 @@ For more information about creating a workflow, see xref:getting-started/create- -- [[con-creating-camel-routes]] -== Creating Camel routes in {context} +== Creating Camel routes in {product_name} You can add YAML or XML Camel routes to your workflow project. @@ -82,7 +82,7 @@ You can define and reference your Camel functions in the workflow definition. } ---- + -The operation description must have the prefix `camel:direct:`, indicating that you want to produce a message to this route via the link:{camel_extensions_url}/direct.html[Camel Direct Component]. Direct is the only component supported by {context} at the moment. +The operation description must have the prefix `camel:direct:`, indicating that you want to produce a message to this route via the link:{camel_extensions_url}/direct.html[Camel Direct Component]. Direct is the only component supported by {product_name} at the moment. + The `operation` suffix contains the name of the route endpoint. In the case of this example, `logRouteReplaceHeader`. + @@ -91,7 +91,7 @@ The `operation` suffix contains the name of the route endpoint. In the case of t The Camel route defined in the workflow must be available in your project during runtime, otherwise, an `IllegalArgumentException` will be thrown. ==== + -. To use the Camel function definition in a workflow action, you can simply reference it as you normally would with any other {context} function definitions. For example: +. To use the Camel function definition in a workflow action, you can simply reference it as you normally would with any other {product_name} function definitions. For example: + .Example of a workflow state action referencing [source,json] @@ -134,11 +134,11 @@ Once a message is received back from the Camel route, the data is merged into th == Example project -There is an link:{kogito_sw_examples_url}/serverless-workflow-camel-routes[example project available on GitHub] using this new feature. You can use it as a reference to have a better understanding of the Camel integration with {context}. +There is an link:{kogito_sw_examples_url}/serverless-workflow-camel-routes[example project available on GitHub] using this new feature. You can use it as a reference to have a better understanding of the Camel integration with {product_name}. == Additional resources -* xref:core/custom-functions-support.adoc[Custom functions for your {context} service] -* xref:core/understanding-jq-expressions.adoc[jq expressions in {context}] +* xref:core/custom-functions-support.adoc[Custom functions for your {product_name} service] +* xref:core/understanding-jq-expressions.adoc[jq expressions in {product_name}] include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc b/serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc index b86065eb6..bcf9e0025 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc +++ b/serverlessworkflow/modules/ROOT/pages/integrations/custom-functions-knative.adoc @@ -1,4 +1,4 @@ -= Invoking Knative services from {context} += Invoking Knative services from {product_name} :compat-mode!: // Metadata: :description: Explain what is and how the Knative service discovery works @@ -7,9 +7,9 @@ :environment_prereq: Minikube is installed :kubectl_prereq: command-line tool is installed. Otherwise, Minikube handles it. -This document describes how to call Knative services using {context} custom functions. The procedure described in this document is based on the link:{kogito_sw_examples_url}/serverless-workflow-custom-function-knative[`serverless-workflow-custom-function-knative`] example application. +This document describes how to call Knative services using {product_name} custom functions. The procedure described in this document is based on the link:{kogito_sw_examples_url}/serverless-workflow-custom-function-knative[`serverless-workflow-custom-function-knative`] example application. -For more details about the Knative custom function, see xref:core/custom-functions-support.adoc#knative-custom-function[Custom functions for your {context} service]. +For more details about the Knative custom function, see xref:core/custom-functions-support.adoc#knative-custom-function[Custom functions for your {product_name} service]. .Prerequisites @@ -54,7 +54,7 @@ Save the Knative service name (`custom-function-knative-service`) to use it in t -- -. Declare the Knative {context} custom function. In the `functions` section of your workflow, add the following: +. Declare the Knative {product_name} custom function. In the `functions` section of your workflow, add the following: + -- @@ -71,7 +71,7 @@ Save the Knative service name (`custom-function-knative-service`) to use it in t } ---- -<1> The name of the {context} function +<1> The name of the {product_name} function <2> Indicates that this function is a custom one <3> Indicates that your custom function is of type `knative` and it will invoke the `custom-function-knative-service` service. <4> The resource path you want to access @@ -101,7 +101,7 @@ Save the Knative service name (`custom-function-knative-service`) to use it in t -- -. Deploy your workflow service to Knative. For more information on how to deploy a {product_name} {context} project to Knative, see the xref:cloud/deploying-on-kubernetes.adoc[Deploying on Kubernetes]. +. Deploy your workflow service to Knative. For more information on how to deploy a {product_name} {product_name} project to Knative, see the xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying on Kubernetes]. . Submit a request to the workflow service @@ -133,8 +133,8 @@ Knative functions support https://github.com/knative/func/blob/main/docs/functio == Additional resources -* xref:core/custom-functions-support.adoc[Custom functions for your {context} service] -* xref:cloud/deploying-on-minikube.adoc[Deploying your Serverless Workflow application on Minikube] -* xref:cloud/deploying-on-kubernetes.adoc[Deploying your Serverless Workflow application on Kubernetes] +* xref:core/custom-functions-support.adoc[Custom functions for your {product_name} service] +* xref:cloud/quarkus/deploying-on-minikube.adoc[Deploying your Serverless Workflow application on Minikube] +* xref:cloud/quarkus/deploying-on-kubernetes.adoc[Deploying your Serverless Workflow application on Kubernetes] include::../_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc b/serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc index c28475bb6..5cd75397b 100644 --- a/serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc +++ b/serverlessworkflow/modules/ROOT/pages/integrations/expose-metrics-to-prometheus.adoc @@ -12,12 +12,12 @@ :prometheus_operator_url: https://prometheus-operator.dev/ :prometheus_operator_getting_started_guide: https://prometheus.io/docs/prometheus/latest/getting_started/#configure-prometheus-to-monitor-the-sample-targets -{context} generates metrics that can be consumed by Prometheus and visualized by dashboard tools, such as link:{openshift_micrometer_url}[OpenShift], link:{dashbuilder_url}[Dashbuilder], and link:{grafana_url}[Grafana]. +{product_name} generates metrics that can be consumed by Prometheus and visualized by dashboard tools, such as link:{openshift_micrometer_url}[OpenShift], link:{dashbuilder_url}[Dashbuilder], and link:{grafana_url}[Grafana]. This document describes how you can enable and expose the generated metrics to Prometheus. [[proc-enable-metrics-sw]] -== Enabling metrics in {context} +== Enabling metrics in {product_name} You can enable the metrics in your workflow application. @@ -45,9 +45,9 @@ For more information about creating a workflow, see xref:getting-started/create- The metrics is available at `/q/metrics` endpoint. [[con-consume-metrics-sw]] -== Metrics consumption in {context} +== Metrics consumption in {product_name} -After enabling the metrics in {context}, the generated metrics can be consumed from OpenShift, Kubernetes, and Prometheus to visualize on different dashboard tools. +After enabling the metrics in {product_name}, the generated metrics can be consumed from OpenShift, Kubernetes, and Prometheus to visualize on different dashboard tools. [[proc-consume-metrics-openshift]] === Consuming metrics from OpenShift @@ -55,9 +55,9 @@ After enabling the metrics in {context}, the generated metrics can be consumed f If your workflow server is running on OpenShift, then you can use the server to monitor your workflow application. Also, you can perform the task of consuming metrics from OpenShift. .Prerequisites -* Metrics is enabled in {context}. +* Metrics is enabled in {product_name}. + -For more information, see <>. +For more information, see <>. .Procedure . To consume metrics from OpenShift, enable monitoring for user-defined projects. @@ -120,9 +120,9 @@ For more information about installing Prometheus Operator, see link:{prometheus_ If your workflow server is running on Prometheus, then you can perform the task of consuming metrics from Prometheus and visualize the workflow on different dashboard tools. .Prerequisites -* Metrics is enabled in {context}. +* Metrics is enabled in {product_name}. + -For more information, see <>. +For more information, see <>. .Procedure . Use the following configuration to enable Prometheus to remove metrics directly from the workflow application: @@ -145,9 +145,9 @@ For more information, see </src/main/resources` directory. .Prerequisites -* {context} plug-in for Knative CLI is installed. +* {product_name} plug-in for Knative CLI is installed. + -For more information about installing the plug-in, see <>. +For more information about installing the plug-in, see <>. ifeval::["{kogito_version_redhat}" != ""] * You followed the steps in xref:getting-started/create-your-first-workflow-service.adoc#proc-configuring-maven-rhbq[Configuring your Maven project to Red Hat build of Quarkus and OpenShift Serverless Logic] endif::[] @@ -145,9 +145,9 @@ After creating your workflow project, you can use the `build` command with `kn w The process of building your workflow project produces a `knative.yml` file in the `./target/kubernetes` folder. If your workflow contains events, then the building process also generates a `kogito.yml` file. .Prerequisites -* {context} plug-in for Knative CLI is installed. +* {product_name} plug-in for Knative CLI is installed. + -For more information about installing the plug-in, see <>. +For more information about installing the plug-in, see <>. * A workflow project is created. + @@ -165,7 +165,7 @@ kn workflow build --image dev.local/my-project [NOTE] ==== -By using `dev.local` as repository, you can deploy your {context} project in a local environment without having to push the image to a container registry. +By using `dev.local` as repository, you can deploy your {product_name} project in a local environment without having to push the image to a container registry. ==== To use the `build` command, you need to provide either `--image` or `--image-name` flag. In the previous command, you can use the `[-i|--image]` in several ways, such as: @@ -258,9 +258,9 @@ kn workflow build --image my-project --push You can use the `deploy` command combined with `kn workflow` to deploy your workflow project in your current directory. However, before deploying the project, you must build your workflow project as the build process produces deployment files, such as `knative.yml` and `kogito.yml` (In case of events) in the `./target/kubernetes` folder. .Prerequisites -* {context} plug-in for Knative CLI is installed. +* {product_name} plug-in for Knative CLI is installed. + -For more information about installing the plug-in, see <>. +For more information about installing the plug-in, see <>. * A workflow project is created. + diff --git a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc index b7340532a..88d89d2c3 100644 --- a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc +++ b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc @@ -1,10 +1,10 @@ -= Deploying your {context} projects using {serverless_logic_web_tools_name} += Deploying your {product_name} projects using {serverless_logic_web_tools_name} :compat-mode!: // Metadata: :description: {serverless_logic_web_tools_name} deploying your projects :keywords: kogito, workflow, serverless, editor, web, tools, settings, openshift, deploy, project -You can deploy your {context} projects to an OpenShift instance using the OpenShift integration. The OpenShift integration allows you test your implementations in a live environment. +You can deploy your {product_name} projects to an OpenShift instance using the OpenShift integration. The OpenShift integration allows you test your implementations in a live environment. [NOTE] ==== @@ -12,14 +12,14 @@ The deployments described in this document are for development purposes, but not ==== [[proc-deploy-first-serverless-project-serverless-logic-web-tools]] -== Deploying your first {context} project +== Deploying your first {product_name} project -You can deploy your first {context} project to an OpenShift instance and run the project in a live environment. +You can deploy your first {product_name} project to an OpenShift instance and run the project in a live environment. .Prerequisites * OpenShift integration is configured correctly. + -For more information, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc[Integrating your {context} project with OpenShift using {serverless_logic_web_tools_name}]. +For more information, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc[Integrating your {product_name} project with OpenShift using {serverless_logic_web_tools_name}]. .Procedure . Create a project using an example application in link:{kogito_sw_examples_url}[GitHub]. @@ -31,7 +31,7 @@ The `serverless-workflow-greeting-quarkus` example application contains a single -- . On the editor page, click *Try on OpenShift* button. -. On the context menu, click *Deploy "greetings" ({context})*. +. On the context menu, click *Deploy "greetings" ({product_name})*. + -- A modal appears, displaying the following deployment options: @@ -58,13 +58,13 @@ image:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-de [[proc-verify-deploy-status-serverless-logic-web-tools]] == Verifying the deployment status of your first project -After the deployment of your {context} project is successful, you can verify various information about the deployed service. +After the deployment of your {product_name} project is successful, you can verify various information about the deployed service. .Prerequisites * OpenShift integration is configured correctly. + -For more information, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc[Integrating your {context} project with OpenShift using {serverless_logic_web_tools_name}]. -* Your {context} project is deployed successfully. +For more information, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc[Integrating your {product_name} project with OpenShift using {serverless_logic_web_tools_name}]. +* Your {product_name} project is deployed successfully. * Deployed project must be deployed using the *Deploy as a project* option as unchecked, as the deployment page is only available using the pre-built image container. If the option *Deploy as a project* option is checked the tool opens the `index.html` file your project provides, if any. .Procedure diff --git a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-github-integration.adoc b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-github-integration.adoc index 67e714f99..bc33fa159 100644 --- a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-github-integration.adoc +++ b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-github-integration.adoc @@ -1,4 +1,4 @@ -= Integrating your {context} project in GitHub using {serverless_logic_web_tools_name} += Integrating your {product_name} project in GitHub using {serverless_logic_web_tools_name} :compat-mode!: // Metadata: :description: {serverless_logic_web_tools_name} github integration diff --git a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc index 8cb908b92..47e813b96 100644 --- a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc +++ b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc @@ -1,15 +1,15 @@ -= Integrating your {context} project with OpenShift using {serverless_logic_web_tools_name} += Integrating your {product_name} project with OpenShift using {serverless_logic_web_tools_name} :compat-mode!: // Metadata: :description: {serverless_logic_web_tools_name} openshift integration :keywords: kogito, workflow, serverless, editor, web, tools, settings, openshift, integration -You can integrate your {context} project with Red Hat OpenShift. OpenShift is an enterprise-ready Kubernetes container platform, enabling your {context} projects to be deployed and tested online. +You can integrate your {product_name} project with Red Hat OpenShift. OpenShift is an enterprise-ready Kubernetes container platform, enabling your {product_name} projects to be deployed and tested online. [[proc-setting-kie-sandbox-extended-services-serverless-logic-web-tools]] == Setting KIE Sandbox Extended Services -The KIE Sandbox Extended Services tool is required to proxy requests to an OpenShift instance. Therefore, setting the KIE Sandbox Extended Services enables you to deploy and monitor your {context} projects. +The KIE Sandbox Extended Services tool is required to proxy requests to an OpenShift instance. Therefore, setting the KIE Sandbox Extended Services enables you to deploy and monitor your {product_name} projects. .Procedure . In the {serverless_logic_web_tools_name} web application, click on the *Cog wheel* (⚙️) on the top-right corner and go to the *KIE Sandbox Extended Services* tab. @@ -29,7 +29,7 @@ After executing the KIE Sandbox Extended Services the content in the *KIE Sandbo [[proc-connecting-openshift-instance-serverless-logic-web-tools]] == Connecting to OpenShift instance using {serverless_logic_web_tools_name} -After setting the KIE Sandbox Extended Services, you can connect to your OpenShift instance to deploy your {context} projects with {serverless_logic_web_tools_name}. +After setting the KIE Sandbox Extended Services, you can connect to your OpenShift instance to deploy your {product_name} projects with {serverless_logic_web_tools_name}. .Prerequisites * KIE Sandbox Extended Services tool installed and running. @@ -63,7 +63,7 @@ image::tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-i + If the entered values are correct, then the tab updates and displays *You're connected to OpenShift* message. -After connecting to OpenShift, you are ready to deploy your {context} projects using {serverless_logic_web_tools_name}. For more information about deploying your projects, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc[Deploying your {context} projects using {serverless_logic_web_tools_name}]. +After connecting to OpenShift, you are ready to deploy your {product_name} projects using {serverless_logic_web_tools_name}. For more information about deploying your projects, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc[Deploying your {product_name} projects using {serverless_logic_web_tools_name}]. [NOTE] ==== diff --git a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc index 89803cd46..a97b1590e 100644 --- a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc +++ b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc @@ -4,18 +4,18 @@ :description: Kogito {serverless_logic_web_tools_name} :keywords: kogito, workflow, serverless, editor, logic, web, tools -The link:{serverless_logic_web_tools_url}[{serverless_logic_web_tools_name}] is a web application that enables you to create and synchronize your {context}, decision files, and dashbuilder files in a single interface. Also, the {serverless_logic_web_tools_name} application provides the integrations that are needed to deploy and test the {context} models in development mode. +The link:{serverless_logic_web_tools_url}[{serverless_logic_web_tools_name}] is a web application that enables you to create and synchronize your {product_name}, decision files, and dashbuilder files in a single interface. Also, the {serverless_logic_web_tools_name} application provides the integrations that are needed to deploy and test the {product_name} models in development mode. .Home page of {serverless_logic_web_tools_name} image::tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.png[] The {serverless_logic_web_tools_name} provides three different editors for your projects, including -* {context} editor for `.sw.json` or `.sw.yaml|yml` files +* {product_name} editor for `.sw.json` or `.sw.yaml|yml` files * Serverless Decision editor for `.yard.json` or `.yard.yaml|yml` files * Dashbuilder editor for `dash.yaml|yml` files -For an improved experience when previewing your {context}, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-enable-kogito-swf-visualization.adoc[Enabling Kogito Serverless Workflow Visualization in {serverless_logic_web_tools_name}]. +For an improved experience when previewing your {product_name}, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-enable-kogito-swf-visualization.adoc[Enabling Kogito Serverless Workflow Visualization in {serverless_logic_web_tools_name}]. [[proc-create-workflow-model-web-tools]] == Creating a workflow model in {serverless_logic_web_tools_name} @@ -36,14 +36,14 @@ A new workspace is created, containing a single file named as _Untitled_ of the . Edit your workflow file, which updates the preview on the right-side of the editor. + -- -The workflow files in {serverless_logic_web_tools_name} are saved automatically after each change. The files are persisted in the browser, but you can sychronize the file using GitHub integration. For more information about GitHub integration, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-github-integration.adoc[Integrating your {context} project in GitHub using {serverless_logic_web_tools_name}]. +The workflow files in {serverless_logic_web_tools_name} are saved automatically after each change. The files are persisted in the browser, but you can synchronize the file using GitHub integration. For more information about GitHub integration, see xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-github-integration.adoc[Integrating your {product_name} project in GitHub using {serverless_logic_web_tools_name}]. -- == Additional resources -* xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc[Integrating your {context} project with OpenShift using {serverless_logic_web_tools_name}] +* xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-openshift-integration.adoc[Integrating your {product_name} project with OpenShift using {serverless_logic_web_tools_name}] * xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-redhat-application-services-integration.adoc[Integrating with Red Hat OpenShift Application and Data Services] -* xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc[Deploying your {context} projects using {serverless_logic_web_tools_name}] +* xref:tooling/serverless-logic-web-tools/serverless-logic-web-tools-deploy-projects.adoc[Deploying your {product_name} projects using {serverless_logic_web_tools_name}] include::../../../pages/_common-content/report-issue.adoc[] diff --git a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-redhat-application-services-integration.adoc b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-redhat-application-services-integration.adoc index 3c60a0c93..a28168faa 100644 --- a/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-redhat-application-services-integration.adoc +++ b/serverlessworkflow/modules/ROOT/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-redhat-application-services-integration.adoc @@ -4,7 +4,7 @@ :description: {serverless_logic_web_tools_name} Red Hat OpenShift Application and Data Services integration :keywords: kogito, workflow, serverless, editor, web, tools, settings, red hat, application, data, services, integration -Some of the features in {serverless_logic_web_tools_name} require integration with Red Hat OpenShift Application and Data Services. Consider uploading OpenAPI specifications to a service registry and deploying {context} that requires Apache Kafka as examples of integration with Red Hat OpenShift Application and Data Services. +Some of the features in {serverless_logic_web_tools_name} require integration with Red Hat OpenShift Application and Data Services. Consider uploading OpenAPI specifications to a service registry and deploying {product_name} that requires Apache Kafka as examples of integration with Red Hat OpenShift Application and Data Services. This document describes how you can configure the required settings to complete the integration with Red Hat OpenShift Application and Data Services. diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc index 233bf71a9..1a51d6ab2 100644 --- a/serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/orchestration-based-saga-pattern.adoc @@ -1,4 +1,4 @@ -= Saga orchestration example in {context} += Saga orchestration example in {product_name} :compat-mode!: // Metadata: @@ -19,7 +19,7 @@ The Saga pattern manages the transactions using a sequence of steps. If a failur To understand the implementation of Saga pattern in a workflow, you can use the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus[`serverless-workflow-saga-quarkus`] example application in GitHub repository. -The `serverless-workflow-saga-quarkus` example application is based on the order fulfillment process and describes how to define Saga pattern using {context}. In the order fulfillment example, a user buys an item from an e-commerce application. The user adds the delivery information and payment details, and waits for the item to be delivered. The following figure shows the sequence of steps that are executed to complete an order: +The `serverless-workflow-saga-quarkus` example application is based on the order fulfillment process and describes how to define the Saga pattern using {product_name}. In the order fulfillment example, a user buys an item from an e-commerce application. The user adds the delivery information and payment details, and waits for the item to be delivered. The following figure shows the sequence of steps that are executed to complete an order: .Example of order fulfillment process image::use-cases/orchestration-based-saga-pattern/order-fulfillment-example.png[Example of order fulfillment process] @@ -98,7 +98,7 @@ Compensation actions:: -- When designing a Saga pattern, compensation actions for each step is considered as a core functionality, which is executed by a participant. -In {context} each workflow state must define a compensation action using `compensatedBy` attribute, indicating another workflow state that performs the compensation action. For example, in `serverless-workflow-saga-quarkus`, `processPayment` state defines `CancelPayment` as a compensation action in the <>. +In {product_name} each workflow state must define a compensation action using `compensatedBy` attribute, indicating another workflow state that performs the compensation action. For example, in `serverless-workflow-saga-quarkus`, `processPayment` state defines `CancelPayment` as a compensation action in the <>. .Example of defining a compensation action [source,json] @@ -108,7 +108,7 @@ In {context} each workflow state must define a compensation action using `compen Errors:: + -- -In {context} errors are identified by a name and can be associated with a workflow state. For example, a `process payment failed` error is associated with the `processPayment` state. +In {product_name} errors are identified by a name and can be associated with a workflow state. For example, a `process payment failed` error is associated with the `processPayment` state. Following is an example of error declaration in the workflow definition: @@ -149,7 +149,7 @@ An error definition uses the fully qualified class name (FQCN) for Java exceptio } ---- -The function that are throwing errors can be any type of functions, such as REST, OpenAPI, or gRPC. For information about error handling, see xref:core/understanding-workflow-error-handling.adoc[Error handling in {context}]. +The function that are throwing errors can be any type of functions, such as REST, OpenAPI, or gRPC. For information about error handling, see xref:core/understanding-workflow-error-handling.adoc[Error handling in {product_name}]. -- The workflow engine controls the execution of the flow and keeps the track of the steps that need to be compensated. Also, the engine ensures that compensated states are executed in reverse order of each completed step. @@ -282,6 +282,6 @@ When executing the application, you can also verify the log with information rel == Additional resources -* xref:core/understanding-workflow-error-handling.adoc[Error handling in {context}] +* xref:core/understanding-workflow-error-handling.adoc[Error handling in {product_name}] include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc b/serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc index 8fe95c3a1..067b89ca2 100644 --- a/serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc +++ b/serverlessworkflow/modules/ROOT/pages/use-cases/timeout-showcase-example.adoc @@ -1,5 +1,5 @@ [#timeout-example] -= Timeout example in {context} += Timeout example in {product_name} :compat-mode!: // Metadata: :description: Timeout example use case example in Serverless Workflow @@ -238,6 +238,6 @@ As you can see there are no active workflow instances, indicating the timeout wa == Additional resources -* xref:core/timeouts-support.adoc[Timeout support in {context}] +* xref:core/timeouts-support.adoc[Timeout support in {product_name}] include::../../pages/_common-content/report-issue.adoc[] \ No newline at end of file