diff --git a/antora.yml b/antora.yml index 0c7de21d..ec2c0f17 100644 --- a/antora.yml +++ b/antora.yml @@ -19,11 +19,12 @@ asciidoc: # Serverless Logic - Names, labels and similar # product_name: OpenShift Serverless Logic - kogito_version_redhat: 9.100.0.redhat-00004 - kogito_branch: 9.100.x-prod + kogito_branch: 9.102.x-prod operator_name: Serverless Logic Operator operator_installation_namespace: openshift-serverless-logic + sonataflow_vscode_extension_name: KIE Serverless Workflow Editor operator_controller_config: sonataflow-operator-controllers-config + operator_controller_manager_deployment_name: logic-operator-rhel8-controller-manager quarkus_platform: com.redhat.quarkus.platform kogito_sw_ga: org.apache.kie.sonataflow:sonataflow-quarkus data_index_ref: Data Index @@ -33,10 +34,12 @@ asciidoc: operator_openshift_catalog: logic-rhel8-operator operator_k8s_keyword: sonataflow operator_k8s_subscription: my-sonataflow-operator + container_image_registry_base: registry.redhat.io/openshift-serverless-1 + sonataflow_operator_imagename: registry.redhat.io/openshift-serverless-1/logic-rhel8-operator osl_kn_cli_imagename: registry.redhat.io/openshift-serverless-1/kn-workflow-cli-artifacts-rhel8 - kogito_devservices_imagename: registry.redhat.io/openshift-serverless-1/logic-data-index-ephemeral-rhel8 sonataflow_devmode_imagename: registry.redhat.io/openshift-serverless-1/logic-swf-devmode-rhel8 sonataflow_builder_imagename: registry.redhat.io/openshift-serverless-1/logic-swf-builder-rhel8 + sonataflow_builder_configmap_name: logic-operator-rhel8-builder-config sonataflow_dataindex_ephemeral_imagename: registry.redhat.io/openshift-serverless-1/logic-data-index-ephemeral-rhel8 sonataflow_dataindex_postgresql_imagename: registry.redhat.io/openshift-serverless-1/logic-data-index-postgresql-rhel8 sonataflow_devmode_devui_url: /q/dev-ui/org.apache.kie.sonataflow.sonataflow-quarkus-devui/ @@ -51,30 +54,34 @@ asciidoc: jobs_service_image_postgresql_name: logic-jobs-service-postgresql-rhel8 jobs_service_image_postgresql: registry.redhat.io/openshift-serverless-1/logic-jobs-service-postgresql-rhel8 jobs_service_image_postgresql_url: https://catalog.redhat.com/software/containers/openshift-serverless-1/logic-jobs-service-postgresql-rhel8/6614eddbaeb155f6aae45385 - jobs_service_image_usage_url: https://github.com/kiegroup/kogito-images/tree/9.100.x-prod#jobs-services-all-in-one # # Versions # - quarkus_version: 3.8.4.redhat-00002 - quarkus_platform_version: 3.8.4.redhat-00002 + quarkus_version: 3.8.6.redhat-00004 + quarkus_platform_version: 3.8.6.SP2-redhat-00002 java_min_version: 17+ maven_min_version: 3.9.3 graalvm_min_version: 22.3.0 spec_version: 0.8 vscode_version: 1.84.0 - kn_cli_version: 1.33.0 + kn_cli_version: 1.35.0 docker_min_version: 20.10.7 docker_compose_min_version: 1.27.2 kubernetes_version: 1.26 openshift_version_min: 4.12 - openshift_version_max: 4.15 - knative_version: 1.13 - knative_serving_version: 1.13 - knative_eventing_version: 1.13 - kogito_version: 9.100.0.redhat-00004 + openshift_version_max: 4.17 + knative_version: 1.15 + knative_serving_version: 1.15 + knative_eventing_version: 1.15 + apache_kie_latest_version: 10.0.0 + kogito_version: 9.102.0.redhat-00005 + kogito_version_redhat: 9.102.0.redhat-00005 + product_version_short: 1.35 + product_version_long: 1.35.0 + product_version_previous_long: 1.34.0 # only used in downstream - operator_version: 1.33.0 + operator_version: 1.35.0 # Persistence extensions for the kogito-swf-builder groupId_quarkus-agroal: io.quarkus @@ -89,14 +96,17 @@ asciidoc: # # URLs # - kogito_examples_repository_url: https://github.com/kiegroup/kogito-examples/tree/9.100.x-prod - kogito_sw_examples_url: https://github.com/kiegroup/kogito-examples/tree/9.100.x-prod/serverless-workflow-examples - kogito_sw_operator_examples_url: https://github.com/kiegroup/kogito-examples/tree/9.100.x-prod/serverless-operator-examples + images_distributions_url: https://catalog.redhat.com/ + apple_support_url: https://support.apple.com/guide/mac-help/open-a-mac-app-from-an-unknown-developer-mh40616/mac + kogito_examples_repository_url: https://github.com/kiegroup/kogito-examples/tree/{kogito_branch} + kogito_operator_repository_rawcontent_url: https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/ + kogito_sw_examples_url: https://github.com/kiegroup/kogito-examples/tree/{kogito_branch}/serverless-workflow-examples + kogito_sw_operator_examples_url: https://github.com/kiegroup/kogito-examples/tree/{kogito_branch}/serverless-operator-examples kogito_examples_url: https://github.com/kiegroup/kogito-examples.git - kogito_apps_url: https://github.com/kiegroup/kogito-apps/tree/9.100.x-prod - kogito_runtimes_url: https://github.com/kiegroup/kogito-runtimes/tree/9.100.x-prod - kogito_runtimes_swf_url: https://github.com/kiegroup/kogito-runtimes/tree/9.100.x-prod/kogito-serverless-workflow/ - kogito_runtimes_swf_test_url: https://github.com/kiegroup/kogito-runtimes/tree/9.100.x-prod/kogito-serverless-workflow/kogito-serverless-workflow-executor-tests/src/test/java/org/kie/kogito/serverless/workflow/executor + kogito_apps_url: https://github.com/kiegroup/kogito-apps/tree/{kogito_branch} + kogito_runtimes_url: https://github.com/kiegroup/kogito-runtimes/tree/{kogito_branch} + kogito_runtimes_swf_url: https://github.com/kiegroup/kogito-runtimes/tree/{kogito_branch}/kogito-serverless-workflow/ + kogito_runtimes_swf_test_url: https://github.com/kiegroup/kogito-runtimes/tree/{kogito_branch}/kogito-serverless-workflow/kogito-serverless-workflow-executor-tests/src/test/java/org/kie/kogito/serverless/workflow/executor quarkus_cli_url: https://quarkus.io/guides/cli-tooling spec_website_url: https://serverlessworkflow.io/ spec_doc_url: https://github.com/serverlessworkflow/specification/blob/0.8.x/specification.md @@ -106,7 +116,7 @@ asciidoc: open_api_spec_url: https://spec.openapis.org/oas/v3.1.0.html open_api_swagger_spec_url: https://swagger.io/docs/specification quarkus_openapi_gen_url: https://github.com/quarkiverse/quarkus-openapi-generator - kn_workflow_plugin_releases_url: https://developers.redhat.com/content-gateway/rest/mirror/pub/cgw/serverless-logic/1.33.0/ + kn_workflow_plugin_releases_url: https://developers.redhat.com/content-gateway/rest/mirror/pub/cgw/serverless-logic/{product_version_long}/ quarkus_guides_base_url: https://quarkus.io/guides quarkus_guides_kafka_url: https://quarkus.io/guides/kafka quarkus_guides_building_native: https://quarkus.io/guides/building-native-image @@ -130,15 +140,15 @@ asciidoc: podman_install_url: https://docs.podman.io/en/latest/ kubectl_install_url: https://kubernetes.io/docs/tasks/tools/install-kubectl docker_compose_install_url: https://docs.docker.com/compose/install/ - kn_cli_install_url: https://docs.openshift.com/serverless/1.33/install/installing-kn.html - knative_eventing_url: https://docs.openshift.com/serverless/1.33/eventing/knative-eventing.html - knative_eventing_broker_url: https://docs.openshift.com/serverless/1.33/eventing/brokers/serverless-brokers.html - knative_eventing_kafka_broker_url: https://docs.openshift.com/serverless/1.33/eventing/brokers/serverless-broker-types.html - knative_eventing_trigger_url: https://docs.openshift.com/serverless/1.33/eventing/triggers/serverless-triggers.html - knative_eventing_sink_binding_url: https://docs.openshift.com/serverless/1.33/eventing/event-sinks/serverless-event-sinks.html - knative_quickstart_url: https://docs.openshift.com/serverless/1.33/install/installing-kn.html - knative_serving_install_yaml_url: https://docs.openshift.com/serverless/1.33/install/installing-knative-serving.html - knative_eventing_install_yaml_url: https://docs.openshift.com/serverless/1.33/install/installing-knative-eventing.html + kn_cli_install_url: https://docs.openshift.com/serverless/{product_version_short}/install/installing-kn.html + knative_eventing_url: https://docs.openshift.com/serverless/{product_version_short}/eventing/knative-eventing.html + knative_eventing_broker_url: https://docs.openshift.com/serverless/{product_version_short}/eventing/brokers/serverless-brokers.html + knative_eventing_kafka_broker_url: https://docs.openshift.com/serverless/{product_version_short}/eventing/brokers/serverless-broker-types.html + knative_eventing_trigger_url: https://docs.openshift.com/serverless/{product_version_short}/eventing/triggers/serverless-triggers.html + knative_eventing_sink_binding_url: https://docs.openshift.com/serverless/{product_version_short}/eventing/event-sinks/serverless-event-sinks.html + knative_quickstart_url: https://docs.openshift.com/serverless/{product_version_short}/install/installing-kn.html + knative_serving_install_yaml_url: https://docs.openshift.com/serverless/{product_version_short}/install/installing-knative-serving.html + knative_eventing_install_yaml_url: https://docs.openshift.com/serverless/{product_version_short}/install/installing-knative-eventing.html kafka_doc_url: https://kafka.apache.org/documentation/ node_install_url: https://nodejs.org/en/download/package-manager/ pnpm_install_url: https://pnpm.io/installation @@ -187,9 +197,8 @@ asciidoc: job_service_xref: xref:job-services/core-concepts.adoc # Tag to use with community-only images - operator_community_prod_yaml: https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/9.100.x-prod/operator.yaml - operator_community_prod_root: https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/9.100.x-prod - sonataflow_operator_imagename: registry.redhat.io/openshift-serverless-1/logic-rhel8-operator + operator_community_prod_yaml: https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{kogito_branch}/operator.yaml + operator_community_prod_root: https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator/{kogito_branch} java_install: 'https://www.java.com/en/download/help/download_options.html' maven_install: 'https://maven.apache.org/install.html' docker_install: 'https://docs.docker.com/engine/install/' diff --git a/modules/serverless-logic/pages/cloud/common/platform-scoped-eventing-system-configuration-example.adoc b/modules/serverless-logic/pages/cloud/common/platform-scoped-eventing-system-configuration-example.adoc new file mode 100644 index 00000000..c8a3440b --- /dev/null +++ b/modules/serverless-logic/pages/cloud/common/platform-scoped-eventing-system-configuration-example.adoc @@ -0,0 +1,24 @@ +[source,yam] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlowPlatform +metadata: + name: sonataflow-platform-example + namespace: example-namespace +spec: + eventing: + broker: + ref: + name: example-broker <1> + namespace: example-broker-namespace <2> + apiVersion: eventing.knative.dev/v1 + kind: Broker +---- + +<1> Name of the Knative Eventing Broker. +<2> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlowPlatform namespace. We recommend creating the Knative Eventing Broker in the same namespace as the SonataFlowPlatform. + +[NOTE] +==== +In production environments, you must use a production-ready broker, like the link:{knative_eventing_kafka_broker_url}[Knative Kafka Broker]. +==== diff --git a/modules/serverless-logic/pages/cloud/index.adoc b/modules/serverless-logic/pages/cloud/index.adoc index e8cf3bbd..5a5509f3 100644 --- a/modules/serverless-logic/pages/cloud/index.adoc +++ b/modules/serverless-logic/pages/cloud/index.adoc @@ -128,6 +128,14 @@ xref:cloud/operator/using-persistence.adoc[] Learn how to define the workflow `Persistence` field to allow the workflow to store its context -- +[.card] +-- +[.card-title] +xref:cloud/operator/enabling-jobs-service.adoc[] +[.card-description] +Learn how to enable the Jobs Service with Operator +-- + [.card] -- [.card-title] diff --git a/modules/serverless-logic/pages/cloud/operator/add-custom-ca-to-a-workflow-pod.adoc b/modules/serverless-logic/pages/cloud/operator/add-custom-ca-to-a-workflow-pod.adoc index c9e1d308..d9430310 100644 --- a/modules/serverless-logic/pages/cloud/operator/add-custom-ca-to-a-workflow-pod.adoc +++ b/modules/serverless-logic/pages/cloud/operator/add-custom-ca-to-a-workflow-pod.adoc @@ -136,6 +136,16 @@ spec: Similar to a deployment spec, a serverless workflow has a spec.podTemplate, with minor differences, but the change is almost identical. In this case, we are mounting some ingress ca-bundle because we want our workflow to reach the `.apps.my-cluster-name.my-cluster-domain` SSL endpoint. + +In this example, we pull the ingress CA of OpenShift's ingress deployment because this is the CA that signs the target routes' certificates. It can be any CA that is signing the target service certificate. +Here's how to copy the ingress ca cert to the desired namespace: + +[source,shell] +--- +kubectl config set-context --current --namespace=my-namespace +kubectl get cm -n openshift-config-managed default-ingress-cert -o yaml | awk '!/namespace:.*$/' | sed 's/default-ingress-cert/ingress-ca/' | kubectl create -f - +--- + Here is the relevant spec section of a workflow with the changes: [source,yaml] diff --git a/modules/serverless-logic/pages/cloud/operator/build-and-deploy-workflows.adoc b/modules/serverless-logic/pages/cloud/operator/build-and-deploy-workflows.adoc index c850c73e..f9b85424 100644 --- a/modules/serverless-logic/pages/cloud/operator/build-and-deploy-workflows.adoc +++ b/modules/serverless-logic/pages/cloud/operator/build-and-deploy-workflows.adoc @@ -57,7 +57,7 @@ kubectl patch sonataflowplatform --patch 'spec:\n build:\n config: [#customize-base-build] === Customize the base build Dockerfile -The operator uses the `ConfigMap` named `logic-operator-builder-config` in the operator's installation namespace ({operator_installation_namespace}) to configure and run the workflow build process. +The operator uses the `ConfigMap` named `{sonataflow_builder_configmap_name}` in the operator's installation namespace ({operator_installation_namespace}) to configure and run the workflow build process. You can change the `Dockerfile` entry in this `ConfigMap` to tailor the Dockerfile to your needs. Just be aware that this can break the build process. .Example of the sonataflow-operator-builder-config `ConfigMap` @@ -83,8 +83,8 @@ data: -Djava.util.logging.manager=org.jboss.logmanager.LogManager\"\nENV JAVA_APP_JAR=\"/deployments/quarkus-run.jar\"\n" kind: ConfigMap metadata: - name: sonataflow-operator-builder-config - namespace: sonataflow-operator-system + name: {sonataflow_builder_configmap_name} + namespace: {operator_installation_namespace} ---- [WARNING] @@ -355,7 +355,7 @@ spec: strategyOptions: KanikoBuildCacheEnabled: "true" registry: - address: registry.redhat.io/openshift-serverless-1 <1> + address: {container_image_registry_base} <1> secret: regcred <2> ---- @@ -429,7 +429,7 @@ If you are running on OpenShift, you have access to the Red Hat's supported regi [source,bash,subs="attributes+"] ---- -kubectl edit cm/sonataflow-operator-builder-config -n {operator_installation_namespace} +kubectl edit cm/{sonataflow_builder_configmap_name} -n {operator_installation_namespace} ---- In your editor, change the first line in the `Dockerfile` entry where it reads `FROM {sonataflow_builder_imagename}:{operator_version}` to the desired image. diff --git a/modules/serverless-logic/pages/cloud/operator/configuring-workflow-eventing-system.adoc b/modules/serverless-logic/pages/cloud/operator/configuring-workflow-eventing-system.adoc new file mode 100644 index 00000000..d34a6b4c --- /dev/null +++ b/modules/serverless-logic/pages/cloud/operator/configuring-workflow-eventing-system.adoc @@ -0,0 +1,315 @@ += Configuring the Workflow Eventing system +:compat-mode!: +// Metadata: +:description: Workflows eventing system configuration +:keywords: kogito, sonataflow, workflow, serverless, operator, kubernetes, knative, knative-eventing, events + +This document describes how to configure the eventing system for a {product_name} workflow. + +In general, the following events are produced in a {product_name} installation: + +* Workflow outgoing and incoming business events. + +* SonataFlow system events sent from the workflow to the Data Index and Job Service respectively. + +* SonataFlow system events sent from the Jobs Service to the Data Index Service. + +The {operator_name} is designed to use the link:{knative_eventing_url}[Knative Eventing] system to resolve all the event communication between these services. + +[NOTE] +==== +In a regular SonataFlow installation, the preferred method is to use the <>, while the <> is reserved only for advanced use cases. +==== + +[#platform-scoped-eventing-system-configuration] +== Platform-scoped Eventing system configuration + +To configure a platform-scoped eventing system, you must use the field `spec.eventing.broker.ref` in the `SonataFlowPlatform` CR to refer to a Knative Eventing broker. + +This information signals the {operator_name} to automatically link every workflow deployed in that namespace, with the `preview` or `gitops` profile, to `produce` and `consume` the events by using that Broker. + +Additionally, the supporting services deployed in that namespace, that do not provide a custom eventing system configuration, will be linked to that broker. +For more information about configuring the supporting services eventing system, xref:cloud/operator/supporting-services.adoc#configuring-supporting-services-eventing-system[see]. + +The following `SonataFlowPlatform` CR fragment shows an example of such configuration: + +.Platform scoped eventing system configuration example + +include::../common/platform-scoped-eventing-system-configuration-example.adoc[] + +[#workflow-scoped-eventing-system-configuration] +== Workflow-scoped Eventing system configuration + +A workflow-scoped eventing system configuration provides the ability to do a fine-grained configuration of the eventing system for the events `produced` and `consumed` by a workflow. + +To configure a workflow-scoped eventing system you must use the fields `spec.sink.ref` and `spec.sources[]` in the `SonataFlow` CR. + +[#outgoing-eventing-system-configuration] +=== Outgoing Eventing system configuration + +To configure a workflow-scoped eventing system for the workflow outgoing events, you must use the field `spec.sink.ref` in the `SonataFlow` CR. + +This information signals the {operator_name} to automatically link the current workflow, to produce the events by using that Broker. +That includes, the {product_name} system events, and the workflow business events. + +The following `SonataFlow` CR fragment shows an example of such configuration: + +.Workflow-scoped outgoing eventing system configuration example +[source, yaml] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlow +metadata: + name: example-workflow + namespace: example-workflow-namespace + annotations: + sonataflow.org/description: Example Workflow + sonataflow.org/version: 0.0.1 + sonataflow.org/profile: preview +spec: + sink: + ref: + name: outgoing-example-broker <1> + namespace: outgoing-example-broker-namespace <2> + apiVersion: eventing.knative.dev/v1 + kind: Broker + flow: <3> + start: ExampleStartState + events: <4> + - name: outEvent1 <5> + source: '' + kind: produced + type: out-event-type1 <6> + + ... <7> +---- + +<1> Name of the Knative Eventing Broker to use for all the events produced by the workflow, including the {product_name} system events. +<2> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlow namespace. We recommend creating the Knative Eventing Broker in the same namespace as the SonataFlow. +<3> Flow definition field in the `SonataFlow` CR. +<4> Events definition field in the `SonataFlow` CR. +<5> Example of an outgoing event `outEvent1` definition. +<6> Event type for the outgoing event `outEvent1` +<7> Only a fragment of the workflow is shown for simplicity. + +[#incoming-eventing-system-configuration] +=== Incoming Eventing system configuration + +To configure a workflow-scoped eventing system for the workflow incoming events, you must use the field `spec.sources[]` in the `SonataFlow` CR. +And, you must add an entry in the array, for every event type that requires an individual configuration. + +This information signals the SonataFlow Operator to automatically link the current workflow, to potentially `consume` the events from different Brokers, depending on the event type. + +Incoming event types not configured with a particular Broker, are consumed by applying <>. + +The following `SonataFlow` CR fragment shows an example of such configuration: + +.Workflow-scoped incoming eventing system configuration example + +[source, yaml] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlow +metadata: + name: example-workflow + namespace: example-workflow-namespace + annotations: + sonataflow.org/description: Example Workflow + sonataflow.org/version: 0.0.1 + sonataflow.org/profile: preview +spec: + sources: + - eventType: in-event-type1 <1> + ref: + name: incoming-example-broker1 <2> + namespace: incoming-example-broker1-namespace <3> + apiVersion: eventing.knative.dev/v1 + kind: Broker + - eventType: in-event-type2 <4> + ref: + name: incoming-example-broker2 <5> + namespace: incoming-example-broker2-namespace <6> + apiVersion: eventing.knative.dev/v1 + kind: Broker + flow: <7> + start: ExampleStartState + events: <8> + - name: inEvent1 <9> + source: '' + kind: consumed + type: in-event-type1 <10> + - name: inEvent2 <11> + source: '' + kind: consumed + type: in-event-type2 <12> + ... <13> + +---- + +<1> Eventing system configuration entry for the workflow incoming events of type `in-event-type1`. +<2> Name of the Knative Eventing Broker to use for the consumption of the events of type `in-event-type1` sent to this workflow. +<3> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlow namespace. We recommend creating the Knative Eventing Broker in the same namespace as the SonataFlow. + +<4> Eventing system configuration entry for the workflow incoming events of type `in-event-type2`. +<5> Name of the Knative Eventing Broker to use for the consumption of the events of type `in-event-type2` sent to this workflow. +<6> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlow namespace. We recommend creating the Knative Eventing Broker in the same namespace as the SonataFlow. + +<7> Flow definition field in the `SonataFlow` CR. +<8> Events definition field in the `SonataFlow` CR. + +<9> Example of an incoming event `inEvent1` definition. +<10> Event type for the incoming event `inEvent1`. +The link of the workflow event, with the corresponding `spec.sources[]` entry, is by using the event type name `in-event-type1`. + +<11> Example of an incoming event `inEvent2` definition. +<12> Event type for the incoming event `inEvent2`. +The link of the workflow event, with the corresponding `spec.sources[]` entry, is by using the event type name `in-event-type2`. + +<13> Only a fragment of the workflow is shown for simplicity. + +[NOTE] +==== +* Remember that the link between a `spec.sources[]` entry and the workflow event, is by using the event type. +* Incoming events without a corresponding `spec.sources[]` entry are consumed by applying the <>. +==== + +[#cluster-scoped-eventing-system-configuration] +== Cluster-scoped Eventing system configuration + +When you use a SonataFlowClusterPlatform, the workflows are automatically linked to the `Broker` configured in the `SonataFlowPlatform` CR referred to by the given `SonataFlowClusterPlatform` CR, according to the <>. + +[#eventing-system-configuration-precedence-rules] +== Eventing system configuration precedence rules + +To configure the eventing system for a workflow, the {operator_name} use the following precedence rules: + +. If the workflow has a configured eventing system, by using any of the <> or <<#incoming-eventing-system-configuration, workflow-scoped incoming eventing system>> configurations, that configuration applies. + +. If the `SonataFlowPlatform` CR enclosing the workflow, is configured with a <>, that configuration applies. + +. If the current cluster, is configured with a <>, that configuration apply. + +. If none of the previous configurations exists, the workflow is configured to: +** Produce direct HTTP calls to deliver the {product_name} system events to the supporting services. +** Consume the workflow incoming events in the workflow service root path `/` via HTTP POST calls. +** No eventing system is configured to produce the workflow business events, and thus, an attempt to produce such event will fail. + +== Eventing System linking objects + +The linking of the workflow with the eventing system is produced by using Knative Eventing SinkBindings and Triggers. +These objects are automatically created by the {operator_name}, and facilitate workflow events production and consumption. + +The following example shows the Knative Eventing objects created for an `example-workflow` configured with a platform-scoped eventing system. + +.Platform-scoped eventing system configuration example +[source,yaml] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlowPlatform +metadata: + name: sonataflow-platform-example + namespace: example-namespace +spec: + eventing: + broker: + ref: + name: example-broker <1> + apiVersion: eventing.knative.dev/v1 + kind: Broker + services: + dataIndex: <2> + enabled: true + jobService: <3> + enabled: true +---- + +<1> Platform Broker configuration used by the Data Index, Jobs Service, and the `example-workflow`. +<2> Data Index ephemeral deployment. +<3> Jobs Service ephemeral deployment. + +.Knative Kafka Broker example used by the SonataFlowPlatform +[source,yaml] +---- +apiVersion: eventing.knative.dev/v1 +kind: Broker +metadata: + annotations: + eventing.knative.dev/broker.class: Kafka <1> + name: example-broker + namespace: example-namespace +spec: + config: + apiVersion: v1 + kind: ConfigMap + name: kafka-broker-config + namespace: knative-eventing +---- + +<1> Use the Kafka class to create a Kafka Knative Broker + +.The `example-workflow` is automatically linked to the `example-broker` in the `example-namespace` +[source, yaml] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlow +metadata: + name: example-workflow + namespace: example-namespace + annotations: + sonataflow.org/description: Example Workflow + sonataflow.org/version: 0.0.1 + sonataflow.org/profile: preview +spec: + flow: + start: ExampleStartState + events: + - name: outEvent1 + source: '' + kind: produced + type: out-event-type1 <1> + - name: inEvent1 + source: '' + kind: consumed + type: in-event-type1 <2> + - name: inEvent2 + source: '' + kind: consumed + type: in-event-type2 <3> + states: + - name: ExampleStartState + ... <4> + +---- + +<1> The `example-workflow` outgoing events are produced by using the SinkBinding `example-workflow-sb`, <<#workflow-knative-eventing-sinkbindings, see>>. +<2> The `example-workflow` events of type `in-event-type1` are consumed by using the Trigger `example-workflow-inevent1-b40c067c-595b-4913-81a4-c8efa980bc11`, <<#workflow-knative-eventing-triggers, see>>. +<3> The `example-workflow` events of type `in-event-type2` are consumed by using the Trigger `example-workflow-inevent2-b40c067c-595b-4913-81a4-c8efa980bc11`, <<#workflow-knative-eventing-triggers, see>>. +<4> Only a fragment of the workflow is shown for simplicity. + +[#workflow-knative-eventing-sinkbindings] +.Knative Eventing SinkBinding created for the `example-workflow` events production +[source,bash] +---- +kn source list -n example-namespace + +NAME TYPE RESOURCE SINK READY +example-workflow-sb SinkBinding sinkbindings.sources.knative.dev broker:example-broker True +---- + +[#workflow-knative-eventing-triggers] +.Knative Eventing Triggers created for the `example-workflow` events consumption +[source,bash] +---- +kn trigger list -n example-namespace + +NAME BROKER SINK AGE CONDITIONS READY REASON +example-workflow-inevent1-b40c067c-595b-4913-81a4-c8efa980bc11 example-broker service:example-workflow 16m 7 OK / 7 True +example-workflow-inevent2-b40c067c-595b-4913-81a4-c8efa980bc11 example-broker service:example-workflow 16m 7 OK / 7 True +---- + +== Additional resources + +* xref:cloud/operator/using-persistence.adoc[Workflow Persistence] +* xref:cloud/operator/supporting-services.adoc[Deploying Supporting Services] + +include::../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file diff --git a/modules/serverless-logic/pages/cloud/operator/global-configuration.adoc b/modules/serverless-logic/pages/cloud/operator/global-configuration.adoc index b26a7cba..d104ffdb 100644 --- a/modules/serverless-logic/pages/cloud/operator/global-configuration.adoc +++ b/modules/serverless-logic/pages/cloud/operator/global-configuration.adoc @@ -45,9 +45,19 @@ You can freely edit any of the options in the key `controllers_cfg.yaml` entry. {groupId_kie-addons-quarkus-persistence-jdbc}:{artifactId_kie-addons-quarkus-persistence-jdbc}:{kogito_version} +| `kogitoEventsGrouping` | true | When true, configures every workflow deployment with the `gitops` or `preview` profiles, to send accumulated workflow status change events to the Data Index. Reducing the number of produced events. + +Set to false to send individual events. + +| `kogitoEventsGroupingBinary` | true | When true, the accumulated workflow status change events are sent in binary mode. Reducing the size of the produced events. + +Set to false to send plain json events. + +| `kogitoEventsGroupingCompress` | false | When true, the accumulated workflow status change events, if sent in binary mode, are be gzipped at the cost of some performance. + |=== -To edit this file, update the ConfigMap `logic-operator-controllers-config` using your preferred tool such as `kubectl`. +To edit this file, update the ConfigMap `{operator_controller_config}` using your preferred tool such as `kubectl`. [#config-changes] == Configuration Changes Impact @@ -93,7 +103,7 @@ The order of precedence is: 1. The `SonataFlowPlatform` in the current context 2. The global configuration entry -3. The `FROM` clause in the Dockerfile in the operator's namespace `sonataflow-operator-builder-config` ConfigMap +3. The `FROM` clause in the Dockerfile in the operator's namespace `{operator_controller_config}` ConfigMap In summary, the entry in `SonataFlowPlatform` will always override any other value. diff --git a/modules/serverless-logic/pages/cloud/operator/install-kn-workflow-cli.adoc b/modules/serverless-logic/pages/cloud/operator/install-kn-workflow-cli.adoc index d7f8acc4..6c8bcb6c 100644 --- a/modules/serverless-logic/pages/cloud/operator/install-kn-workflow-cli.adoc +++ b/modules/serverless-logic/pages/cloud/operator/install-kn-workflow-cli.adoc @@ -10,7 +10,7 @@ * You have first installed the link:{kn_cli_install_url}[Knative CLI]. * link:{docker_install_url}[Docker] or {podman_install_url}[Podman] is installed. -[proc-install-sw-plugin-kn-cli]] +[[proc-install-sw-plugin-kn-cli]] == Installing the {product_name} plug-in for Knative CLI You can use the {product_name} plug-in to set up your local workflow project quickly using Knative CLI. @@ -110,7 +110,7 @@ To verify that the installation was successful, you can execute the following co kn workflow version ---- output: -[source,shell] +[source,shell,subs="attributes+"] ---- -1.33.0 +{product_version_long} ---- diff --git a/modules/serverless-logic/pages/cloud/operator/install-serverless-operator.adoc b/modules/serverless-logic/pages/cloud/operator/install-serverless-operator.adoc index db9d2b28..2820ed13 100644 --- a/modules/serverless-logic/pages/cloud/operator/install-serverless-operator.adoc +++ b/modules/serverless-logic/pages/cloud/operator/install-serverless-operator.adoc @@ -11,7 +11,7 @@ :kubernetes_operator_uninstall_url: https://olm.operatorframework.io/docs/tasks/uninstall-operator/ :operatorhub_url: https://operatorhub.io/ -This guide describes how to install the {operator_name} in a Kubernetes or OpenShift cluster. The operator has been tested on OpenShift {openshift_version_min}+ and link:{minikube_url}[Minikube]. +This guide describes how to install the {operator_name} in a Kubernetes or OpenShift cluster. The operator is in an xref:cloud/operator/known-issues.adoc[early development stage] (community only) and has been tested on OpenShift {openshift_version_min}+, Kubernetes {kubernetes_version}+, and link:{minikube_url}[Minikube]. .Prerequisites * A Kubernetes or OpenShift cluster with admin privileges and `kubectl` installed. @@ -29,6 +29,20 @@ When searching for the operator in the *Filter by keyword* field, use the word ` To remove the operator on OpenShift refer to the "link:{openshift_operator_uninstall_url}[Deleting Operators from a cluster]" from the OpenShift's documentation. +== {product_name} Operator Kubernetes installation + +=== Install + +To install the operator on Kubernetes refer to the "link:{kubernetes_operator_install_url}[How to install an Operator from OperatorHub.io]" from the OperatorHub's documentation. + +When link:{operatorhub_url}[searching for the operator in the *Search OperatorHub* field], use the word `{operator_k8s_keyword}`. + +=== Uninstall + +To remove the operator on Kubernetes follow the document "link:{kubernetes_operator_uninstall_url}[Uninstall your operator]" from the OLM's documentation. + +When searching for the subscription to remove, use the word `{operator_k8s_subscription}`. + == {product_name} Operator Manual Installation [WARNING] @@ -48,20 +62,20 @@ To install the {product_name} Operator, you can use the following command: .Install {product_name} Operator on Kubernetes [source,shell,subs="attributes+"] ---- -kubectl create -f {operator_community_prod_yaml} +kubectl create -f {kogito_operator_repository_rawcontent_url}/{operator_version}/operator.yaml ---- -Replace with specific version if needed: +Replace `` with specific version if needed: ---- -kubectl create -f https://raw.githubusercontent.com/kiegroup/kogito-serverless-operator//operator.yaml +kubectl create -f {kogito_operator_repository_rawcontent_url}//operator.yaml ---- -`` could be `10.0.0` for instance. +`` could be `{product_version_long}` for instance. You can follow the deployment of the {product_name} Operator: .Watch the {product_name} Operator pod [source,shell,subs="attributes+"] ---- -kubectl get pod -n sonataflow-operator-system --watch +kubectl get pod -n {operator_installation_namespace} --watch ---- A successful installation should have an output like this: @@ -70,9 +84,9 @@ A successful installation should have an output like this: [source] ---- NAME READY STATUS RESTARTS AGE -sonataflow-operator-controller-manager-948547ffd-sr2j2 0/2 ContainerCreating 0 6s -sonataflow-operator-controller-manager-948547ffd-sr2j2 1/2 Running 0 7s -sonataflow-operator-controller-manager-948547ffd-sr2j2 2/2 Running 0 20s +{operator_controller_manager_deployment_name}-948547ffd-sr2j2 0/2 ContainerCreating 0 6s +{operator_controller_manager_deployment_name}-948547ffd-sr2j2 1/2 Running 0 7s +{operator_controller_manager_deployment_name}-948547ffd-sr2j2 2/2 Running 0 20s ---- You can also follow the operator’s log: @@ -80,7 +94,7 @@ You can also follow the operator’s log: .Watch the {product_name} Operator pod logs [source,shell,subs="attributes+"] ---- -kubectl logs deployment/sonataflow-operator-controller-manager -n sonataflow-operator-system -f +kubectl logs deployment/{operator_controller_manager_deployment_name} -n {operator_installation_namespace} -f ---- Once the operator is running, it will watch for instances of the {product_name} Custom Resources (CR). Using CRs, you can configure your {product_name} environment and define Workflows and builds to be handled by the operator. @@ -106,7 +120,7 @@ To uninstall the correct version of the operator, first you must get the current .Getting the operator version [source,shell,subs="attributes+"] ---- -kubectl get deployment sonataflow-operator-controller-manager -n sonataflow-operator-system -o jsonpath="{.spec.template.spec.containers[?(@.name=='manager')].image}" +kubectl get deployment {operator_controller_manager_deployment_name} -n {operator_installation_namespace} -o jsonpath="{.spec.template.spec.containers[?(@.name=='manager')].image}" {sonataflow_operator_imagename}:{operator_version} ---- @@ -114,7 +128,7 @@ kubectl get deployment sonataflow-operator-controller-manager -n sonataflow-oper .Uninstalling the operator [source,shell,subs="attributes+"] ---- -kubectl delete -f {operator_community_prod_yaml} +kubectl delete -f {kogito_operator_repository_rawcontent_url}//operator.yaml ---- [TIP] diff --git a/modules/serverless-logic/pages/cloud/operator/supporting-services.adoc b/modules/serverless-logic/pages/cloud/operator/supporting-services.adoc index 67675ad4..a19b00c8 100644 --- a/modules/serverless-logic/pages/cloud/operator/supporting-services.adoc +++ b/modules/serverless-logic/pages/cloud/operator/supporting-services.adoc @@ -180,6 +180,220 @@ When you use the common PostgreSQL configuration, the database schema for each s For example, `sonataflow-platform-example-data-index-service`. ==== +[#configuring-supporting-services-eventing-system] +== Configuring the supporting services Eventing system + +In general, the following events are produced in a {product_name} installation: + +* Workflow outgoing and incoming business events. +* {product_name} system events sent from the workflow to the Data Index and Job Service respectively. +* {product_name} system events sent from the Jobs Service to the Data Index Service. + +The {operator_name} is designed to use the link:{knative_eventing_url}[Knative Eventing] system to resolve all the event communication between these services. + +[NOTE] +==== +In a regular {product_name} installation, the preferred method is to use the <>, while the <> is reserved only for advanced use cases. +==== + +[#platform-scoped-eventing-system-configuration] +=== Platform-scoped Eventing system configuration + +To configure a platform-scoped eventing system, you must use the field `spec.eventing.broker.ref` in the `SonataFlowPlatform` CR to refer to a Knative Eventing Broker. + +This information signals the {operator_name} to automatically link the supporting services to `produce` and `consume` the events by using that Broker. + +Additionally, workflows deployed in that namespace, with the `preview` or `gitops` profile, that don't provide a custom eventing system configuration, will be linked to that Broker. +For more information about configuring the workflow eventing system, xref:cloud/operator/configuring-workflow-eventing-system.adoc[see]. + +The following `SonataFlowPlatform` CR fragment shows an example of such configuration: + +.Platform scoped eventing system configuration example +include::../common/platform-scoped-eventing-system-configuration-example.adoc[] + + +[#service-scoped-eventing-system-configuration] +=== Service-scoped Eventing system configuration + +A service scoped eventing system configuration provides the ability to do a fine-grained configuration of the Eventing system for the <> or the <>. + +[NOTE] +==== +In a regular {product_name} installation, the preferred method is to use a <>, while the service-scoped configuration is reserved only for advanced use cases. +==== + +[#data-index-eventing-system-configuration] +=== Data Index Eventing system configuration + +To configure a service-scoped eventing system for the Data Index, you must use the field `spec.services.dataIndex.source.ref` in the `SonataFlowPlatform` CR to refer to a specific Knative Eventing Broker. + +This information signals the {operator_name} to automatically link the Data Index to `consume` the {product_name} system events from that Broker. + +.Data Index service scoped eventing system configuration example +[source,yam] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlowPlatform +metadata: + name: sonataflow-platform-example +spec: + services: + dataIndex: + source: + ref: + name: data-index-source-example-broker <1> + namespace: data-index-source-example-broker-namespace <2> + apiVersion: eventing.knative.dev/v1 + kind: Broker +---- + +<1> Name of the Knative Eventing Broker to `consume` events from. +<2> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlowPlatform namespace. In general, we recommend creating the Knative Eventing Broker in the same namespace as the SonataFlowPlatform. + +[NOTE] +==== +In production environments, you must use a production-ready broker, like the link:{knative_eventing_kafka_broker_url}[Knative Kafka Broker]. +==== + +[#jos-service-eventing-system-configuration] +=== Jobs Service Eventing system configuration + +To configure a service-scoped eventing system for the Jobs Service, you must use the fields `spec.services.jobService.source.ref` and `spec.services.jobService.sink.ref` in the `SonataFlowPlatform` CR. + +This information signals the {operator_name} to automatically link the Jobs Service to `consume` and `produce` the {product_name} system events from that configuration respectively. + +.Jobs Service scoped eventing system configuration example +[source,yam] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlowPlatform +metadata: + name: sonataflow-platform-example +spec: + services: + jobService: + source: + ref: + name: jobs-service-source-example-broker <1> + namespace: jobs-service-source-example-broker-namespace <2> + apiVersion: eventing.knative.dev/v1 + kind: Broker + sink: + ref: + name: jobs-service-sink-example-broker <3> + namespace: jobs-service-sink-example-broker-namespace <4> + apiVersion: eventing.knative.dev/v1 + kind: Broker +---- + +<1> Name of the Knative Eventing Broker to `consume` events from. +<2> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlowPlatform namespace. In general, we recommend creating the Knative Eventing Broker in the same namespace as the SonataFlowPlatform. +<3> Name of the Knative Eventing Broker to `produce` events on. +<4> Optional: Defines the namespace of the Knative Eventing Broker. Defaults to the SonataFlowPlatform namespace. In general, we recommend creating the Knative Eventing Broker in the same namespace as the SonataFlowPlatform. + +[NOTE] +==== +In production environments, you must use production-ready brokers, like the link:{knative_eventing_kafka_broker_url}[Knative Kafka Broker]. +==== + +[#cluster-scoped-eventing-system-configuration] +=== Cluster-scoped Eventing system configuration + +When you use a <> deployment, the supporting services are automatically linked to the `Broker` configured in the `SonataFlowPlatform` CR referred to by the `SonataFlowClusterPlatform` CR. + +=== Eventing system configuration precedence rules + +To configure the eventing system for a supporting service, the {operator_name} use the following precedence rules: + +. If the supporting service has a configured eventing system, by using any of the <> or <> respectively, that configuration applies. + +. If the `SonataFlowPlatform` CR enclosing the supporting service, is configured with a <>, that configuration applies. + +. If the current cluster, is configured with a <>, that configuration apply. + +. If none of the previous configurations exists, the supporting service is configured to produce direct HTTP calls to deliver events. + +=== Eventing System linking objects + +The linking of the supporting services with the Eventing System is produced by using Knative Eventing SinkBindings and Triggers. +These objects are automatically created by the {operator_name}, and facilitate the events production and consumption from the supporting services. + +The following example shows the Knative Native eventing objects created for the `SonataFlowPlatform` CR: + +.SonataFlowPlatform eventing system configuration example +[source,yaml] +---- +apiVersion: sonataflow.org/v1alpha08 +kind: SonataFlowPlatform +metadata: + name: sonataflow-platform-example + namespace: example-namespace +spec: + eventing: + broker: + ref: + name: example-broker <1> + apiVersion: eventing.knative.dev/v1 + kind: Broker + services: + dataIndex: <2> + enabled: true + jobService: <3> + enabled: true +---- + +<1> Platform Broker configuration used by the Data Index, Jobs Service, and the workflows if not overridden. +<2> Data Index ephemeral deployment. +<3> Jobs Service ephemeral deployment. + +.Knative Kafka Broker example used by the SonataFlowPlatform +[source,yaml] +---- +apiVersion: eventing.knative.dev/v1 +kind: Broker +metadata: + annotations: + eventing.knative.dev/broker.class: Kafka <1> + name: example-broker + namespace: example-namespace +spec: + config: + apiVersion: v1 + kind: ConfigMap + name: kafka-broker-config + namespace: knative-eventing +---- + +<1> Use the Kafka class to create a Kafka Knative Broker + +.Knative Eventing Triggers created for the Data Index and Jobs Service events consumption +[source,bash] +---- +kn trigger list -n example-namespace + +NAME BROKER SINK AGE CONDITIONS READY REASON +data-index-jobs-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-definition-e48b4e4bf73e22b90ecf7e093ff6b1eaf example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-error-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-instance-mul35f055c67a626f51bb8d2752606a6b54 example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-node-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-sla-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-state-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True +data-index-process-variable-ac727d6051750888dedb72f697737c0dfbf example-broker service:sonataflow-platform-example-data-index-service 106s 7 OK / 7 True + +jobs-service-create-job-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-jobs-service 106s 7 OK / 7 True +jobs-service-delete-job-fbf285df-c0a4-4545-b77a-c232ec2890e2 example-broker service:sonataflow-platform-example-jobs-service 106s 7 OK / 7 True +---- + +.Knative Eventing SinkBinding created for the Jobs Service events production +[source,bash] +---- +kn source list -n example-namespace + +NAME TYPE RESOURCE SINK READY +sonataflow-platform-example-jobs-service-sb SinkBinding sinkbindings.sources.knative.dev broker:example-broker True +---- + == Advanced Supporting Services Configurations To configure the advanced options for any of the supporting services you must use the `podTemplate` field respectively, for example `dataIndex.podTemplate`: @@ -258,7 +472,8 @@ Additionally, it can manage different persistence options for each service, and * xref:data-index/data-index-core-concepts.adoc[] * xref:job-services/core-concepts.adoc[Job Service Core Concepts] -* xref:cloud/operator/using-persistence.adoc[] +* xref:cloud/operator/using-persistence.adoc[Workflow Persistence] +* xref:cloud/operator/configuring-workflow-eventing-system.adoc[Workflow Eventing System] * xref:cloud/operator/known-issues.adoc[] include::../../../pages/_common-content/report-issue.adoc[] diff --git a/modules/serverless-logic/pages/cloud/operator/upgrade-serverless-operator/upgrade_1_34_0_to_1_35_0.adoc b/modules/serverless-logic/pages/cloud/operator/upgrade-serverless-operator/upgrade_1_34_0_to_1_35_0.adoc index 1ebccf1d..0d6081e3 100644 --- a/modules/serverless-logic/pages/cloud/operator/upgrade-serverless-operator/upgrade_1_34_0_to_1_35_0.adoc +++ b/modules/serverless-logic/pages/cloud/operator/upgrade-serverless-operator/upgrade_1_34_0_to_1_35_0.adoc @@ -11,13 +11,12 @@ :kubernetes_operator_uninstall_url: https://olm.operatorframework.io/docs/tasks/uninstall-operator/ :operatorhub_url: https://operatorhub.io/ +// NOTE: Do not parametrize this guide, this is version specific migration guide, hence the versions are hardcoded. This guide describes how to upgrade the {operator_name} 1.34.0 installed in an OpenShift cluster to the version 1.35.0. .Prerequisites * An OpenShift cluster with admin privileges and `oc` installed. - - == Procedure To upgrade an OSL 1.34.0 installation to the OSL 1.35.0 version, you must execute this procedure: diff --git a/modules/serverless-logic/pages/cloud/operator/using-persistence.adoc b/modules/serverless-logic/pages/cloud/operator/using-persistence.adoc index d11ea82a..feea5d6e 100644 --- a/modules/serverless-logic/pages/cloud/operator/using-persistence.adoc +++ b/modules/serverless-logic/pages/cloud/operator/using-persistence.adoc @@ -267,9 +267,9 @@ metadata: name: sonataflow-platform spec: properties: - flow: - - name: quarkus.flyway.migrate-at-start - value: true + flow: + - name: quarkus.flyway.migrate-at-start + value: true ---- [NOTE] @@ -299,4 +299,4 @@ And, by using the `SonataFlow` CR you can enable the persistence of a particular * xref:cloud/operator/developing-workflows.adoc[] * xref:persistence/core-concepts.adoc[] -include::../../../pages/_common-content/report-issue.adoc[] \ No newline at end of file +include::../../../pages/_common-content/report-issue.adoc[] diff --git a/modules/serverless-logic/pages/core/handling-events-on-workflows.adoc b/modules/serverless-logic/pages/core/handling-events-on-workflows.adoc index e988610a..5f51ad3d 100644 --- a/modules/serverless-logic/pages/core/handling-events-on-workflows.adoc +++ b/modules/serverless-logic/pages/core/handling-events-on-workflows.adoc @@ -141,4 +141,3 @@ Similar to the callback state in a workflow, the workflow instance to be resumed * xref:use-cases/advanced-developer-use-cases/event-orchestration/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing] include::../../pages/_common-content/report-issue.adoc[] - diff --git a/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc b/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc index 4bc255bc..e22d2cde 100644 --- a/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc +++ b/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc @@ -239,6 +239,59 @@ In the previous example, a CloudEvent was published when the state transitioned. data={"gitRepo":"ssh://bitbucket.org/m2k-test","branch":"aaaaaaasssss","token":null,"workspaceId":"b93980cb-3943-4223-9441-8694c098eeb9","projectId":"9b305fe3-d441-48ce-b01b-d314e86e14ec","transformId":"723dce89-c25c-4c7b-9ef3-842de92e6fe6","workflowCallerId":"7ddb5193-bedc-4942-a857-596b31f377ed"} ---- +== ForEach state + +ForEach `iteratiomParam` should be accessed as a variable, using a `$` prefix, not as a JSON property, since the loop variable is not part of the workflow model the expression is evaluated against. Therefore, instead of accessing it like a JSON property (with a `.` prefix), the loop variable should be referenced with a `$` prefix + +For instance, this link:{spec_doc_url}#foreach-state[ForEach specification example] + +[source,json] +---- + "states": [ + { + "name":"SendConfirmState", + "type":"foreach", + "inputCollection": "${ [.orders[] | select(.completed == true)] }", + "iterationParam": "completedorder", + "outputCollection": "${ .confirmationresults }", + "actions":[ + { + "functionRef": { + "refName": "sendConfirmationFunction", + "arguments": { + "orderNumber": "${ .completedorder.orderNumber }", + "email": "${ .completedorder.email }" + } + } + }], + "end": true + }] +---- + +should be modified to + +---- + "states": [ + { + "name":"SendConfirmState", + "type":"foreach", + "inputCollection": "${ [.orders[] | select(.completed == true)] }", + "iterationParam": "completedorder", + "outputCollection": "${ .confirmationresults }", + "actions":[ + { + "functionRef": { + "refName": "sendConfirmationFunction", + "arguments": { + "orderNumber": "${ $completedorder.orderNumber }", + "email": "${ $completedorder.email }" + } + } + }], + "end": true + }] +---- + == Workflow secrets, constants and context diff --git a/modules/serverless-logic/pages/data-index/data-index-core-concepts.adoc b/modules/serverless-logic/pages/data-index/data-index-core-concepts.adoc index afd69aa3..511cf551 100644 --- a/modules/serverless-logic/pages/data-index/data-index-core-concepts.adoc +++ b/modules/serverless-logic/pages/data-index/data-index-core-concepts.adoc @@ -256,9 +256,7 @@ Depending on the attribute type, the following operators are also available: ** `greaterThanEqual`: Integer ** `lessThan`: Integer ** `lessThanEqual`: Integer -** `between`: Numeric range -** `from`: Integer -** `to`: Integer +** `between`: Numeric range (with `from` and `to` Integer values required) * Date argument: ** `isNull`: Boolean (`true` or `false`) @@ -267,9 +265,11 @@ Depending on the attribute type, the following operators are also available: ** `greaterThanEqual`: Date time ** `lessThan`: Date time ** `lessThanEqual`: Date time -** `between`: Date range -** `from`: Date time -** `to`: Date time +** `between`: Date range (with `from` and `to` Date time values required) + +* Enum argument: +** `equal`: Enum value +** `in`: Array of enum values -- Sort query results using the `orderBy` parameter:: diff --git a/modules/serverless-logic/pages/data-index/data-index-service.adoc b/modules/serverless-logic/pages/data-index/data-index-service.adoc index cbeac180..21409c66 100644 --- a/modules/serverless-logic/pages/data-index/data-index-service.adoc +++ b/modules/serverless-logic/pages/data-index/data-index-service.adoc @@ -4,7 +4,6 @@ :description: Data Index Service use as an independent service that allow to index and query audit data in {product_name} :keywords: workflow, serverless, data, dataindex, data-index, index, service, standalone // External pages -:distributions_url: https://quay.io/organization/kiegroup :dev_services_url: https://quarkus.io/guides/dev-services :test_containers_url: https://www.testcontainers.org/ :kubernetes_configmap_url: https://kubernetes.io/docs/concepts/configuration/configmap/ @@ -19,9 +18,7 @@ The goal is to configure the container to allow to process ProcessInstances and [#data-index-service-distributions] === {data_index_ref} distributions -{data_index_ref} service can be deployed referencing directly a distributed {data_index_ref} image. Here there are the different {data_index_ref} image distributions that can be found in link:{distributions_url}[Quay.io/kiegroup]: - -image::data-index/data-index-distributions.png[Image of data-index different available distributions] +{data_index_ref} service can be deployed referencing directly a distributed {data_index_ref} image. There are the different {data_index_ref} image distributions that can be found in link:{images_distributions_url}[here]. === {data_index_ref} standalone service deployment @@ -63,7 +60,7 @@ Here you can see in example, how the {data_index_ref} resource definition can be ---- data-index: container_name: data-index - image: {sonataflow_dataindex_postgresql_imagename}:latest <1> + image: quay.io/kiegroup/kogito-data-index-postgresql:latest <1> ports: - "8180:8080" depends_on: @@ -81,7 +78,7 @@ Here you can see in example, how the {data_index_ref} resource definition can be QUARKUS_HIBERNATE_ORM_DATABASE_GENERATION: update ---- -<1> Reference the right {data_index_ref} image to match with the type of Database, in this case `{sonataflow_dataindex_postgresql_imagename}:latest` +<1> Reference the right {data_index_ref} image to match with the type of Database, in this case `{sonataflow_dataindex_postgresql_imagename}:{operator_version}` <2> Provide the database connection properties. <3> When `KOGITO_DATA_INDEX_QUARKUS_PROFILE` is not present, the {data_index_ref} is configured to use Kafka eventing. <4> To initialize the database schema at start using flyway. @@ -157,7 +154,7 @@ spec: spec: containers: - name: data-index-service-postgresql - image: {sonataflow_dataindex_postgresql_imagename}:latest <1> + image: quay.io/kiegroup/kogito-data-index-postgresql:latest <1> imagePullPolicy: Always ports: - containerPort: 8080 @@ -223,7 +220,7 @@ spec: name: data-index-service-postgresql uri: /jobs <7> ---- -<1> Reference the right {data_index_ref} image to match with the type of Database, in this case `{sonataflow_dataindex_postgresql_imagename}:latest` +<1> Reference the right {data_index_ref} image to match with the type of Database, in this case `{sonataflow_dataindex_postgresql_imagename}:{operator_version}` <2> Provide the database connection properties <3> KOGITO_DATA_INDEX_QUARKUS_PROFILE: http-events-support to use the http-connector with Knative eventing. <4> To initialize the database schema at start using flyway diff --git a/modules/serverless-logic/pages/migration-guide/index.adoc b/modules/serverless-logic/pages/migration-guide/index.adoc deleted file mode 100644 index 05569fcf..00000000 --- a/modules/serverless-logic/pages/migration-guide/index.adoc +++ /dev/null @@ -1,5 +0,0 @@ -= Migration guide for {product_name} components - -In this chapter you will find guide focusing on migration of our components across version. -We provide migration guides only when needed, so if there is no guide available it means the migration -is seamless. \ No newline at end of file diff --git a/modules/serverless-logic/pages/migration-guide/operator/to-1.43.0-migration-guide.adoc b/modules/serverless-logic/pages/migration-guide/operator/to-1.43.0-migration-guide.adoc deleted file mode 100644 index 43680122..00000000 --- a/modules/serverless-logic/pages/migration-guide/operator/to-1.43.0-migration-guide.adoc +++ /dev/null @@ -1,103 +0,0 @@ -# Migration Guide for Operator - - -## Version 1.43.0 - -When migrating to version 1.43.0, you must review the `SonataFlowPlatform` instances. - -.Procedure - -1. Change the `.spec.platform` to `.spec.build.config`. For example, given that you have: -+ -[source,yaml,subs="attributes+"] ----- -apiVersion: sonataflow.org/v1alpha08 -kind: SonataFlowPlatform -metadata: - name: sonataflow-platform -spec: - platform: - registry: - address: quay.io/kiegroup - secret: regcred ----- -+ -You should change it to: -+ -[source,yaml,subs="attributes+"] ----- -apiVersion: sonataflow.org/v1alpha08 -kind: SonataFlowPlatform -metadata: - name: sonataflow-platform -spec: - build: - config: - registry: - address: quay.io/kiegroup - secret: regcred ----- - -2. Change the `.spec.build` to `.spec.build.template`. For example, given that you have: -+ -[source,yaml,subs="attributes+"] ----- -apiVersion: sonataflow.org/v1alpha08 -kind: SonataFlowPlatform -metadata: - name: sonataflow-platform -spec: - build: - resources: - requests: - memory: "64Mi" - cpu: "250m" - limits: - memory: "128Mi" - cpu: "500m" ----- -+ -You should change it to: -+ -[source,yaml,subs="attributes+"] ----- -apiVersion: sonataflow.org/v1alpha08 -kind: SonataFlowPlatform -metadata: - name: sonataflow-platform -spec: - build: - template: - resources: - requests: - memory: "64Mi" - cpu: "250m" - limits: - memory: "128Mi" - cpu: "500m" ----- - -3. Change the `.spec.devModeBaseImage` to `spec.devMode.baseImage`. For example, given that you have: -+ -[source,yaml,subs="attributes+"] ----- -apiVersion: sonataflow.org/v1alpha08 -kind: SonataFlowPlatform -metadata: - name: sonataflow-platform -spec: - devModeBaseImage: quay.io/johndoe/myworkflow:latest ----- -+ -You should change it to: -+ -[source,yaml,subs="attributes+"] ----- -apiVersion: sonataflow.org/v1alpha08 -kind: SonataFlowPlatform -metadata: - name: sonataflow-platform -spec: - devMode: - baseImage: quay.io/johndoe/myworkflow:latest ----- \ No newline at end of file diff --git a/modules/serverless-logic/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc b/modules/serverless-logic/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc index ad02a8a1..1c445e5a 100644 --- a/modules/serverless-logic/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc +++ b/modules/serverless-logic/pages/testing-and-troubleshooting/kn-plugin-workflow-overview.adoc @@ -7,9 +7,16 @@ // links :apple_support_url: https://support.apple.com/guide/mac-help/mh40616/mac -{product_name} provides a plug-in named `kn-plugin-workflow` for Knative CLI, which enables you to set up a local workflow project quickly using the command line. +{product_name} provides a plug-in named `kn-workflow` for Knative CLI, which provides command line features that help you develop a local workflow project quickly. -This document describes fatures of the workflow plugin for KN CLI. See xref:cloud/operator/install-kn-workflow-cli.adoc[] for currently supported installation procedure. +This document describes how you can install and use the `kn-workflow` plug-in in {product_name}. + +You can also find brief introduction for some of the commands that the plugin provides. + +[[proc-install-sw-plugin-kn-cli]] +== Installing the {product_name} plug-in for Knative CLI + +You can use the {product_name} plug-in to set up your local workflow project quickly using Knative CLI. .Prerequisites * link:{java_install_url}[Java] {java_min_version} is installed. @@ -17,9 +24,36 @@ This document describes fatures of the workflow plugin for KN CLI. See xref:clou * (Optional) link:{docker_install_url}[Docker] is installed. * (Optional) link:{podman_install_url}[Podman] is installed. * link:{kubectl_install_url}[Kubernetes CLI] is installed. +* link:{kn_cli_install_url}[Knative CLI] is installed. -After installing the plug-in, you can use `kn workflow` to run the related subcommands as follows: +.Procedure +. Download the latest binary file, suitable for your environment, from the link:{kie_tools_releases_page_url}[KIE Tooling Releases] page. +. Install the `kn workflow` command as a plug-in of the Knative CLI using the following steps: ++ +-- +.. Rename the downloaded binary as follows: ++ +mv kn-workflow-linux-amd64 kn-workflow ++ +.. Make the binary file executable as follows: ++ +`chmod +x kn-workflow` ++ +[WARNING] +==== +On Mac, some systems might block the application to run due to Apple enforcing policies. To fix this problem, check the *Security & Privacy* section in the *System Preferences* -> *General* tab to approve the application to run. For more information, see link:{apple_support_url}[Apple support article: Open a Mac app from an unidentified developer]. +==== +.. Copy the `kn-workflow` binary file to `/usr/local/bin`. +.. Run the following command to verify that `kn-workflow` plug-in is installed successfully: ++ +`kn plugin list` +After installing the plug-in, you can use `kn workflow` to run the related subcommands. +-- + +. Use the `workflow` subcommand in Knative CLI as follows: ++ +-- .Aliases to use workflow subcommand [source,shell] ---- @@ -27,38 +61,34 @@ kn workflow kn-workflow ---- -[WARNING] -==== -On Mac, some systems might block the application to run due to Apple enforcing policies. To fix this problem, check the *Security & Privacy* section in the *System Preferences* -> *General* tab to approve the application to run. For more information, see link:{apple_support_url}[Apple support article: Open a Mac app from an unidentified developer]. -==== - .Example output [source,text] ---- -Manage SonataFlow projects + Manage SonataFlow projects -Currently, SonataFlow targets use cases with a single Serverless Workflow main -file definition (i.e. workflow.sw.{json|yaml|yml}). + Currently, SonataFlow targets use cases with a single Serverless Workflow main + file definition (i.e. workflow.sw.{json|yaml|yml}). -Additionally, you can define the configurable parameters of your application in the -"application.properties" file (inside the root project directory). -You can also store your spec files (i.e., Open API files) inside the "specs" folder, - schemas file inside "schemas" folder and also subflows inside "subflows" folder. + Additionally, you can define the configurable parameters of your application in the + "application.properties" file (inside the root project directory). + You can also store your spec files (i.e., OpenAPI files) inside the "specs" folder, + schemas file inside "schemas" folder and also subflows inside "subflows" folder. -A SonataFlow project, as the following structure by default: + A SonataFlow project, as the following structure by default: -Workflow project root - /specs (optional) - /schemas (optional) - /subflows (optional) - workflow.sw.{json|yaml|yml} (mandatory) + Workflow project root + /specs (optional) + /schemas (optional) + /subflows (optional) + workflow.sw.{json|yaml|yml} (mandatory) + Usage: - kn workflow [command] + kn workflow [command] Aliases: - kn workflow, kn-workflow + kn workflow, kn-workflow Available Commands: completion Generate the autocompletion script for the specified shell @@ -72,11 +102,12 @@ Available Commands: version Show the version Flags: - -h, --help help for kn workflow - -v, --version version for kn workflow + -h, --help help for kn workflow + -v, --version version for kn workflow -Use "kn [command] --help" for more information about a command. +Use "kn workflow [command] --help" for more information about a command. ---- +-- [[proc-create-sw-project-kn-cli]] == Creating a workflow project using Knative CLI @@ -89,12 +120,9 @@ The `create` command sets up {product_name} project containing a minimal "hello * {product_name} plug-in for Knative CLI is installed. + For more information about installing the plug-in, see <>. -ifeval::["{kogito_version_redhat}" != ""] -* You followed the steps in xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc#proc-configuring-maven-rhbq[Configuring your Maven project to Red Hat build of Quarkus and OpenShift Serverless Logic] -endif::[] .Procedure -. In Knative CLI, enter the following command to create a new project: +. You can use the following command to create a new project: + -- .Creates a project named `new-project` @@ -112,12 +140,21 @@ kn workflow create --name my-project ---- -- +This will scaffold a directory named `my-project` with a simple workflow in JSON format. You can overwrite the format of the workflow to YAML by using the `[--yaml-workflow]` flag as follows: + +.Create a project named `my-project` with default workflow in YAML format +[source,shell] +---- +kn workflow create --name my-project --yaml-workflow +---- +-- + [[proc-build-sw-project-kn-cli]] == Running a workflow project using Knative CLI -After creating your workflow project, you can use the `run` command with `kn workflow` to build & run your workflow project in your current directory. +After creating your workflow project, you can use the `run` command with `kn workflow` to build & run your workflow project. You must be in the root folder of your workflow project. -This will start a {product_name} docker image and map your local folder to this image. +This plugin will build your project and start a {product_name} container image that will be mapped to your local folder. .Prerequisites * {product_name} plug-in for Knative CLI is installed. @@ -127,11 +164,10 @@ For more information about installing the plug-in, see <>. -* Minikube cluster is running locally. .Procedure -. In Knative CLI, enter the following command to build and run your workflow project: +. Enter the following command to build and run your workflow project: + -- .Run the project and start a local development image. @@ -140,14 +176,24 @@ For more information about creating a workflow project, see <]` flag as follows: + +.Run a project and start a local development image using different port +[source,shell] +---- +kn workflow run --port 8081 +---- +-- [[proc-gen-manifests-sw-project-kn-cli]] == Generating a list of Operator manifests using Knative CLI After creating your workflow project, you can use the `gen-manifest` command with `kn workflow` to generate operator manifest files for your workflow project in your current directory. -This will create a new file in `./manifests` directory in your project. +This will screate a new file in `./manifests` directory in your project. .Prerequisites * {product_name} plug-in for Knative CLI is installed. @@ -157,11 +203,12 @@ For more information about installing the plug-in, see <>. -* Minikube cluster is running locally. + +* You have set up your environment according to the xref:getting-started/preparing-environment.adoc#proc-minimal-local-environment-setup[minimal environment setup] guide. .Procedure -. In Knative CLI, enter the following command to generate operator manifests for your workflow project: +. Enter the following command to generate operator manifests for your workflow project: + -- .Generate the operator manifest files for your project. @@ -180,11 +227,12 @@ kubectl apply -f manifests/01-sonataflow_hello.yaml -n ---- -- +For more options with `gen-manifest` command use `[-h|--help]`. [[proc-deploy-sw-project-kn-cli]] == Deploying a workflow project using Knative CLI -You can use the `deploy` command combined with `kn workflow` to deploy your workflow project in your current directory. +You can use the `deploy` command combined with `kn workflow` to deploy your workflow project. You must be in the root folder of your workflow project. .Prerequisites * {product_name} plug-in for Knative CLI is installed. @@ -195,16 +243,18 @@ For more information about installing the plug-in, see <>. -* A minikube cluster is running locally. +* You have set up your environment according to the xref:getting-started/preparing-environment.adoc#proc-minimal-local-environment-setup[minimal environment setup] guide. + +* You have installed {operator_name} in your kubernetes cluster according to xref:cloud/operator/install-serverless-operator.adoc[operator installation] guide. .Procedure -. In Knative CLI, enter the following command to deploy your workflow project: +. Enter the following command to deploy your workflow project: + -- -.Deploy a workflow project +.Deploy a workflow project, you must specify a namespace [source,shell] ---- -kn workflow deploy +kn workflow deploy --namespace ---- Also, ensure that you have access to your cluster and your cluster can access the generated container image. @@ -231,7 +281,7 @@ ifeval::["{kogito_version_redhat}" != ""] endif::[] .Procedure -. In Knative CLI, enter the following command to create a new project: +. Enter the following command to create a new project: + -- .Creates a project named `new-project` @@ -264,6 +314,8 @@ When you run the `create` command for the first time, it might take a while due ==== -- +For more options with `quarkus create` command use `[-h|--help]`. + [[proc-build-quarkus-sw-project-kn-cli]] == Building a Quarkus workflow project using Knative CLI @@ -281,7 +333,7 @@ For more information about installing the plug-in, see <>. .Procedure -. In Knative CLI, enter the following command to build your workflow project: +. Enter the following command to build your workflow project: + -- .Build the project and generate a local image named `dev.local/my-project` @@ -379,6 +431,8 @@ kn workflow quarkus build --image my-project --push ---- -- +For more options with `quarkus build` command use `[-h|--help]`. + [[proc-deploy-quarkus-sw-project-kn-cli]] == Deploying a Quarkus workflow project using Knative CLI @@ -397,8 +451,10 @@ For more information about created a workflow project, see <>. +* You have set up your environment according to the xref:getting-started/preparing-environment.adoc#proc-minimal-local-environment-setup[minimal environment setup] guide. + .Procedure -. In Knative CLI, enter the following command to deploy your workflow project: +. Enter the following command to deploy your workflow project: + -- .Deploy a workflow project @@ -409,10 +465,10 @@ kn workflow quarkus deploy If the deployment files (`knative.yml` and `kogito.yml`) are saved in any other folder instead of `./target/kubernetes`, then you can override the path using the `--path` flag with deployment command as follows: -.Deploy a workflow project using `--path` +.Deploy a workflow project using `--path` with `knative.yml` [source,shell] ---- -kn workflow quarkus deploy --path other-path +kn workflow quarkus deploy --path ---- Also, ensure that you have access to your cluster and your cluster can access the generated container image. @@ -427,6 +483,6 @@ You can use the `kubectl` command line if you want to use a complex deployment s == Additional resources * xref:getting-started/create-your-first-workflow-service-with-kn-cli-and-vscode.adoc[Creating your first SonataFlow project] -* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first Quarkus Workflow project] +* xref:use-cases/advanced-developer-use-cases/getting-started/create-your-first-workflow-service.adoc[Creating your first SonataFlow project with Quarkus] include::../../pages/_common-content/report-issue.adoc[] diff --git a/modules/serverless-logic/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc b/modules/serverless-logic/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc index 9ad80083..44e1a328 100644 --- a/modules/serverless-logic/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc +++ b/modules/serverless-logic/pages/tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.adoc @@ -6,6 +6,11 @@ The link:{serverless_logic_web_tools_url}[{serverless_logic_web_tools_name}] is a web application that enables you to create and synchronize your {product_name}, decision files, and Dashbuilder files in a single interface. Also, the {serverless_logic_web_tools_name} application provides the integrations that are needed to deploy and test the {product_name} models in development mode. +[IMPORTANT] +==== + {serverless_logic_web_tools_name} is Development Support Scope only. For more information about the support scope of Red Hat Development Support see https://access.redhat.com/solutions/7031210. +==== + .Home page of {serverless_logic_web_tools_name} image::tooling/serverless-logic-web-tools/serverless-logic-web-tools-overview.png[] diff --git a/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-chrome-extension.adoc b/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-chrome-extension.adoc index b059c115..ef3917d1 100644 --- a/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-chrome-extension.adoc +++ b/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-chrome-extension.adoc @@ -10,6 +10,11 @@ The Google Chrome extension for the Serverless Workflow editor enables you to vi This document describes how to install and use the Chrome extension for Serverless Workflow editor on GitHub. +[IMPORTANT] +==== +The Chrome extension for Serverless Workflow editor on GitHub is Development Support Scope only. For more information about the support scope of Red Hat Development Support see https://access.redhat.com/solutions/7031210. +==== + [[proc-install-chrome-extension-sw-editor]] == Installing the Chrome extension for Serverless Workflow editor on GitHub diff --git a/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc b/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc index e865eda8..ee5958ba 100644 --- a/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc +++ b/modules/serverless-logic/pages/tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc @@ -12,6 +12,11 @@ The VS Code extension for the Serverless Workflow editor enables you to view and This document describes how to install and use the VS Code extension for Serverless Workflow editor, along with how to load OpenAPI files using the extension. +[IMPORTANT] +==== +{sonataflow_vscode_extension_name} visual studio extension is Development Support Scope only. For more information about the support scope of Red Hat Development Support see https://access.redhat.com/solutions/7031210. +==== + [[proc-install-vscode-extension-sw-editor]] == Installing the VS Code Extension for Serverless Workflow editor diff --git a/modules/serverless-logic/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc b/modules/serverless-logic/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc index 915dd318..ee6ce25d 100644 --- a/modules/serverless-logic/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc +++ b/modules/serverless-logic/pages/use-cases/advanced-developer-use-cases/integrations/expose-metrics-to-prometheus.adoc @@ -145,14 +145,29 @@ For more information, see < + org.kie + kie-addons-quarkus-monitoring-sonataflow + +---- + +metric `sonataflow_input_parameters_counter`, which tracks input parameter, will be available. This metric is optional because it counts every workflow input parameter key value combination, which might be space consuming. -* `kogito_process_instance_completed_total`: Completed workflows -* `kogito_process_instance_started_total`: Started workflows -* `kogito_process_instance_running_total`: Running workflows -* `kogito_process_instance_duration_seconds_sum`: Workflows total duration [NOTE] ====