diff --git a/.github/workflows/cypress-e2e-reporting-test.yml b/.github/workflows/cypress-e2e-reporting-test.yml index 35d51b64..f44d323b 100644 --- a/.github/workflows/cypress-e2e-reporting-test.yml +++ b/.github/workflows/cypress-e2e-reporting-test.yml @@ -7,8 +7,8 @@ env: # avoid warnings like "tput: No value for $TERM and no -T specified" TERM: xterm OPENSEARCH_DASHBOARDS_VERSION: '2.x' - OPENSEARCH_VERSION: '2.14.0' - OPENSEARCH_PLUGIN_VERSION: '2.14.0.0' + OPENSEARCH_VERSION: '2.17.1' + OPENSEARCH_PLUGIN_VERSION: '2.17.1.0' PLUGIN_NAME: dashboards-reporting jobs: @@ -154,14 +154,14 @@ jobs: working-directory: OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }} - name: Capture failure screenshots - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 if: failure() with: name: cypress-screenshots-${{ matrix.os }} path: OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }}/.cypress/screenshots - name: Capture test video - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 if: failure() with: name: cypress-videos-${{ matrix.os }} diff --git a/.github/workflows/dashboards-reports-test-and-build-workflow.yml b/.github/workflows/dashboards-reports-test-and-build-workflow.yml index 046add6b..34c6669d 100644 --- a/.github/workflows/dashboards-reports-test-and-build-workflow.yml +++ b/.github/workflows/dashboards-reports-test-and-build-workflow.yml @@ -6,7 +6,7 @@ env: PLUGIN_NAME: reportsDashboards ARTIFACT_NAME: reports-dashboards OPENSEARCH_VERSION: 'main' - OPENSEARCH_PLUGIN_VERSION: 2.14.0.0 + OPENSEARCH_PLUGIN_VERSION: 2.17.1.0 jobs: @@ -66,126 +66,127 @@ jobs: whoami && yarn build && mv -v ./build/*.zip ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip" - name: Upload Artifact For Linux - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: dashboards-reports-linux - path: ./OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }}/build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip - - windows-build: - runs-on: windows-latest - steps: - - # Enable longer filenames for windows - - name: Enable longer filenames - run: git config --system core.longpaths true - - - name: Checkout OpenSearch Dashboards - uses: actions/checkout@v1 - with: - repository: opensearch-project/Opensearch-Dashboards - ref: ${{ env.OPENSEARCH_VERSION }} - path: OpenSearch-Dashboards - - - name: Setup Node - uses: actions/setup-node@v3 - with: - node-version-file: '../OpenSearch-Dashboards/.nvmrc' - registry-url: 'https://registry.npmjs.org' - - - name: Install Yarn - # Need to use bash to avoid having a windows/linux specific step - shell: bash - run: | - YARN_VERSION=$(node -p "require('../OpenSearch-Dashboards/package.json').engines.yarn") - echo "Installing yarn@$YARN_VERSION" - npm i -g yarn@$YARN_VERSION - - - run: node -v - - run: yarn -v - - - name: Checkout Plugin - uses: actions/checkout@v1 - with: - path: OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }} - - - name: OpenSearch Dashboards Plugin Bootstrap - uses: nick-fields/retry@v1 - with: - timeout_minutes: 30 - max_attempts: 3 - command: yarn osd bootstrap --single-version=loose - - - name: Test - uses: nick-fields/retry@v1 - with: - timeout_minutes: 30 - max_attempts: 3 - command: yarn test - - - name: Build Artifact - run: | - yarn build - mv ./build/*.zip ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip - - - name: Upload Artifact For Windows - uses: actions/upload-artifact@v1 - with: - name: dashboards-reports-windows - path: ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip - - macos-build: - runs-on: macos-latest - steps: - - name: Checkout OpenSearch Dashboards - uses: actions/checkout@v1 - with: - repository: opensearch-project/Opensearch-Dashboards - ref: ${{ env.OPENSEARCH_VERSION }} - path: OpenSearch-Dashboards - - - name: Setup Node - uses: actions/setup-node@v3 - with: - node-version-file: '../OpenSearch-Dashboards/.nvmrc' - registry-url: 'https://registry.npmjs.org' - - - name: Install Yarn - # Need to use bash to avoid having a windows/linux specific step - shell: bash - run: | - YARN_VERSION=$(node -p "require('../OpenSearch-Dashboards/package.json').engines.yarn") - echo "Installing yarn@$YARN_VERSION" - npm i -g yarn@$YARN_VERSION - - - run: node -v - - run: yarn -v - - - name: Checkout Plugin - uses: actions/checkout@v1 - with: - path: OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }} - - - name: OpenSearch Dashboards Plugin Bootstrap - uses: nick-fields/retry@v1 - with: - timeout_minutes: 30 - max_attempts: 3 - command: yarn osd bootstrap --single-version=loose - - - name: Test - uses: nick-fields/retry@v1 - with: - timeout_minutes: 30 - max_attempts: 3 - command: yarn test - - - name: Build Artifact - run: | - yarn build - mv ./build/*.zip ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip - - - name: Upload Artifact For MacOS - uses: actions/upload-artifact@v1 - with: - name: dashboards-reports-macosx - path: ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip + path: ./OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }}/build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip + + # Commenting out windows and macos build jobs as they are not required for now. + # windows-build: + # runs-on: windows-latest + # steps: + + # # Enable longer filenames for windows + # - name: Enable longer filenames + # run: git config --system core.longpaths true + + # - name: Checkout OpenSearch Dashboards + # uses: actions/checkout@v1 + # with: + # repository: opensearch-project/Opensearch-Dashboards + # ref: ${{ env.OPENSEARCH_VERSION }} + # path: OpenSearch-Dashboards + + # - name: Setup Node + # uses: actions/setup-node@v3 + # with: + # node-version-file: '../OpenSearch-Dashboards/.nvmrc' + # registry-url: 'https://registry.npmjs.org' + + # - name: Install Yarn + # # Need to use bash to avoid having a windows/linux specific step + # shell: bash + # run: | + # YARN_VERSION=$(node -p "require('../OpenSearch-Dashboards/package.json').engines.yarn") + # echo "Installing yarn@$YARN_VERSION" + # npm i -g yarn@$YARN_VERSION + + # - run: node -v + # - run: yarn -v + + # - name: Checkout Plugin + # uses: actions/checkout@v1 + # with: + # path: OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }} + + # - name: OpenSearch Dashboards Plugin Bootstrap + # uses: nick-fields/retry@v1 + # with: + # timeout_minutes: 30 + # max_attempts: 3 + # command: yarn osd bootstrap --single-version=loose + + # - name: Test + # uses: nick-fields/retry@v1 + # with: + # timeout_minutes: 30 + # max_attempts: 3 + # command: yarn test + + # - name: Build Artifact + # run: | + # yarn build + # mv ./build/*.zip ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip + + # - name: Upload Artifact For Windows + # uses: actions/upload-artifact@v4 + # with: + # name: dashboards-reports-windows + # path: ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip + + # macos-build: + # runs-on: macos-latest + # steps: + # - name: Checkout OpenSearch Dashboards + # uses: actions/checkout@v1 + # with: + # repository: opensearch-project/Opensearch-Dashboards + # ref: ${{ env.OPENSEARCH_VERSION }} + # path: OpenSearch-Dashboards + + # - name: Setup Node + # uses: actions/setup-node@v3 + # with: + # node-version-file: '../OpenSearch-Dashboards/.nvmrc' + # registry-url: 'https://registry.npmjs.org' + + # - name: Install Yarn + # # Need to use bash to avoid having a windows/linux specific step + # shell: bash + # run: | + # YARN_VERSION=$(node -p "require('../OpenSearch-Dashboards/package.json').engines.yarn") + # echo "Installing yarn@$YARN_VERSION" + # npm i -g yarn@$YARN_VERSION + + # - run: node -v + # - run: yarn -v + + # - name: Checkout Plugin + # uses: actions/checkout@v1 + # with: + # path: OpenSearch-Dashboards/plugins/${{ env.PLUGIN_NAME }} + + # - name: OpenSearch Dashboards Plugin Bootstrap + # uses: nick-fields/retry@v1 + # with: + # timeout_minutes: 30 + # max_attempts: 3 + # command: yarn osd bootstrap --single-version=loose + + # - name: Test + # uses: nick-fields/retry@v1 + # with: + # timeout_minutes: 30 + # max_attempts: 3 + # command: yarn test + + # - name: Build Artifact + # run: | + # yarn build + # mv ./build/*.zip ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip + + # - name: Upload Artifact For MacOS + # uses: actions/upload-artifact@v4 + # with: + # name: dashboards-reports-macosx + # path: ./build/${{ env.ARTIFACT_NAME }}-${{ env.OPENSEARCH_PLUGIN_VERSION }}.zip diff --git a/.github/workflows/dev-environment.yml b/.github/workflows/dev-environment.yml new file mode 100644 index 00000000..471c633c --- /dev/null +++ b/.github/workflows/dev-environment.yml @@ -0,0 +1,96 @@ +# This workflow downloads the source code at the given git reference +# (branch, tag or commit), an sets up an environment (Kibana or OpenSearch) +# to run this code and a command (build, test, ...). +# +# This workflow is used as a base for other workflows. + +name: Base workflow - Environment + +on: + workflow_call: + inputs: + reference: + required: true + type: string + default: master + description: Source code reference (branch, tag or commit SHA). + command: + required: true + type: string + default: 'yarn build' + description: Command to run in the environment + docker_run_extra_args: + type: string + default: '' + description: Additional paramaters for the docker run command. + required: false + artifact_name: + type: string + default: '' + description: Artifact name (will be automatically suffixed with .zip) + required: false + artifact_path: + type: string + default: '' + description: Folder to include in the archive. + required: false + notify_jest_coverage_summary: + type: boolean + default: false + required: false + +jobs: + # Deploy the plugin in a development environment and run a command + # using a pre-built Docker image, hosted in Quay.io. + deploy_and_run_command: + name: Deploy and run command + runs-on: ubuntu-latest + steps: + - name: Step 01 - Download the plugin's source code + uses: actions/checkout@v3 + with: + repository: wazuh/wazuh-dashboards-reporting + ref: ${{ inputs.reference }} + path: wazuh-dashboards-reporting + + # Fix source code ownership so the internal user of the Docker + # container is also owner. + - name: Step 02 - Change code ownership + run: sudo chown 1000:1000 -R wazuh-dashboards-reporting; + + - name: Step 03 - Set up the environment and run the command + run: | + # Read the platform version from the package.json file + echo "Reading the platform version from the package.json..."; + platform_version=$(jq -r '.opensearchDashboards.version | select(. != null)' wazuh-dashboards-reporting/package.json); + echo "Plugin platform version: $platform_version"; + + # Up the environment and run the command + docker run -t --rm \ + -e OPENSEARCH_DASHBOARDS_VERSION=${platform_version} \ + -v `pwd`/wazuh-dashboards-reporting:/home/node/kbn/plugins/wazuh-dashboards-reporting \ + ${{ inputs.docker_run_extra_args }} \ + quay.io/wazuh/osd-dev:${platform_version} \ + bash -c ' + yarn config set registry https://registry.yarnpkg.com; + cd /home/node/kbn/plugins/wazuh-dashboards-reporting && yarn && ${{ inputs.command }}; + ' + - name: Get the plugin version + run: | + echo "version=$(jq -r '.wazuh.version' $(pwd)/wazuh-dashboards-reporting/package.json)" >> $GITHUB_ENV + echo "revision=$(jq -r '.wazuh.revision' $(pwd)/wazuh-dashboards-reporting/package.json)" >> $GITHUB_ENV + + - name: Step 04 - Upload artifact to GitHub + if: ${{ inputs.artifact_name && inputs.artifact_path }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.artifact_name }}_${{ env.version }}-${{ env.revision }}_${{ inputs.reference }}.zip + path: ${{ inputs.artifact_path }} + + - name: Step 05 - Upload coverage results to GitHub + if: ${{ inputs.notify_jest_coverage_summary && github.event_name == 'pull_request' }} + uses: AthleticNet/comment-test-coverage@1.2.2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + path: ./wazuh-dashboards-reporting/target/test-coverage/coverage-summary.json + title: "Code coverage (Jest)" \ No newline at end of file diff --git a/.github/workflows/ftr-e2e-reporting-test.yml b/.github/workflows/ftr-e2e-reporting-test.yml index 63a1f4a0..da5d2aa8 100644 --- a/.github/workflows/ftr-e2e-reporting-test.yml +++ b/.github/workflows/ftr-e2e-reporting-test.yml @@ -7,8 +7,8 @@ env: # avoid warnings like "tput: No value for $TERM and no -T specified" TERM: xterm OPENSEARCH_DASHBOARDS_VERSION: '2.x' - OPENSEARCH_VERSION: '2.14.0' - OPENSEARCH_PLUGIN_VERSION: '2.14.0.0' + OPENSEARCH_VERSION: '2.17.1' + OPENSEARCH_PLUGIN_VERSION: '2.17.1.0' PLUGIN_NAME: dashboards-reporting jobs: @@ -162,14 +162,14 @@ jobs: working-directory: opensearch-dashboards-functional-test - name: Capture failure screenshots - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 if: failure() with: name: cypress-screenshots-${{ matrix.os }} path: opensearch-dashboards-functional-test/cypress/screenshots - name: Capture test video - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 if: failure() with: name: cypress-videos-${{ matrix.os }} diff --git a/.github/workflows/manual-build.yml b/.github/workflows/manual-build.yml new file mode 100644 index 00000000..09144d39 --- /dev/null +++ b/.github/workflows/manual-build.yml @@ -0,0 +1,34 @@ +# This workflow builds a production-ready package from the given Git reference. +# Any branch, tag or commit SHA existing in the origin can be used. +# +# This workflow is based on the `dev-environment` workflow. + +name: Manual build + +on: + workflow_call: + inputs: + reference: + required: true + type: string + description: Source code reference (branch, tag or commit SHA) + default: 4.10.0 + workflow_dispatch: + inputs: + reference: + required: true + type: string + default: master + description: Source code reference (branch, tag or commit SHA) + +jobs: + # Build an app package from the given source code reference. + build: + name: Build app package + uses: ./.github/workflows/dev-environment.yml + with: + reference: ${{ inputs.reference }} + command: 'yarn build' + artifact_name: 'reports-dashboards' + artifact_path: './wazuh-dashboards-reporting/build' + secrets: inherit diff --git a/common/index.ts b/common/index.ts index ce180a09..7b322767 100644 --- a/common/index.ts +++ b/common/index.ts @@ -21,6 +21,7 @@ export const OPENSEARCH_REPORTS_API = { const REPORTING_NOTIFICATIONS_API_PREFIX = '/api/reporting_notifications'; export const REPORTING_NOTIFICATIONS_DASHBOARDS_API = Object.freeze({ GET_CONFIGS: `${REPORTING_NOTIFICATIONS_API_PREFIX}/get_configs`, + GET_CONFIG: `${REPORTING_NOTIFICATIONS_API_PREFIX}/get_config`, GET_EVENT: `${REPORTING_NOTIFICATIONS_API_PREFIX}/get_event`, SEND_TEST_MESSAGE: `${REPORTING_NOTIFICATIONS_API_PREFIX}/test_message`, }); @@ -28,6 +29,6 @@ export const REPORTING_NOTIFICATIONS_DASHBOARDS_API = Object.freeze({ const NOTIFICATIONS_API_BASE_PATH = '/_plugins/_notifications'; export const NOTIFICATIONS_API = Object.freeze({ CONFIGS: `${NOTIFICATIONS_API_BASE_PATH}/configs`, - EVENTS: `${NOTIFICATIONS_API_BASE_PATH}/events`, + EVENTS: `${NOTIFICATIONS_API_BASE_PATH}/get_event`, TEST_MESSAGE: `${NOTIFICATIONS_API_BASE_PATH}/feature/test`, }); diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 00000000..b4965257 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,77 @@ +# Wazuh development with Wazuh Stack + +## Requirements + +- vm.max_map_count=262144 + + To modify the vm.max_map_count, you can run this command: + `sudo sysctl -w vm.max_map_count=262144` + +- jq + + To install jq, you can run this command: + + - In Debian/Ubuntu os: + `sudo apt-get install jq` + - In RedHat/CentOS: + `sudo yum install jq` + - In Arch: + `sudo pacman -Sy --noconfirm jq` + - In MAC: + `brew install jq` + +## Usage + +Use always the provided script to bring up or down the development +environment. For example: + +```bash +./dev.sh [-o 1.2.4] [-d 1.2.0] $WZ_HOME up [saml] +``` + +The script will ask you all the required parameters to bring up the +environment, including the version of the elastic stack you want to +develop for, and the source code folder where the wazuh-dashboard-plugins is +located. + +Use the `saml` flag to bring up KeyCloak IDP. **Add idp to your hosts and start +the server using the `--no-base-path`**. + +```apacheconf +# Linux systems: /etc/hosts +# Windows systems: C:\Windows\System32\drivers\etc\hosts +127.0.0.1 idp +``` + +**The script will not select the appropriate version of the +wazuh-dashboard-plugins to use, so be sure to check out the appropriate version +before bringing up the environment!** + +### UI Credentials + +The default user and password to access the UI at https://0.0.0.0:5601/ are: + +``` +admin:admin +``` + +## Notes + +`Wazuh Indexer` and `Wazuh Dashboard` are both a redistribution of a +version of the OpenSearch Stack. We will only create environments for +the versions of OpenSearch which will be included into a Wazuh +version. + +We must use official `Wazuh Indexer` and `Wazuh Dashboard` images for +testing! + +This environment will start a working deployment with: + +- Imposter - a mock server. +- Elasticsearch-exporter - Elasticsearch metrics to Prometheus adapter. +- OpenSearch single-node cluster. +- OpenSearch Dashboards development environment. + +The OpenSearch Dashboards development environment includes an already +bootstrapped Kibana, with all the node modules precompiled and ready to +use in a development session. diff --git a/docker/config/1.x/certs/ca.json b/docker/config/1.x/certs/ca.json new file mode 100644 index 00000000..8a96a70a --- /dev/null +++ b/docker/config/1.x/certs/ca.json @@ -0,0 +1,15 @@ +{ + "CN": "Wazuh", + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "L": "San Francisco", + "O": "Wazuh", + "OU": "Wazuh Root CA" + } + ] +} diff --git a/docker/config/1.x/certs/cfssl.json b/docker/config/1.x/certs/cfssl.json new file mode 100644 index 00000000..d23daf76 --- /dev/null +++ b/docker/config/1.x/certs/cfssl.json @@ -0,0 +1,58 @@ +{ + "signing": { + "default": { + "expiry": "8760h" + }, + "profiles": { + "intermediate_ca": { + "usages": [ + "signing", + "digital signature", + "key encipherment", + "cert sign", + "crl sign", + "server auth", + "client auth" + ], + "expiry": "8760h", + "ca_constraint": { + "is_ca": true, + "max_path_len": 0, + "max_path_len_zero": true + } + }, + "peer": { + "usages": [ + "signing", + "digital signature", + "key encipherment", + "data encipherment", + "client auth", + "server auth" + ], + "expiry": "8760h" + }, + "server": { + "usages": [ + "signing", + "digital signing", + "key encipherment", + "data encipherment", + "server auth" + ], + "expiry": "8760h" + }, + "client": { + "usages": [ + "signing", + "digital signature", + "key encipherment", + "data encipherment", + "client auth" + ], + "expiry": "8760h" + } + } + } +} + diff --git a/docker/config/1.x/certs/host.json b/docker/config/1.x/certs/host.json new file mode 100644 index 00000000..27805da5 --- /dev/null +++ b/docker/config/1.x/certs/host.json @@ -0,0 +1,19 @@ +{ + "CN": "HOST", + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "L": "California", + "O": "Wazuh", + "OU": "Wazuh" + } + ], + "hosts": [ + "HOST", + "localhost" + ] +} diff --git a/docker/config/1.x/filebeat/filebeat.yml b/docker/config/1.x/filebeat/filebeat.yml new file mode 100644 index 00000000..92f55312 --- /dev/null +++ b/docker/config/1.x/filebeat/filebeat.yml @@ -0,0 +1,21 @@ +# Wazuh - Filebeat configuration file +filebeat.modules: + - module: wazuh + alerts: + enabled: true + archives: + enabled: false + +setup.template.json.enabled: true +setup.template.json.path: "/etc/filebeat/wazuh-template.json" +setup.template.json.name: "wazuh" +setup.template.overwrite: true +setup.ilm.enabled: false +output.elasticsearch: + hosts: ["https://os1:9200"] + username: "admin" + password: "admin" + ssl.verification_mode: full + ssl.certificate_authorities: ["/etc/ssl/elastic/ca.pem"] + ssl.certificate: "/etc/ssl/elastic/filebeat.pem" + ssl.key: "/etc/ssl/elastic/filebeat-key.pem" diff --git a/docker/config/1.x/os/config-saml.yml b/docker/config/1.x/os/config-saml.yml new file mode 100644 index 00000000..74fc91c8 --- /dev/null +++ b/docker/config/1.x/os/config-saml.yml @@ -0,0 +1,40 @@ +--- +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + http: + anonymous_auth_enabled: false + authc: + internal_auth: + order: 0 + description: "HTTP basic authentication using the internal user database" + http_enabled: true + transport_enabled: true + http_authenticator: + type: basic + challenge: false + authentication_backend: + type: internal + saml_auth: + order: 1 + description: "Keycloack SAML provider" + http_enabled: true + transport_enabled: false + http_authenticator: + type: saml + challenge: true + config: + idp: + metadata_url: http://idp:8080/realms/wazuh/protocol/saml/descriptor + entity_id: http://idp:8080/realms/wazuh + sp: + entity_id: wazuh + signature_private_key_filepath: "certs/admin-key.pem" + kibana_url: https://localhost:5601 + roles_key: Role + exchange_key: 1a2a3a4a5a6a7a8a9a0a1b2b3b4b5b6b + authentication_backend: + type: noop diff --git a/docker/config/1.x/os/config.yml b/docker/config/1.x/os/config.yml new file mode 100644 index 00000000..c1385bfb --- /dev/null +++ b/docker/config/1.x/os/config.yml @@ -0,0 +1,20 @@ +--- +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + http: + anonymous_auth_enabled: false + authc: + internal_auth: + order: 0 + description: "HTTP basic authentication using the internal user database" + http_enabled: true + transport_enabled: true + http_authenticator: + type: basic + challenge: false + authentication_backend: + type: internal diff --git a/docker/config/1.x/os/internal_users.yml b/docker/config/1.x/os/internal_users.yml new file mode 100755 index 00000000..21036aaa --- /dev/null +++ b/docker/config/1.x/os/internal_users.yml @@ -0,0 +1,74 @@ +--- +# This is the internal user database +# The hash value is a bcrypt hash and can be generated with plugin/tools/hash.sh + +_meta: + type: "internalusers" + config_version: 2 + +# Define your internal users here + +wazuh_admin: + hash: "$2y$12$d2awHiOYvZjI88VfsDON.u6buoBol0gYPJEgdG1ArKVE0OMxViFfu" + reserved: true + hidden: false + backend_roles: [] + attributes: {} + opendistro_security_roles: ["wazuh_ui_admin"] + static: false + +wazuh_user: + hash: "$2y$12$BQixeoQdRubZdVf/7sq1suHwiVRnSst1.lPI2M0.GPZms4bq2D9vO" + reserved: true + hidden: false + backend_roles: [] + attributes: {} + opendistro_security_roles: ["wazuh_ui_user"] + static: false + +## Demo users + +admin: + hash: "$2a$12$VcCDgh2NDk07JGN0rjGbM.Ad41qVR/YFJcgHp0UGns5JDymv..TOG" + reserved: true + backend_roles: + - "admin" + description: "Demo admin user" + +kibanaserver: + hash: "$2a$12$4AcgAt3xwOWadA5s5blL6ev39OXDNhmOesEoo33eZtrq2N0YrU3H." + reserved: true + description: "Demo kibanaserver user" + +kibanaro: + hash: "$2a$12$JJSXNfTowz7Uu5ttXfeYpeYE0arACvcwlPBStB1F.MI7f0U9Z4DGC" + reserved: false + backend_roles: + - "kibanauser" + - "readall" + attributes: + attribute1: "value1" + attribute2: "value2" + attribute3: "value3" + description: "Demo kibanaro user" + +logstash: + hash: "$2a$12$u1ShR4l4uBS3Uv59Pa2y5.1uQuZBrZtmNfqB3iM/.jL0XoV9sghS2" + reserved: false + backend_roles: + - "logstash" + description: "Demo logstash user" + +readall: + hash: "$2a$12$ae4ycwzwvLtZxwZ82RmiEunBbIPiAmGZduBAjKN0TXdwQFtCwARz2" + reserved: false + backend_roles: + - "readall" + description: "Demo readall user" + +snapshotrestore: + hash: "$2y$12$DpwmetHKwgYnorbgdvORCenv4NAK8cPUg8AI6pxLCuWf/ALc0.v7W" + reserved: false + backend_roles: + - "snapshotrestore" + description: "Demo snapshotrestore user" diff --git a/docker/config/1.x/os/opensearch.yml b/docker/config/1.x/os/opensearch.yml new file mode 100644 index 00000000..ee1dbf59 --- /dev/null +++ b/docker/config/1.x/os/opensearch.yml @@ -0,0 +1,42 @@ +network.host: "0.0.0.0" +node.name: "os1" +path.data: /var/lib/os1 +path.logs: /var/log/os1 +# comment compatibility.override_main_response_version for 2.0.0 +compatibility.override_main_response_version: true +plugins.security.ssl.http.pemcert_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.pem +plugins.security.ssl.http.pemkey_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.key +plugins.security.ssl.http.pemtrustedcas_filepath: ${OPENSEARCH_PATH_CONF}/certs/ca.pem +plugins.security.ssl.transport.pemcert_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.pem +plugins.security.ssl.transport.pemkey_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.key +plugins.security.ssl.transport.pemtrustedcas_filepath: ${OPENSEARCH_PATH_CONF}/certs/ca.pem +plugins.security.ssl.http.enabled: true +plugins.security.ssl.transport.enforce_hostname_verification: false +plugins.security.ssl.transport.resolve_hostname: false +plugins.security.authcz.admin_dn: + - "CN=admin,OU=Wazuh,O=Wazuh,L=California,C=US" +plugins.security.check_snapshot_restore_write_privileges: true +plugins.security.enable_snapshot_restore_privilege: true +plugins.security.nodes_dn: + - "CN=os1,OU=Wazuh,O=Wazuh,L=California,C=US" +plugins.security.restapi.roles_enabled: + - "all_access" + - "security_rest_api_access" +plugins.security.system_indices.enabled: true +plugins.security.system_indices.indices: + [ + ".opendistro-alerting-config", + ".opendistro-alerting-alert*", + ".opendistro-anomaly-results*", + ".opendistro-anomaly-detector*", + ".opendistro-anomaly-checkpoints", + ".opendistro-anomaly-detection-state", + ".opendistro-reports-*", + ".opendistro-notifications-*", + ".opendistro-notebooks", + ".opensearch-observability", + ".opendistro-asynchronous-search-response*", + ".replication-metadata-store", + ] +plugins.security.allow_default_init_securityindex: true +cluster.routing.allocation.disk.threshold_enabled: false diff --git a/docker/config/1.x/os/roles.yml b/docker/config/1.x/os/roles.yml new file mode 100644 index 00000000..16894a9f --- /dev/null +++ b/docker/config/1.x/os/roles.yml @@ -0,0 +1,163 @@ +_meta: + type: "roles" + config_version: 2 + +# Restrict users so they can only view visualization and dashboards on kibana +kibana_read_only: + reserved: true + +# The security REST API access role is used to assign specific users access to change the security settings through the REST API. +security_rest_api_access: + reserved: true + +# Allows users to view monitors, destinations and alerts +alerting_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/alerting/alerts/get" + - "cluster:admin/opendistro/alerting/destination/get" + - "cluster:admin/opendistro/alerting/monitor/get" + - "cluster:admin/opendistro/alerting/monitor/search" + +# Allows users to view and acknowledge alerts +alerting_ack_alerts: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/alerting/alerts/*" + +# Allows users to use all alerting functionality +alerting_full_access: + reserved: true + cluster_permissions: + - "cluster_monitor" + - "cluster:admin/opendistro/alerting/*" + index_permissions: + - index_patterns: + - "*" + allowed_actions: + - "indices_monitor" + - "indices:admin/aliases/get" + - "indices:admin/mappings/get" + +# Allow users to read Anomaly Detection detectors and results +anomaly_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/ad/detector/info" + - "cluster:admin/opendistro/ad/detector/search" + - "cluster:admin/opendistro/ad/detectors/get" + - "cluster:admin/opendistro/ad/result/search" + - "cluster:admin/opendistro/ad/tasks/search" + +# Allows users to use all Anomaly Detection functionality +anomaly_full_access: + reserved: true + cluster_permissions: + - "cluster_monitor" + - "cluster:admin/opendistro/ad/*" + index_permissions: + - index_patterns: + - "*" + allowed_actions: + - "indices_monitor" + - "indices:admin/aliases/get" + - "indices:admin/mappings/get" + +# Allows users to read Notebooks +notebooks_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/notebooks/list" + - "cluster:admin/opendistro/notebooks/get" + +# Allows users to all Notebooks functionality +notebooks_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/notebooks/create" + - "cluster:admin/opendistro/notebooks/update" + - "cluster:admin/opendistro/notebooks/delete" + - "cluster:admin/opendistro/notebooks/get" + - "cluster:admin/opendistro/notebooks/list" + +# Allows users to read and download Reports +reports_instances_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/reports/instance/list" + - "cluster:admin/opendistro/reports/instance/get" + - "cluster:admin/opendistro/reports/menu/download" + +# Allows users to read and download Reports and Report-definitions +reports_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/reports/definition/get" + - "cluster:admin/opendistro/reports/definition/list" + - "cluster:admin/opendistro/reports/instance/list" + - "cluster:admin/opendistro/reports/instance/get" + - "cluster:admin/opendistro/reports/menu/download" + +# Allows users to all Reports functionality +reports_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/reports/definition/create" + - "cluster:admin/opendistro/reports/definition/update" + - "cluster:admin/opendistro/reports/definition/on_demand" + - "cluster:admin/opendistro/reports/definition/delete" + - "cluster:admin/opendistro/reports/definition/get" + - "cluster:admin/opendistro/reports/definition/list" + - "cluster:admin/opendistro/reports/instance/list" + - "cluster:admin/opendistro/reports/instance/get" + - "cluster:admin/opendistro/reports/menu/download" + +# Allows users to use all asynchronous-search functionality +asynchronous_search_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/asynchronous_search/*" + index_permissions: + - index_patterns: + - "*" + allowed_actions: + - "indices:data/read/search*" + +# Allows users to read stored asynchronous-search results +asynchronous_search_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/asynchronous_search/get" + +wazuh_ui_user: + reserved: true + hidden: false + cluster_permissions: [] + index_permissions: + - index_patterns: + - "wazuh-*" + dls: "" + fls: [] + masked_fields: [] + allowed_actions: + - "read" + tenant_permissions: [] + static: false + +wazuh_ui_admin: + reserved: true + hidden: false + cluster_permissions: [] + index_permissions: + - index_patterns: + - "wazuh-*" + dls: "" + fls: [] + masked_fields: [] + allowed_actions: + - "read" + - "delete" + - "manage" + - "index" + tenant_permissions: [] + static: false diff --git a/docker/config/1.x/os/roles_mapping.yml b/docker/config/1.x/os/roles_mapping.yml new file mode 100644 index 00000000..4eca848c --- /dev/null +++ b/docker/config/1.x/os/roles_mapping.yml @@ -0,0 +1,71 @@ +--- +# In this file users, backendroles and hosts can be mapped to Wazuh indexer Security roles. +# Permissions for Wazuh indexer roles are configured in roles.yml + +_meta: + type: "rolesmapping" + config_version: 2 + +# Define your roles mapping here + +## Demo roles mapping + +all_access: + reserved: false + backend_roles: + - "admin" + description: "Maps admin to all_access" + +own_index: + reserved: false + users: + - "*" + description: "Allow full access to an index named like the username" + +logstash: + reserved: false + backend_roles: + - "logstash" + +kibana_user: + reserved: false + backend_roles: + - "kibanauser" + users: + - "wazuh_user" + - "wazuh_admin" + description: "Maps kibanauser to kibana_user" + +readall: + reserved: false + backend_roles: + - "readall" + +manage_snapshots: + reserved: false + backend_roles: + - "snapshotrestore" + +kibana_server: + reserved: true + users: + - "kibanaserver" + +wazuh_ui_admin: + reserved: true + hidden: false + backend_roles: [] + hosts: [] + users: + - "wazuh_admin" + - "kibanaserver" + and_backend_roles: [] + +wazuh_ui_user: + reserved: true + hidden: false + backend_roles: [] + hosts: [] + users: + - "wazuh_user" + and_backend_roles: [] diff --git a/docker/config/1.x/osd/opensearch_dashboards.yml b/docker/config/1.x/osd/opensearch_dashboards.yml new file mode 100755 index 00000000..a7804f54 --- /dev/null +++ b/docker/config/1.x/osd/opensearch_dashboards.yml @@ -0,0 +1,14 @@ +server.host: 0.0.0.0 +server.port: 5601 +opensearch.hosts: https://os1:9200 +opensearch.ssl.verificationMode: certificate +opensearch.requestHeadersWhitelist: ["securitytenant", "Authorization"] +opensearch_security.multitenancy.enabled: false +opensearch_security.readonly_mode.roles: ["kibana_read_only"] +server.ssl.enabled: true +server.ssl.key: "/home/node/kbn/certs/osd.key" +server.ssl.certificate: "/home/node/kbn/certs/osd.pem" +opensearch.ssl.certificateAuthorities: ["/home/node/kbn/certs/ca.pem"] +uiSettings.overrides.defaultRoute: /app/wazuh +opensearch.username: "kibanaserver" +opensearch.password: "kibanaserver" diff --git a/docker/config/1.x/osd/opensearch_dashboards_saml.yml b/docker/config/1.x/osd/opensearch_dashboards_saml.yml new file mode 100755 index 00000000..f5b38f37 --- /dev/null +++ b/docker/config/1.x/osd/opensearch_dashboards_saml.yml @@ -0,0 +1,25 @@ +server.host: 0.0.0.0 +server.port: 5601 +opensearch.hosts: https://os1:9200 +opensearch.ssl.verificationMode: certificate +opensearch.requestHeadersWhitelist: ["securitytenant", "Authorization"] +opensearch_security.multitenancy.enabled: false +opensearch_security.readonly_mode.roles: ["kibana_read_only"] +server.ssl.enabled: true +server.ssl.key: "/home/node/kbn/certs/osd.key" +server.ssl.certificate: "/home/node/kbn/certs/osd.pem" +opensearch.ssl.certificateAuthorities: ["/home/node/kbn/certs/ca.pem"] +uiSettings.overrides.defaultRoute: /app/wazuh +opensearch.username: "kibanaserver" +opensearch.password: "kibanaserver" + +opensearch_security.auth.type: "saml" +server.xsrf.whitelist: + [ + /_plugins/_security/saml/acs, + /_opendistro/_security/saml/acs, + /_plugins/_security/saml/acs/idpinitiated, + /_opendistro/_security/saml/acs/idpinitiated, + /_plugins/_security/saml/logout, + /_opendistro/_security/saml/logout, + ] diff --git a/docker/config/1.x/osd/wazuh.yml b/docker/config/1.x/osd/wazuh.yml new file mode 100755 index 00000000..3f3bc90b --- /dev/null +++ b/docker/config/1.x/osd/wazuh.yml @@ -0,0 +1,19 @@ +hosts: + - manager: + url: 'https://wazuh.manager' + port: 55000 + username: wazuh-wui + password: MyS3cr37P450r.*- + run_as: false + - imposter: + url: 'http://imposter' + port: 8080 + username: wazuh-wui + password: MyS3cr37P450r.*- + run_as: false + - imposter-cli: + url: 'http://' + port: 8080 + username: wazuh-wui + password: MyS3cr37P450r.*- + run_as: false diff --git a/docker/config/1.x/wm/wazuh_manager.conf b/docker/config/1.x/wm/wazuh_manager.conf new file mode 100755 index 00000000..aff1af9d --- /dev/null +++ b/docker/config/1.x/wm/wazuh_manager.conf @@ -0,0 +1,353 @@ + + + yes + yes + no + no + no + smtp.example.wazuh.com + wazuh@example.wazuh.com + recipient@example.wazuh.com + 12 + alerts.log + 10m + 0 + + + + 3 + 12 + + + + + plain + + + + secure + 1514 + tcp + 131072 + + + + + no + yes + yes + yes + yes + yes + yes + yes + + + 43200 + + etc/rootcheck/rootkit_files.txt + etc/rootcheck/rootkit_trojans.txt + + yes + + + + yes + 1800 + 1d + yes + + wodles/java + wodles/ciscat + + + + + yes + yes + /var/log/osquery/osqueryd.results.log + /etc/osquery/osquery.conf + yes + + + + + no + 1h + yes + yes + yes + yes + yes + yes + yes + + + + 10 + + + + + yes + yes + 12h + yes + + + + no + 5m + 6h + yes + + + + no + trusty + xenial + bionic + focal + 1h + + + + + no + stretch + buster + bullseye + 1h + + + + + no + 5 + 6 + 7 + 8 + 1h + + + + + no + amazon-linux + amazon-linux-2 + 1h + + + + + no + 1h + + + + + yes + 1h + + + + + yes + 2010 + 1h + + + + + + + no + + + 43200 + + yes + + + yes + + + no + + + /etc,/usr/bin,/usr/sbin + /bin,/sbin,/boot + + + /etc/mtab + /etc/hosts.deny + /etc/mail/statistics + /etc/random-seed + /etc/random.seed + /etc/adjtime + /etc/httpd/logs + /etc/utmpx + /etc/wtmpx + /etc/cups/certs + /etc/dumpdates + /etc/svc/volatile + + + .log$|.swp$ + + + /etc/ssl/private.key + + yes + yes + yes + yes + + + 10 + + + 100 + + + + yes + 5m + 1h + 10 + + + + + + 127.0.0.1 + ^localhost.localdomain$ + 10.0.0.106 + + + + disable-account + disable-account + yes + + + + restart-wazuh + restart-wazuh + + + + firewall-drop + firewall-drop + yes + + + + host-deny + host-deny + yes + + + + route-null + route-null + yes + + + + win_route-null + route-null.exe + yes + + + + netsh + netsh.exe + yes + + + + + + + command + df -P + 360 + + + + full_command + netstat -tulpn | sed 's/\([[:alnum:]]\+\)\ \+[[:digit:]]\+\ \+[[:digit:]]\+\ \+\(.*\):\([[:digit:]]*\)\ \+\([0-9\.\:\*]\+\).\+\ \([[:digit:]]*\/[[:alnum:]\-]*\).*/\1 \2 == \3 == \4 \5/' | sort -k 4 -g | sed 's/ == \(.*\) ==/:\1/' | sed 1,2d + netstat listening ports + 360 + + + + full_command + last -n 20 + 360 + + + + + ruleset/decoders + ruleset/rules + 0215-policy_rules.xml + etc/lists/audit-keys + etc/lists/amazon/aws-eventnames + etc/lists/security-eventchannel + + + etc/decoders + etc/rules + + + + yes + 1 + 64 + 15m + + + + + no + 1515 + no + yes + no + HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + + no + etc/sslmanager.cert + etc/sslmanager.key + no + + + + wazuh + node01 + master + + 1516 + 0.0.0.0 + + NODE_IP + + no + yes + + + + + + + syslog + /var/ossec/logs/active-responses.log + + + diff --git a/docker/config/2.x/certs/ca.json b/docker/config/2.x/certs/ca.json new file mode 100644 index 00000000..8a96a70a --- /dev/null +++ b/docker/config/2.x/certs/ca.json @@ -0,0 +1,15 @@ +{ + "CN": "Wazuh", + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "L": "San Francisco", + "O": "Wazuh", + "OU": "Wazuh Root CA" + } + ] +} diff --git a/docker/config/2.x/certs/cfssl.json b/docker/config/2.x/certs/cfssl.json new file mode 100644 index 00000000..d23daf76 --- /dev/null +++ b/docker/config/2.x/certs/cfssl.json @@ -0,0 +1,58 @@ +{ + "signing": { + "default": { + "expiry": "8760h" + }, + "profiles": { + "intermediate_ca": { + "usages": [ + "signing", + "digital signature", + "key encipherment", + "cert sign", + "crl sign", + "server auth", + "client auth" + ], + "expiry": "8760h", + "ca_constraint": { + "is_ca": true, + "max_path_len": 0, + "max_path_len_zero": true + } + }, + "peer": { + "usages": [ + "signing", + "digital signature", + "key encipherment", + "data encipherment", + "client auth", + "server auth" + ], + "expiry": "8760h" + }, + "server": { + "usages": [ + "signing", + "digital signing", + "key encipherment", + "data encipherment", + "server auth" + ], + "expiry": "8760h" + }, + "client": { + "usages": [ + "signing", + "digital signature", + "key encipherment", + "data encipherment", + "client auth" + ], + "expiry": "8760h" + } + } + } +} + diff --git a/docker/config/2.x/certs/host.json b/docker/config/2.x/certs/host.json new file mode 100644 index 00000000..27805da5 --- /dev/null +++ b/docker/config/2.x/certs/host.json @@ -0,0 +1,19 @@ +{ + "CN": "HOST", + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "L": "California", + "O": "Wazuh", + "OU": "Wazuh" + } + ], + "hosts": [ + "HOST", + "localhost" + ] +} diff --git a/docker/config/2.x/filebeat/filebeat.yml b/docker/config/2.x/filebeat/filebeat.yml new file mode 100644 index 00000000..92f55312 --- /dev/null +++ b/docker/config/2.x/filebeat/filebeat.yml @@ -0,0 +1,21 @@ +# Wazuh - Filebeat configuration file +filebeat.modules: + - module: wazuh + alerts: + enabled: true + archives: + enabled: false + +setup.template.json.enabled: true +setup.template.json.path: "/etc/filebeat/wazuh-template.json" +setup.template.json.name: "wazuh" +setup.template.overwrite: true +setup.ilm.enabled: false +output.elasticsearch: + hosts: ["https://os1:9200"] + username: "admin" + password: "admin" + ssl.verification_mode: full + ssl.certificate_authorities: ["/etc/ssl/elastic/ca.pem"] + ssl.certificate: "/etc/ssl/elastic/filebeat.pem" + ssl.key: "/etc/ssl/elastic/filebeat-key.pem" diff --git a/docker/config/2.x/os/config-saml.yml b/docker/config/2.x/os/config-saml.yml new file mode 100644 index 00000000..74fc91c8 --- /dev/null +++ b/docker/config/2.x/os/config-saml.yml @@ -0,0 +1,40 @@ +--- +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + http: + anonymous_auth_enabled: false + authc: + internal_auth: + order: 0 + description: "HTTP basic authentication using the internal user database" + http_enabled: true + transport_enabled: true + http_authenticator: + type: basic + challenge: false + authentication_backend: + type: internal + saml_auth: + order: 1 + description: "Keycloack SAML provider" + http_enabled: true + transport_enabled: false + http_authenticator: + type: saml + challenge: true + config: + idp: + metadata_url: http://idp:8080/realms/wazuh/protocol/saml/descriptor + entity_id: http://idp:8080/realms/wazuh + sp: + entity_id: wazuh + signature_private_key_filepath: "certs/admin-key.pem" + kibana_url: https://localhost:5601 + roles_key: Role + exchange_key: 1a2a3a4a5a6a7a8a9a0a1b2b3b4b5b6b + authentication_backend: + type: noop diff --git a/docker/config/2.x/os/config.yml b/docker/config/2.x/os/config.yml new file mode 100644 index 00000000..c1385bfb --- /dev/null +++ b/docker/config/2.x/os/config.yml @@ -0,0 +1,20 @@ +--- +_meta: + type: "config" + config_version: 2 + +config: + dynamic: + http: + anonymous_auth_enabled: false + authc: + internal_auth: + order: 0 + description: "HTTP basic authentication using the internal user database" + http_enabled: true + transport_enabled: true + http_authenticator: + type: basic + challenge: false + authentication_backend: + type: internal diff --git a/docker/config/2.x/os/internal_users.yml b/docker/config/2.x/os/internal_users.yml new file mode 100755 index 00000000..bf0becfc --- /dev/null +++ b/docker/config/2.x/os/internal_users.yml @@ -0,0 +1,56 @@ +--- +# This is the internal user database +# The hash value is a bcrypt hash and can be generated with plugin/tools/hash.sh + +_meta: + type: "internalusers" + config_version: 2 + +# Define your internal users here + +## Demo users + +admin: + hash: "$2a$12$VcCDgh2NDk07JGN0rjGbM.Ad41qVR/YFJcgHp0UGns5JDymv..TOG" + reserved: true + backend_roles: + - "admin" + description: "Demo admin user" + +kibanaserver: + hash: "$2a$12$4AcgAt3xwOWadA5s5blL6ev39OXDNhmOesEoo33eZtrq2N0YrU3H." + reserved: true + description: "Demo kibanaserver user" + +kibanaro: + hash: "$2a$12$JJSXNfTowz7Uu5ttXfeYpeYE0arACvcwlPBStB1F.MI7f0U9Z4DGC" + reserved: false + backend_roles: + - "kibanauser" + - "readall" + attributes: + attribute1: "value1" + attribute2: "value2" + attribute3: "value3" + description: "Demo kibanaro user" + +logstash: + hash: "$2a$12$u1ShR4l4uBS3Uv59Pa2y5.1uQuZBrZtmNfqB3iM/.jL0XoV9sghS2" + reserved: false + backend_roles: + - "logstash" + description: "Demo logstash user" + +readall: + hash: "$2a$12$ae4ycwzwvLtZxwZ82RmiEunBbIPiAmGZduBAjKN0TXdwQFtCwARz2" + reserved: false + backend_roles: + - "readall" + description: "Demo readall user" + +snapshotrestore: + hash: "$2y$12$DpwmetHKwgYnorbgdvORCenv4NAK8cPUg8AI6pxLCuWf/ALc0.v7W" + reserved: false + backend_roles: + - "snapshotrestore" + description: "Demo snapshotrestore user" diff --git a/docker/config/2.x/os/opensearch.yml b/docker/config/2.x/os/opensearch.yml new file mode 100644 index 00000000..ee1dbf59 --- /dev/null +++ b/docker/config/2.x/os/opensearch.yml @@ -0,0 +1,42 @@ +network.host: "0.0.0.0" +node.name: "os1" +path.data: /var/lib/os1 +path.logs: /var/log/os1 +# comment compatibility.override_main_response_version for 2.0.0 +compatibility.override_main_response_version: true +plugins.security.ssl.http.pemcert_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.pem +plugins.security.ssl.http.pemkey_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.key +plugins.security.ssl.http.pemtrustedcas_filepath: ${OPENSEARCH_PATH_CONF}/certs/ca.pem +plugins.security.ssl.transport.pemcert_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.pem +plugins.security.ssl.transport.pemkey_filepath: ${OPENSEARCH_PATH_CONF}/certs/os1.key +plugins.security.ssl.transport.pemtrustedcas_filepath: ${OPENSEARCH_PATH_CONF}/certs/ca.pem +plugins.security.ssl.http.enabled: true +plugins.security.ssl.transport.enforce_hostname_verification: false +plugins.security.ssl.transport.resolve_hostname: false +plugins.security.authcz.admin_dn: + - "CN=admin,OU=Wazuh,O=Wazuh,L=California,C=US" +plugins.security.check_snapshot_restore_write_privileges: true +plugins.security.enable_snapshot_restore_privilege: true +plugins.security.nodes_dn: + - "CN=os1,OU=Wazuh,O=Wazuh,L=California,C=US" +plugins.security.restapi.roles_enabled: + - "all_access" + - "security_rest_api_access" +plugins.security.system_indices.enabled: true +plugins.security.system_indices.indices: + [ + ".opendistro-alerting-config", + ".opendistro-alerting-alert*", + ".opendistro-anomaly-results*", + ".opendistro-anomaly-detector*", + ".opendistro-anomaly-checkpoints", + ".opendistro-anomaly-detection-state", + ".opendistro-reports-*", + ".opendistro-notifications-*", + ".opendistro-notebooks", + ".opensearch-observability", + ".opendistro-asynchronous-search-response*", + ".replication-metadata-store", + ] +plugins.security.allow_default_init_securityindex: true +cluster.routing.allocation.disk.threshold_enabled: false diff --git a/docker/config/2.x/os/roles.yml b/docker/config/2.x/os/roles.yml new file mode 100644 index 00000000..5b35df44 --- /dev/null +++ b/docker/config/2.x/os/roles.yml @@ -0,0 +1,149 @@ +_meta: + type: "roles" + config_version: 2 + +# Restrict users so they can only view visualization and dashboard on kibana +kibana_read_only: + reserved: true + +# The security REST API access role is used to assign specific users access to change the security settings through the REST API. +security_rest_api_access: + reserved: true + +# Allows users to view monitors, destinations and alerts +alerting_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/alerting/alerts/get" + - "cluster:admin/opendistro/alerting/destination/get" + - "cluster:admin/opendistro/alerting/monitor/get" + - "cluster:admin/opendistro/alerting/monitor/search" + +# Allows users to view and acknowledge alerts +alerting_ack_alerts: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/alerting/alerts/*" + +# Allows users to use all alerting functionality +alerting_full_access: + reserved: true + cluster_permissions: + - "cluster_monitor" + - "cluster:admin/opendistro/alerting/*" + index_permissions: + - index_patterns: + - "*" + allowed_actions: + - "indices_monitor" + - "indices:admin/aliases/get" + - "indices:admin/mappings/get" + +# Allow users to read Anomaly Detection detectors and results +anomaly_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/ad/detector/info" + - "cluster:admin/opendistro/ad/detector/search" + - "cluster:admin/opendistro/ad/detectors/get" + - "cluster:admin/opendistro/ad/result/search" + - "cluster:admin/opendistro/ad/tasks/search" + +# Allows users to use all Anomaly Detection functionality +anomaly_full_access: + reserved: true + cluster_permissions: + - "cluster_monitor" + - "cluster:admin/opendistro/ad/*" + index_permissions: + - index_patterns: + - "*" + allowed_actions: + - "indices_monitor" + - "indices:admin/aliases/get" + - "indices:admin/mappings/get" + +# Allows users to read Notebooks +notebooks_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/notebooks/list" + - "cluster:admin/opendistro/notebooks/get" + +# Allows users to all Notebooks functionality +notebooks_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/notebooks/create" + - "cluster:admin/opendistro/notebooks/update" + - "cluster:admin/opendistro/notebooks/delete" + - "cluster:admin/opendistro/notebooks/get" + - "cluster:admin/opendistro/notebooks/list" + +# Allows users to read and download Reports +reports_instances_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/reports/instance/list" + - "cluster:admin/opendistro/reports/instance/get" + - "cluster:admin/opendistro/reports/menu/download" + +# Allows users to read and download Reports and Report-definitions +reports_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/reports/definition/get" + - "cluster:admin/opendistro/reports/definition/list" + - "cluster:admin/opendistro/reports/instance/list" + - "cluster:admin/opendistro/reports/instance/get" + - "cluster:admin/opendistro/reports/menu/download" + +# Allows users to all Reports functionality +reports_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/reports/definition/create" + - "cluster:admin/opendistro/reports/definition/update" + - "cluster:admin/opendistro/reports/definition/on_demand" + - "cluster:admin/opendistro/reports/definition/delete" + - "cluster:admin/opendistro/reports/definition/get" + - "cluster:admin/opendistro/reports/definition/list" + - "cluster:admin/opendistro/reports/instance/list" + - "cluster:admin/opendistro/reports/instance/get" + - "cluster:admin/opendistro/reports/menu/download" + +# Allows users to use all asynchronous-search functionality +asynchronous_search_full_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/asynchronous_search/*" + index_permissions: + - index_patterns: + - "*" + allowed_actions: + - "indices:data/read/search*" + +# Allows users to read stored asynchronous-search results +asynchronous_search_read_access: + reserved: true + cluster_permissions: + - "cluster:admin/opendistro/asynchronous_search/get" + +# Wazuh monitoring and statistics index permissions +manage_wazuh_index: + reserved: true + hidden: false + cluster_permissions: [] + index_permissions: + - index_patterns: + - "wazuh-*" + dls: "" + fls: [] + masked_fields: [] + allowed_actions: + - "read" + - "delete" + - "manage" + - "index" + tenant_permissions: [] + static: false diff --git a/docker/config/2.x/os/roles_mapping.yml b/docker/config/2.x/os/roles_mapping.yml new file mode 100644 index 00000000..94c2b466 --- /dev/null +++ b/docker/config/2.x/os/roles_mapping.yml @@ -0,0 +1,88 @@ +--- +# In this file users, backendroles and hosts can be mapped to Open Distro Security roles. +# Permissions for Opendistro roles are configured in roles.yml + +_meta: + type: "rolesmapping" + config_version: 2 + +# Define your roles mapping here + +## Default roles mapping + +all_access: + reserved: true + hidden: false + backend_roles: + - "admin" + hosts: [] + users: [] + and_backend_roles: [] + description: "Maps admin to all_access" + +own_index: + reserved: false + hidden: false + backend_roles: [] + hosts: [] + users: + - "*" + and_backend_roles: [] + description: "Allow full access to an index named like the username" + +logstash: + reserved: false + hidden: false + backend_roles: + - "logstash" + hosts: [] + users: [] + and_backend_roles: [] + +readall: + reserved: true + hidden: false + backend_roles: + - "readall" + hosts: [] + users: [] + and_backend_roles: [] + +manage_snapshots: + reserved: true + hidden: false + backend_roles: + - "snapshotrestore" + hosts: [] + users: [] + and_backend_roles: [] + +kibana_server: + reserved: true + hidden: false + backend_roles: [] + hosts: [] + users: + - "kibanaserver" + and_backend_roles: [] + +kibana_user: + reserved: false + hidden: false + backend_roles: + - "kibanauser" + hosts: [] + users: [] + and_backend_roles: [] + description: "Maps kibanauser to kibana_user" + + # Wazuh monitoring and statistics index permissions +manage_wazuh_index: + reserved: true + hidden: false + backend_roles: [] + hosts: [] + users: + - "kibanaserver" + - "admin" + and_backend_roles: [] diff --git a/docker/config/2.x/osd/opensearch_dashboards.yml b/docker/config/2.x/osd/opensearch_dashboards.yml new file mode 100755 index 00000000..ecf0203e --- /dev/null +++ b/docker/config/2.x/osd/opensearch_dashboards.yml @@ -0,0 +1,22 @@ +server.host: 0.0.0.0 +server.port: 5601 +opensearch.hosts: https://os1:9200 +opensearch.ssl.verificationMode: certificate +#osd 1.2.4 +# opensearch.requestHeadersWhitelist: ["securitytenant","Authorization"] +# +# osd 2.0 +opensearch.requestHeadersAllowlist: ["securitytenant", "Authorization"] +# +opensearch_security.multitenancy.enabled: false +opensearch_security.readonly_mode.roles: ["kibana_read_only"] +server.ssl.enabled: true +server.ssl.key: "/home/node/kbn/certs/osd.key" +server.ssl.certificate: "/home/node/kbn/certs/osd.pem" +opensearch.ssl.certificateAuthorities: ["/home/node/kbn/certs/ca.pem"] +uiSettings.overrides.defaultRoute: /app/reports-dashboards +opensearch.username: "kibanaserver" +opensearch.password: "kibanaserver" +opensearchDashboards.branding: + useExpandedHeader: false + diff --git a/docker/config/2.x/osd/opensearch_dashboards_saml.yml b/docker/config/2.x/osd/opensearch_dashboards_saml.yml new file mode 100755 index 00000000..10c10f03 --- /dev/null +++ b/docker/config/2.x/osd/opensearch_dashboards_saml.yml @@ -0,0 +1,30 @@ +server.host: 0.0.0.0 +server.port: 5601 +opensearch.hosts: https://os1:9200 +opensearch.ssl.verificationMode: certificate +#osd 1.2.4 +# opensearch.requestHeadersWhitelist: ["securitytenant","Authorization"] +# +# osd 2.0 +opensearch.requestHeadersAllowlist: ["securitytenant", "Authorization"] +# +opensearch_security.multitenancy.enabled: false +opensearch_security.readonly_mode.roles: ["kibana_read_only"] +server.ssl.enabled: true +server.ssl.key: "/home/node/kbn/certs/osd.key" +server.ssl.certificate: "/home/node/kbn/certs/osd.pem" +opensearch.ssl.certificateAuthorities: ["/home/node/kbn/certs/ca.pem"] +uiSettings.overrides.defaultRoute: /app/reports-dashboards +opensearch.username: "kibanaserver" +opensearch.password: "kibanaserver" + +opensearch_security.auth.type: "saml" +server.xsrf.allowlist: + [ + /_plugins/_security/saml/acs, + /_opendistro/_security/saml/acs, + /_plugins/_security/saml/acs/idpinitiated, + /_opendistro/_security/saml/acs/idpinitiated, + /_plugins/_security/saml/logout, + /_opendistro/_security/saml/logout, + ] diff --git a/docker/config/2.x/osd/wazuh.yml b/docker/config/2.x/osd/wazuh.yml new file mode 100755 index 00000000..421c58b1 --- /dev/null +++ b/docker/config/2.x/osd/wazuh.yml @@ -0,0 +1,22 @@ +hosts: + - manager: + url: 'https://wazuh.manager' + port: 55000 + username: wazuh-wui + password: MyS3cr37P450r.*- + run_as: false + - imposter: + url: 'http://imposter' + port: 8080 + username: wazuh-wui + password: MyS3cr37P450r.*- + run_as: false + - imposter-cli: + url: 'http://' + port: 8080 + username: wazuh-wui + password: MyS3cr37P450r.*- + run_as: false + + +wazuh.updates.disabled: true diff --git a/docker/config/2.x/wm/wazuh_manager.conf b/docker/config/2.x/wm/wazuh_manager.conf new file mode 100755 index 00000000..aff1af9d --- /dev/null +++ b/docker/config/2.x/wm/wazuh_manager.conf @@ -0,0 +1,353 @@ + + + yes + yes + no + no + no + smtp.example.wazuh.com + wazuh@example.wazuh.com + recipient@example.wazuh.com + 12 + alerts.log + 10m + 0 + + + + 3 + 12 + + + + + plain + + + + secure + 1514 + tcp + 131072 + + + + + no + yes + yes + yes + yes + yes + yes + yes + + + 43200 + + etc/rootcheck/rootkit_files.txt + etc/rootcheck/rootkit_trojans.txt + + yes + + + + yes + 1800 + 1d + yes + + wodles/java + wodles/ciscat + + + + + yes + yes + /var/log/osquery/osqueryd.results.log + /etc/osquery/osquery.conf + yes + + + + + no + 1h + yes + yes + yes + yes + yes + yes + yes + + + + 10 + + + + + yes + yes + 12h + yes + + + + no + 5m + 6h + yes + + + + no + trusty + xenial + bionic + focal + 1h + + + + + no + stretch + buster + bullseye + 1h + + + + + no + 5 + 6 + 7 + 8 + 1h + + + + + no + amazon-linux + amazon-linux-2 + 1h + + + + + no + 1h + + + + + yes + 1h + + + + + yes + 2010 + 1h + + + + + + + no + + + 43200 + + yes + + + yes + + + no + + + /etc,/usr/bin,/usr/sbin + /bin,/sbin,/boot + + + /etc/mtab + /etc/hosts.deny + /etc/mail/statistics + /etc/random-seed + /etc/random.seed + /etc/adjtime + /etc/httpd/logs + /etc/utmpx + /etc/wtmpx + /etc/cups/certs + /etc/dumpdates + /etc/svc/volatile + + + .log$|.swp$ + + + /etc/ssl/private.key + + yes + yes + yes + yes + + + 10 + + + 100 + + + + yes + 5m + 1h + 10 + + + + + + 127.0.0.1 + ^localhost.localdomain$ + 10.0.0.106 + + + + disable-account + disable-account + yes + + + + restart-wazuh + restart-wazuh + + + + firewall-drop + firewall-drop + yes + + + + host-deny + host-deny + yes + + + + route-null + route-null + yes + + + + win_route-null + route-null.exe + yes + + + + netsh + netsh.exe + yes + + + + + + + command + df -P + 360 + + + + full_command + netstat -tulpn | sed 's/\([[:alnum:]]\+\)\ \+[[:digit:]]\+\ \+[[:digit:]]\+\ \+\(.*\):\([[:digit:]]*\)\ \+\([0-9\.\:\*]\+\).\+\ \([[:digit:]]*\/[[:alnum:]\-]*\).*/\1 \2 == \3 == \4 \5/' | sort -k 4 -g | sed 's/ == \(.*\) ==/:\1/' | sed 1,2d + netstat listening ports + 360 + + + + full_command + last -n 20 + 360 + + + + + ruleset/decoders + ruleset/rules + 0215-policy_rules.xml + etc/lists/audit-keys + etc/lists/amazon/aws-eventnames + etc/lists/security-eventchannel + + + etc/decoders + etc/rules + + + + yes + 1 + 64 + 15m + + + + + no + 1515 + no + yes + no + HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + + no + etc/sslmanager.cert + etc/sslmanager.key + no + + + + wazuh + node01 + master + + 1516 + 0.0.0.0 + + NODE_IP + + no + yes + + + + + + + syslog + /var/ossec/logs/active-responses.log + + + diff --git a/docker/config/enable_saml.sh b/docker/config/enable_saml.sh new file mode 100755 index 00000000..89d77617 --- /dev/null +++ b/docker/config/enable_saml.sh @@ -0,0 +1,160 @@ +#!/bin/bash + +# idp container launches and docker-compose returns too quickly, do not wait +# for container to be healthy as it has no dependencies, so we wait before +# continuing +sleep 7 + +indexer="$1-os1-1" +dashboard="$1-osd-1" + +# Setup keycloack to be used with wazuh-dashboards + +# Connection +U="admin" +P="admin" +B="http://idp:8080" + +# Realm +REALM="master" + +# Get ACCESS_TOKEN from default install +ACCESS_TOKEN=$(curl -sS \ + -d 'client_id=admin-cli' \ + -d 'username=admin' \ + -d 'password=admin' \ + -d 'grant_type=password' \ + "${B}/realms/master/protocol/openid-connect/token" | jq -r '.access_token') + +H=('-H' 'Content-Type: application/json' '-H' "Authorization: Bearer $ACCESS_TOKEN") + +# Create new REALM +REALM="wazuh" +P='{ + "id": "wazuh", + "realm": "wazuh", + "enabled": true +}' + +curl -sS -L -X POST "${B}/admin/realms" "${H[@]}" -d "$P" | grep -v "Conflict detected" + +# Add admin certificates to keycloak as these are used by indexer to sign saml +# messages. These should be uploaded to keycloak if we want it to verify indexer +# messages. +key=$(cat /certs/wi/admin-key.pem | grep -v "PRIVATE KEY" | tr -d "\n") +cert=$(cat /certs/wi/admin.pem | grep -v CERTIFICATE | tr -d "\n") + +# Create client +# By default the client does not verify the client signature on saml messages +# but it could be enabled for testing purposes +PC="{ + \"protocol\": \"saml\", + \"name\": \"wazuh\", + \"clientId\": \"wazuh\", + \"description\": \"wazuh saml integration\", + \"baseUrl\": \"https://localhost:5601\", + \"rootUrl\": \"https://localhost:5601\", + \"redirectUris\": [\"https://localhost:5601/*\"], + \"attributes\" : { + \"saml_single_logout_service_url_redirect\": \"https://localhost:5601/_opendistro/_security/saml/logout\", + \"saml_assertion_consumer_url_post\": \"https://localhost:5601/_opendistro/_security/saml/acs/idpinitiated\", + \"saml_single_logout_service_url_post\": \"https://wazuh.dashboard:5601/_opendistro/_security/saml/logout\", + \"saml.force.post.binding\": \"false\", + \"saml.signing.certificate\": \"$cert\", + \"saml.signing.private.key\": \"$key\", + \"saml.client.signature\": \"true\", + \"saml_single_logout_service_url_redirect\": \"https://localhost:5601\", + \"post.logout.redirect.uris\": \"https://localhost:5601*\" + } +}" + +curl -sS -L -X POST "${B}/admin/realms/${REALM}/clients" "${H[@]}" -d "$PC" | grep -v "Client wazuh already exists" + +# Get a client json representation +CLIENT=$(curl -sS -L -X GET "${B}/admin/realms/${REALM}/clients" "${H[@]}" -G -d 'clientId=wazuh' | jq '.[] | select(.clientId=="wazuh")') + +# Get client id +CID=$(echo $CLIENT | jq -r '.id') + +# Generate all-access and admin role for the realm +PR1='{ + "name":"all-access" +}' + +curl -sS -L -X POST "${B}/admin/realms/${REALM}/roles" "${H[@]}" -d "$PR1" | grep -v "Role with name all-access already exists" + +PR2='{ + "name":"admin" +}' + +curl -sS -L -X POST "${B}/admin/realms/${REALM}/roles" "${H[@]}" -d "$PR2" | grep -v "Role with name admin already exists" + +## create new user +PU='{ + "username": "wazuh", + "email": "hello@wazuh.com", + "firstName": "Wazuh", + "lastName": "Wazuh", + "emailVerified": true, + "enabled": true, + "credentials": [{"temporary":false,"type":"password","value":"wazuh"}], + "realmRoles": ["admin", "all-access"] +}' + +curl -sS -L -X POST "${B}/admin/realms/${REALM}/users" "${H[@]}" -d "$PU" | grep -v "User exists with same username" + +## Get a user json representation +USER=$(curl -sS -L -X GET "${B}/admin/realms/${REALM}/users" "${H[@]}" -G -d 'username=wazuh' | jq '.[] | select(.username=="wazuh")') + +### Get user id +USERID=$(echo $USER | jq -r '.id') + +# Get roles +ROLES=$(curl -sS -L -X GET "${B}/admin/realms/${REALM}/roles" "${H[@]}" -d "$PR2") + +## Assign role +ADMINID=$(echo $ROLES | jq -r '.[] | select(.name=="admin").id') +ALLACCESSID=$(echo $ROLES | jq -r '.[] | select(.name=="all-access").id') + +PA1="[ + { + \"id\": \"$ADMINID\", + \"name\": \"admin\", + \"composite\": false, + \"clientRole\": false, + \"containerId\": \"wazuh\" + }, + { + \"id\": \"$ALLACCESSID\", + \"name\": \"all-access\", + \"description\": \"\", + \"composite\": false, + \"clientRole\": false, + \"containerId\": \"wazuh\" + } +]" + +curl -sS -L -X POST "${B}/admin/realms/${REALM}/users/${USERID}/role-mappings/realm" "${H[@]}" -d "$PA1" + +# Get list of client scopes +CSCOPES=$(curl -sS -L -X GET "${B}/admin/realms/${REALM}/client-scopes" "${H[@]}") +CSID=$(echo $CSCOPES | jq -r '.[] | select(.name=="role_list").id ') +CSR=$(echo $CSCOPES | jq -r '.[] | select(.name=="role_list") ') + +# Set single to true, so opensearch works +UPDATE=$(echo $CSR | jq '.protocolMappers[] | select(.name=="role list").config.single |= "true" ') +PMID=$(echo $CSR | jq -r '.protocolMappers[] | select(.name=="role list").id') + +curl -sS -L -X PUT "${B}/admin/realms/${REALM}/client-scopes/$CSID/protocol-mappers/models/$PMID" "${H[@]}" -d "$UPDATE" + +# Set up auth realm on opensearch +certs="/usr/share/opensearch/config/certs" +ca="$certs/ca.pem" +cert="$certs/admin.pem" +key="$certs/admin-key.pem" + +securityadmin="bash /usr/share/opensearch/plugins/opensearch-security/tools/securityadmin.sh" +config_path="/usr/share/opensearch/config/opensearch-security" + +echo "To update configuration in indexer, you can run:" +echo docker exec -e JAVA_HOME=/usr/share/opensearch/jdk $indexer $securityadmin -cacert $ca -cert $cert -key $key -cd $config_path diff --git a/docker/dev.sh b/docker/dev.sh new file mode 100755 index 00000000..e16ce7d5 --- /dev/null +++ b/docker/dev.sh @@ -0,0 +1,155 @@ +#!/bin/bash + +usage() { + echo + echo "./dev.sh [-os os_version] [-osd osd_version] /wazuh_app_src action [saml/server] [server_version]" + echo + echo "where" + echo " -o os_version Specify the OS version (optional)" + echo " -d osd_version Specify the OSD version (optional)" + echo " wazuh_app_src is the path to the wazuh application source code" + echo " action is one of up | down | stop" + echo " saml to deploy a saml enabled environment (optional)" + echo " server to deploy a real server enabled environment (optional)" + exit -1 +} + +exit_with_message() { + echo $1 + exit -1 +} + +if ! command -v jq &> /dev/null; then + echo "[ERROR] jq is not installed. Please install jq to continue." + echo "sudo apt-get install jq in Debian/Ubuntu OS" + echo "sudo yum install jq in RedHat/CentOS OS" + echo "sudo pacman -Sy --noconfirm jq in Arch OS" + echo "brew install jq in MAC OS" + exit 1 +fi + +PACKAGE_PATH="../opensearch_dashboards.json" +os_version="" +osd_version="" + +while getopts ":o:d:" opt; do + case ${opt} in + o) + os_version=$OPTARG + ;; + d) + osd_version=$OPTARG + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + exit 1 + ;; + :) + echo "The -$OPTARG option requires an argument." >&2 + exit 1 + ;; + esac +done +shift $((OPTIND - 1)) + +if [ -z "$os_version" ] || [ -z "$osd_version" ]; then + if [ ! -f $PACKAGE_PATH ]; then + echo "[ERROR] The file package.json was not found." + exit 1 + fi + + if [ -z "$os_version" ]; then + echo "[INFO] OS Version not received via flag, getting the version from $PACKAGE_PATH" + os_version=$(jq -r '.opensearchDashboardsVersion' $PACKAGE_PATH) + if [ -z "$os_version" ]; then + echo "[ERROR] Could not retrieve the OS version from package.json." + exit 1 + fi + fi + + if [ -z "$osd_version" ]; then + echo "[INFO] OSD Version not received via flag, getting the version from $PACKAGE_PATH" + osd_version=$(jq -r '.opensearchDashboardsVersion' $PACKAGE_PATH) + if [ -z "$osd_version" ]; then + echo "[ERROR] Could not retrieve the OSD version from package.json." + exit 1 + fi + fi +fi + +if [ $# -lt 2 ]; then + echo "[ERROR] Incorrect number of arguments " $# ", got " $@ + echo + usage +fi + +if [[ $1 != /* ]]; then + echo "[ERROR] Source path must be absolute, and start with /" + echo + usage + exit +fi + +export PASSWORD=${PASSWORD:-admin} +export OS_VERSION=$os_version +export OSD_VERSION=$osd_version +export OSD_PORT=${PORT:-5601} +export IMPOSTER_VERSION=3.44.1 +export SRC=$1 +export OSD_MAJOR_NUMBER=$(echo $OSD_VERSION | cut -d. -f1) +export COMPOSE_PROJECT_NAME=os-dev-${OSD_VERSION//./} +export WAZUH_STACK="" + +if [[ "$OSD_MAJOR_NUMBER" -ge 2 ]]; then + export OSD_MAJOR="2.x" +else + export OSD_MAJOR="1.x" +fi + +profile="standard" +export WAZUH_DASHBOARD_CONF=./config/${OSD_MAJOR}/osd/opensearch_dashboards.yml +export SEC_CONFIG_FILE=./config/${OSD_MAJOR}/os/config.yml + +export SEC_CONFIG_PATH=/usr/share/opensearch/plugins/opensearch-security/securityconfig +if [[ "$OSD_MAJOR" == "2.x" ]]; then + export SEC_CONFIG_PATH=/usr/share/opensearch/config/opensearch-security +fi + +case "$2" in +up) + /bin/bash ../scripts/create_docker_networks.sh + docker compose --profile $profile -f dev.yml up -Vd + + # Display a command to deploy an agent when using the real server + if [[ "$3" =~ "server" ]]; then + echo + echo "**************WARNING**************" + echo "The agent version must be a published one. This uses only released versions." + echo "If you need to change de version, edit the command as you see fit." + echo "***********************************" + echo "1. (Optional) Enroll an agent (Ubuntu 20.04):" + echo "docker run --name ${COMPOSE_PROJECT_NAME}-agent-\$(date +%s) --network os-dev-${OS_VERSION} --label com.docker.compose.project=${COMPOSE_PROJECT_NAME} --env WAZUH_AGENT_VERSION=${WAZUH_STACK} -d ubuntu:20.04 bash -c '" + echo " apt update -y" + echo " apt install -y curl lsb-release" + echo " curl -so \wazuh-agent-\${WAZUH_AGENT_VERSION}.deb \\" + echo " https://packages.wazuh.com/4.x/apt/pool/main/w/wazuh-agent/wazuh-agent_\${WAZUH_AGENT_VERSION}-1_amd64.deb \\" + echo " && WAZUH_MANAGER='wazuh.manager' WAZUH_AGENT_GROUP='default' dpkg -i ./wazuh-agent-\${WAZUH_AGENT_VERSION}.deb" + echo + echo " /etc/init.d/wazuh-agent start" + echo " tail -f /var/ossec/logs/ossec.log" + echo "'" + echo + fi + ;; + down) + docker compose --profile $profile -f dev.yml down -v --remove-orphans + ;; + stop) + docker compose --profile $profile -f dev.yml -p ${COMPOSE_PROJECT_NAME} stop + ;; + *) + echo "[ERROR] Action must be up | down | stop: " + echo + usage + ;; +esac diff --git a/docker/dev.yml b/docker/dev.yml new file mode 100755 index 00000000..023fde73 --- /dev/null +++ b/docker/dev.yml @@ -0,0 +1,242 @@ +version: '2.2' + +# x-logging: &logging +# logging: +# driver: loki +# options: +# loki-url: 'http://host.docker.internal:3100/loki/api/v1/push' + +services: + exporter: + image: quay.io/prometheuscommunity/elasticsearch-exporter:latest + + # <<: *logging + + hostname: exporter-osd-${OS_VERSION} + profiles: + - 'standard' + networks: + - os-dev + - mon + command: + - '--es.uri=https://admin:${PASSWORD}@os1:9200' + - '--es.ssl-skip-verify' + - '--es.all' + + generator: + image: cfssl/cfssl + + # <<: *logging + profiles: + - 'server' + - 'saml' + - 'standard' + volumes: + - wi_certs:/certs/wi + - wd_certs:/certs/wd + - wm_certs:/certs/wm + - idp_certs:/certs/idp + - ./config/${OSD_MAJOR}/certs:/conf + - os_logs:/logs + - os_data:/data + # Included to avoid docker from creating duplicated networks + networks: + - os-dev + entrypoint: /bin/bash + command: > + -c ' + export certs=/tmp/certs + mkdir $$certs + cd $$certs + + echo "Generating CA" + cfssl gencert -initca /conf/ca.json | cfssljson -bare ca + + echo "Generating servers certificates" + for i in os1 osd imposter; do + echo "Generating cert for $$i" + cat /conf/host.json | \ + sed "s/HOST/$$i/g" | \ + cfssl gencert \ + -ca $$certs/ca.pem \ + -ca-key $$certs/ca-key.pem \ + -config /conf/cfssl.json \ + -profile=server - | \ + cfssljson -bare $$i + openssl pkcs8 -topk8 -inform pem -in $$i-key.pem -outform pem -nocrypt -out $$i.key + done + + echo "Generating clients certificates" + for i in admin filebeat saml; do + echo "Generating cert for $$i" + cat /conf/host.json | \ + sed "s/HOST/$$i/g" | \ + cfssl gencert \ + -ca $$certs/ca.pem \ + -ca-key $$certs/ca-key.pem \ + -config /conf/cfssl.json \ + -profile=client - | \ + cfssljson -bare $$i + openssl pkcs8 -topk8 -inform pem -in $$i-key.pem -outform pem -nocrypt -out $$i.key + done + + echo "Setting up permissions" + + rm /certs/wi/* /certs/wd/* /certs/wm/* + + mv $$certs/os1* /certs/wi + mv $$certs/admin* /certs/wi + mv /certs/wi/admin.key /certs/wi/admin-key.pem + cp $$certs/*ca* /certs/wi + + mv $$certs/osd* /certs/wd + cp $$certs/*ca* /certs/wd + + mv $$certs/saml* /certs/idp + mv /certs/idp/saml.key /certs/idp/saml-key.pem + cp $$certs/*ca* /certs/idp + + mv $$certs/*.* /certs/wm + + chmod 640 /certs/wi/* /certs/wd/* /certs/wm/* + chown -R 1000:1000 /certs/* + ls -alR /certs/ + + chown 1000:1000 /data /logs + chmod 775 /data /logs + sleep 300 + ' + healthcheck: + test: ['CMD-SHELL', '[ -r /certs/wi/os1.pem ]'] + interval: 2s + timeout: 5s + retries: 10 + + os1: + image: opensearchproject/opensearch:${OS_VERSION} + + # <<: *logging + profiles: + - 'standard' + environment: + - cluster.name=os-dev-cluster + - node.name=os1 + - discovery.seed_hosts=os1 + - cluster.initial_master_nodes=os1 + - bootstrap.memory_lock=true # along with the memlock settings below, disables swapping + - 'OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m' # minimum and maximum Java heap size, recommend setting both to 50% of system RAM + - OPENSEARCH_PATH_CONF=/usr/share/opensearch/config/ + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 # maximum number of open files for the OpenSearch user, set to at least 65536 on modern systems + hard: 65536 + volumes: + - wi_certs:/usr/share/opensearch/config/certs/ + - ./config/${OSD_MAJOR}/os/opensearch.yml:/usr/share/opensearch/config/opensearch.yml + - ./config/${OSD_MAJOR}/os/internal_users.yml:${SEC_CONFIG_PATH}/internal_users.yml + - ${SEC_CONFIG_FILE}:${SEC_CONFIG_PATH}/config.yml + - ./config/${OSD_MAJOR}/os/roles_mapping.yml:${SEC_CONFIG_PATH}/roles_mapping.yml + - ./config/${OSD_MAJOR}/os/roles.yml:${SEC_CONFIG_PATH}/roles.yml + + - os_logs:/var/log/os1 + - os_data:/var/lib/os1 + ports: + - 9200:9200 + - 9300:9300 + networks: + - os-dev + - mon + healthcheck: + test: + [ + 'CMD-SHELL', + "curl -v --cacert config/certs/ca.pem https://os1:9200 2>&1 | grep -q '401 Unauthorized'", + ] + interval: 1s + timeout: 5s + retries: 120 + + filebeat: + depends_on: + os1: + condition: service_healthy + image: elastic/filebeat:7.10.2 + profiles: + - 'standard' + hostname: filebeat + user: '0:0' + networks: + - os-dev + - mon + + # <<: *logging + # restart: always + entrypoint: + - '/bin/bash' + command: > + -c ' + mkdir -p /etc/filebeat + echo admin | filebeat keystore add username --stdin --force + echo ${PASSWORD}| filebeat keystore add password --stdin --force + curl -so /etc/filebeat/wazuh-template.json https://raw.githubusercontent.com/wazuh/wazuh/v4.7.2/extensions/elasticsearch/7.x/wazuh-template.json + curl -s https://packages.wazuh.com/4.x/filebeat/wazuh-filebeat-0.3.tar.gz | tar -xvz -C /usr/share/filebeat/module + # copy filebeat to preserve correct permissions without + # affecting host filesystem + cp /tmp/filebeat.yml /usr/share/filebeat/filebeat.yml + chown root.root /usr/share/filebeat/filebeat.yml + chmod go-w /usr/share/filebeat/filebeat.yml + filebeat setup -e + filebeat + ' + volumes: + - wm_certs:/etc/ssl/elastic + - ./config/${OSD_MAJOR}/filebeat/filebeat.yml:/tmp/filebeat.yml + + osd: + depends_on: + os1: + condition: service_healthy + image: quay.io/wazuh/osd-dev:${OSD_VERSION} + profiles: + - 'standard' + hostname: osd + networks: + - os-dev + - devel + - mon + user: '1000:1000' + + # <<: *logging + ports: + - ${OSD_PORT}:5601 + environment: + - 'LOGS=/proc/1/fd/1' + volumes: + - osd_cache:/home/node/.cache + - '${SRC}:/home/node/kbn/plugins/wazuh-dashboards-reporting' + - wd_certs:/home/node/kbn/certs/ + - ${WAZUH_DASHBOARD_CONF}:/home/node/kbn/config/opensearch_dashboards.yml + - ./config/${OSD_MAJOR}/osd/wazuh.yml:/home/node/kbn/data/wazuh/config/wazuh.yml + +networks: + os-dev: + name: os-dev-${OS_VERSION} + driver: bridge + mon: + external: true + devel: + external: true + +volumes: + osd_cache: + certs: + os_logs: + os_data: + wi_certs: + wd_certs: + wm_certs: + idp_certs: + keycloak-data: diff --git a/package.json b/package.json index ad89389b..357ca1dc 100644 --- a/package.json +++ b/package.json @@ -4,6 +4,14 @@ "description": "OpenSearch Dashboards Reports Plugin", "license": "Apache-2.0", "main": "index.ts", + "opensearchDashboards": { + "version": "2.17.1", + "templateVersion": "2.17.1" + }, + "wazuh": { + "version": "5.0.0", + "revision": "00" + }, "scripts": { "osd": "node ../../scripts/osd", "opensearch": "node ../../scripts/opensearch", @@ -92,4 +100,4 @@ "braces": "^3.0.3", "micromatch": "^4.0.8" } -} \ No newline at end of file +} diff --git a/public/components/main/__tests__/__snapshots__/main.test.tsx.snap b/public/components/main/__tests__/__snapshots__/main.test.tsx.snap index 0b025ce1..b22d054d 100644 --- a/public/components/main/__tests__/__snapshots__/main.test.tsx.snap +++ b/public/components/main/__tests__/__snapshots__/main.test.tsx.snap @@ -808,7 +808,32 @@ exports[`
panel render component 1`] = ` aria-live="polite" aria-sort="none" class="euiTableHeaderCell" - data-test-subj="tableHeaderCell_status_5" + data-test-subj="tableHeaderCell_notificationsEnabled_5" + role="columnheader" + scope="col" + > + + + @@ -837,7 +862,7 @@ exports[`
panel render component 1`] = ` >
panel render component after create success 1`] = ` aria-live="polite" aria-sort="none" class="euiTableHeaderCell" - data-test-subj="tableHeaderCell_status_5" + data-test-subj="tableHeaderCell_notificationsEnabled_5" + role="columnheader" + scope="col" + > + + + @@ -1784,7 +1834,7 @@ exports[`
panel render component after create success 1`] = ` >
panel render component after delete success 1`] = ` aria-live="polite" aria-sort="none" class="euiTableHeaderCell" - data-test-subj="tableHeaderCell_status_5" + data-test-subj="tableHeaderCell_notificationsEnabled_5" + role="columnheader" + scope="col" + > + + + @@ -2787,7 +2862,7 @@ exports[`
panel render component after delete success 1`] = ` >
panel render component after edit success 1`] = ` aria-live="polite" aria-sort="none" class="euiTableHeaderCell" - data-test-subj="tableHeaderCell_status_5" + data-test-subj="tableHeaderCell_notificationsEnabled_5" + role="columnheader" + scope="col" + > + + + @@ -3791,7 +3891,7 @@ exports[`
panel render component after edit success 1`] = ` >
panel render component 1`] = ` aria-live="polite" aria-sort="none" class="euiTableHeaderCell" - data-test-subj="tableHeaderCell_status_5" + data-test-subj="tableHeaderCell_notificationsEnabled_5" + role="columnheader" + scope="col" + > + + + @@ -342,6 +367,22 @@ exports[` panel render component 1`] = `
+ +
+ Notifications +
+
+ +
+ @@ -455,6 +496,22 @@ exports[` panel render component 1`] = `
+ +
+ Notifications +
+
+ +
+ @@ -838,7 +895,32 @@ exports[` panel render empty table 1`] = ` aria-live="polite" aria-sort="none" class="euiTableHeaderCell" - data-test-subj="tableHeaderCell_status_5" + data-test-subj="tableHeaderCell_notificationsEnabled_5" + role="columnheader" + scope="col" + > + + + @@ -867,7 +949,7 @@ exports[` panel render empty table 1`] = ` >
New report available to view

', + configIds: ['VnOVQ5IBH5EsCNGPWgec'], }, report_params: { report_name: 'Test report table response', @@ -47,12 +47,16 @@ export const reportTableMockResponse = [ export const mockReportsTableItems = [ { + channel: ['VnOVQ5IBH5EsCNGPWgec'], + emailRecipients: undefined, + textDescription: 'New report available to view', + htmldescription: '

New report available to view

', + title: 'New report', id: '123456', reportName: 'Test report table response', type: 'On demand', - sender: '—', + sender: undefined, opensearchDashboardsRecipients: '—', - emailRecipients: '—', reportSource: 'Dashboard', timeCreated: undefined, state: undefined, @@ -84,8 +88,10 @@ export const reportDefinitionsTableMockResponse = [ }, }, delivery: { - delivery_type: 'OpenSearch Dashboards user', - delivery_params: { opensearch_dashboards_recipients: [] }, + title: 'New report', + textDescription: 'New report available to view', + htmlDescription: '

New report available to view

', + configIds: ['VnOVQ5IBH5EsCNGPWgec'], }, trigger: { trigger_type: 'Schedule', @@ -114,6 +120,7 @@ export const reportDefinitionsTableMockContent = [ type: 'Schedule', owner: '—', source: 'Dashboard', + notificationsEnabled: 'Enabled', baseUrl: 'test_base_url.com', lastUpdated: 1602713211007, details: 'Recurring', diff --git a/public/components/main/main.tsx b/public/components/main/main.tsx index 8116f634..d69799a7 100644 --- a/public/components/main/main.tsx +++ b/public/components/main/main.tsx @@ -240,13 +240,13 @@ export function Main(props) { const { httpClient } = props; await httpClient .get('../api/reporting/reports') - .then((response) => { + .then(async (response) => { setReportsTableContent(addReportsTableContent(response.data)); }) .catch((error) => { console.log('error when fetching all reports: ', error); // permission denied error - if (error.body.statusCode === 403) { + if (error?.body?.statusCode === 403) { handleReportsTableErrorToast('permissions'); } else { handleReportsTableErrorToast('API'); @@ -265,7 +265,7 @@ export function Main(props) { }) .catch((error) => { console.log('error when fetching all report definitions: ', error); - if (error.body.statusCode === 403) { + if (error?.body?.statusCode === 403) { handleReportDefinitionsTableErrorToast('permissions'); } else { handleReportDefinitionsTableErrorToast('API'); diff --git a/public/components/main/main_utils.tsx b/public/components/main/main_utils.tsx index 0bb75b18..26caa31c 100644 --- a/public/components/main/main_utils.tsx +++ b/public/components/main/main_utils.tsx @@ -8,6 +8,7 @@ import 'babel-polyfill'; import { HttpSetup } from '../../../../../src/core/public'; import { uiSettingsService } from '../utils/settings_service'; import { GENERATE_REPORT_PARAM } from '../visual_report/constants'; +import { REPORTING_NOTIFICATIONS_DASHBOARDS_API } from '../../../common'; export const getAvailableNotificationsChannels = (configList: any) => { let availableChannels = []; @@ -72,15 +73,19 @@ export const addReportsTableContent = (data: string | any[]) => { id: item._id, reportName: reportParams.report_name, type: trigger.trigger_type, - sender: `\u2014`, + channel: reportDefinition.delivery.configIds, + sender: reportDefinition.delivery.emailSender, opensearchDashboardsRecipients: `\u2014`, - emailRecipients: `\u2014`, + emailRecipients: reportDefinition.delivery.emailRecipients, reportSource: reportParams.report_source, //TODO: wrong name timeCreated: report.time_created, state: report.state, url: report.query_url, format: reportParams.core_params.report_format, + htmldescription: reportDefinition.delivery.htmlDescription, + textDescription: reportDefinition.delivery.textDescription, + title: reportDefinition.delivery.title, }; reportsTableItems.push(reportsTableEntry); } @@ -92,6 +97,7 @@ export const addReportDefinitionsTableContent = (data: any) => { for (let index = 0; index < data.length; ++index) { let item = data[index]; let reportDefinition = item._source.report_definition; + let reportNotification = reportDefinition.delivery; let reportParams = reportDefinition.report_params; let trigger = reportDefinition.trigger; let triggerParams = trigger.trigger_params; @@ -108,6 +114,8 @@ export const addReportDefinitionsTableContent = (data: any) => { ? `\u2014` : triggerParams.schedule_type, // e.g. recurring, cron based status: reportDefinition.status, + notificationsEnabled: + reportNotification.configIds.length > 0 ? 'Enabled' : 'Disabled' }; reportDefinitionsTableItems.push(reportDefinitionsTableEntry); } @@ -264,3 +272,45 @@ export const generateReportById = async ( } }); }; +export const sendTestNotificationsMessage = async ( + id: string, + httpClientProps: HttpSetup, + item: any +) => { + try { + const eventId = await httpClientProps + .get( + `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.SEND_TEST_MESSAGE}/${item.channel[0]}`, + { + query: { feature: 'report' }, + } + ) + .then((response) => response.event_source.reference_id); + const configId = await httpClientProps.get( + `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.GET_CONFIG}/${eventId}` + ); + } catch (error) { + console.log('error', error); + } +}; +export const getChannelsDetails = async (data: any, httpClient: HttpSetup) => { + try { + const arrayData = data.data; + for (let i = 0; i < arrayData.length; i++) { + const id = arrayData[i]._source.report_definition.delivery.configIds[0]; + const channel = await httpClient.get( + `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.GET_CONFIG}/${id}` + ); + const sender = await httpClient.get( + `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.GET_CONFIG}/${channel.config_list[0].config.email.email_account_id}` + ); + arrayData[i]._source.report_definition.delivery.emailRecipients = + channel.config_list[0].config.email.recipient_list; + arrayData[i]._source.report_definition.delivery.emailSender = + sender.config_list[0].config.smtp_account.from_address; + } + return arrayData; + } catch (error) { + console.log('error', error); + } +}; diff --git a/public/components/main/report_definition_details/__tests__/__snapshots__/report_definition_details.test.tsx.snap b/public/components/main/report_definition_details/__tests__/__snapshots__/report_definition_details.test.tsx.snap index f4bc2bfc..08c0c9bb 100644 --- a/public/components/main/report_definition_details/__tests__/__snapshots__/report_definition_details.test.tsx.snap +++ b/public/components/main/report_definition_details/__tests__/__snapshots__/report_definition_details.test.tsx.snap @@ -22,4 +22,4 @@ exports[` panel render on demand definition details 1 > Report definition details -`; \ No newline at end of file +`; diff --git a/public/components/main/report_definition_details/report_definition_details.tsx b/public/components/main/report_definition_details/report_definition_details.tsx index db81768d..a2043aa4 100644 --- a/public/components/main/report_definition_details/report_definition_details.tsx +++ b/public/components/main/report_definition_details/report_definition_details.tsx @@ -19,7 +19,7 @@ import { EuiLink, EuiGlobalToastList, EuiOverlayMask, - EuiConfirmModal, + EuiConfirmModal } from '@elastic/eui'; import { ReportDetailsComponent, @@ -27,15 +27,16 @@ import { } from '../report_details/report_details'; import { fileFormatsUpper, - generateReportFromDefinitionId, + generateReportFromDefinitionId } from '../main_utils'; import { ReportDefinitionSchemaType } from '../../../../server/model'; import moment from 'moment'; import { permissionsMissingToast, - permissionsMissingActions, + permissionsMissingActions } from '../../utils/utils'; import { GenerateReportLoadingModal } from '../loading_modal'; +import { REPORTING_NOTIFICATIONS_DASHBOARDS_API } from '../../../../common'; const ON_DEMAND = 'On demand'; @@ -54,11 +55,19 @@ interface ReportDefinitionDetails { triggerType: string; scheduleDetails: string; baseUrl: string; + channelName: string; } -export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: any; httpClient?: any; chrome: any }) { - const { chrome } = props; - const [reportDefinitionDetails, setReportDefinitionDetails] = useState({ +export function ReportDefinitionDetails(props: { + match?: any; + setBreadcrumbs?: any; + httpClient?: any; + chrome: any; +}) { + const { chrome } = props; + const [reportDefinitionDetails, setReportDefinitionDetails] = useState< + ReportDefinitionDetails + >({ name: '', description: '', created: '', @@ -72,11 +81,12 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a reportFooter: '', triggerType: '', scheduleDetails: '', - baseUrl: '' + baseUrl: '', + emailrecipients: [] }); const [ reportDefinitionRawResponse, - setReportDefinitionRawResponse, + setReportDefinitionRawResponse ] = useState({}); const [toasts, setToasts] = useState([]); const [showDeleteModal, setShowDeleteModal] = useState(false); @@ -88,7 +98,9 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a setShowLoading(e); }; - const handleShowDeleteModal = (e: boolean | ((prevState: boolean) => boolean)) => { + const handleShowDeleteModal = ( + e: boolean | ((prevState: boolean) => boolean) + ) => { setShowDeleteModal(e); }; @@ -128,7 +140,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'danger', iconType: 'alert', - id: 'reportDefinitionDetailsErrorToast', + id: 'reportDefinitionDetailsErrorToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -146,7 +158,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'success', iconType: 'check', - id: 'generateReportSuccessToast', + id: 'generateReportSuccessToast' }; // @ts-ignore setToasts(toasts.concat(successToast)); @@ -164,7 +176,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'danger', iconType: 'alert', - id: 'generateReportErrorToast', + id: 'generateReportErrorToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -186,7 +198,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'success', iconType: 'check', - id: 'successEnableToast', + id: 'successEnableToast' }; // @ts-ignore setToasts(toasts.concat(successToast)); @@ -200,7 +212,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'danger', iconType: 'alert', - id: 'errorToast', + id: 'errorToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -214,7 +226,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'success', iconType: 'check', - id: 'successDisableToast', + id: 'successDisableToast' }; // @ts-ignore setToasts(toasts.concat(successToast)); @@ -236,7 +248,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'danger', iconType: 'alert', - id: 'errorDisableToast', + id: 'errorDisableToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -260,7 +272,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ), color: 'danger', iconType: 'alert', - id: 'errorDeleteToast', + id: 'errorDeleteToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -270,7 +282,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a addErrorDeletingReportDefinitionToastHandler(); }; - const removeToast = (removedToast: { id: string; }) => { + const removeToast = (removedToast: { id: string }) => { setToasts(toasts.filter((toast: any) => toast.id !== removedToast.id)); }; @@ -278,7 +290,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a setReportDefinitionDetails(e); }; - const handleReportDefinitionRawResponse = (e: {} ) => { + const handleReportDefinitionRawResponse = (e: {}) => { setReportDefinitionRawResponse(e); }; @@ -313,7 +325,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a 'opensearch.reports.reportDefinitionsDetails.button.delete.query', { defaultMessage: 'Are you sure you want to delete "{name}"?', - values: { name: reportDefinitionDetails.name }, + values: { name: reportDefinitionDetails.name } } )}

@@ -323,7 +335,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ); }; - const humanReadableScheduleDetails = (trigger) => { + const humanReadableScheduleDetails = trigger => { let scheduleDetails = ''; if (trigger.trigger_type === 'Schedule') { if (trigger.trigger_params.schedule_type === 'Recurring') { @@ -339,12 +351,11 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a 'opensearch.reports.reportDefinitionsDetails.schedule.dailyAt', { defaultMessage: 'Daily @ {time}', - values: { time: date.toTimeString() }, + values: { time: date.toTimeString() } } ); - } - // By interval - else { + } else { + // By interval const date = new Date( trigger.trigger_params.schedule.interval.start_time ); @@ -356,22 +367,21 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a values: { period: trigger.trigger_params.schedule.interval.period, unit: trigger.trigger_params.schedule.interval.unit.toLowerCase(), - time: date.toTimeString(), - }, + time: date.toTimeString() + } } ); } - } - // Cron - else if (trigger.trigger_params.schedule_type === 'Cron based') { + } else if (trigger.trigger_params.schedule_type === 'Cron based') { + // Cron scheduleDetails = i18n.translate( 'opensearch.reports.reportDefinitionsDetails.schedule.cronBased', { defaultMessage: 'Cron based: {expression} ({timezone})', values: { expression: trigger.trigger_params.schedule.cron.expression, - timezone: trigger.trigger_params.schedule.cron.timezone, - }, + timezone: trigger.trigger_params.schedule.cron.timezone + } } ); } @@ -379,27 +389,27 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a return scheduleDetails; }; - const getReportDefinitionDetailsMetadata = ( + const getReportDefinitionDetailsMetadata = async ( data: ReportDefinitionSchemaType - ) : ReportDefinitionDetails => { + ): Promise => { const reportDefinition: ReportDefinitionSchemaType = data; const { report_params: reportParams, trigger, delivery, time_created: timeCreated, - last_updated: lastUpdated, + last_updated: lastUpdated } = reportDefinition; const { trigger_type: triggerType, - trigger_params: triggerParams, + trigger_params: triggerParams } = trigger; const { core_params: { base_url: baseUrl, report_format: reportFormat, - time_duration: timeDuration, - }, + time_duration: timeDuration + } } = reportParams; let readableDate = new Date(timeCreated); @@ -442,51 +452,64 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ? humanReadableScheduleDetails(data.trigger) : `\u2014`, status: reportDefinition.status, + channelName: '', + emailSubject: delivery.title, + emailBody: delivery.textDescription }; + + if (delivery.configIds.length > 0) { + const [{ config: { name } }] = await getConfigChannel(delivery.configIds); + + reportDefinitionDetails.channelName = name; + } return reportDefinitionDetails; }; useEffect(() => { const { httpClient } = props; httpClient - .get(`../api/reporting/reportDefinitions/${reportDefinitionId}`) - .then((response: {report_definition: ReportDefinitionSchemaType}) => { - handleReportDefinitionRawResponse(response); - handleReportDefinitionDetails(getReportDefinitionDetailsMetadata(response.report_definition)); - props.setBreadcrumbs([ - { - text: i18n.translate( - 'opensearch.reports.reportDefinitionsDetails.schedule.breadcrumb.reporting', - { defaultMessage: 'Reporting' } - ), - href: '#', - }, - { - text: i18n.translate( - 'opensearch.reports.reportDefinitionsDetails.schedule.breadcrumb.reportDefinitionDetails', + .get(`../api/reporting/reportDefinitions/${reportDefinitionId}`) + .then( + async (response: { report_definition: ReportDefinitionSchemaType }) => { + handleReportDefinitionRawResponse(response); + handleReportDefinitionDetails( + await getReportDefinitionDetailsMetadata(response.report_definition) + ); + props.setBreadcrumbs([ { - defaultMessage: 'Report definition details: {name}', - values: { - name: response.report_definition.report_params.report_name, - }, + text: i18n.translate( + 'opensearch.reports.reportDefinitionsDetails.schedule.breadcrumb.reporting', + { defaultMessage: 'Reporting' } + ), + href: '#' + }, + { + text: i18n.translate( + 'opensearch.reports.reportDefinitionsDetails.schedule.breadcrumb.reportDefinitionDetails', + { + defaultMessage: 'Report definition details: {name}', + values: { + name: response.report_definition.report_params.report_name + } + } + ) } - ), - }, - ]); - }) - .catch((error: any) => { - console.error( - i18n.translate( - 'opensearch.reports.reportDefinitionsDetails.schedule.breadcrumb.error', - { - defaultMessage: - 'error when getting report definition details: {error}', - values: { error: error }, - } - ) - ); - handleDetailsErrorToast(); - }); + ]); + } + ) + .catch((error: any) => { + console.error( + i18n.translate( + 'opensearch.reports.reportDefinitionsDetails.schedule.breadcrumb.error', + { + defaultMessage: + 'error when getting report definition details: {error}', + values: { error: error } + } + ) + ); + handleDetailsErrorToast(); + }); }, []); const downloadIconDownload = async () => { @@ -495,7 +518,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a handleLoading(false); }; - const fileFormatDownload = (data: { [x: string]: any; }) => { + const fileFormatDownload = (data: { [x: string]: any }) => { let formatUpper = data['fileFormat']; formatUpper = fileFormatsUpper[formatUpper]; return ( @@ -513,7 +536,8 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a return ( {data['source']} @@ -533,14 +557,14 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a httpClient .put(`../api/reporting/reportDefinitions/${reportDefinitionId}`, { body: JSON.stringify(updatedReportDefinition), - params: reportDefinitionId.toString(), + params: reportDefinitionId.toString() }) - .then(() => { + .then(async () => { const updatedRawResponse = { report_definition: {} }; updatedRawResponse.report_definition = updatedReportDefinition; handleReportDefinitionRawResponse(updatedRawResponse); setReportDefinitionDetails( - getReportDefinitionDetailsMetadata(updatedReportDefinition) + await getReportDefinitionDetailsMetadata(updatedReportDefinition) ); if (statusChange === 'Enable') { handleSuccessChangingScheduleStatusToast('enable'); @@ -548,7 +572,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a handleSuccessChangingScheduleStatusToast('disable'); } }) - .catch((error: { body: { statusCode: number; }; }) => { + .catch((error: { body: { statusCode: number } }) => { console.error('error in updating report definition status:', error); if (error.body.statusCode === 403) { handleErrorChangingScheduleStatusToast('permissions'); @@ -602,7 +626,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a .then(() => { window.location.assign(`reports-dashboards#/delete=success`); }) - .catch((error: { body: { statusCode: number; }; }) => { + .catch((error: { body: { statusCode: number } }) => { console.log('error when deleting report definition:', error); if (error.body.statusCode === 403) { handlePermissionsMissingDeleteToast(); @@ -625,53 +649,89 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a ); const triggerSection = - reportDefinitionDetails.triggerType === ON_DEMAND ? ( - - ) : ( - - - - - - - ); + : + + + + + ; + + const showDeleteConfirmationModal = showDeleteModal + ? + : null; - const showDeleteConfirmationModal = showDeleteModal ? ( - - ) : null; + const showLoadingModal = showLoading + ? + : null; - const showLoadingModal = showLoading ? ( - - ) : null; + const getConfigChannel = async idChannels => { + const { httpClient } = props; + const configId = idChannels[0]; + const { config_list } = await httpClient.get( + `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.GET_CONFIG}/${configId}` + ); + return config_list; + }; + + const notificationSection = ( + + + + + + + ); return ( <> @@ -834,6 +894,7 @@ export function ReportDefinitionDetails(props: { match?: any; setBreadcrumbs?: a {triggerSection} + {notificationSection} {readable}; }, }, + { + field: 'notificationsEnabled', + name: i18n.translate( + 'opensearch.reports.reportDefinitionsTable.columns.notificationsEnabled', + { defaultMessage: 'Notifications' } + ), + sortable: true, + truncateText: false, + }, { field: 'status', name: i18n.translate( diff --git a/public/components/main/report_details/report_details.tsx b/public/components/main/report_details/report_details.tsx index 209c5ea6..28518b4a 100644 --- a/public/components/main/report_details/report_details.tsx +++ b/public/components/main/report_details/report_details.tsx @@ -21,7 +21,11 @@ import { EuiIcon, EuiGlobalToastList, } from '@elastic/eui'; -import { fileFormatsUpper, generateReportById } from '../main_utils'; +import { + fileFormatsUpper, + generateReportById, + sendTestNotificationsMessage, +} from '../main_utils'; import { GenerateReportLoadingModal } from '../loading_modal'; import { ReportSchemaType } from '../../../../server/model'; import dateMath from '@elastic/datemath'; @@ -289,6 +293,11 @@ export function ReportDetails(props: { match?: any; setBreadcrumbs?: any; httpCl handleErrorToast, handlePermissionsMissingDownloadToast ); + await sendTestNotificationsMessage( + reportId, + props.httpClient, + reportDetails + ); handleLoading(false); }; diff --git a/public/components/main/reports_table.tsx b/public/components/main/reports_table.tsx index 8c5c1516..e5a8a73d 100644 --- a/public/components/main/reports_table.tsx +++ b/public/components/main/reports_table.tsx @@ -18,6 +18,7 @@ import { fileFormatsUpper, humanReadableDate, generateReportById, + sendTestNotificationsMessage, } from './main_utils'; import { GenerateReportLoadingModal } from './loading_modal'; @@ -95,7 +96,7 @@ export function ReportsTable(props) { setShowLoading(e); }; - const onDemandDownload = async (id: any) => { + const onDemandDownload = async (id: any, item: any) => { handleLoading(true); await generateReportById( id, @@ -104,6 +105,7 @@ export function ReportsTable(props) { handleErrorToast, handlePermissionsMissingToast ); + await sendTestNotificationsMessage(id, httpClient, item); handleLoading(false); }; @@ -186,7 +188,7 @@ export function ReportsTable(props) { ) : ( onDemandDownload(id)} + onClick={() => onDemandDownload(id, item)} id="landingPageOnDemandDownload" > {fileFormatsUpper[item.format]} diff --git a/public/components/report_definitions/create/create_report_definition.tsx b/public/components/report_definitions/create/create_report_definition.tsx index 2bc43057..1452bc29 100644 --- a/public/components/report_definitions/create/create_report_definition.tsx +++ b/public/components/report_definitions/create/create_report_definition.tsx @@ -13,16 +13,17 @@ import { EuiButton, EuiTitle, EuiPageBody, - EuiSpacer, + EuiSpacer } from '@elastic/eui'; import { ReportSettings } from '../report_settings'; import { generateReportFromDefinitionId } from '../../main/main_utils'; import { converter } from '../utils'; import { permissionsMissingToast, - permissionsMissingActions, + permissionsMissingActions } from '../../utils/utils'; import { definitionInputValidation } from '../utils/utils'; +import { ReportDelivery } from '../delivery'; interface reportParamsType { report_name: string; @@ -89,7 +90,12 @@ export interface timeRangeParams { timeTo: Date; } -export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; httpClient?: any; chrome: any }) { +export function CreateReport(props: { + [x: string]: any; + setBreadcrumbs?: any; + httpClient?: any; + chrome: any; +}) { const { chrome } = props; let createReportDefinitionRequest: reportDefinitionParams = { @@ -100,8 +106,8 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht core_params: { base_url: '', report_format: '', - time_duration: '', - }, + time_duration: '' + } }, delivery: { configIds: [], @@ -110,8 +116,8 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht htmlDescription: '' }, trigger: { - trigger_type: '', - }, + trigger_type: '' + } }; const [toasts, setToasts] = useState([]); @@ -120,23 +126,23 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht const [ showSettingsReportNameError, - setShowSettingsReportNameError, + setShowSettingsReportNameError ] = useState(false); const [ settingsReportNameErrorMessage, - setSettingsReportNameErrorMessage, + setSettingsReportNameErrorMessage ] = useState(''); const [ showSettingsReportSourceError, - setShowSettingsReportSourceError, + setShowSettingsReportSourceError ] = useState(false); const [ settingsReportSourceErrorMessage, - setSettingsReportSourceErrorMessage, + setSettingsReportSourceErrorMessage ] = useState(''); const [ showTriggerIntervalNaNError, - setShowTriggerIntervalNaNError, + setShowTriggerIntervalNaNError ] = useState(false); const [showCronError, setShowCronError] = useState(false); const [showTimeRangeError, setShowTimeRangeError] = useState(false); @@ -153,12 +159,12 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht 'opensearch.reports.createReportDefinition.error.fieldsHaveAnError', { defaultMessage: - 'One or more fields have an error. Please check and try again.', + 'One or more fields have an error. Please check and try again.' } ), color: 'danger', iconType: 'alert', - id: 'errorToast', + id: 'errorToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -182,7 +188,7 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht ), color: 'danger', iconType: 'alert', - id: 'errorToast', + id: 'errorToast' }; } // @ts-ignore @@ -201,7 +207,7 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht ), color: 'danger', iconType: 'alert', - id: 'timeRangeErrorToast', + id: 'timeRangeErrorToast' }; // @ts-ignore setToasts(toasts.concat(errorToast)); @@ -211,13 +217,13 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht addInvalidTimeRangeToastHandler(); }; - const removeToast = (removedToast: { id: string; }) => { + const removeToast = (removedToast: { id: string }) => { setToasts(toasts.filter((toast: any) => toast.id !== removedToast.id)); }; let timeRange = { timeFrom: new Date(), - timeTo: new Date(), + timeTo: new Date() }; const createNewReportDefinition = async ( @@ -244,8 +250,8 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht setShowTriggerIntervalNaNError, timeRange, setShowTimeRangeError, - setShowCronError, - ).then((response) => { + setShowCronError + ).then(response => { error = response; }); if (error) { @@ -257,19 +263,23 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht .post('../api/reporting/reportDefinition', { body: JSON.stringify(metadata), headers: { - 'Content-Type': 'application/json', - }, - }) - .then(async (resp: { scheduler_response: { reportDefinitionId: string; }; }) => { - //TODO: consider handle the on demand report generation from server side instead - if (metadata.trigger.trigger_type === 'On demand') { - const reportDefinitionId = - resp.scheduler_response.reportDefinitionId; - generateReportFromDefinitionId(reportDefinitionId, httpClient); + 'Content-Type': 'application/json' } - window.location.assign(`reports-dashboards#/create=success`); }) - .catch((error: {body: { statusCode: number; }; }) => { + .then( + async (resp: { + scheduler_response: { reportDefinitionId: string }; + }) => { + //TODO: consider handle the on demand report generation from server side instead + if (metadata.trigger.trigger_type === 'On demand') { + const reportDefinitionId = + resp.scheduler_response.reportDefinitionId; + generateReportFromDefinitionId(reportDefinitionId, httpClient); + } + window.location.assign(`reports-dashboards#/create=success`); + } + ) + .catch((error: { body: { statusCode: number } }) => { console.log('error in creating report definition: ' + error); if (error.body.statusCode === 403) { handleErrorOnCreateToast('permissions'); @@ -288,15 +298,15 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht 'opensearch.reports.createReportDefinition.breadcrumb.reporting', { defaultMessage: 'Reporting' } ), - href: '#', + href: '#' }, { text: i18n.translate( 'opensearch.reports.createReportDefinition.breadcrumb.createReportDefinition', { defaultMessage: 'Create report definition' } ), - href: '#/create', - }, + href: '#/create' + } ]); }, []); @@ -305,16 +315,22 @@ export function CreateReport(props: { [x: string]: any; setBreadcrumbs?: any; ht

- {!getNavGroupEnabled && i18n.translate('opensearch.reports.createReportDefinition.title', { - defaultMessage: 'Create report definition', - })} + {!getNavGroupEnabled && + i18n.translate( + 'opensearch.reports.createReportDefinition.title', + { + defaultMessage: 'Create report definition' + } + )}

- {!getNavGroupEnabled && } + {!getNavGroupEnabled && } + + {i18n.translate( diff --git a/public/components/report_definitions/delivery/delivery.tsx b/public/components/report_definitions/delivery/delivery.tsx index b3ab50f3..938cce9a 100644 --- a/public/components/report_definitions/delivery/delivery.tsx +++ b/public/components/report_definitions/delivery/delivery.tsx @@ -15,14 +15,14 @@ import { EuiCheckbox, EuiComboBox, EuiFieldText, - EuiButton, + EuiButton } from '@elastic/eui'; import CSS from 'csstype'; import { getChannelsQueryObject, noDeliveryChannelsSelectedMessage, testMessageConfirmationMessage, - testMessageFailureMessage, + testMessageFailureMessage } from './delivery_constants'; import 'react-mde/lib/styles/css/react-mde-all.css'; import { reportDefinitionParams } from '../create/create_report_definition'; @@ -32,7 +32,7 @@ import { getAvailableNotificationsChannels } from '../../main/main_utils'; import { REPORTING_NOTIFICATIONS_DASHBOARDS_API } from '../../../../common'; const styles: CSS.Properties = { - maxWidth: '800px', + maxWidth: '800px' }; // TODO: add to schema to avoid need for export @@ -62,7 +62,7 @@ export function ReportDelivery(props: ReportDeliveryProps) { showDeliverySubjectError, deliverySubjectError, showDeliveryTextError, - deliveryTextError, + deliveryTextError } = props; const [isDeliveryHidden, setIsHidden] = useState(false); @@ -81,16 +81,13 @@ export function ReportDelivery(props: ReportDeliveryProps) { const handleSendNotification = (e: { target: { checked: boolean } }) => { setSendNotification(e.target.checked); includeDelivery = e.target.checked; - if (includeDelivery) { + if (!edit) { reportDefinitionRequest.delivery.title = 'New report'; reportDefinitionRequest.delivery.textDescription = 'New report available to view'; reportDefinitionRequest.delivery.htmlDescription = converter.makeHtml( 'New report available to view' ); - } else { - reportDefinitionRequest.delivery.title = `\u2014`; - reportDefinitionRequest.delivery.textDescription = `\u2014`; } }; @@ -125,7 +122,7 @@ export function ReportDelivery(props: ReportDeliveryProps) { configIds: [], title: `\u2014`, // default values before any Notifications settings are configured textDescription: `\u2014`, - htmlDescription: '', + htmlDescription: '' }; }; @@ -144,7 +141,7 @@ export function ReportDelivery(props: ReportDeliveryProps) { event_id: event.event_id, created_time_ms: event.created_time_ms, last_updated_time_ms: event.last_updated_time_ms, - success, + success }; }; @@ -166,23 +163,16 @@ export function ReportDelivery(props: ReportDeliveryProps) { try { const eventId = await httpClientProps .get( - `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.SEND_TEST_MESSAGE}/${selectedChannels[i].id}`, + `${REPORTING_NOTIFICATIONS_DASHBOARDS_API.SEND_TEST_MESSAGE}/${selectedChannels[ + i + ].id}`, { query: { - feature: 'reports', - }, + feature: 'report' + } } ) - .then((response) => response.event_id); - - await getNotification(eventId).then((response) => { - if (!response.success) { - const error = new Error('Failed to send the test message.'); - failedChannels.push(response.status_list[0].config_name); - error.stack = JSON.stringify(response.status_list, null, 2); - throw error; - } - }); + .then(response => response.event_source.reference_id); } catch (error) { testMessageFailures = true; } @@ -202,16 +192,16 @@ export function ReportDelivery(props: ReportDeliveryProps) { headers: { Accept: 'text/plain, */*; q=0.01', 'Accept-Language': 'en-US,en;q=0.5', - 'osd-xsrf': 'true', + 'osd-xsrf': 'true' }, method: 'POST', - mode: 'cors', + mode: 'cors' } ) - .then((response) => { + .then(response => { return response.text(); }) - .then(function (data) { + .then(function(data) { if (data.includes('opensearch-notifications')) { setIsHidden(false); return; @@ -224,7 +214,7 @@ export function ReportDelivery(props: ReportDeliveryProps) { checkIfNotificationsPluginIsInstalled(); httpClientProps .get(`${REPORTING_NOTIFICATIONS_DASHBOARDS_API.GET_CONFIGS}`, { - query: getChannelsQueryObject, + query: getChannelsQueryObject }) .then(async (response: any) => { let availableChannels = getAvailableNotificationsChannels( @@ -238,17 +228,17 @@ export function ReportDelivery(props: ReportDeliveryProps) { httpClientProps .get(`../api/reporting/reportDefinitions/${editDefinitionId}`) .then(async (response: any) => { - if (response.report_definition.delivery.configIds.length > 0) { + let delivery = response.report_definition.delivery; + if (delivery.configIds.length > 0) { // add config IDs handleSendNotification({ target: { checked: true } }); - let delivery = response.report_definition.delivery; let editChannelOptions = []; for (let i = 0; i < delivery.configIds.length; ++i) { for (let j = 0; j < availableChannels.length; ++j) { if (delivery.configIds[i] === availableChannels[j].id) { let editChannelOption = { label: availableChannels[j].label, - id: availableChannels[j].id, + id: availableChannels[j].id }; editChannelOptions.push(editChannelOption); break; @@ -256,10 +246,10 @@ export function ReportDelivery(props: ReportDeliveryProps) { } } setSelectedChannels(editChannelOptions); - setNotificationSubject(delivery.title); - setNotificationMessage(delivery.textDescription); - reportDefinitionRequest.delivery = delivery; } + setNotificationSubject(delivery.title); + setNotificationMessage(delivery.textDescription); + reportDefinitionRequest.delivery = delivery; }); } else { defaultCreateDeliveryParams(); @@ -273,68 +263,67 @@ export function ReportDelivery(props: ReportDeliveryProps) { }); }, []); - const showNotificationsBody = sendNotification ? ( -
- - - - - - - - - - - - Promise.resolve(converter.makeHtml(markdown)) - } - /> - - - - - Send test message - - -
- ) : null; + const showNotificationsBody = sendNotification + ?
+ + + + + + + + + + + + Promise.resolve(converter.makeHtml(markdown))} + /> + + + + + Send test message + + +
+ : null; return (