From fbbfc5d887f766a41568fc7149ca66164161ae55 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Wed, 8 Nov 2023 02:54:23 +0100 Subject: [PATCH 1/9] HPCC-30710 Refactor Smoketest GH Action Signed-off-by: Gordon Smith --- .github/workflows/build-docker.yml | 179 +++++++ .github/workflows/build-gh_runner.yml | 244 +++++++++ .github/workflows/build-vcpkg.yml | 567 ++++++++------------- .github/workflows/test-smoke-gh_runner.yml | 147 ++++++ .github/workflows/test-ui-gh_runner.yml | 124 +++++ .github/workflows/test-unit-gh_runner.yml | 78 +++ dockerfiles/vcpkg/amazonlinux.dockerfile | 1 + dockerfiles/vcpkg/build.sh | 19 +- dockerfiles/vcpkg/centos-7.dockerfile | 6 +- dockerfiles/vcpkg/centos-8.dockerfile | 6 +- dockerfiles/vcpkg/ubuntu-20.04.dockerfile | 1 + dockerfiles/vcpkg/ubuntu-22.04.dockerfile | 1 + dockerfiles/vcpkg/ubuntu-23.10.dockerfile | 1 + vcpkg | 2 +- 14 files changed, 1018 insertions(+), 358 deletions(-) create mode 100644 .github/workflows/build-docker.yml create mode 100644 .github/workflows/build-gh_runner.yml create mode 100644 .github/workflows/test-smoke-gh_runner.yml create mode 100644 .github/workflows/test-ui-gh_runner.yml create mode 100644 .github/workflows/test-unit-gh_runner.yml diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml new file mode 100644 index 00000000000..56f318be832 --- /dev/null +++ b/.github/workflows/build-docker.yml @@ -0,0 +1,179 @@ +name: Build Package (Docker) + +on: + workflow_call: + inputs: + os: + type: string + description: 'Operating System' + required: false + default: 'ubuntu-22.04' + ln: + type: boolean + description: 'Internal Build' + required: false + default: false + single-package: + type: boolean + description: 'Single Package' + required: false + default: true + build-type: + type: string + description: 'CMake Build Type' + required: false + default: 'RelWithDebInfo' + containerized: + type: boolean + description: 'Containerized Build' + required: false + default: false + strip-files: + type: boolean + description: 'Single Package' + required: false + default: true + cmake-configuration: + type: string + description: 'CMake Configuration' + required: false + default: '-DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DUSE_CPPUNIT=ON -DSUPPRESS_V8EMBED=ON -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache' + cmake-configuration-ex: + type: string + description: 'CMake Configuration Extra' + required: false + default: '' + update-cache: + type: boolean + description: 'Update Cache' + required: false + default: true + upload-package: + type: boolean + description: 'Upload Package as Asset' + required: false + default: false + asset-name: + type: string + description: 'Asset Name (if upload-package is true)' + required: false + default: 'build-docker-package' + secrets: + LNB_TOKEN: + required: false + +jobs: + + build-docker: + runs-on: ubuntu-22.04 + steps: + - name: Free additional disk space (remove Android SDK + Tools) + run: | + sudo rm -rf /usr/local/lib/android + + - name: Checkout HPCC-Platform + uses: actions/checkout@v3 + with: + submodules: recursive + path: ${{ github.workspace }}/HPCC-Platform + + - name: Checkout LN + if: ${{ inputs.ln == true }} + uses: actions/checkout@v3 + with: + repository: ${{ github.repository_owner }}/LN + token: ${{ secrets.LNB_TOKEN }} + ref: ${{ github.base_ref }} + submodules: recursive + path: ${{ github.workspace }}/LN + + - name: Calculate vars + id: vars + working-directory: ${{ github.workspace }}/HPCC-Platform/vcpkg + shell: "bash" + run: | + branch_label_1=${{ github.base_ref }} + branch_label_2=$(echo ${{ github.ref }} | cut -d'/' -f3) + echo "branch_label=${branch_label_1:-$branch_label_2}" >> $GITHUB_OUTPUT + vcpkg_sha_short=$(git rev-parse --short=8 HEAD) + echo "vcpkg_sha_short=$vcpkg_sha_short" >> $GITHUB_OUTPUT + docker_build_label=hpccsystems/platform-build-base-${{ inputs.os }} + echo "docker_tag=$docker_build_label:$vcpkg_sha_short" >> $GITHUB_OUTPUT + + - name: Print vars + shell: "bash" + run: | + echo "${{ toJSON(steps.vars.outputs) }}" + + - uses: docker/setup-buildx-action@v2 + + - name: Pull Build Image + run: | + docker pull ${{ steps.vars.outputs.docker_tag }} || true + + - uses: hendrikmuhs/ccache-action@v1.2 + with: + save: ${{ inputs.update-cache == true }} + key: docker-${{ inputs.os }}-${{ inputs.build-type }}-${{ steps.vars.outputs.branch_label }}-${{ inputs.containerized == true && 'k8s' || 'bare-metal' }}-${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} + restore-keys: | + docker-${{ inputs.os }}-${{ inputs.build-type }}-${{ steps.vars.outputs.branch_label }}-${{ inputs.containerized == true && 'k8s' || 'bare-metal' }}- + docker-${{ inputs.os }}-${{ inputs.build-type }}-${{ steps.vars.outputs.branch_label }}- + docker-${{ inputs.os }}-${{ inputs.build-type }}- + docker-${{ inputs.os }}- + + - name: CMake Configure and Build + shell: "bash" + run: | + mkdir -p ${{ github.workspace }}/LN + mkdir -p ${{ github.workspace }}/build + mkdir -p ${{ github.workspace }}/.ccache + declare -a plugins + plugins=(${{ inputs.single-package == true && '"PLATFORM"' || '"PLATFORM" "CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "PARQUETEMBED" "REDIS" "REMBED" "SQLITE3EMBED" "SQS"' }}) + for plugin in "${plugins[@]}"; do + sudo rm -f ${{ github.workspace }}/build/CMakeCache.txt + sudo rm -rf ${{ github.workspace }}/build/CMakeFiles + docker run --rm \ + --mount source="${{ github.workspace }}/HPCC-Platform",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=delegated \ + --mount source="${{ github.workspace }}/LN",target=/hpcc-dev/LN,type=bind,consistency=delegated \ + --mount source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=delegated \ + --mount source="${{ github.workspace }}/.ccache",target=/root/.ccache,type=bind,consistency=delegated \ + ${{ steps.vars.outputs.docker_tag }} "\ + cmake -G Ninja -S /hpcc-dev/${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform -DCMAKE_BUILD_TYPE=${{ inputs.build-type }} -DCONTAINERIZED=${{ inputs.containerized == true && 'ON' || 'OFF' }} -DCPACK_STRIP_FILES=${{ inputs.strip-files == true && 'ON' || 'OFF' }} ${{ inputs.cmake-configuration }} ${{ inputs.cmake-configuration-ex }} ${{ inputs.single-package == true && '-DINCLUDE_PLUGINS=ON' || '-D$plugin=ON' }} && \ + cmake --build /hpcc-dev/build --parallel ${{ inputs.upload-package == true && '--target package' || ''}}" + done + + - name: Upload Package + if: ${{ inputs.upload-package == true }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.asset-name }} + path: | + ${{ github.workspace }}/build/*.deb + ${{ github.workspace }}/build/*.rpm + if-no-files-found: error + + - name: Upload Support Files + if: ${{ inputs.upload-package == true }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.asset-name }}-support-files + path: | + ${{ github.workspace }}/HPCC-Platform/.github/workflows/smoketest-preabort.sh + ${{ github.workspace }}/HPCC-Platform/.github/workflows/timeoutcmd + if-no-files-found: error + + - name: Upload UI Test Files + if: ${{ inputs.upload-package == true }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.asset-name }}-ui_test-files + path: | + ${{ github.workspace }}/HPCC-Platform/esp/src/test-ui/**/* + if-no-files-found: error + + - name: Upload Error Logs + if: ${{ failure() || cancelled() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.os }}-${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }}-logs + path: ${{ github.workspace }}/build/**/*.log diff --git a/.github/workflows/build-gh_runner.yml b/.github/workflows/build-gh_runner.yml new file mode 100644 index 00000000000..58e0dbc3052 --- /dev/null +++ b/.github/workflows/build-gh_runner.yml @@ -0,0 +1,244 @@ +name: Build Package (gh-runner) + +env: + VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read" + VCPKG_NUGET_REPOSITORY: "https://github.com/hpcc-systems/vcpkg" + +on: + workflow_call: + inputs: + os: + type: string + description: 'Operating System' + required: false + default: 'ubuntu-22.04' + ln: + type: boolean + description: 'Internal Build' + required: false + default: false + single-package: + type: boolean + description: 'Single Package' + required: false + default: true + build-type: + type: string + description: 'CMake Build Type' + required: false + default: 'RelWithDebInfo' + containerized: + type: boolean + description: 'Containerized Build' + required: false + default: false + strip-files: + type: boolean + description: 'Single Package' + required: false + default: true + cmake-configuration: + type: string + description: 'CMake Configuration' + required: false + default: '-DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DUSE_CPPUNIT=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache' + cmake-configuration-ex: + type: string + description: 'CMake Configuration Extra' + required: false + default: '' + update-cache: + type: boolean + description: 'Update Cache' + required: false + default: true + upload-package: + type: boolean + description: 'Upload Package as Asset' + required: false + default: false + asset-name: + type: string + description: 'Asset Name (if upload-package is true)' + required: false + default: 'build-docker-package' + secrets: + LNB_TOKEN: + required: false + +jobs: + + build-gh_runner: + runs-on: ${{ inputs.os }} + steps: + - name: Ubuntu Dependencies + if: ${{ contains(inputs.os, 'ubuntu') }} + shell: "bash" + run: | + sudo apt-get update -y + sudo apt-get install -yq --no-install-recommends \ + apt-transport-https \ + autoconf \ + autoconf-archive \ + automake \ + autotools-dev \ + binutils-dev \ + bison \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + flex \ + git \ + gnupg \ + groff-base \ + libtool \ + pkg-config \ + software-properties-common \ + tar \ + unzip \ + uuid-dev \ + zip + + - name: Ubuntu Dependencies 2 + if: ${{ contains(inputs.os, 'ubuntu') }} + shell: "bash" + run: | + sudo apt-get install -yq --no-install-recommends \ + ccache \ + default-jdk \ + ninja-build \ + python3-dev \ + rsync \ + fop \ + libsaxonb-java + + - name: Ubuntu Mono Dependencies + if: ${{ inputs.os == 'ubuntu-22.04' }} + shell: "bash" + run: | + sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF + echo "deb https://download.mono-project.com/repo/ubuntu stable-focal main" | sudo tee /etc/apt/sources.list.d/mono-official-stable.list + sudo apt-get update -y + sudo apt-get -yq install -f mono-complete + + - name: OSX Dependencies + if: ${{ contains(inputs.os, 'macos') }} + shell: "bash" + run: | + brew install bison flex pkg-config autoconf autoconf-archive automake libtool cmake openjdk@11 ninja + + - name: "Remove builtin vcpkg (old)" + working-directory: . + shell: "bash" + run: | + ${{ !contains(inputs.os, 'windows') && 'sudo' || '' }} rm -rf "$VCPKG_INSTALLATION_ROOT" + + - name: Checkout HPCC-Platform + uses: actions/checkout@v3 + with: + submodules: recursive + path: ${{ github.workspace }}/HPCC-Platform + + - name: Checkout LN + if: ${{ inputs.ln == true }} + uses: actions/checkout@v3 + with: + repository: ${{ github.repository_owner }}/LN + token: ${{ secrets.LNB_TOKEN }} + ref: ${{ github.base_ref }} + submodules: recursive + path: ${{ github.workspace }}/LN + + - name: "vcpkg Bootstrap" + shell: "bash" + run: | + ./HPCC-Platform/vcpkg/bootstrap-vcpkg.sh + + - name: "Setup NuGet credentials" + shell: "bash" + run: | + ${{ !contains(inputs.os, 'windows') && 'mono' || '' }} `./HPCC-Platform/vcpkg/vcpkg fetch nuget | tail -n 1` \ + sources add \ + -name "GitHub" \ + -source "https://nuget.pkg.github.com/hpcc-systems/index.json" \ + -storepasswordincleartext \ + -username "${{ github.actor }}" \ + -password "${{ secrets.GITHUB_TOKEN }}" + + - name: Calculate vars + id: vars + working-directory: ${{ github.workspace }}/HPCC-Platform/vcpkg + shell: "bash" + run: | + branch_label_1=${{ github.base_ref }} + branch_label_2=$(echo ${{ github.ref }} | cut -d'/' -f3) + echo "branch_label=${branch_label_1:-$branch_label_2}" >> $GITHUB_OUTPUT + vcpkg_sha_short=$(git rev-parse --short=8 HEAD) + echo "vcpkg_sha_short=$vcpkg_sha_short" >> $GITHUB_OUTPUT + + - name: Print vars + shell: "bash" + run: | + echo "${{ toJSON(steps.vars.outputs) }}" + + - uses: actions/cache@v3 + id: cache + with: + path: | + ${{github.workspace}}/build/vcpkg_installed + key: vcpkg-${{ inputs.os }}-${{ steps.vars.outputs.vcpkg_sha_short }} + + - uses: hendrikmuhs/ccache-action@v1.2 + with: + save: ${{ inputs.update-cache == true }} + key: gh-runner-${{ inputs.os }}-${{ inputs.build-type }}-${{ steps.vars.outputs.branch_label }}-${{ inputs.containerized == true && 'k8s' || 'bare-metal' }}-${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} + restore-keys: | + gh-runner-${{ inputs.os }}-${{ inputs.build-type }}-${{ steps.vars.outputs.branch_label }}-${{ inputs.containerized == true && 'k8s' || 'bare-metal' }}- + gh-runner-${{ inputs.os }}-${{ inputs.build-type }}-${{ steps.vars.outputs.branch_label }}- + gh-runner-${{ inputs.os }}-${{ inputs.build-type }}- + gh-runner-${{ inputs.os }}- + + - name: CMake Configure and Build + shell: "bash" + run: | + mkdir -p ${{ github.workspace }}/LN + mkdir -p ${{ github.workspace }}/build + cmake ${{ !contains(inputs.os, 'windows') && '-G Ninja' || '' }} -S ./${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} -B ./build -DCMAKE_BUILD_TYPE=${{ inputs.build-type }} -DCONTAINERIZED=${{ inputs.containerized == true && 'ON' || 'OFF' }} -DCPACK_STRIP_FILES=${{ inputs.strip-files == true && 'ON' || 'OFF' }} ${{ inputs.cmake-configuration }} ${{ inputs.cmake-configuration-ex }} + cmake --build ./build ${{ contains(inputs.os, 'windows') && '--config Release' || ''}} --parallel ${{ inputs.upload-package == true && '--target package' || ''}} + + - name: Upload Package + if: ${{ inputs.upload-package == true }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.asset-name }} + path: | + ${{ github.workspace }}/build/*.deb + ${{ github.workspace }}/build/*.rpm + if-no-files-found: error + + - name: Upload Support Files + if: ${{ inputs.upload-package == true }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.asset-name }}-support-files + path: | + ${{ github.workspace }}/HPCC-Platform/.github/workflows/smoketest-preabort.sh + ${{ github.workspace }}/HPCC-Platform/.github/workflows/timeoutcmd + if-no-files-found: error + + - name: Upload UI Test Files + if: ${{ inputs.upload-package == true }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.asset-name }}-ui_test-files + path: | + ${{ github.workspace }}/HPCC-Platform/esp/src/test-ui/**/* + if-no-files-found: error + + - name: Upload Error Logs + if: ${{ failure() || cancelled() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.os }}-${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }}-logs + path: ${{ github.workspace }}/build/**/*.log diff --git a/.github/workflows/build-vcpkg.yml b/.github/workflows/build-vcpkg.yml index 0bd873d4219..7c79c2d72a6 100644 --- a/.github/workflows/build-vcpkg.yml +++ b/.github/workflows/build-vcpkg.yml @@ -3,8 +3,38 @@ name: Test Build env: VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read" VCPKG_NUGET_REPOSITORY: "https://github.com/hpcc-systems/vcpkg" + OS_DEPENDENCIES: "bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk + r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake" on: + workflow_dispatch: + inputs: + os: + type: choice + options: + - 'ubuntu-23.10' + - 'ubuntu-22.04' + - 'ubuntu-20.04' + - 'centos-8' + - 'centos-7' + - 'amazonlinux' + description: 'Operating System' + required: false + default: 'ubuntu-22.04' + ln: + type: boolean + description: 'Internal Build' + required: false + default: false + smoketest: + type: boolean + description: 'Run Smoketest' + required: false + default: false + + # Do not include "push" in final version + push: + pull_request: branches: - "master" @@ -21,348 +51,195 @@ on: - cron: "0 0 * * *" jobs: - preamble: - name: Check Skip - runs-on: ubuntu-22.04 - outputs: - folder_platform: ${{ steps.vars.outputs.folder_platform }} - mount_platform: ${{ steps.vars.outputs.mount_platform }} - folder_ln: ${{ steps.vars.outputs.folder_ln }} - mount_ln: ${{ steps.vars.outputs.mount_ln }} - folder_build: ${{ steps.vars.outputs.folder_build }} - mount_build: ${{ steps.vars.outputs.mount_build }} - community_ref: ${{ steps.vars.outputs.community_ref }} - internal_ref: ${{ steps.vars.outputs.internal_ref }} - community_tag: ${{ steps.vars.outputs.community_tag }} - internal_tag: ${{ steps.vars.outputs.internal_tag }} - candidate_base_branch: ${{ steps.vars.outputs.candidate_base_branch }} - cmake_docker_config: ${{ steps.vars.outputs.cmake_docker_config }} - platform: "1" - platform_testing_do_not_release: ${{ steps.skip_check.outputs.platform }} - include_plugins: ${{ (steps.skip_check.outputs.plugins && 'ON') || 'OFF' }} - steps: - - name: Calculate vars - id: vars - run: | - echo "folder_platform=${{ github.workspace }}/HPCC-Platform" >> $GITHUB_OUTPUT - echo 'mount_platform=source="${{ github.workspace }}/HPCC-Platform",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached' >> $GITHUB_OUTPUT - echo "folder_ln=${{ github.workspace }}/LN" >> $GITHUB_OUTPUT - echo 'mount_ln=source="${{ github.workspace }}/LN",target=/hpcc-dev/LN,type=bind,consistency=cached' >> $GITHUB_OUTPUT - echo "folder_build=${{ github.workspace }}/build" >> $GITHUB_OUTPUT - echo 'mount_build=source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=cached' >> $GITHUB_OUTPUT - community_ref=${{ github.ref }} - echo "community_ref=$community_ref" >> $GITHUB_OUTPUT - echo "internal_ref=${{ github.base_ref }}" >> $GITHUB_OUTPUT - community_tag=$(echo $community_ref | cut -d'/' -f3) - echo "community_tag=$community_tag" >> $GITHUB_OUTPUT - echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT - echo "candidate_base_branch=${{ github.base_ref }}" >> $GITHUB_OUTPUT - echo "cmake_docker_config=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF" >> $GITHUB_OUTPUT - - - id: skip_check - uses: hpcc-systems/github-actions/changed-modules@main - with: - github_token: ${{ github.token }} - - - name: Print vars - run: | - echo "${{ toJSON(steps.vars.outputs) }})" - echo "${{ toJSON(steps.skip_check.outputs) }})" - - build-docker: - name: Build Docker - needs: preamble - runs-on: ubuntu-22.04 - strategy: - matrix: - include: - - os: ubuntu-23.10 - event_name: "pull_request" - - os: ubuntu-22.04 - name: k8s - container: true - event_name: "schedule" - - os: ubuntu-22.04 - event_name: "schedule" - - os: ubuntu-20.04 - event_name: "schedule" - - os: centos-8 - event_name: "schedule" - - os: centos-7 - event_name: "pull_request" - cmake_options_extra: "-DVCPKG_TARGET_TRIPLET=x64-centos-7-dynamic" - - os: amazonlinux - event_name: "schedule" - cmake_options_extra: "-DVCPKG_TARGET_TRIPLET=x64-amazonlinux-dynamic" - - os: centos-7 - name: LN - cmake_options_extra: "-DVCPKG_TARGET_TRIPLET=x64-centos-7-dynamic" - ln: true - event_name: "schedule" - fail-fast: false - - steps: - - name: Free additional disk space (remove Android SDK + Tools) - run: | - sudo rm -rf /usr/local/lib/android - - - name: Checkout HPCC-Platform - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - uses: actions/checkout@v3 - with: - ref: ${{ needs.preamble.outputs.community_ref }} - submodules: recursive - path: ${{ needs.preamble.outputs.folder_platform }} - - - name: Checkout LN - if: ${{ matrix.ln && contains(matrix.event_name, github.event_name) }} - uses: actions/checkout@v3 - with: - repository: ${{ github.repository_owner }}/LN - token: ${{ secrets.LNB_TOKEN }} - ref: ${{ needs.preamble.outputs.internal_ref }} - submodules: recursive - path: ${{ needs.preamble.outputs.folder_ln }} - - - name: Calculate vars - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - id: vars - working-directory: ${{ needs.preamble.outputs.folder_platform }}/vcpkg - run: | - vcpkg_sha_short=$(git rev-parse --short=8 HEAD) - echo "vcpkg_sha_short=$vcpkg_sha_short" >> $GITHUB_OUTPUT - docker_build_label=hpccsystems/platform-build-${{ matrix.os }} - echo "docker_build_label=$docker_build_label" >> $GITHUB_OUTPUT - echo "docker_tag=$docker_build_label:$vcpkg_sha_short" >> $GITHUB_OUTPUT - echo "docker_tag_candidate_base=$docker_build_label:${{ needs.preamble.outputs.candidate_base_branch }}" >> $GITHUB_OUTPUT - - - name: Print vars - run: | - echo "${{ toJSON(needs.preamble.outputs) }})" - echo "${{ toJSON(steps.vars.outputs) }})" - - - name: Set up Docker Buildx - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - id: buildx - uses: docker/setup-buildx-action@v2 - - - name: Pull previous images - run: | - docker pull ${{ steps.vars.outputs.docker_tag_candidate_base }} || true - docker pull ${{ steps.vars.outputs.docker_tag }} || true - - - name: Create Build Image - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - uses: docker/build-push-action@v4 - with: - builder: ${{ steps.buildx.outputs.name }} - file: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg/${{ matrix.os }}.dockerfile - context: ${{ needs.preamble.outputs.folder_platform }}/dockerfiles/vcpkg - load: true - build-args: | - VCPKG_REF=${{ steps.vars.outputs.vcpkg_sha_short }} - tags: | - ${{ steps.vars.outputs.docker_tag_candidate_base }} - cache-from: | - type=registry,ref=${{ steps.vars.outputs.docker_tag_candidate_base }} - type=registry,ref=${{ steps.vars.outputs.docker_tag }} - cache-to: type=inline - - # Communtiy Build - - name: CMake Packages - if: ${{ !matrix.ln && !matrix.container && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - run: | - mkdir -p ${{ needs.preamble.outputs.folder_build }} - declare -a plugins - if [ ${{ needs.preamble.outputs.include_plugins }} == "ON" ]; then - plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "PARQUETEMBED" "REDIS" "REMBED" "SQLITE3EMBED" "SQS" "PLATFORM") - else - plugins=("PLATFORM") - fi - for plugin in "${plugins[@]}"; do - sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt - sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ - cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ - cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - # Disabled as not currently needed --- - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - done - - - name: CMake Containerized Packages - if: ${{ matrix.container && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - run: | - mkdir -p ${{ needs.preamble.outputs.folder_build }} - sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt - sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ - cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ - cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - - # Internal Build --- - - name: CMake LN Packages - if: ${{ matrix.ln && !matrix.container && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - run: | - mkdir -p ${{ needs.preamble.outputs.folder_build }} - sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt - sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles - docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "\ - cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF ${{ matrix.cmake_options_extra }} && \ - cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - # Disabled as not currently needed --- - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake -S /hpcc-dev/LN -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.preamble.outputs.cmake_docker_config }} -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - # docker run --rm --mount ${{ needs.preamble.outputs.mount_platform }} --mount ${{ needs.preamble.outputs.mount_ln }} --mount ${{ needs.preamble.outputs.mount_build }} ${{ steps.vars.outputs.docker_tag_candidate_base }} "cmake --build /hpcc-dev/build --parallel $(nproc) --target package" - - # Common --- - - name: Upload error logs - if: ${{ failure() || cancelled() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ matrix.os }}-${{ matrix.ln }}-${{ matrix.container }}-logs - path: ${{ needs.preamble.outputs.folder_build }}/**/*.log - - build-bare-metal: - name: Build Bare Metal - # if: github.repository == 'hpcc-systems/HPCC-Platform' - needs: preamble - strategy: - matrix: - include: - - os: "ubuntu-20.04" - mono: "mono" - sudo: "sudo" - cmake_config_options: "-DCMAKE_BUILD_TYPE=RelWithDebInfo -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON" - cmake_build_options: "-- -j$(nproc) -k" - event_name: "schedule" - - os: "ubuntu-22.04" - mono: "mono" - sudo: "sudo" - cmake_config_options: "-DCMAKE_BUILD_TYPE=RelWithDebInfo -DINCLUDE_PLUGINS=${{ needs.preamble.outputs.include_plugins }} -DSUPPRESS_V8EMBED=ON -DSUPPRESS_REMBED=ON" - cmake_build_options: "-- -j$(nproc) -k" - event_name: "pull_request" - - os: "windows-2019" - mono: "" - sudo: "" - cmake_config_options: '-T host=x64 -A x64 -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' - cmake_build_options: "--config Release --parallel $NUMBER_OF_PROCESSORS" - event_name: "schedule" - - os: "windows-2022" - mono: "" - sudo: "" - cmake_config_options: '-T host=x64 -A x64 -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' - cmake_build_options: "--config Release --parallel $NUMBER_OF_PROCESSORS" - event_name: "pull_request" - - os: "macos-11" - mono: "mono" - sudo: "sudo" - cmake_config_options: "-DCMAKE_BUILD_TYPE=Release -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF" - cmake_build_options: "--parallel $(nproc)" - event_name: "schedule" - - os: "macos-12" - mono: "mono" - sudo: "sudo" - cmake_config_options: "-DCMAKE_BUILD_TYPE=Release -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF" - cmake_build_options: "--parallel $(nproc)" - event_name: "pull_request" - fail-fast: false - - runs-on: ${{ matrix.os }} - steps: - - name: Print vars - shell: "bash" - run: | - echo ${{ matrix.os }} - echo "${{ toJSON(needs.preamble.outputs) }})" - - - name: Ubuntu Dependencies - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform && contains(matrix.os, 'ubuntu') }} - shell: "bash" - run: | - sudo apt-get update -y - sudo apt-get install -yq --no-install-recommends \ - apt-transport-https \ - autoconf \ - autoconf-archive \ - automake \ - autotools-dev \ - binutils-dev \ - bison \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - flex \ - git \ - gnupg \ - groff-base \ - libtool \ - pkg-config \ - software-properties-common \ - tar \ - unzip \ - uuid-dev \ - zip - - - name: Ubuntu Mono Dependencies - if: ${{ matrix.os == 'ubuntu-22.04' && needs.preamble.outputs.platform && contains(matrix.os, 'ubuntu') }} - shell: "bash" - run: | - sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF - echo "deb https://download.mono-project.com/repo/ubuntu stable-focal main" | sudo tee /etc/apt/sources.list.d/mono-official-stable.list - sudo apt-get update -y - sudo apt-get -yq install -f mono-complete - - - name: OSX Dependencies - if: ${{ contains(matrix.os, 'macos') && contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - shell: "bash" - run: | - brew install bison flex pkg-config autoconf autoconf-archive automake libtool cmake openjdk@11 - - - name: "Remove builtin vcpkg (old)" - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - working-directory: . - shell: "bash" - run: | - ${{ matrix.sudo }} rm -rf "$VCPKG_INSTALLATION_ROOT" - - - name: Checkout HPCC-Platform - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - uses: actions/checkout@v3 - with: - ref: ${{ needs.preamble.outputs.community_ref }} - submodules: recursive - path: ./HPCC-Platform - - - name: "vcpkg Bootstrap" - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - shell: "bash" - run: | - ./HPCC-Platform/vcpkg/bootstrap-vcpkg.sh - - - name: "Setup NuGet credentials" - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - shell: "bash" - run: | - ${{ matrix.mono }} `./HPCC-Platform/vcpkg/vcpkg fetch nuget | tail -n 1` \ - sources add \ - -name "GitHub" \ - -source "https://nuget.pkg.github.com/hpcc-systems/index.json" \ - -storepasswordincleartext \ - -username "${{ github.actor }}" \ - -password "${{ secrets.GITHUB_TOKEN }}" - - - name: CMake Packages - if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }} - shell: "bash" - run: | - mkdir -p ./build - cmake -S ./HPCC-Platform -B ./build ${{ matrix.cmake_config_options }} - cmake --build ./build ${{ matrix.cmake_build_options }} - - - name: Upload error logs - if: ${{ failure() || cancelled() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ matrix.os }}--${{ matrix.ln }}-${{ matrix.container }}-logs - path: ./build/**/*.log + build-workflow-dispatch: + if: ${{ contains('workflow_dispatch', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: ${{ inputs.os }} + ln: ${{ inputs.ln }} + upload-package: true + asset-name: 'docker-package' + secrets: inherit + + test-workflow-dispatch: + if: ${{ contains('workflow_dispatch', github.event_name) && inputs.smoketest == true }} + needs: build-workflow-dispatch + uses: ./.github/workflows/test-smoke-gh_runner.yml + with: + os: ${{ inputs.os }} + asset-name: 'docker-package' + secrets: inherit + + build-docker-ubuntu-23_10: + if: ${{ contains('pull_request,push', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: ubuntu-23.10 + single-package: true + containerized: false + secrets: inherit + + build-docker-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: ubuntu-22.04 + single-package: true + containerized: false + upload-package: true + asset-name: 'docker-package' + secrets: inherit + + test-smoke-docker-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + needs: build-docker-ubuntu-22_04 + uses: ./.github/workflows/test-smoke-gh_runner.yml + with: + os: ubuntu-22.04 + asset-name: 'docker-package' + secrets: inherit + + test-unit-docker-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + needs: build-docker-ubuntu-22_04 + uses: ./.github/workflows/test-unit-gh_runner.yml + with: + os: ubuntu-22.04 + asset-name: 'docker-package' + secrets: inherit + + test-ui-docker-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + needs: build-docker-ubuntu-22_04 + uses: ./.github/workflows/test-ui-gh_runner.yml + with: + os: ubuntu-22.04 + asset-name: 'docker-package' + secrets: inherit + + build-docker-ubuntu-20_04: + if: ${{ contains('schedule,push', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: ubuntu-20.04 + single-package: true + containerized: false + secrets: inherit + + build-docker-centos-8: + if: ${{ contains('schedule,push', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: centos-8 + single-package: true + containerized: false + secrets: inherit + + build-docker-centos-7: + if: ${{ contains('pull_request,push', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: centos-7 + single-package: true + containerized: false + cmake-configuration-ex: "-DVCPKG_TARGET_TRIPLET=x64-centos-7-dynamic" + secrets: inherit + + build-docker-amazonlinux: + if: ${{ contains('schedule,push', github.event_name) }} + uses: ./.github/workflows/build-docker.yml + with: + os: amazonlinux + single-package: true + containerized: false + cmake-configuration-ex: "-DVCPKG_TARGET_TRIPLET=x64-amazonlinux-dynamic" + secrets: inherit + + build-gh_runner-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + uses: ./.github/workflows/build-gh_runner.yml + with: + os: ubuntu-22.04 + single-package: true + containerized: false + upload-package: true + asset-name: 'gh_runner-package' + secrets: inherit + + test-smoke-gh_runner-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + needs: build-gh_runner-ubuntu-22_04 + uses: ./.github/workflows/test-smoke-gh_runner.yml + with: + os: ubuntu-22.04 + asset-name: 'gh_runner-package' + secrets: inherit + + test-unit-gh_runner-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + needs: build-gh_runner-ubuntu-22_04 + uses: ./.github/workflows/test-unit-gh_runner.yml + with: + os: ubuntu-22.04 + asset-name: 'gh_runner-package' + secrets: inherit + + test-ui-gh_runner-ubuntu-22_04: + if: ${{ contains('pull_request,push', github.event_name) }} + needs: build-gh_runner-ubuntu-22_04 + uses: ./.github/workflows/test-ui-gh_runner.yml + with: + os: ubuntu-22.04 + asset-name: 'gh_runner-package' + secrets: inherit + + build-gh_runner-ubuntu-20_04: + if: ${{ contains('schedule,push', github.event_name) }} + uses: ./.github/workflows/build-gh_runner.yml + with: + os: ubuntu-20.04 + single-package: true + containerized: false + secrets: inherit + + build-gh_runner-windows-2022: + if: ${{ contains('pull_request,push', github.event_name) }} + uses: ./.github/workflows/build-gh_runner.yml + with: + os: windows-2022 + single-package: true + containerized: false + cmake-configuration: '-T host=x64 -A x64 -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + secrets: inherit + + build-gh_runner-windows-2019: + if: ${{ contains('schedule,push', github.event_name) }} + uses: ./.github/workflows/build-gh_runner.yml + with: + os: windows-2019 + single-package: true + containerized: false + cmake-configuration: '-T host=x64 -A x64 -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + secrets: inherit + + build-gh_runner-macos-12: + if: ${{ contains('pull_request,push', github.event_name) }} + uses: ./.github/workflows/build-gh_runner.yml + with: + os: macos-12 + single-package: true + containerized: false + build-type: 'Release' + cmake-configuration: '-DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + secrets: inherit + + build-gh_runner-macos-11: + if: ${{ contains('schedule,push', github.event_name) }} + uses: ./.github/workflows/build-gh_runner.yml + with: + os: macos-11 + single-package: true + containerized: false + build-type: 'Release' + cmake-configuration: '-DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + secrets: inherit diff --git a/.github/workflows/test-smoke-gh_runner.yml b/.github/workflows/test-smoke-gh_runner.yml new file mode 100644 index 00000000000..2d65c3b9199 --- /dev/null +++ b/.github/workflows/test-smoke-gh_runner.yml @@ -0,0 +1,147 @@ +name: Smoketest Package (gh-runner) + +on: + workflow_call: + inputs: + os: + type: string + description: 'Operating System' + required: false + default: 'ubuntu-22.04' + asset-name: + type: string + description: 'Asset Name' + required: false + default: 'build-docker-package' + dependencies: + type: string + description: 'Dependencies' + required: false + default: 'bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake xmlstarlet' + +jobs: + + main: + runs-on: ${{ inputs.os }} + strategy: + fail-fast: false + matrix: + engine: ['hthor', 'thor', 'roxie'] + match: + - pattern: '[a-d]*.ecl' + - pattern: '[e-h]*.ecl' + - pattern: '[i-l]*.ecl' + - pattern: '[m-q]*.ecl' + - pattern: '[r-u]*.ecl' + exclude: teststdlibrary.ecl + - pattern: '[v-z]*.ecl' + include: + - engine: 'hthor' + match: + pattern: teststdlibrary.ecl + - engine: 'thor' + match: + pattern: teststdlibrary.ecl + - engine: 'roxie' + match: + pattern: teststdlibrary.ecl + + steps: + + - name: Free additional disk space (remove Android SDK + Tools) + run: | + sudo rm -rf /usr/local/lib/android + + - name: Download Package + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.asset-name }} + path: ${{ inputs.asset-name }} + + - name: Download Support Files + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.asset-name }}-support-files + path: ${{ inputs.asset-name }}-support-files + + - name: vars + id: vars + run: | + echo "matrix-setname=$(echo -n ${{ matrix.match.pattern }} | tr -c \"[:alnum:]\" _)" >> $GITHUB_OUTPUT + + - name: Install Dependencies + shell: "bash" + run: | + sudo apt-get update + sudo apt-get install -y ${{ inputs.dependencies }} + sudo apt-get install -y gdb + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - + sudo apt-get install -y nodejs + + echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" + + - name: Install Package + shell: "bash" + run: | + sudo apt-get install -y -f ./${{ inputs.asset-name }}/*.deb + sudo chown -R $USER:$USER /opt/HPCCSystems + + sudo xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 /etc/HPCCSystems/environment.xml + + chmod +x ./${{ inputs.asset-name }}-support-files/* + sudo cp ./${{ inputs.asset-name }}-support-files/* /opt/HPCCSystems/bin + + - name: Start HPCC-Platform + shell: "bash" + run: | + export LANG="en_US.UTF-8" + sudo update-locale + sudo /etc/init.d/hpcc-init start + + - name: Run Setup + working-directory: /opt/HPCCSystems/testing/regress + shell: "bash" + run: | + # todo: run once before matrix + /opt/HPCCSystems/bin/timeoutcmd $((8 * 60)) \ + ./ecl-test setup --preAbort '/opt/HPCCSystems/bin/smoketest-preabort.sh' --pq 2 --generateStackTrace + grep Failure: /home/runner/HPCCSystems-regression/log/setup_*.log + if [[ "$(grep -ohP '(?<=^Failure: )[0-9]+$' /home/runner/HPCCSystems-regression/log/setup_*.log | awk '{s+=$1} END {print s}')" -gt 0 ]]; then exit 1; fi + + - name: Select Tests + id: select-tests + working-directory: /opt/HPCCSystems/testing/regress + shell: "bash" + run: | + (cd ecl; ls ${{ matrix.match.pattern }}) > matches.tmp + echo queries are: + cat matches.tmp + if [[ -z "${{ matrix.match.exclude }}" ]] + then + queries="$(cat matches.tmp | tr '\n' ' ')" + else + queries="$(cd ecl; ls ${{ matrix.match.exclude }} | grep -v -f - ../matches.tmp | tr '\n' ' ')" + fi + echo queries after exclude: ${queries} + echo "queries=${queries}" >> $GITHUB_OUTPUT + + - name: Run Tests + working-directory: /opt/HPCCSystems/testing/regress + shell: "bash" + run: | + # force regression suite to timeout after 60 minutes, so it captures ZAP report of any inflight hung queries + /opt/HPCCSystems/bin/timeoutcmd $((60 * 60)) \ + ./ecl-test query --preAbort '/opt/HPCCSystems/bin/smoketest-preabort.sh' --pq 2 --target ${{ matrix.engine }} --excludeclass python2,embedded-r,embedded-js,3rdpartyservice,mongodb --generateStackTrace ${{ steps.select-tests.outputs.queries }} + grep Failure: /home/runner/HPCCSystems-regression/log/${{ matrix.engine }}.*.log + if [[ "$(grep -oP '(?<=^Failure: )[0-9]+$' /home/runner/HPCCSystems-regression/log/${{ matrix.engine }}.*.log)" -gt 0 ]]; then exit 1; fi + + - name: regression-run-logs-artifact + if: ${{ failure() || cancelled() }} + uses: actions/upload-artifact@v3 + with: + name: test-smoke-${{ inputs.asset-name }}-${{ matrix.engine }}-${{ steps.vars.outputs.matrix-setname }} + path: | + /var/log/HPCCSystems + /home/runner/HPCCSystems-regression + if-no-files-found: error diff --git a/.github/workflows/test-ui-gh_runner.yml b/.github/workflows/test-ui-gh_runner.yml new file mode 100644 index 00000000000..ec2a6b79235 --- /dev/null +++ b/.github/workflows/test-ui-gh_runner.yml @@ -0,0 +1,124 @@ +name: UI test Package (gh-runner) + +on: + workflow_call: + inputs: + os: + type: string + description: 'Operating System' + required: false + default: 'ubuntu-22.04' + asset-name: + type: string + description: 'Asset Name' + required: false + default: 'build-docker-package' + dependencies: + type: string + description: 'Dependencies' + required: false + default: 'bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake xmlstarlet' + +jobs: + + main: + timeout-minutes: 30 # the ui-tests run step has its own timeout, however the job can sometimes take time to download dependencies + runs-on: ${{ inputs.os }} + steps: + + - name: Free additional disk space (remove Android SDK + Tools) + run: | + sudo rm -rf /usr/local/lib/android + + - name: Download UI Test Files + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.asset-name }}-ui_test-files + path: ${{ inputs.asset-name }}-ui_test-files + + - name: Check-ECLWatch-UI-Test-Directory + id: check + run: | + if [[ ! -d ${{ inputs.asset-name }}-ui_test-files ]] + then + echo "ECLWatch UI ${{ inputs.asset-name }}-ui_test-files directory missing." + else + javaFilesCount=$(find ${{ inputs.asset-name }}-ui_test-files/ -iname '*.java' -type f -print | wc -l ) + echo "Number of test java files is $javaFilesCount" + if [[ ${javaFilesCount} -eq 0 ]] + then + echo "No java files, do nothing." + else + echo "runtests=true" >> $GITHUB_OUTPUT + fi + fi + + - name: Install Dependencies + if: steps.check.outputs.runtests + shell: "bash" + run: | + sudo apt-get update + sudo apt-get install -y git wget net-tools + sudo apt-get install -y tzdata unzip xvfb libxi6 + sudo apt-get install -y default-jdk + sudo apt-get install -y ${{ inputs.dependencies }} + sudo apt-get install -y gdb + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - + sudo apt-get install -y nodejs + + echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" + + - name: UI-Test-Prerequisites + if: steps.check.outputs.runtests + run: | + wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb + sudo apt-get install -y ./google-chrome-stable_current_amd64.deb + wget https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip + unzip chromedriver_linux64.zip + sudo mv chromedriver /usr/bin/chromedriver + sudo chown root:root /usr/bin/chromedriver + sudo chmod +x /usr/bin/chromedriver + wget https://selenium-release.storage.googleapis.com/3.141/selenium-server-standalone-3.141.59.jar + wget http://www.java2s.com/Code/JarDownload/testng/testng-6.8.7.jar.zip + unzip testng-6.8.7.jar.zip + + - name: Download Package + if: steps.check.outputs.runtests + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.asset-name }} + path: ${{ inputs.asset-name }} + + - name: Install Package + if: steps.check.outputs.runtests + shell: "bash" + run: | + sudo apt-get install -y -f ./${{ inputs.asset-name }}/*.deb + sudo chown -R $USER:$USER /opt/HPCCSystems + + sudo xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 /etc/HPCCSystems/environment.xml + + - name: run + timeout-minutes: 10 # generous, current runtime is ~1min, this should be increased if new tests are added + if: steps.check.outputs.runtests + run: | + export LANG="en_US.UTF-8" + update-locale + source /opt/HPCCSystems/sbin/hpcc_setenv + sudo /etc/init.d/hpcc-init start + export CLASSPATH=".:$(realpath selenium-server-standalone-3.141.59.jar):$(realpath testng-6.8.7.jar)" + pushd ${{ inputs.asset-name }}-ui_test-files + ./run.sh tests http://localhost:8010 > eclWatchUiTest.log 2>&1 + retCode=$? + echo "UI test done" + [[ $retCode -ne 0 ]] && exit 1 + popd + + - name: eclwatch-ui-test-logs-artifact + if: ${{ failure() || cancelled() }} + uses: actions/upload-artifact@v2 + with: + name: ECLWatchUiTest + path: ${{ inputs.asset-name }}-ui_test-files/eclWatchUiTest.log + if-no-files-found: error diff --git a/.github/workflows/test-unit-gh_runner.yml b/.github/workflows/test-unit-gh_runner.yml new file mode 100644 index 00000000000..904187785e8 --- /dev/null +++ b/.github/workflows/test-unit-gh_runner.yml @@ -0,0 +1,78 @@ +name: Unittest Package (gh-runner) + +on: + workflow_call: + inputs: + os: + type: string + description: 'Operating System' + required: false + default: 'ubuntu-22.04' + asset-name: + type: string + description: 'Asset Name' + required: false + default: 'build-docker-package' + dependencies: + type: string + description: 'Dependencies' + required: false + default: 'bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake xmlstarlet' + +jobs: + + main: + runs-on: ${{ inputs.os }} + steps: + + - name: Free additional disk space (remove Android SDK + Tools) + run: | + sudo rm -rf /usr/local/lib/android + + - name: Download Package + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.asset-name }} + path: ${{ inputs.asset-name }} + + - name: Download Support Files + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.asset-name }}-support-files + path: ${{ inputs.asset-name }}-support-files + + - name: Install Dependencies + shell: "bash" + run: | + sudo apt-get update + sudo apt-get install -y ${{ inputs.dependencies }} + sudo apt-get install -y gdb + + curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - + sudo apt-get install -y nodejs + + echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" + + - name: Install Package + shell: "bash" + run: | + sudo apt-get install -y -f ./${{ inputs.asset-name }}/*.deb + sudo chown -R $USER:$USER /opt/HPCCSystems + + sudo xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 /etc/HPCCSystems/environment.xml + + chmod +x ./${{ inputs.asset-name }}-support-files/* + sudo cp ./${{ inputs.asset-name }}-support-files/* /opt/HPCCSystems/bin + + - name: Start HPCC-Platform + shell: "bash" + run: | + export LANG="en_US.UTF-8" + sudo update-locale + sudo /etc/init.d/hpcc-init start + + - name: Run Tests + timeout-minutes: 10 # generous, expected time is approx 1 min. + shell: "bash" + run: | + /opt/HPCCSystems/bin/unittests diff --git a/dockerfiles/vcpkg/amazonlinux.dockerfile b/dockerfiles/vcpkg/amazonlinux.dockerfile index 5c1aaf941e5..190badf60f4 100644 --- a/dockerfiles/vcpkg/amazonlinux.dockerfile +++ b/dockerfiles/vcpkg/amazonlinux.dockerfile @@ -6,6 +6,7 @@ RUN amazon-linux-extras install java-openjdk11 && yum install -y \ python3-devel \ epel-release RUN yum install -y \ + ccache \ R-core-devel \ R-Rcpp-devel \ R-RInside-devel diff --git a/dockerfiles/vcpkg/build.sh b/dockerfiles/vcpkg/build.sh index 5c3926df63d..2a624617b69 100755 --- a/dockerfiles/vcpkg/build.sh +++ b/dockerfiles/vcpkg/build.sh @@ -29,11 +29,11 @@ docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD CMAKE_OPTIONS="-G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON" function doBuild() { - docker pull "hpccsystems/platform-build-base-$1:$VCPKG_REF" || true - docker pull "hpccsystems/platform-build-$1:$VCPKG_REF" || true - docker pull "hpccsystems/platform-build-$1:$GITHUB_BRANCH" || true + # docker pull "hpccsystems/platform-build-base-$1:$VCPKG_REF" || true + # docker pull "hpccsystems/platform-build-$1:$VCPKG_REF" || true + # docker pull "hpccsystems/platform-build-$1:$GITHUB_BRANCH" || true - docker build --progress plain --pull --rm -f "$SCRIPT_DIR/$1.dockerfile" \ + docker build --progress plain --rm -f "$SCRIPT_DIR/$1.dockerfile" \ --build-arg DOCKER_NAMESPACE=$DOCKER_USERNAME \ --build-arg VCPKG_REF=$VCPKG_REF \ --cache-from hpccsystems/platform-build-$1:$VCPKG_REF \ @@ -51,9 +51,14 @@ function doBuild() { elif [ "$1" == "amazonlinux" ]; then CMAKE_OPTIONS_EXTRA="-DVCPKG_TARGET_TRIPLET=x64-amazonlinux-dynamic" fi - docker run --rm --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached hpccsystems/platform-build-$1:$VCPKG_REF \ - "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build-$1 ${CMAKE_OPTIONS} ${CMAKE_OPTIONS_EXTRA} && \ - cmake --build /hpcc-dev/HPCC-Platform/build-$1 --target package --parallel $(nproc)" + mkdir -p $HOME/.ccache + docker run --rm \ + --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached \ + --mount source="$HOME/.ccache",target=/root/.ccache,type=bind,consistency=cached \ + hpccsystems/platform-build-$1:$VCPKG_REF \ + "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build-$1 ${CMAKE_OPTIONS} ${CMAKE_OPTIONS_EXTRA} -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache && \ + cmake --build /hpcc-dev/HPCC-Platform/build-$1 --target install --parallel $(nproc) && \ + /etc/init.d/hpcc-init start" sudo chown -R $(id -u):$(id -g) ./build-$1 # docker run -it --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached build-ubuntu-22.04:latest bash diff --git a/dockerfiles/vcpkg/centos-7.dockerfile b/dockerfiles/vcpkg/centos-7.dockerfile index 15524db7e21..7369d75cdcc 100644 --- a/dockerfiles/vcpkg/centos-7.dockerfile +++ b/dockerfiles/vcpkg/centos-7.dockerfile @@ -1,12 +1,14 @@ ARG VCPKG_REF=latest FROM hpccsystems/platform-build-base-centos-7:$VCPKG_REF -RUN yum install -y \ +RUN yum makecache && yum install -y \ epel-release \ java-11-openjdk-devel \ python3-devel \ wget && \ - yum update -y && yum install -y R-core-devel && \ + yum update -y && yum install -y \ + ccache \ + R-core-devel && \ yum -y clean all && rm -rf /var/cache ENV Rcpp_package=Rcpp_0.12.19.tar.gz diff --git a/dockerfiles/vcpkg/centos-8.dockerfile b/dockerfiles/vcpkg/centos-8.dockerfile index 8d419588d73..35d54ef9567 100644 --- a/dockerfiles/vcpkg/centos-8.dockerfile +++ b/dockerfiles/vcpkg/centos-8.dockerfile @@ -4,9 +4,9 @@ FROM hpccsystems/platform-build-base-centos-8:$VCPKG_REF RUN yum remove -y python3.11 java-1.* && yum install -y \ java-11-openjdk-devel \ python3-devel \ - epel-release - -RUN yum install -y \ + epel-release && \ + yum update -y && yum install -y \ + ccache \ R-core-devel \ R-Rcpp-devel \ R-RInside-devel diff --git a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile index 7211cccebde..c5d0585c472 100644 --- a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile @@ -4,6 +4,7 @@ FROM hpccsystems/platform-build-base-ubuntu-20.04:$VCPKG_REF ENV RInside_package=RInside_0.2.14.tar.gz RUN apt-get update && apt-get install --no-install-recommends -y \ + ccache \ default-jdk \ python3-dev \ wget \ diff --git a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile index 768830ad711..9ff881b9bb3 100644 --- a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile @@ -2,6 +2,7 @@ ARG VCPKG_REF=latest FROM hpccsystems/platform-build-base-ubuntu-22.04:$VCPKG_REF RUN apt-get update && apt-get install --no-install-recommends -y \ + ccache \ default-jdk \ ninja-build \ python3-dev \ diff --git a/dockerfiles/vcpkg/ubuntu-23.10.dockerfile b/dockerfiles/vcpkg/ubuntu-23.10.dockerfile index e154dbe5bd5..a9bbac10c3d 100644 --- a/dockerfiles/vcpkg/ubuntu-23.10.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-23.10.dockerfile @@ -2,6 +2,7 @@ ARG VCPKG_REF=latest FROM hpccsystems/platform-build-base-ubuntu-23.10:$VCPKG_REF RUN apt-get update && apt-get install --no-install-recommends -y \ + ccache \ default-jdk \ ninja-build \ python3-dev \ diff --git a/vcpkg b/vcpkg index 4d5501e1c57..fcde93bcb3f 160000 --- a/vcpkg +++ b/vcpkg @@ -1 +1 @@ -Subproject commit 4d5501e1c5796ca7b42f4ad38731f8fd2a8c8d75 +Subproject commit fcde93bcb3fec4ae8b650cec2e4e1b7787308098 From 551169eb8ce2c3b668cb10bac1075d008235d8e8 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Fri, 29 Sep 2023 17:38:14 +0100 Subject: [PATCH 2/9] HPCC-30411 Add support for dynamically updating TLS config HPCC-30754 Allow roxie to use issuer based tls in bare-metal configuration Signed-off-by: Gavin Halliday --- common/thorhelper/thorsoapcall.cpp | 5 +- esp/bindings/http/client/httpclient.cpp | 2 +- esp/clients/wsdfuaccess/wsdfuaccess.cpp | 25 +- esp/services/ws_dfu/ws_dfuService.cpp | 6 +- esp/test/httptest/httptest.cpp | 15 +- esp/tools/soapplus/http.cpp | 5 +- esp/tools/soapplus/httpproxy.cpp | 5 +- fs/dafilesrv/dafilesrv.cpp | 3 +- fs/dafsclient/rmtclient.cpp | 30 +- fs/dafsserver/dafsserver.cpp | 26 +- roxie/ccd/ccd.hpp | 2 +- roxie/ccd/ccdlistener.cpp | 4 +- roxie/ccd/ccdmain.cpp | 39 +- roxie/ccd/ccdprotocol.cpp | 31 +- roxie/ccd/hpccprotocol.hpp | 2 +- system/jlib/jptree.cpp | 86 ++++ system/jlib/jptree.hpp | 20 + system/jlib/jsecrets.cpp | 401 +++++++++++------- system/jlib/jsecrets.hpp | 28 +- system/jlib/jsmartsock.cpp | 2 +- system/jlib/jsmartsock.hpp | 4 +- system/jlib/jsmartsock.ipp | 4 +- system/security/securesocket/securesocket.cpp | 350 ++++++++------- system/security/securesocket/securesocket.hpp | 14 +- 24 files changed, 665 insertions(+), 444 deletions(-) diff --git a/common/thorhelper/thorsoapcall.cpp b/common/thorhelper/thorsoapcall.cpp index be9abd2b4b2..98df33c7a32 100644 --- a/common/thorhelper/thorsoapcall.cpp +++ b/common/thorhelper/thorsoapcall.cpp @@ -1242,7 +1242,10 @@ class CWSCHelper : implements IWSCHelper, public CInterface if (!ownedSC) { if (clientCert != NULL) - ownedSC.setown(createSecureSocketContextEx(clientCert->certificate, clientCert->privateKey, clientCert->passphrase, ClientSocket)); + { + Owned config = createSecureSocketConfig(clientCert->certificate, clientCert->privateKey, clientCert->passphrase); + ownedSC.setown(createSecureSocketContextEx2(config, ClientSocket)); + } else if (clientCertIssuer.length()) ownedSC.setown(createSecureSocketContextSecret(clientCertIssuer.str(), ClientSocket)); else diff --git a/esp/bindings/http/client/httpclient.cpp b/esp/bindings/http/client/httpclient.cpp index 3c0f1dd99c9..07829510c71 100644 --- a/esp/bindings/http/client/httpclient.cpp +++ b/esp/bindings/http/client/httpclient.cpp @@ -123,7 +123,7 @@ IHttpClient* CHttpClientContext::createHttpClient(const char* proxy, const char* if (xproc) m_ssctx.setown(xproc(m_config.get(),ClientSocket)); else - throw MakeStringException(-1, "procedure createSecureSocketContext can't be loaded"); + throw MakeStringException(-1, "procedure createSecureSocketContextEx2 can't be loaded"); } if(m_ssctx.get() == NULL) diff --git a/esp/clients/wsdfuaccess/wsdfuaccess.cpp b/esp/clients/wsdfuaccess/wsdfuaccess.cpp index 7850464c80f..b8d59e7189e 100644 --- a/esp/clients/wsdfuaccess/wsdfuaccess.cpp +++ b/esp/clients/wsdfuaccess/wsdfuaccess.cpp @@ -534,7 +534,6 @@ StringBuffer &encodeDFUFileMeta(StringBuffer &metaInfoBlob, IPropertyTree *metaI metaInfo->serialize(metaInfoBlob); const char *keyPairName = metaInfo->queryProp("keyPairName"); // NB: in container mode, this is the name of the secret containing the cert. - const char *privateKeyFName = nullptr; Owned metaInfoEnvelope = createPTree(); #ifdef _CONTAINERIZED /* Encode the public certificate in the request. NB: this is an approach used for JWT token delegation. @@ -543,24 +542,24 @@ StringBuffer &encodeDFUFileMeta(StringBuffer &metaInfoBlob, IPropertyTree *metaI * If the size of this initial request was ever a concern, we could consider other ways to ensure a one-off * delivery of this esp public signing cert. to dafilesrv, e.g. by dafilesrv reaching out to esp to request it. */ - Owned info = getIssuerTlsServerConfig(keyPairName); - if (!info) + Owned config = getIssuerTlsSyncedConfig(keyPairName); + if (!config || !config->isValid()) throw makeStringExceptionV(-1, "encodeDFUFileMeta: No '%s' MTLS certificate detected.", keyPairName); - privateKeyFName = info->queryProp("privatekey"); - if (isEmptyString(privateKeyFName)) - throw makeStringException(-1, "encodeDFUFileMeta: MTLS - private path missing"); - const char *certPath = info->queryProp("certificate"); - verifyex(certPath); - StringBuffer certificate; - certificate.loadFile(certPath); - verifyex(certificate.length()); + + Owned info = config->getTree(); + const char *privateKeyText = info->queryProp("privatekey"); + if (isEmptyString(privateKeyText)) + throw makeStringException(-1, "encodeDFUFileMeta: MTLS - private key missing"); + const char *certificate = info->queryProp("certificate"); + verifyex(certificate); metaInfoEnvelope->setProp("certificate", certificate); + Owned privateKey = loadPrivateKeyFromMemory(privateKeyText, nullptr); #else - privateKeyFName = environment->getPrivateKeyPath(keyPairName); + const char *privateKeyFName = environment->getPrivateKeyPath(keyPairName); if (isEmptyString(privateKeyFName)) throw makeStringExceptionV(-1, "Key name '%s' is not found in environment settings: /EnvSettings/Keys/KeyPair.", keyPairName); -#endif Owned privateKey = loadPrivateKeyFromFile(privateKeyFName, nullptr); +#endif StringBuffer metaInfoSignature; digiSign(metaInfoSignature, metaInfoBlob.length(), metaInfoBlob.bytes(), *privateKey); diff --git a/esp/services/ws_dfu/ws_dfuService.cpp b/esp/services/ws_dfu/ws_dfuService.cpp index 343d9cf26e2..97412c42cbb 100644 --- a/esp/services/ws_dfu/ws_dfuService.cpp +++ b/esp/services/ws_dfu/ws_dfuService.cpp @@ -6113,8 +6113,7 @@ void CWsDfuEx::dFUFileAccessCommon(IEspContext &context, const CDfsLogicalFileNa StringBuffer dafilesrvHost; #ifdef _CONTAINERIZED keyPairName.set("signing"); - Owned info = getIssuerTlsServerConfig(keyPairName); - if (!info) + if (!hasIssuerTlsConfig(keyPairName)) throw makeStringExceptionV(-1, "dFUFileAccessCommon: file signing certificate ('%s') not defined in configuration.", keyPairName.str()); auto externalService = k8s::getDafileServiceFromConfig("stream"); @@ -6490,8 +6489,7 @@ bool CWsDfuEx::onDFUFileCreateV2(IEspContext &context, IEspDFUFileCreateV2Reques #ifdef _CONTAINERIZED keyPairName.set("signing"); - Owned info = getIssuerTlsServerConfig(keyPairName); - if (!info) + if (!hasIssuerTlsConfig(keyPairName)) throw makeStringExceptionV(-1, "onDFUFileCreateV2: file signing certificate ('%s' ) not defined in configuration.", keyPairName.str()); const char *planeName = clusterName; diff --git a/esp/test/httptest/httptest.cpp b/esp/test/httptest/httptest.cpp index 0888478912e..b72bbd55311 100644 --- a/esp/test/httptest/httptest.cpp +++ b/esp/test/httptest/httptest.cpp @@ -152,10 +152,7 @@ HttpClient::HttpClient(int threads, int times, const char* host, int port, FILE* if(use_ssl) { #ifdef _USE_OPENSSL - if(sslconfig != NULL) - m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ClientSocket)); - else - m_ssctx.setown(createSecureSocketContext(ClientSocket)); + m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ClientSocket)); #else throw MakeStringException(-1, "HttpClient: failure to create SSL connection to host '%s': OpenSSL not enabled in build", host); #endif @@ -614,10 +611,7 @@ HttpServer::HttpServer(int port, const char* in, FILE* ofile, bool use_ssl, IPro if(use_ssl) { #ifdef _USE_OPENSSL - if(sslconfig != NULL) - m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ServerSocket)); - else - m_ssctx.setown(createSecureSocketContext(ServerSocket)); + m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ServerSocket)); #else throw MakeStringException(-1, "HttpServer: failure to create SSL socket - OpenSSL not enabled in build"); #endif @@ -1180,10 +1174,7 @@ HttpProxy::HttpProxy(int localport, const char* host, int port, FILE* ofile, boo if(use_ssl) { #if _USE_OPENSSL - if(sslconfig != NULL) - m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ClientSocket)); - else - m_ssctx.setown(createSecureSocketContext(ClientSocket)); + m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ClientSocket)); #else throw MakeStringException(-1, "HttpProxy: failure to create SSL connection to host '%s': OpenSSL not enabled in build", host); #endif diff --git a/esp/tools/soapplus/http.cpp b/esp/tools/soapplus/http.cpp index fd288668ef4..ceb70c6fba8 100644 --- a/esp/tools/soapplus/http.cpp +++ b/esp/tools/soapplus/http.cpp @@ -505,10 +505,7 @@ HttpClient::HttpClient(IProperties* globals, const char* url, const char* inname if (cfg && *cfg) cfgtree.setown(createPTreeFromXMLFile(cfg)); } - if (cfgtree) - m_ssctx.setown(createSecureSocketContextEx2(cfgtree, ClientSocket)); - else - m_ssctx.setown(createSecureSocketContext(ClientSocket)); + m_ssctx.setown(createSecureSocketContextEx2(cfgtree, ClientSocket)); } #else throw MakeStringException(-1, "HttpClient: failure to create SSL socket - OpenSSL not enabled in build"); diff --git a/esp/tools/soapplus/httpproxy.cpp b/esp/tools/soapplus/httpproxy.cpp index b07874d8c7f..4d9a8d303b2 100644 --- a/esp/tools/soapplus/httpproxy.cpp +++ b/esp/tools/soapplus/httpproxy.cpp @@ -582,10 +582,7 @@ HttpProxy::HttpProxy(int localport, const char* host, int port, FILE* ofile, boo if(use_ssl) { #ifdef _USE_OPENSSL - if(sslconfig != NULL) - m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ClientSocket)); - else - m_ssctx.setown(createSecureSocketContext(ClientSocket)); + m_ssctx.setown(createSecureSocketContextEx2(sslconfig, ClientSocket)); #else throw MakeStringException(-1, "HttpProxy: failure to create SSL socket - OpenSSL not enabled in build"); #endif diff --git a/fs/dafilesrv/dafilesrv.cpp b/fs/dafilesrv/dafilesrv.cpp index 182aaa5fbed..0c5a52ee301 100644 --- a/fs/dafilesrv/dafilesrv.cpp +++ b/fs/dafilesrv/dafilesrv.cpp @@ -397,8 +397,7 @@ int main(int argc, const char* argv[]) // Use the "public" certificate issuer, unless it's visibility is "cluster" (meaning internal only) const char *visibility = getComponentConfigSP()->queryProp("service/@visibility"); const char *certScope = strsame("cluster", visibility) ? "local" : "public"; - Owned info = getIssuerTlsServerConfig(certScope); - connectMethod = info ? SSLOnly : SSLNone; + connectMethod = hasIssuerTlsConfig(certScope) ? SSLOnly : SSLNone; // NB: connectMethod will direct the CRemoteFileServer on accept to create a secure socket based on the same issuer certificates dedicatedRowServicePort = 0; // row service always runs on same secure ssl port in containerized mode diff --git a/fs/dafsclient/rmtclient.cpp b/fs/dafsclient/rmtclient.cpp index 48fcb1e9c58..4204a1185b5 100644 --- a/fs/dafsclient/rmtclient.cpp +++ b/fs/dafsclient/rmtclient.cpp @@ -112,6 +112,12 @@ static class _securitySettingsClient } } + const IPropertyTree * getSecureConfig() + { + //Later: return a synced tree... + return createSecureSocketConfig(queryCertificate(), queryPrivateKey(), queryPassPhrase()); + } + protected: DAFSConnectCfg connectMethod; unsigned short daFileSrvPort; @@ -156,10 +162,10 @@ static ISecureSocket *createSecureSocket(ISocket *sock, const char *issuer) auto it = secureCtxClientIssuerMap.find(issuer); if (it == secureCtxClientIssuerMap.end()) { - Owned info = getIssuerTlsServerConfig(issuer); - if (!info) + Owned info = getIssuerTlsSyncedConfig(issuer); + if (!info || !info->isValid()) throw makeStringExceptionV(-1, "createSecureSocket() : missing MTLS configuration for issuer: %s", issuer); - secureContext.setown(createSecureSocketContextEx2(info, ClientSocket)); + secureContext.setown(createSecureSocketContextSynced(info, ClientSocket)); secureCtxClientIssuerMap.emplace(issuer, secureContext.getLink()); } else @@ -168,7 +174,10 @@ static ISecureSocket *createSecureSocket(ISocket *sock, const char *issuer) else { if (!secureContextClient) - secureContextClient.setown(createSecureSocketContextEx(securitySettings.queryCertificate(), securitySettings.queryPrivateKey(), securitySettings.queryPassPhrase(), ClientSocket)); + { + Owned config = securitySettings.getSecureConfig(); + secureContextClient.setown(createSecureSocketContextEx2(config, ClientSocket)); + } secureContext.set(secureContextClient); } } @@ -751,17 +760,8 @@ void CRemoteBase::connectSocket(SocketEndpoint &ep, unsigned connectTimeoutMs, u } else { - Owned secretPTree = getSecret("storage", storageSecret); - if (!secretPTree) - throw makeStringExceptionV(-1, "secret %s.%s not found", "storage", storageSecret.str()); - - StringBuffer certSecretBuf; - getSecretKeyValue(certSecretBuf, secretPTree, "tls.crt"); - - StringBuffer privKeySecretBuf; - getSecretKeyValue(privKeySecretBuf, secretPTree, "tls.key"); - - Owned secureContext = createSecureSocketContextEx(certSecretBuf, privKeySecretBuf, nullptr, ClientSocket); + Owned config = createStorageTlsConfig(storageSecret, false); + Owned secureContext = createSecureSocketContextSynced(config, ClientSocket); ssock.setown(secureContext->createSecureSocket(socket.getClear(), loglevel)); } } diff --git a/fs/dafsserver/dafsserver.cpp b/fs/dafsserver/dafsserver.cpp index cd0fc21ce62..5cb9a26b118 100644 --- a/fs/dafsserver/dafsserver.cpp +++ b/fs/dafsserver/dafsserver.cpp @@ -112,6 +112,13 @@ static class _securitySettingsServer { queryDafsSecSettings(&connectMethod, &daFileSrvPort, &daFileSrvSSLPort, &certificate, &privateKey, &passPhrase); } + + const IPropertyTree * getSecureConfig() + { + //Later: return a synced tree... + return createSecureSocketConfig(certificate, privateKey, passPhrase); + } + } securitySettings; #endif @@ -133,23 +140,12 @@ static ISecureSocket *createSecureSocket(ISocket *sock, bool disableClientCertVe */ const char *certScope = strsame("cluster", getComponentConfigSP()->queryProp("service/@visibility")) ? "local" : "public"; - Owned info = getIssuerTlsServerConfig(certScope); - if (!info) + Owned info = getIssuerTlsSyncedConfig(certScope, nullptr, disableClientCertVerification); + if (!info || !info->isValid()) throw makeStringException(-1, "createSecureSocket() : missing MTLS configuration"); - Owned cloneInfo; - if (disableClientCertVerification) - { - // do not insist clients provide a cerificate for verification. - // This is used when the connection is TLS, but the authentication is done via other means - // e.g. in the case of the streaming service a opaque signed blob is transmitted and must - // be verified before proceeding. - cloneInfo.setown(createPTreeFromIPT(info)); - cloneInfo->setPropBool("verify/@enable", false); - info = cloneInfo; - } - secureContextServer.setown(createSecureSocketContextEx2(info, ServerSocket)); + secureContextServer.setown(createSecureSocketContextSynced(info, ServerSocket)); #else - secureContextServer.setown(createSecureSocketContextEx(securitySettings.certificate, securitySettings.privateKey, securitySettings.passPhrase, ServerSocket)); + secureContextServer.setown(createSecureSocketContextEx2(securitySettings.getSecureConfig(), ServerSocket)); #endif } } diff --git a/roxie/ccd/ccd.hpp b/roxie/ccd/ccd.hpp index ff954e4e619..0537b0f0b06 100644 --- a/roxie/ccd/ccd.hpp +++ b/roxie/ccd/ccd.hpp @@ -410,7 +410,7 @@ extern int backgroundCopyClass; extern int backgroundCopyPrio; extern unsigned roxiePort; // If listening on multiple, this is the first. Used for lock cascading -extern IPropertyTree *roxiePortTlsClientConfig; +extern ISyncedPropertyTree *roxiePortTlsClientConfig; extern unsigned udpMulticastBufferSize; diff --git a/roxie/ccd/ccdlistener.cpp b/roxie/ccd/ccdlistener.cpp index e0830b7edc9..99a48198fe8 100644 --- a/roxie/ccd/ccdlistener.cpp +++ b/roxie/ccd/ccdlistener.cpp @@ -79,7 +79,7 @@ class CascadeManager : public CInterface CriticalSection revisionCrit; int myEndpoint; const IRoxieContextLogger &logctx; - IPropertyTree *tlsConfig = nullptr; + ISyncedPropertyTree *tlsConfig = nullptr; void unlockChildren() { @@ -135,7 +135,7 @@ class CascadeManager : public CInterface assertex(sock); if (tlsConfig) { - Owned secureCtx = createSecureSocketContextEx2(tlsConfig, ClientSocket); + Owned secureCtx = createSecureSocketContextSynced(tlsConfig, ClientSocket); if (!secureCtx) throw makeStringException(ROXIE_TLS_ERROR, "Roxie CascadeManager failed creating secure context for roxie control message"); Owned ssock = secureCtx->createSecureSocket(sock.getClear()); diff --git a/roxie/ccd/ccdmain.cpp b/roxie/ccd/ccdmain.cpp index 9f20d67fc63..159d6f99642 100644 --- a/roxie/ccd/ccdmain.cpp +++ b/roxie/ccd/ccdmain.cpp @@ -46,6 +46,10 @@ #include "hpccconfig.hpp" #include "udpsha.hpp" +#ifdef _USE_OPENSSL +#include "securesocket.hpp" +#endif + #if defined (__linux__) #include #include "ioprio.h" @@ -230,7 +234,7 @@ unsigned leafCacheMB = 50; unsigned blobCacheMB = 0; unsigned roxiePort = 0; -IPropertyTree *roxiePortTlsClientConfig = nullptr; +ISyncedPropertyTree *roxiePortTlsClientConfig = nullptr; #ifndef _CONTAINERIZED Owned perfMonHook; @@ -1450,7 +1454,7 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml) else { Owned protocolPlugin = loadHpccProtocolPlugin(protocolCtx, NULL); - Owned roxieServer = protocolPlugin->createListener("runOnce", createRoxieProtocolMsgSink(myNode.getIpAddress(), 0, 1, false), 0, 0, nullptr, nullptr, nullptr, nullptr, nullptr); + Owned roxieServer = protocolPlugin->createListener("runOnce", createRoxieProtocolMsgSink(myNode.getIpAddress(), 0, 1, false), 0, 0, nullptr, nullptr); try { const char *format = topology->queryProp("@format"); @@ -1501,7 +1505,7 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml) { roxiePort = port; if (roxieFarm.getPropBool("@tls")) - roxiePortTlsClientConfig = createIssuerTlsClientConfig(roxieFarm.queryProp("@issuer"), roxieFarm.getPropBool("@selfSigned")); + roxiePortTlsClientConfig = createIssuerTlsConfig(roxieFarm.queryProp("@issuer"), nullptr, true, roxieFarm.getPropBool("@selfSigned"), true, false); debugEndpoint.set(roxiePort, ip); } bool suspended = roxieFarm.getPropBool("@suspended", false); @@ -1513,23 +1517,21 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml) StringBuffer certFileName; StringBuffer keyFileName; StringBuffer passPhraseStr; - Owned tlsConfig; + Owned tlsConfig; if (serviceTLS) { - protocol = "ssl"; #ifdef _USE_OPENSSL - if (isContainerized()) + protocol = "ssl"; + const char *certIssuer = roxieFarm.queryProp("@issuer"); + if (isEmptyString(certIssuer)) + certIssuer = roxieFarm.getPropBool("@public", true) ? "public" : "local"; + bool disableMtls = roxieFarm.getPropBool("@disableMtls", false); + tlsConfig.setown(getIssuerTlsSyncedConfig(certIssuer, roxieFarm.queryProp("trusted_peers"), disableMtls)); + if (!tlsConfig || !tlsConfig->isValid()) { - const char *certIssuer = roxieFarm.queryProp("@issuer"); - if (isEmptyString(certIssuer)) - certIssuer = roxieFarm.getPropBool("@public", true) ? "public" : "local"; - tlsConfig.setown(getIssuerTlsServerConfigWithTrustedPeers(certIssuer, roxieFarm.queryProp("trusted_peers"))); - if (!tlsConfig) + if (isContainerized()) throw MakeStringException(ROXIE_FILE_ERROR, "TLS secret for issuer %s not found", certIssuer); - DBGLOG("Roxie service, port(%d) TLS issuer (%s)", port, certIssuer); - } - else - { + const char *passPhrase = roxieFarm.queryProp("@passphrase"); if (!isEmptyString(passPhrase)) decrypt(passPhraseStr, passPhrase); @@ -1555,7 +1557,12 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml) if (!checkFileExists(keyFileName.str())) throw MakeStringException(ROXIE_FILE_ERROR, "Roxie SSL Farm Listener on port %d missing privateKeyFile (%s)", port, keyFileName.str()); + + Owned staticConfig = createSecureSocketConfig(certFileName, keyFileName, passPhraseStr); + tlsConfig.setown(createSyncedPropertyTree(staticConfig)); } + else + DBGLOG("Roxie service, port(%d) TLS issuer (%s)", port, certIssuer); #else OWARNLOG("Skipping Roxie SSL Farm Listener on port %d : OpenSSL disabled in build", port); @@ -1566,7 +1573,7 @@ int CCD_API roxie_main(int argc, const char *argv[], const char * defaultYaml) const char *config = roxieFarm.queryProp("@config"); // NB: leaks - until we fix bug in ensureProtocolPlugin() whereby some paths return a linked object and others do not IHpccProtocolPlugin *protocolPlugin = ensureProtocolPlugin(*protocolCtx, soname); - roxieServer.setown(protocolPlugin->createListener(protocol ? protocol : "native", createRoxieProtocolMsgSink(ip, port, numThreads, suspended), port, listenQueue, config, tlsConfig, certFileName, keyFileName, passPhraseStr)); + roxieServer.setown(protocolPlugin->createListener(protocol ? protocol : "native", createRoxieProtocolMsgSink(ip, port, numThreads, suspended), port, listenQueue, config, tlsConfig)); } else roxieServer.setown(createRoxieWorkUnitListener(numThreads, suspended)); diff --git a/roxie/ccd/ccdprotocol.cpp b/roxie/ccd/ccdprotocol.cpp index fe0e1be6567..e5546259936 100644 --- a/roxie/ccd/ccdprotocol.cpp +++ b/roxie/ccd/ccdprotocol.cpp @@ -29,7 +29,7 @@ //================================================================================================================================ -IHpccProtocolListener *createProtocolListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const IPropertyTree *tlsConfig, const char *certFile, const char *keyFile, const char *passPhrase); +IHpccProtocolListener *createProtocolListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const ISyncedPropertyTree *tlsConfig); class CHpccProtocolPlugin : implements IHpccProtocolPlugin, public CInterface { @@ -60,9 +60,9 @@ class CHpccProtocolPlugin : implements IHpccProtocolPlugin, public CInterface maxHttpConnectionRequests = ctx.ctxGetPropInt("@maxHttpConnectionRequests", 0); maxHttpKeepAliveWait = ctx.ctxGetPropInt("@maxHttpKeepAliveWait", 5000); // In milliseconds } - IHpccProtocolListener *createListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const char *config, const IPropertyTree *tlsConfig, const char *certFile, const char *keyFile, const char *passPhrase) + IHpccProtocolListener *createListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const char *config, const ISyncedPropertyTree *tlsConfig) { - return createProtocolListener(protocol, sink, port, listenQueue, tlsConfig, certFile, keyFile, passPhrase); + return createProtocolListener(protocol, sink, port, listenQueue, tlsConfig); } public: StringArray targetNames; @@ -224,32 +224,23 @@ class ProtocolSocketListener : public ProtocolListener Owned socket; SocketEndpoint ep; StringAttr protocol; - StringAttr certFile; - StringAttr keyFile; - StringAttr passPhrase; Owned secureContext; bool isSSL = false; public: - ProtocolSocketListener(IHpccProtocolMsgSink *_sink, unsigned _port, unsigned _listenQueue, const char *_protocol, const IPropertyTree *_tlsConfig, const char *_certFile, const char *_keyFile, const char *_passPhrase) + ProtocolSocketListener(IHpccProtocolMsgSink *_sink, unsigned _port, unsigned _listenQueue, const char *_protocol, const ISyncedPropertyTree *_tlsConfig) : ProtocolListener(_sink) { port = _port; listenQueue = _listenQueue; ep.set(port, queryHostIP()); protocol.set(_protocol); - certFile.set(_certFile); - keyFile.set(_keyFile); - passPhrase.set(_passPhrase); isSSL = streq(protocol.str(), "ssl"); #ifdef _USE_OPENSSL if (isSSL) { - if (_tlsConfig) - secureContext.setown(createSecureSocketContextEx2(_tlsConfig, ServerSocket)); - else - secureContext.setown(createSecureSocketContextEx(certFile.get(), keyFile.get(), passPhrase.get(), ServerSocket)); + secureContext.setown(createSecureSocketContextSynced(_tlsConfig, ServerSocket)); } #endif } @@ -2212,16 +2203,20 @@ void ProtocolSocketListener::runOnce(const char *query) p->runOnce(query); } -IHpccProtocolListener *createProtocolListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const IPropertyTree *tlsConfig, const char *certFile, const char *keyFile, const char *passPhrase) +IHpccProtocolListener *createProtocolListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const ISyncedPropertyTree *tlsConfig) { if (traceLevel) { const char *certIssuer = "none"; - if (tlsConfig && tlsConfig->hasProp("@issuer")) - certIssuer = tlsConfig->queryProp("@issuer"); + if (tlsConfig) + { + Owned tlsInfo = tlsConfig->getTree(); + if (tlsInfo && tlsInfo->hasProp("@issuer")) + certIssuer = tlsInfo->queryProp("@issuer"); + } DBGLOG("Creating Roxie socket listener, protocol %s, issuer=%s, pool size %d, listen queue %d%s", protocol, certIssuer, sink->getPoolSize(), listenQueue, sink->getIsSuspended() ? " SUSPENDED":""); } - return new ProtocolSocketListener(sink, port, listenQueue, protocol, tlsConfig, certFile, keyFile, passPhrase); + return new ProtocolSocketListener(sink, port, listenQueue, protocol, tlsConfig); } extern IHpccProtocolPlugin *loadHpccProtocolPlugin(IHpccProtocolPluginContext *ctx, IActiveQueryLimiterFactory *_limiterFactory) diff --git a/roxie/ccd/hpccprotocol.hpp b/roxie/ccd/hpccprotocol.hpp index 21380c0a54d..0fbb19c2e0d 100644 --- a/roxie/ccd/hpccprotocol.hpp +++ b/roxie/ccd/hpccprotocol.hpp @@ -134,7 +134,7 @@ interface IActiveQueryLimiterFactory : extends IInterface interface IHpccProtocolPlugin : extends IInterface { - virtual IHpccProtocolListener *createListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const char *config, const IPropertyTree *tlsConfig, const char *certFile, const char *keyFile, const char *passPhrase)=0; + virtual IHpccProtocolListener *createListener(const char *protocol, IHpccProtocolMsgSink *sink, unsigned port, unsigned listenQueue, const char *config, const ISyncedPropertyTree *tlsConfig)=0; }; extern IHpccProtocolPlugin *loadHpccProtocolPlugin(IHpccProtocolPluginContext *ctx, IActiveQueryLimiterFactory *limiterFactory); diff --git a/system/jlib/jptree.cpp b/system/jlib/jptree.cpp index 4a3df7ce08f..994d916f422 100644 --- a/system/jlib/jptree.cpp +++ b/system/jlib/jptree.cpp @@ -10138,3 +10138,89 @@ void setExpertOpt(const char *opt, const char *value) getExpertOptPath(opt, xpath.clear()); config->setProp(xpath, value); } + +//--------------------------------------------------------------------------------------------------------------------- + +//HPCC-30752 This should move inside PTree to allow a more efficient hash calculation and possible caching. +//Currently the values are not persisted so the implementation could change. That may change in the future. +unsigned getPropertyTreeHash(const IPropertyTree & source, unsigned hashcode) +{ + if (source.isBinary()) + { + MemoryBuffer mb; + source.getPropBin(nullptr, mb); + hashcode = hashc((const byte *)mb.bufferBase(), mb.length(), hashcode); + } + else + { + const char * value = source.queryProp(nullptr); + if (value) + hashcode = hashcz((const byte *)value, hashcode); + } + + Owned aiter = source.getAttributes(); + ForEach(*aiter) + { + hashcode = hashcz((const byte *)aiter->queryName(), hashcode); + hashcode = hashcz((const byte *)aiter->queryValue(), hashcode); + } + + Owned iter = source.getElements("*"); + ForEach(*iter) + { + IPropertyTree & child = iter->query(); + hashcode = hashcz((const byte *)child.queryName(), hashcode); + hashcode = getPropertyTreeHash(child, hashcode); + } + return hashcode; +} + +class SyncedPropertyTreeWrapper : extends CInterfaceOf +{ +public: + SyncedPropertyTreeWrapper(IPropertyTree * _tree) : tree(_tree) + { + } + + virtual const IPropertyTree * getTree() const override + { + return LINK(tree); + } + + virtual bool getProp(MemoryBuffer & result, const char * xpath) const override + { + if (!tree) + return false; + return tree->getPropBin(xpath, result); + } + + virtual bool getProp(StringBuffer & result, const char * xpath) const override + { + if (!tree) + return false; + return tree->getProp(xpath, result); + } + + virtual unsigned getVersion() const override + { + return 0; + } + + virtual bool isStale() const override + { + return false; + } + + virtual bool isValid() const override + { + return tree != nullptr; + } + +protected: + Linked tree; +}; + +ISyncedPropertyTree * createSyncedPropertyTree(IPropertyTree * tree) +{ + return new SyncedPropertyTreeWrapper(tree); +} diff --git a/system/jlib/jptree.hpp b/system/jlib/jptree.hpp index b3dc69c6fdd..80888c8218d 100644 --- a/system/jlib/jptree.hpp +++ b/system/jlib/jptree.hpp @@ -430,4 +430,24 @@ extern jlib_decl StringBuffer &getExpertOptString(const char *opt, StringBuffer extern jlib_decl void setExpertOpt(const char *opt, const char *value); +//--------------------------------------------------------------------------------------------------------------------- + +extern jlib_decl unsigned getPropertyTreeHash(const IPropertyTree & source, unsigned hashcode); + +//Interface for encapsulating an IPropertyTree that can be atomically updated. The result of getTree() is guaranteed +//to not be modified and to remain valid and consistent until it is released. +interface ISyncedPropertyTree : extends IInterface +{ + virtual const IPropertyTree * getTree() const = 0; + virtual bool getProp(MemoryBuffer & result, const char * xpath) const = 0; + virtual bool getProp(StringBuffer & result, const char * xpath) const = 0; + //Return a version-hash which changes whenever the property tree changes - so that a caller can determine whether it needs to update + virtual unsigned getVersion() const = 0; + virtual bool isStale() const = 0; // An indication that the property tree may be out of date because it couldn't be resynchronized. + virtual bool isValid() const = 0; // Is the property tree non-null? Typically called at startup to check configuration is provided. +}; + +extern jlib_decl ISyncedPropertyTree * createSyncedPropertyTree(IPropertyTree * tree); + + #endif diff --git a/system/jlib/jsecrets.cpp b/system/jlib/jsecrets.cpp index 7fa24b4b794..5951ad6fbb7 100644 --- a/system/jlib/jsecrets.cpp +++ b/system/jlib/jsecrets.cpp @@ -49,6 +49,7 @@ #include #endif +//#define TRACE_SECRETS #include enum class CVaultKind { kv_v1, kv_v2 }; @@ -72,7 +73,7 @@ interface IVaultManager : extends IInterface static CriticalSection secretCacheCS; static Owned secretCache; static CriticalSection mtlsInfoCacheCS; -static Owned mtlsInfoCache; +static std::unordered_map> mtlsInfoCache; static Owned vaultManager; static MemoryAttr udpKey; static bool udpKeyInitialized = false; @@ -80,7 +81,6 @@ static bool udpKeyInitialized = false; MODULE_INIT(INIT_PRIORITY_SYSTEM) { secretCache.setown(createPTree()); - mtlsInfoCache.setown(createPTree()); return true; } @@ -1157,41 +1157,17 @@ IPropertyTree *getSecret(const char *category, const char * name, const char * o bool getSecretKeyValue(MemoryBuffer & result, const IPropertyTree *secret, const char * key) { validateKeyName(key); - if (!secret) return false; - - IPropertyTree *tree = secret->queryPropTree(key); - if (tree) - return tree->getPropBin(nullptr, result); - return false; + return secret->getPropBin(key, result); } bool getSecretKeyValue(StringBuffer & result, const IPropertyTree *secret, const char * key) { validateKeyName(key); - if (!secret) return false; - - IPropertyTree *tree = secret->queryPropTree(key); - if (!tree) - return false; - if (tree->isBinary(nullptr)) - { - MemoryBuffer mb; - tree->getPropBin(nullptr, mb); - //caller implies it's a string - result.append(mb.length(), mb.toByteArray()); - return true; - } - const char *value = tree->queryProp(nullptr); - if (value) - { - result.append(value); - return true; - } - return false; + return secret->getProp(key, result); } extern jlib_decl bool getSecretValue(StringBuffer & result, const char *category, const char * name, const char * key, bool required) @@ -1207,7 +1183,7 @@ extern jlib_decl bool getSecretValue(StringBuffer & result, const char *category //--------------------------------------------------------------------------------------------------------------------- -class CSecret final : public CInterfaceOf +class CSecret final : public CInterfaceOf { public: CSecret(const char *_category, const char * _name, const char * _vaultId, const char * _version, const IPropertyTree * _secret) @@ -1216,28 +1192,35 @@ class CSecret final : public CInterfaceOf updateHash(); } - virtual const IPropertyTree * getTree() const; + virtual const IPropertyTree * getTree() const override; - virtual bool getKeyValue(MemoryBuffer & result, const char * key) const + virtual bool getProp(MemoryBuffer & result, const char * key) const override { CriticalBlock block(secretCs); checkStale(); return getSecretKeyValue(result, secret, key); } - virtual bool getKeyValue(StringBuffer & result, const char * key) const + virtual bool getProp(StringBuffer & result, const char * key) const override { CriticalBlock block(secretCs); checkStale(); return getSecretKeyValue(result, secret, key); } - virtual bool isStale() const + virtual bool isStale() const override { return secret && hasCacheExpired(secret); } - virtual unsigned getVersion() const + virtual unsigned getVersion() const override { + CriticalBlock block(secretCs); + checkStale(); return secretHash; } + virtual bool isValid() const override + { + CriticalBlock block(secretCs); + return secret != nullptr; + } protected: void checkStale() const; @@ -1265,6 +1248,9 @@ void CSecret::checkStale() const { if (isStale()) { +#ifdef TRACE_SECRETS + DBGLOG("Secret %s/%s is stale updating from %u...", category.str(), name.str(), secretHash); +#endif //MORE: This could block or fail - in roxie especially it would be better to return the old value try { @@ -1280,48 +1266,15 @@ void CSecret::checkStale() const } } -//This should probably move to jptree.?pp as a generally useful function -static unsigned calculateTreeHash(const IPropertyTree & source, unsigned hashcode) -{ - if (source.isBinary()) - { - MemoryBuffer mb; - source.getPropBin(nullptr, mb); - hashcode = hashc((const byte *)mb.bufferBase(), mb.length(), hashcode); - } - else - { - const char * value = source.queryProp(nullptr); - if (value) - hashcode = hashcz((const byte *)value, hashcode); - } - - Owned aiter = source.getAttributes(); - ForEach(*aiter) - { - hashcode = hashcz((const byte *)aiter->queryName(), hashcode); - hashcode = hashcz((const byte *)aiter->queryValue(), hashcode); - } - - Owned iter = source.getElements("*"); - ForEach(*iter) - { - IPropertyTree & child = iter->query(); - hashcode = hashcz((const byte *)child.queryName(), hashcode); - hashcode = calculateTreeHash(child, hashcode); - } - return hashcode; -} - void CSecret::updateHash() const { if (secret) - secretHash = calculateTreeHash(*secret.get(), 0x811C9DC5); + secretHash = getPropertyTreeHash(*secret.get(), 0x811C9DC5); else secretHash = 0; } -ISecret * resolveSecret(const char *category, const char * name, const char * optVaultId, const char * optVersion) +ISyncedPropertyTree * resolveSecret(const char *category, const char * name, const char * optVaultId, const char * optVersion) { Owned resolved = getSecret(category, name, optVaultId, optVersion); return new CSecret(category, name, optVaultId, optVersion, resolved); @@ -1385,106 +1338,241 @@ jlib_decl bool containsEmbeddedKey(const char *certificate) return false; } -IPropertyTree *createIssuerTlsClientConfig(const char *issuer, bool acceptSelfSigned, bool addCACert) -{ - if (isEmptyString(issuer)) - return nullptr; - StringBuffer filepath; - StringBuffer secretpath; - buildSecretPath(secretpath, "certificates", issuer); +//--------------------------------------------------------------------------------------------------------------------- + +class CSyncedCertificateBase : public CInterfaceOf +{ +public: + CSyncedCertificateBase(const char *_issuer) + : issuer(_issuer) + { + } - Owned info = createPTree(); + virtual const IPropertyTree * getTree() const override final; - if (strieq(issuer, "remote")||strieq(issuer, "local")) + virtual bool getProp(MemoryBuffer & result, const char * key) const override final { - filepath.set(secretpath).append("tls.crt"); - if (!checkFileExists(filepath)) - return nullptr; + CriticalBlock block(secretCs); + checkStale(); + return getSecretKeyValue(result, config, key); + } + virtual bool getProp(StringBuffer & result, const char * key) const override final + { + CriticalBlock block(secretCs); + checkStale(); + return getSecretKeyValue(result, config, key); + } + virtual bool isStale() const override final + { + return secret->isStale(); + } + virtual bool isValid() const override + { + return secret->isValid(); - info->setProp("certificate", filepath.str()); - filepath.set(secretpath).append("tls.key"); - if (checkFileExists(filepath)) - info->setProp("privatekey", filepath.str()); + } + virtual unsigned getVersion() const override final + { + CriticalBlock block(secretCs); + checkStale(); + //If information that is combined with the secret (e.g. trusted peers) can also change dynamically this would + //need to be a separate hash calculated from the config tree + return secretHash; } - IPropertyTree *verify = ensurePTree(info, "verify"); - if (addCACert) +protected: + virtual void updateConfigFromSecret(const IPropertyTree * secretInfo) const = 0; + +protected: + void checkStale() const; + void createConfig() const; + void createDefaultConfigFromSecret(const IPropertyTree * secretInfo, bool addCertificates, bool addCertificateAuthority) const; + void updateCertificateFromSecret(const IPropertyTree * secretInfo) const; + void updateCertificateAuthorityFromSecret(const IPropertyTree * secretInfo) const; + +protected: + StringAttr issuer; + Owned secret; + mutable CriticalSection secretCs; + mutable Linked config; + mutable std::atomic secretHash{0}; +}; + + +const IPropertyTree * CSyncedCertificateBase::getTree() const +{ + CriticalBlock block(secretCs); + checkStale(); + return LINK(config); +} + +void CSyncedCertificateBase::checkStale() const +{ + if (secretHash != secret->getVersion()) + createConfig(); +} + +void CSyncedCertificateBase::createConfig() const +{ + //Update before getting the tree to avoid potential race condition updating the tree at the same time. + //Could alternatively return the version number from the getTree() call. + secretHash = secret->getVersion(); + + Owned secretInfo = secret->getTree(); + if (secretInfo) { - filepath.set(secretpath).append("ca.crt"); - if (checkFileExists(filepath)) - { - IPropertyTree *ca = ensurePTree(verify, "ca_certificates"); - ca->setProp("@path", filepath.str()); - } + config.setown(createPTree(issuer)); + ensurePTree(config, "verify"); + updateConfigFromSecret(secretInfo); } - verify->setPropBool("@enable", true); - verify->setPropBool("@address_match", false); - verify->setPropBool("@accept_selfsigned", acceptSelfSigned); - verify->setProp("trusted_peers", "anyone"); + else + config.clear(); +} + - return info.getClear(); +void CSyncedCertificateBase::updateCertificateFromSecret(const IPropertyTree * secretInfo) const +{ + StringBuffer value; + config->setProp("@issuer", issuer); // server only? + if (secretInfo->getProp("tls.crt", value.clear())) + config->setProp("certificate", value.str()); + if (secretInfo->getProp("tls.key", value.clear())) + config->setProp("privatekey", value.str()); } -IPropertyTree *getIssuerTlsServerConfig(const char *name) +void CSyncedCertificateBase::updateCertificateAuthorityFromSecret(const IPropertyTree * secretInfo) const { - if (isEmptyString(name)) - return nullptr; + StringBuffer value; + if (secretInfo->getProp("ca.crt", value.clear())) + { + IPropertyTree *verify = config->queryPropTree("verify"); + IPropertyTree *ca = ensurePTree(verify, "ca_certificates"); + ca->setProp("pem", value.str()); + } +} - validateSecretName(name); - CriticalBlock block(mtlsInfoCacheCS); - Owned info = mtlsInfoCache->getPropTree(name); - if (info) - return info.getClear(); +//--------------------------------------------------------------------------------------------------------------------- - StringBuffer filepath; - StringBuffer secretpath; +class CIssuerConfig final : public CSyncedCertificateBase +{ +public: + CIssuerConfig(const char *_issuer, const char * _trustedPeers, bool _isClientConnection, bool _acceptSelfSigned, bool _addCACert, bool _disableMTLS) + : CSyncedCertificateBase(_issuer), trustedPeers(_trustedPeers), isClientConnection(_isClientConnection), acceptSelfSigned(_acceptSelfSigned), addCACert(_addCACert), disableMTLS(_disableMTLS) + { + secret.setown(resolveSecret("certificates", issuer, nullptr, nullptr)); + createConfig(); + } - buildSecretPath(secretpath, "certificates", name); + virtual void updateConfigFromSecret(const IPropertyTree * secretInfo) const override; - filepath.set(secretpath).append("tls.crt"); - if (!checkFileExists(filepath)) - return nullptr; +protected: + StringAttr trustedPeers; + bool isClientConnection; // required in constructor + bool acceptSelfSigned; // required in constructor + bool addCACert; // required in constructor + bool disableMTLS; +}; - info.set(mtlsInfoCache->setPropTree(name)); - info->setProp("@issuer", name); - info->setProp("certificate", filepath.str()); - filepath.set(secretpath).append("tls.key"); - if (checkFileExists(filepath)) - info->setProp("privatekey", filepath.str()); - IPropertyTree *verify = ensurePTree(info, "verify"); - if (verify) - { - filepath.set(secretpath).append("ca.crt"); - if (checkFileExists(filepath)) - { - IPropertyTree *ca = ensurePTree(verify, "ca_certificates"); - if (ca) - ca->setProp("@path", filepath.str()); - } - //For now only the "public" issuer implies client certificates are not required - verify->setPropBool("@enable", !strieq(name, "public")); - verify->setPropBool("@address_match", false); - verify->setPropBool("@accept_selfsigned", false); + +void CIssuerConfig::updateConfigFromSecret(const IPropertyTree * secretInfo) const +{ + if (!isClientConnection || !strieq(issuer, "public")) + updateCertificateFromSecret(secretInfo); + + + // addCACert is usually true. A client hitting a public issuer is the case where we don't want the ca cert + // defined. Otherwise, for MTLS we want control over our CACert using addCACert. When hitting public services + // using public certificate authorities we want the well known (browser compatible) CA list installed on the + // system instead. + if (!isClientConnection || addCACert) + updateCertificateAuthorityFromSecret(secretInfo); + + IPropertyTree *verify = config->queryPropTree("verify"); + assertex(verify); // Should always be defined by this point. + + //For now only the "public" issuer implies client certificates are not required + verify->setPropBool("@enable", !disableMTLS && (isClientConnection || !strieq(issuer, "public"))); + verify->setPropBool("@address_match", false); + verify->setPropBool("@accept_selfsigned", isClientConnection && acceptSelfSigned); + if (trustedPeers) // Allow blank string to mean none, null means anyone + verify->setProp("trusted_peers", trustedPeers); + else verify->setProp("trusted_peers", "anyone"); +} + + +ISyncedPropertyTree * createIssuerTlsConfig(const char * issuer, const char * optTrustedPeers, bool isClientConnection, bool acceptSelfSigned, bool addCACert, bool disableMTLS) +{ + return new CIssuerConfig(issuer, optTrustedPeers, isClientConnection, acceptSelfSigned, addCACert, disableMTLS); + +} +//--------------------------------------------------------------------------------------------------------------------- + +class CCertificateConfig final : public CSyncedCertificateBase +{ +public: + CCertificateConfig(const char * _category, const char * _secretName, bool _addCACert) + : CSyncedCertificateBase(nullptr), addCACert(_addCACert) + { + secret.setown(resolveSecret(_category, _secretName, nullptr, nullptr)); + if (!secret->isValid()) + throw makeStringExceptionV(-1, "secret %s.%s not found", _category, _secretName); + createConfig(); } - return info.getClear(); + + virtual void updateConfigFromSecret(const IPropertyTree * secretInfo) const override; + +protected: + bool addCACert; // required in constructor +}; + +void CCertificateConfig::updateConfigFromSecret(const IPropertyTree * secretInfo) const +{ + updateCertificateFromSecret(secretInfo); + + if (addCACert) + updateCertificateAuthorityFromSecret(secretInfo); } -IPropertyTree *getIssuerTlsServerConfigWithTrustedPeers(const char *issuer, const char *trusted_peers) + +ISyncedPropertyTree * createStorageTlsConfig(const char * secretName, bool addCACert) { - Owned issuerConfig = getIssuerTlsServerConfig(issuer); - if (!issuerConfig || isEmptyString(trusted_peers)) - return issuerConfig.getClear(); - //TBD: might cache in the future, but needs thought, lookup must include trusted_peers, but will there be cases where trusted_peers can change dynamically? - Owned tlsConfig = createPTreeFromIPT(issuerConfig); - if (!tlsConfig) + return new CCertificateConfig("storage", secretName, addCACert); + +} + + +const ISyncedPropertyTree * getIssuerTlsSyncedConfig(const char * issuer, const char * optTrustedPeers, bool disableMTLS) +{ + if (isEmptyString(issuer)) return nullptr; - IPropertyTree *verify = ensurePTree(tlsConfig, "verify"); - verify->setProp("trusted_peers", trusted_peers); - return tlsConfig.getClear(); + const char * key; + StringBuffer temp; + if (!isEmptyString(optTrustedPeers) || disableMTLS) + { + temp.append(issuer).append("/").append(optTrustedPeers).append('/').append(disableMTLS); + key = temp.str(); + } + else + key = issuer; + + CriticalBlock block(mtlsInfoCacheCS); + auto match = mtlsInfoCache.find(key); + if (match != mtlsInfoCache.cend()) + return LINK(match->second); + + Owned config = createIssuerTlsConfig(issuer, optTrustedPeers, false, false, true, disableMTLS); + mtlsInfoCache.emplace(key, config); + return config.getClear(); +} + +bool hasIssuerTlsConfig(const char *issuer) +{ + Owned match = getIssuerTlsSyncedConfig(issuer, nullptr, false); + return match && match->isValid(); } enum UseMTLS { UNINIT, DISABLED, ENABLED }; @@ -1518,21 +1606,18 @@ jlib_decl bool queryMtls() if (checkFileExists(cert) && checkFileExists(privKey)) { CriticalBlock block(mtlsInfoCacheCS); - if (mtlsInfoCache) - { - IPropertyTree *info = mtlsInfoCache->queryPropTree("local"); - if (!info) - info = mtlsInfoCache->setPropTree("local"); - if (info) - { // always update - info->setProp("certificate", cert); - info->setProp("privatekey", privKey); - if ( (!isEmptyString(pubKey)) && (checkFileExists(pubKey)) ) - info->setProp("publickey", pubKey); - if (!isEmptyString(passPhrase)) - info->setProp("passphrase", passPhrase); // encrypted - } - } + assertex(mtlsInfoCache.find("local") == mtlsInfoCache.cend()); + + Owned info = createPTree("local"); + info->setProp("certificate", cert); + info->setProp("privatekey", privKey); + if ( (!isEmptyString(pubKey)) && (checkFileExists(pubKey)) ) + info->setProp("publickey", pubKey); + if (!isEmptyString(passPhrase)) + info->setProp("passphrase", passPhrase); // encrypted + + Owned entry = createSyncedPropertyTree(info); + mtlsInfoCache.emplace("local", entry); } } } diff --git a/system/jlib/jsecrets.hpp b/system/jlib/jsecrets.hpp index 39dddeeb3d2..9601deb4d7d 100644 --- a/system/jlib/jsecrets.hpp +++ b/system/jlib/jsecrets.hpp @@ -22,22 +22,17 @@ #include "jlib.hpp" #include "jstring.hpp" -interface ISecret : extends IInterface -{ - virtual const IPropertyTree * getTree() const = 0; - virtual bool getKeyValue(MemoryBuffer & result, const char * key) const = 0; - virtual bool getKeyValue(StringBuffer & result, const char * key) const = 0; - virtual bool isStale() const = 0; - //Return a sequence number which changes whenever the secret actually changes - so that a caller can determine - //whether it needs to reload the certificates. - virtual unsigned getVersion() const = 0; -}; +interface ISyncedPropertyTree; extern jlib_decl void setSecretMount(const char * path); extern jlib_decl void setSecretTimeout(unsigned timeoutMs); +//Return the current (cached) value of a secret. If the secret is not defined, return nullptr. extern jlib_decl IPropertyTree *getSecret(const char *category, const char * name, const char * optVaultId = nullptr, const char * optVersion = nullptr); -extern jlib_decl ISecret * resolveSecret(const char *category, const char * name, const char * optRequiredVault, const char* optVersion); +// resolveSecret() always returns an object, which will potentially be updated behind the scenes. If no secret is originally +// defined, but it then configured in a vault or Kubernetes secret, it will be bicked up when the cache entry is +// refreshed - allowing missing configuration to be updated for a live system. +extern jlib_decl ISyncedPropertyTree * resolveSecret(const char *category, const char * name, const char * optRequiredVault, const char* optVersion); extern jlib_decl bool getSecretKeyValue(MemoryBuffer & result, const IPropertyTree *secret, const char * key); extern jlib_decl bool getSecretKeyValue(StringBuffer & result, const IPropertyTree *secret, const char * key); @@ -48,11 +43,14 @@ extern jlib_decl const MemoryAttr &getSecretUdpKey(bool required); extern jlib_decl bool containsEmbeddedKey(const char *certificate); -//getIssuerTlsServerConfig must return owned because the internal cache could be updated internally and the return will become invalid, so must be linked -extern jlib_decl IPropertyTree *getIssuerTlsServerConfig(const char *issuer); -extern jlib_decl IPropertyTree *getIssuerTlsServerConfigWithTrustedPeers(const char *issuer, const char *trusted_peers); +//getIssuerTlsConfig must return owned because the internal cache could be updated internally and the return will become invalid, so must be linked +extern jlib_decl const ISyncedPropertyTree * getIssuerTlsSyncedConfig(const char * issuer, const char * optTrustedPeers, bool disableMTLS); +inline const ISyncedPropertyTree * getIssuerTlsSyncedConfig(const char * issuer) { return getIssuerTlsSyncedConfig(issuer, nullptr, false); } -extern jlib_decl IPropertyTree *createIssuerTlsClientConfig(const char *issuer, bool acceptSelfSigned, bool addCACert=true); +extern jlib_decl bool hasIssuerTlsConfig(const char *issuer); + +extern jlib_decl ISyncedPropertyTree * createIssuerTlsConfig(const char * issuer, const char * optTrustedPeers, bool isClientConnection, bool acceptSelfSigned, bool addCACert, bool disableMTLS); +extern jlib_decl ISyncedPropertyTree * createStorageTlsConfig(const char * secretName, bool addCACert); extern jlib_decl void splitFullUrl(const char *url, bool &https, StringBuffer &user, StringBuffer &password, StringBuffer &host, StringBuffer &port, StringBuffer &fullpath); extern jlib_decl void splitUrlSchemeHostPort(const char *url, StringBuffer &user, StringBuffer &password, StringBuffer &schemeHostPort, StringBuffer &path); diff --git a/system/jlib/jsmartsock.cpp b/system/jlib/jsmartsock.cpp index f7f0b4190dd..e8644827297 100644 --- a/system/jlib/jsmartsock.cpp +++ b/system/jlib/jsmartsock.cpp @@ -218,7 +218,7 @@ CSmartSocketFactory::CSmartSocketFactory(IPropertyTree &service, bool _retry, un tlsService = service.getPropBool("@tls"); issuer.set(service.queryProp("@issuer")); if (tlsService) - tlsConfig.setown(createIssuerTlsClientConfig(issuer, service.getPropBool("@selfSigned"), service.getPropBool("@caCert"))); + tlsConfig.setown(createIssuerTlsConfig(issuer, nullptr, true, service.getPropBool("@selfSigned"), service.getPropBool("@caCert"), false)); StringBuffer s; s.append(name).append(':').append(port); diff --git a/system/jlib/jsmartsock.hpp b/system/jlib/jsmartsock.hpp index 7e5e21e4c3a..e5139364ef2 100644 --- a/system/jlib/jsmartsock.hpp +++ b/system/jlib/jsmartsock.hpp @@ -37,7 +37,7 @@ interface jlib_decl ISmartSocket : extends IInterface virtual void close() = 0; }; - +interface ISyncedPropertyTree; interface jlib_decl ISmartSocketFactory : extends IInterface { virtual int run()=0; @@ -59,7 +59,7 @@ interface jlib_decl ISmartSocketFactory : extends IInterface virtual StringBuffer & getUrlStr(StringBuffer &str, bool useHostName) = 0; virtual bool isTlsService() const = 0; - virtual const IPropertyTree *queryTlsConfig() const = 0; + virtual const ISyncedPropertyTree *queryTlsConfig() const = 0; }; diff --git a/system/jlib/jsmartsock.ipp b/system/jlib/jsmartsock.ipp index e89c7850d1c..0aa065f95f1 100644 --- a/system/jlib/jsmartsock.ipp +++ b/system/jlib/jsmartsock.ipp @@ -68,7 +68,7 @@ protected: unsigned nextEndpointIndex; bool retry; bool tlsService = false; - Owned tlsConfig; + Owned tlsConfig; StringAttr issuer; unsigned retryInterval; @@ -104,7 +104,7 @@ public: virtual StringBuffer & getUrlStr(StringBuffer &str, bool useHostName); virtual bool isTlsService() const override { return tlsService; } - virtual const IPropertyTree *queryTlsConfig() const { return tlsConfig; }; + virtual const ISyncedPropertyTree *queryTlsConfig() const { return tlsConfig; }; const char *queryTlsIssuer() const { return issuer.str(); } }; diff --git a/system/security/securesocket/securesocket.cpp b/system/security/securesocket/securesocket.cpp index 2b416916d7d..7c61a6c9a5f 100644 --- a/system/security/securesocket/securesocket.cpp +++ b/system/security/securesocket/securesocket.cpp @@ -97,6 +97,13 @@ static void readBio(BIO* bio, StringBuffer& buf) } } + +interface ISecureSocketContextCallback : implements IInterface +{ + virtual unsigned getVersion() = 0; // Check the version of the context to see if the SSL context needs to be recreated + virtual SSL * createActiveSSL() = 0; // Must be called after getVersion() +}; + //Use a namespace to prevent clashes with a class of the same name in jhtree namespace securesocket { @@ -137,6 +144,7 @@ class CSecureSocket : implements ISecureSocket, public CInterface { private: SSL* m_ssl; + Linked contextCallback; Owned m_socket; bool m_verify; bool m_address_match; @@ -147,6 +155,7 @@ class CSecureSocket : implements ISecureSocket, public CInterface size32_t nextblocksize = 0; unsigned blockflags = BF_ASYNC_TRANSFER; unsigned blocktimeoutms = WAIT_FOREVER; + unsigned contextVersion; #ifdef USERECVSEM static Semaphore receiveblocksem; bool receiveblocksemowned; // owned by this socket @@ -158,8 +167,7 @@ class CSecureSocket : implements ISecureSocket, public CInterface public: IMPLEMENT_IINTERFACE; - CSecureSocket(ISocket* sock, SSL_CTX* ctx, bool verify = false, bool addres_match = false, CStringSet* m_peers = NULL, int loglevel=SSLogNormal, const char *fqdn = nullptr); - CSecureSocket(int sockfd, SSL_CTX* ctx, bool verify = false, bool addres_match = false, CStringSet* m_peers = NULL, int loglevel=SSLogNormal, const char *fqdn = nullptr); + CSecureSocket(ISocket* sock, int sockfd, ISecureSocketContextCallback * callback, bool verify = false, bool addres_match = false, CStringSet* m_peers = NULL, int loglevel=SSLogNormal, const char *fqdn = nullptr); ~CSecureSocket(); virtual int secure_accept(int logLevel); @@ -173,6 +181,22 @@ class CSecureSocket : implements ISecureSocket, public CInterface virtual size32_t writetms(void const* buf, size32_t size, unsigned timeoutms=WAIT_FOREVER); void readTimeout(void* buf, size32_t min_size, size32_t max_size, size32_t &size_read, unsigned timeout, bool useSeconds); + void checkForUpdatedContext() + { + //Check if a new ssl context should be created. + //No need for a critical section because the socket functions are never accessed by multiple threads at the same time + //It is possible that createActiveSSL() may be for a later version - but that will only mean that the same context + //is recreated when the version number is seen to have changed. + unsigned activeVersion = contextCallback->getVersion(); + if (activeVersion != contextVersion) + { + DBGLOG("CSecureSocket: Updating secure socket context from version %u to %u", contextVersion, activeVersion); + contextVersion = activeVersion; + SSL_free(m_ssl); + m_ssl = contextCallback->createActiveSSL(); + } + } + virtual StringBuffer& get_ssl_version(StringBuffer& ver) { @@ -467,38 +491,14 @@ Semaphore CSecureSocket::receiveblocksem(2); /************************************************************************** * CSecureSocket -- secure socket layer implementation using openssl * **************************************************************************/ -CSecureSocket::CSecureSocket(ISocket* sock, SSL_CTX* ctx, bool verify, bool address_match, CStringSet* peers, int loglevel, const char *fqdn) +CSecureSocket::CSecureSocket(ISocket* sock, int sockfd, ISecureSocketContextCallback * callback, bool verify, bool address_match, CStringSet* peers, int loglevel, const char *fqdn) + : contextCallback(callback) { + if (sock) + sockfd = sock->OShandle(); m_socket.setown(sock); - m_ssl = SSL_new(ctx); - - m_verify = verify; - m_address_match = address_match; - m_peers = peers;; - m_loglevel = loglevel; - m_isSecure = false; - - if(m_ssl == NULL) - { - throw MakeStringException(-1, "Can't create ssl"); - } - - // there is no MSG_NOSIGNAL or SO_NOSIGPIPE for SSL_write() ... -#ifndef _WIN32 - signal(SIGPIPE, SIG_IGN); -#endif - - SSL_set_fd(m_ssl, sock->OShandle()); - - if (fqdn) - m_fqdn.set(fqdn); -} - -CSecureSocket::CSecureSocket(int sockfd, SSL_CTX* ctx, bool verify, bool address_match, CStringSet* peers, int loglevel, const char *fqdn) -{ - //m_socket.setown(sock); - //m_socket.setown(ISocket::attach(sockfd)); - m_ssl = SSL_new(ctx); + contextVersion = callback->getVersion(); + m_ssl = callback->createActiveSSL(); m_verify = verify; m_address_match = address_match; @@ -652,6 +652,7 @@ bool CSecureSocket::verify_cert(X509* cert) int CSecureSocket::secure_accept(int logLevel) { + checkForUpdatedContext(); int err; err = SSL_accept(m_ssl); if(err == 0) @@ -1268,9 +1269,10 @@ static bool setVerifyCertsPEMBuffer(SSL_CTX *ctx, const char *caCertBuf, int caC return true; } -class CSecureSocketContext : public CInterfaceOf +class CSecureSocketContext : implements ISecureSocketContext, implements ISecureSocketContextCallback, public CInterface { private: + SecureSocketType sockettype; OwnedSSLCTX m_ctx; #if (OPENSSL_VERSION_NUMBER > 0x00909000L) const SSL_METHOD* m_meth = nullptr; @@ -1282,6 +1284,9 @@ class CSecureSocketContext : public CInterfaceOf bool m_address_match = false; Owned m_peers; StringAttr password; + CriticalSection cs; + Owned syncedConfig; + unsigned configVersion = 0; void setSessionIdContext() { @@ -1352,129 +1357,158 @@ class CSecureSocketContext : public CInterfaceOf throw makeStringExceptionV(-1, "Error loading CA certificates from %s", caCertsPathOrBuf); } -public: - CSecureSocketContext(SecureSocketType sockettype) - { - initContext(sockettype); - - SSL_CTX_set_mode(m_ctx, SSL_CTX_get_mode(m_ctx) | SSL_MODE_AUTO_RETRY); - } - - CSecureSocketContext(const char* certFileOrBuf, const char* privKeyFileOrBuf, const char* passphrase, SecureSocketType sockettype) - { - initContext(sockettype); - - // MCK TODO: should we set a default cipherList, as is done in other ctor (below) ? - - password.set(passphrase); - SSL_CTX_set_default_passwd_cb_userdata(m_ctx, (void*)password.str()); - SSL_CTX_set_default_passwd_cb(m_ctx, pem_passwd_cb); - - setCertificate(certFileOrBuf); - setPrivateKey(privKeyFileOrBuf); - - SSL_CTX_set_mode(m_ctx, SSL_CTX_get_mode(m_ctx) | SSL_MODE_AUTO_RETRY); - } - - CSecureSocketContext(const IPropertyTree* config, SecureSocketType sockettype) + void createNewContext(const IPropertyTree* config) { - assertex(config); - initContext(sockettype); - const char *cipherList = config->queryProp("cipherList"); - if (!cipherList || !*cipherList) - cipherList = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5"; - SSL_CTX_set_cipher_list(m_ctx, cipherList); - - const char* passphrase = config->queryProp("passphrase"); - if (passphrase && *passphrase) + if (config) { - StringBuffer pwd; - decrypt(pwd, passphrase); - password.set(pwd); - SSL_CTX_set_default_passwd_cb_userdata(m_ctx, (void*)password.str()); - SSL_CTX_set_default_passwd_cb(m_ctx, pem_passwd_cb); - } + const char *cipherList = config->queryProp("cipherList"); + if (!cipherList || !*cipherList) + cipherList = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5"; + SSL_CTX_set_cipher_list(m_ctx, cipherList); - const char *certFileOrBuf = config->queryProp("certificate_pem"); - if (!certFileOrBuf) - certFileOrBuf = config->queryProp("certificate"); - if (certFileOrBuf && *certFileOrBuf) - setCertificate(certFileOrBuf); + const char* passphrase = config->queryProp("passphrase"); + if (passphrase && *passphrase) + { + StringBuffer pwd; + decrypt(pwd, passphrase); + password.set(pwd); + SSL_CTX_set_default_passwd_cb_userdata(m_ctx, (void*)password.str()); + SSL_CTX_set_default_passwd_cb(m_ctx, pem_passwd_cb); + } - const char *privKeyFileOrBuf = config->queryProp("privatekey_pem"); - if (!privKeyFileOrBuf) - privKeyFileOrBuf = config->queryProp("privatekey"); - if (privKeyFileOrBuf && *privKeyFileOrBuf) - setPrivateKey(privKeyFileOrBuf); + const char *certFileOrBuf = config->queryProp("certificate_pem"); + if (!certFileOrBuf) + certFileOrBuf = config->queryProp("certificate"); + if (certFileOrBuf && *certFileOrBuf) + setCertificate(certFileOrBuf); - SSL_CTX_set_mode(m_ctx, SSL_CTX_get_mode(m_ctx) | SSL_MODE_AUTO_RETRY); + const char *privKeyFileOrBuf = config->queryProp("privatekey_pem"); + if (!privKeyFileOrBuf) + privKeyFileOrBuf = config->queryProp("privatekey"); + if (privKeyFileOrBuf && *privKeyFileOrBuf) + setPrivateKey(privKeyFileOrBuf); - m_verify = config->getPropBool("verify/@enable"); - m_address_match = config->getPropBool("verify/@address_match"); + m_verify = config->getPropBool("verify/@enable"); + m_address_match = config->getPropBool("verify/@address_match"); - if(m_verify) - { - const char *caCertPathOrBuf = config->queryProp("verify/ca_certificates/pem"); - if (!caCertPathOrBuf) - caCertPathOrBuf = config->queryProp("verify/ca_certificates/@path"); - if (caCertPathOrBuf && *caCertPathOrBuf) - setVerifyCerts(caCertPathOrBuf); - - bool acceptSelfSigned = config->getPropBool("verify/@accept_selfsigned"); - SSL_CTX_set_verify(m_ctx, SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT | SSL_VERIFY_CLIENT_ONCE, (acceptSelfSigned) ? verify_callback_allow_selfSigned : verify_callback_reject_selfSigned); - - m_peers.setown(new CStringSet()); - const char* peersstr = config->queryProp("verify/trusted_peers"); - while(peersstr && *peersstr) + if(m_verify) { - StringBuffer onepeerbuf; - peersstr = strtok__(peersstr, "|", onepeerbuf); - if(onepeerbuf.length() == 0) - break; - - char* onepeer = onepeerbuf.detach(); - if (isdigit(*onepeer)) + const char *caCertPathOrBuf = config->queryProp("verify/ca_certificates/pem"); + if (!caCertPathOrBuf) + caCertPathOrBuf = config->queryProp("verify/ca_certificates/@path"); + if (caCertPathOrBuf && *caCertPathOrBuf) + setVerifyCerts(caCertPathOrBuf); + + bool acceptSelfSigned = config->getPropBool("verify/@accept_selfsigned"); + SSL_CTX_set_verify(m_ctx, SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT | SSL_VERIFY_CLIENT_ONCE, (acceptSelfSigned) ? verify_callback_allow_selfSigned : verify_callback_reject_selfSigned); + + m_peers.setown(new CStringSet()); + const char* peersstr = config->queryProp("verify/trusted_peers"); + while(peersstr && *peersstr) { - char *dash = strrchr(onepeer, '-'); - if (dash) + StringBuffer onepeerbuf; + peersstr = strtok__(peersstr, "|", onepeerbuf); + if(onepeerbuf.length() == 0) + break; + + char* onepeer = onepeerbuf.detach(); + if (isdigit(*onepeer)) { - *dash = 0; - int last = atoi(dash+1); - char *dot = strrchr(onepeer, '.'); - *dot = 0; - int first = atoi(dot+1); - for (int i = first; i <= last; i++) + char *dash = strrchr(onepeer, '-'); + if (dash) + { + *dash = 0; + int last = atoi(dash+1); + char *dot = strrchr(onepeer, '.'); + *dot = 0; + int first = atoi(dot+1); + for (int i = first; i <= last; i++) + { + StringBuffer t; + t.append(onepeer).append('.').append(i); + m_peers->add(t.str()); + } + } + else { - StringBuffer t; - t.append(onepeer).append('.').append(i); - m_peers->add(t.str()); + m_peers->add(onepeer); } } else { m_peers->add(onepeer); } + free(onepeer); } - else - { - m_peers->add(onepeer); - } - free(onepeer); } } + + SSL_CTX_set_mode(m_ctx, SSL_CTX_get_mode(m_ctx) | SSL_MODE_AUTO_RETRY); + } + + void checkForUpdatedContext() + { + //Check if a new context should be created - it must be called within a critical section + //NOTE: The openssl ctx is reference counted internally, so any existing sockets will still be valid. + if (syncedConfig) + { + unsigned activeVersion = syncedConfig->getVersion(); + if (activeVersion != configVersion) + { + DBGLOG("CSecureSocketContext: Updating secure socket context from version %u to %u", configVersion, activeVersion); + configVersion = activeVersion; + Owned config = syncedConfig->getTree(); + createNewContext(config); + } + } + } + +public: + IMPLEMENT_IINTERFACE + + CSecureSocketContext(const IPropertyTree* config, SecureSocketType _sockettype) : sockettype(_sockettype) + { + createNewContext(config); + } + + CSecureSocketContext(const ISyncedPropertyTree* _syncedConfig, SecureSocketType _sockettype) : syncedConfig(_syncedConfig), sockettype(_sockettype) + { + Owned config; + if (syncedConfig) + { + configVersion = syncedConfig->getVersion(); + config.setown(syncedConfig->getTree()); + } + createNewContext(config); } +//interface ISecureSocketContext ISecureSocket* createSecureSocket(ISocket* sock, int loglevel, const char *fqdn) { - return new CSecureSocket(sock, m_ctx, m_verify, m_address_match, m_peers, loglevel, fqdn); + return new CSecureSocket(sock, 0, this, m_verify, m_address_match, m_peers, loglevel, fqdn); } ISecureSocket* createSecureSocket(int sockfd, int loglevel, const char *fqdn) { - return new CSecureSocket(sockfd, m_ctx, m_verify, m_address_match, m_peers, loglevel, fqdn); + return new CSecureSocket(nullptr, sockfd, this, m_verify, m_address_match, m_peers, loglevel, fqdn); + } + +//interface ISecureSocketContextCallback + virtual unsigned getVersion() + { + CriticalBlock block(cs); + checkForUpdatedContext(); + return configVersion; } + virtual SSL * createActiveSSL() + { + //If this function is called it is either a new socket or getVersion() has been called to check it is up to date + CriticalBlock block(cs); + return SSL_new(m_ctx); + } + }; class CRsaCertificate : implements ICertificate, public CInterface @@ -1961,51 +1995,61 @@ extern "C" { SECURESOCKET_API ISecureSocketContext* createSecureSocketContext(SecureSocketType sockettype) { - return new securesocket::CSecureSocketContext(sockettype); + return new securesocket::CSecureSocketContext((ISyncedPropertyTree *)nullptr, sockettype); } -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextEx(const char* certFileOrBuf, const char* privKeyFileOrBuf, const char* passphrase, SecureSocketType sockettype) +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSynced(const ISyncedPropertyTree * config, SecureSocketType sockettype) { - return new securesocket::CSecureSocketContext(certFileOrBuf, privKeyFileOrBuf, passphrase, sockettype); + return new securesocket::CSecureSocketContext(config, sockettype); } -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextEx2(const IPropertyTree* config, SecureSocketType sockettype) + +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSecret(const char *issuer, SecureSocketType sockettype) { - if (config == NULL) - return createSecureSocketContext(sockettype); + Owned info = getIssuerTlsSyncedConfig(issuer); + return createSecureSocketContextSynced(info, sockettype); +} - return new securesocket::CSecureSocketContext(config, sockettype); -} -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSSF(ISmartSocketFactory* ssf) +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSecretSrv(const char *issuer, bool requireMtlsFlag) { - if (ssf == nullptr || !ssf->queryTlsConfig()) - return createSecureSocketContext(ClientSocket); + if (requireMtlsFlag && !queryMtls()) + throw makeStringException(-100, "TLS secure communication requested but not configured"); - return new securesocket::CSecureSocketContext(ssf->queryTlsConfig(), ClientSocket); + Owned info = getIssuerTlsSyncedConfig(issuer); + if (!info->isValid()) + throw makeStringException(-101, "TLS secure communication requested but not configured (2)"); + + return createSecureSocketContextSynced(info, ServerSocket); } -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSecret(const char *issuer, SecureSocketType sockettype) +IPropertyTree * createSecureSocketConfig(const char* certFileOrBuf, const char* privKeyFileOrBuf, const char* passphrase) { - Owned info = getIssuerTlsServerConfig(issuer); - //if the secret doesn't exist doesn't exist just go on without it. IF it is required the tls connection will fail. - //This is primarily for client side... server side would probably use the explict ptree config or explict cert param at least for now. - if (info) - return createSecureSocketContextEx2(info, sockettype); - else - return createSecureSocketContext(sockettype); + if (!certFileOrBuf && !privKeyFileOrBuf && !passphrase) + return nullptr; + + Owned config = createPTree("ssl"); + if (certFileOrBuf) + config->setProp("certificate", certFileOrBuf); + if (privKeyFileOrBuf) + config->setProp("privatekey", privKeyFileOrBuf); + if (passphrase) + config->setProp("passphrase", passphrase); + return config.getClear(); } -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSecretSrv(const char *issuer, bool requireMtlsFlag) +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSSF(ISmartSocketFactory* ssf) { - if (requireMtlsFlag && !queryMtls()) - throw makeStringException(-100, "TLS secure communication requested but not configured"); + if (ssf == nullptr) + return createSecureSocketContext(ClientSocket); - Owned info = getIssuerTlsServerConfig(issuer); - if (!info) - throw makeStringException(-101, "TLS secure communication requested but not configured (2)"); + return createSecureSocketContextSynced(ssf->queryTlsConfig(), ClientSocket); +} - return createSecureSocketContextEx2(info, ServerSocket); +//Legacy factory interfaces +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextEx2(const IPropertyTree* config, SecureSocketType sockettype) +{ + return new securesocket::CSecureSocketContext(config, sockettype); } SECURESOCKET_API ICertificate *createCertificate() @@ -2123,7 +2167,7 @@ class CSecureSmartSocketFactory : public CSmartSocketFactory CSecureSmartSocketFactory(IPropertyTree &service, bool _retry, unsigned _retryInterval, unsigned _dnsInterval) : CSmartSocketFactory(service, _retry, _retryInterval, _dnsInterval) { - secureContext.setown(createSecureSocketContextEx2(queryTlsConfig(), ClientSocket)); + secureContext.setown(createSecureSocketContextSynced(queryTlsConfig(), ClientSocket)); } virtual ISmartSocket *connect_timeout(unsigned timeoutms) override diff --git a/system/security/securesocket/securesocket.hpp b/system/security/securesocket/securesocket.hpp index 2bf1758d92e..cc790676f7d 100644 --- a/system/security/securesocket/securesocket.hpp +++ b/system/security/securesocket/securesocket.hpp @@ -87,12 +87,18 @@ typedef ISecureSocketContext* (*createSecureSocketContextSecret_t)(const char *m extern "C" { -SECURESOCKET_API ISecureSocketContext* createSecureSocketContext(SecureSocketType); -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextEx(const char* certFileOrBuf, const char* privKeyFileOrBuf, const char* passphrase, SecureSocketType); -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextEx2(const IPropertyTree* config, SecureSocketType); -SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSSF(ISmartSocketFactory* ssf); +//The following allow the creation of a secure socket context where the certificates will automatically be updated when they expire. +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSynced(const ISyncedPropertyTree * config, SecureSocketType sockettype); // Will become the primary (only) factory method SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSecret(const char *mtlsSecretName, SecureSocketType); SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSecretSrv(const char *mtlsSecretName, bool requireMtlsConfig); +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextSSF(ISmartSocketFactory* ssf); + +//Helper function to aid migration to the functions above. This should eventually be removed. +SECURESOCKET_API IPropertyTree * createSecureSocketConfig(const char* certFileOrBuf, const char* privKeyFileOrBuf, const char* passphrase); + +//Legacy factory methods - should be phased out. +SECURESOCKET_API ISecureSocketContext* createSecureSocketContext(SecureSocketType); +SECURESOCKET_API ISecureSocketContext* createSecureSocketContextEx2(const IPropertyTree* config, SecureSocketType); SECURESOCKET_API ICertificate *createCertificate(); SECURESOCKET_API int signCertificate(const char* csr, const char* ca_certificate, const char* ca_privkey, const char* ca_passphrase, int days, StringBuffer& certificate); }; From 5f49b7f126327be6103cbdfb6ff7fbdce6478fb6 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 9 Nov 2023 23:05:23 +0100 Subject: [PATCH 3/9] HPCC-30710 Refactor Smoketest GH Action Signed-off-by: Gordon Smith WIP WIP WIP --- .github/workflows/build-containers-pr.yml | 141 ----- .../build-containers-target-branch.yml | 112 ---- .github/workflows/build-docker.yml | 4 +- .github/workflows/build-gh_runner.yml | 12 +- .github/workflows/build-test-eclwatch.yml | 7 - .github/workflows/build-vcpkg.yml | 82 +-- .github/workflows/smoketest.yml | 592 ------------------ .github/workflows/test-smoke-gh_runner.yml | 10 +- .github/workflows/test-ui-gh_runner.yml | 44 +- .github/workflows/test-unit-gh_runner.yml | 35 +- cmake_modules/options.cmake | 2 +- dockerfiles/vcpkg/build.sh | 6 +- dockerfiles/vcpkg/centos-7.dockerfile | 24 - dockerfiles/vcpkg/centos-8.dockerfile | 16 - dockerfiles/vcpkg/ubuntu-20.04.dockerfile | 19 - dockerfiles/vcpkg/ubuntu-22.04.dockerfile | 21 - dockerfiles/vcpkg/ubuntu-23.10.dockerfile | 21 - vcpkg | 2 +- 18 files changed, 67 insertions(+), 1083 deletions(-) delete mode 100644 .github/workflows/build-containers-pr.yml delete mode 100644 .github/workflows/build-containers-target-branch.yml delete mode 100644 .github/workflows/smoketest.yml delete mode 100644 dockerfiles/vcpkg/centos-7.dockerfile delete mode 100644 dockerfiles/vcpkg/centos-8.dockerfile delete mode 100644 dockerfiles/vcpkg/ubuntu-20.04.dockerfile delete mode 100644 dockerfiles/vcpkg/ubuntu-22.04.dockerfile delete mode 100644 dockerfiles/vcpkg/ubuntu-23.10.dockerfile diff --git a/.github/workflows/build-containers-pr.yml b/.github/workflows/build-containers-pr.yml deleted file mode 100644 index c5cca9badd6..00000000000 --- a/.github/workflows/build-containers-pr.yml +++ /dev/null @@ -1,141 +0,0 @@ -name: Docker smoketest build -on: - pull_request: - branches: - - "master" - - "candidate-*.x" - - "!candidate-8.2.*" - - "!candidate-8.0.*" - - "!candidate-7.12.*" - - "!candidate-7.10.*" - - "!candidate-7.8.*" - - "!candidate-7.6.*" - - "!candidate-7.4.*" - - "!candidate-7.2.*" - - "!candidate-7.0.*" - - "!candidate-6.*" - -jobs: - check-skip: - # continue-on-error: true # Uncomment once integration is finished - runs-on: ubuntu-20.04 - # Map a step output to a job output - outputs: - changed: ${{ steps.skip_check.outputs.dockerfiles || steps.skip_check.outputs.platform }} - steps: - - id: skip_check - uses: hpcc-systems/github-actions/changed-modules@main - with: - github_token: ${{ github.token }} - - build-images: - needs: check-skip - if: ${{ needs.check-skip.outputs.changed }} - runs-on: ubuntu-20.04 - steps: - - name: vars - id: vars - run: | - # echo ::set-output name=container_registry::ghcr.io - # echo ::set-output name=cr_user::${{ github.repository_owner }} - echo ::set-output name=container_registry::docker.io - echo ::set-output name=cr_user::hpccbuilds - echo ::set-output name=build_prbase_sha::${{ github.event.pull_request.base.sha }} - echo ::set-output name=build_prbase_label::${{ github.base_ref }} - echo ::set-output name=build_user::${{ github.actor }} - echo ::set-output name=build_type::RelWithDebInfo - echo ::set-output name=use_cppunit::1 - echo ::set-output name=platform_build::smoketest-platform-build-vcpkg - - - name: tracing - run: | - echo "Base ref = ${{ github.ref }}" - echo "Action = ${{ github.action }}" - echo "Event = ${{ github.event_name }}" - echo "Actor = ${{ github.actor }}" - echo "Ref = ${{ github.ref }}" - echo "base sha = ${{ github.event.pull_request.base.sha }}" - echo "Sha = ${{ github.sha }}" - echo "github.repository = ${{ github.repository }}" - echo "repository_owner = ${{ github.repository_owner }}" - echo "github.workspace = ${{ github.workspace }}" - echo "runner.workspace = ${{ runner.workspace }}" - echo "github.event.pull_request.head.repo.owner.login = ${{ github.event.pull_request.head.repo.owner.login }}" - echo "build_prbase_label = ${{ steps.vars.outputs.build_prbase_label }}" - echo "platform_build = ${{ steps.vars.outputs.platform_build }}" - - - name: Checkout PR - uses: actions/checkout@v3 - - - name: Calculate vcpkg vars - id: vcpkg_vars - run: | - echo "base_ver=$(git submodule status vcpkg | cut -c2-9)" >> $GITHUB_OUTPUT - - - name: tracing (vcpkg_vars) - run: | - echo "vcpkg_vars.base_ver = ${{ steps.vcpkg_vars.outputs.base_ver }}" - - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - with: - driver: docker - - - name: Check if PR-Base prebuilt - id: check-images - run: | - prbase_missing=$(docker manifest inspect ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-${{ github.event.pull_request.base.sha }} > /dev/null ; echo $?) - echo prbase_missing=${prbase_missing} - if [[ "${prbase_missing}" -eq 1 ]] - then - echo "Current PR target branch image cannot be found, using latest" - prbase_missing=$(docker manifest inspect ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-latest > /dev/null ; echo $?) - if [[ "${prbase_missing}" -eq 1 ]] - then - echo "Cannot find the 'latest' target branch image" - echo ::set-output name=prbase_missing::${prbase_missing} - fi - echo ::set-output name=platform_prbase_ver::${{ steps.vars.outputs.build_prbase_label }}-latest - else - echo ::set-output name=platform_prbase_ver::${{ steps.vars.outputs.build_prbase_label }}-${{ github.event.pull_request.base.sha }} - fi - - # Normal expectation is that the following step would normally be skipped - # NB: This is only for the case where the target branch image doesn't already exist. - # The build-containers-target-branch.yml action would normally have built/published this image, - # or they'll be a "latest" which will have been picked up by the check-images step. - - name: branch image - if: ${{ steps.check-images.outputs.prbase_missing == '1' }} - uses: docker/build-push-action@v2 - with: - context: . - file: ./dockerfiles/platform-build/Dockerfile - builder: ${{ steps.buildx.outputs.name }} - tags: | - ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-${{ steps.vars.outputs.build_prbase_sha }} - ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_prbase_label }}-latest - build-args: | - CR_REPO=${{ steps.vars.outputs.container_registry }} - BASE_VER=${{ steps.vcpkg_vars.outputs.base_ver }} - BUILD_USER=${{ github.repository_owner }} - GITHUB_ACTOR=${{ github.actor }} - BUILD_TAG=${{ steps.vars.outputs.build_prbase_sha }} - BUILD_TYPE=${{ steps.vars.outputs.build_type }} - GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} - USE_CPPUNIT=${{ steps.vars.outputs.use_cppunit }} - BUILD_THREADS=${{ steps.vars.outputs.build_threads }} - - - name: PR image - uses: docker/build-push-action@v2 - with: - context: . - file: ./dockerfiles/platform-build-incremental-container/Dockerfile - builder: ${{ steps.buildx.outputs.name }} - build-args: | - CR_USER=${{ steps.vars.outputs.cr_user }} - CR_REPO=${{ steps.vars.outputs.container_registry }} - PLATFORM_PRBASE_VER=${{ steps.check-images.outputs.platform_prbase_ver }} - GITHUB_REPO=${{ github.repository }} - GITHUB_PRREF=${{ github.ref }} - BUILD_THREADS=${{ steps.vars.outputs.build_threads }} diff --git a/.github/workflows/build-containers-target-branch.yml b/.github/workflows/build-containers-target-branch.yml deleted file mode 100644 index dafe221a23e..00000000000 --- a/.github/workflows/build-containers-target-branch.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: Docker target branch build -on: - push: - branches: - - "master" - - "candidate-*.x" - - "!candidate-8.2.*" - - "!candidate-8.0.*" - - "!candidate-7.12.*" - - "!candidate-7.10.*" - - "!candidate-7.8.*" - - "!candidate-7.6.*" - - "!candidate-7.4.*" - - "!candidate-7.2.*" - - "!candidate-7.0.*" - - "!candidate-6.*" - -jobs: - check-skip: - # continue-on-error: true # Uncomment once integration is finished - runs-on: ubuntu-20.04 - # Map a step output to a job output - outputs: - changed: ${{ steps.skip_check.outputs.dockerfiles || steps.skip_check.outputs.platform }} - steps: - - id: skip_check - uses: hpcc-systems/github-actions/changed-modules@main - with: - github_token: ${{ github.token }} - - build-images: - needs: check-skip - if: ${{ needs.check-skip.outputs.changed }} - runs-on: ubuntu-20.04 - steps: - - name: vars - id: vars - run: | - # echo ::set-output name=container_registry::ghcr.io - # echo ::set-output name=cr_user::${{ github.repository_owner }} - echo ::set-output name=container_registry::docker.io - echo ::set-output name=cr_user::hpccbuilds - echo ::set-output name=build_base_sha::${{ github.sha }} - echo ::set-output name=build_base_label::${GITHUB_REF##*/} - echo ::set-output name=build_user::${{ github.actor }} - echo ::set-output name=build_type::RelWithDebInfo - echo ::set-output name=use_cppunit::1 - echo ::set-output name=platform_build::smoketest-platform-build-vcpkg - - - name: tracing - run: | - echo "Action = ${{ github.action }}" - echo "Event = ${{ github.event_name }}" - echo "Actor = ${{ github.actor }}" - echo "Ref = ${{ github.ref }}" - echo "Sha = ${{ github.sha }}" - echo "github.repository = ${{ github.repository }}" - echo "repository_owner = ${{ github.repository_owner }}" - echo "github.workspace = ${{ github.workspace }}" - echo "runner.workspace = ${{ runner.workspace }}" - echo "build_base_sha = ${{ steps.vars.outputs.build_base_sha }}" - echo "build_base_label = ${{ steps.vars.outputs.build_base_label }}" - - - name: Checkout - uses: actions/checkout@v2 - - - name: Calculate vcpkg vars - id: vcpkg_vars - run: | - echo "base_ver=$(git submodule status vcpkg | cut -c2-9)" >> $GITHUB_OUTPUT - - - name: tracing (vcpkg_vars) - run: | - echo "vcpkg_vars.base_ver = ${{ steps.vcpkg_vars.outputs.base_ver }}" - - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - - # - name: Login to GitHub Container Registry - # uses: docker/login-action@v1 - # with: - # registry: ghcr.io - # username: ${{ github.repository_owner }} - # password: ${{ secrets.GITHUB_TOKEN }} - - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - # build branch image - - name: branch image - uses: docker/build-push-action@v2 - with: - context: . - file: ./dockerfiles/platform-build/Dockerfile - builder: ${{ steps.buildx.outputs.name }} - tags: | - ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_base_label }}-${{ steps.vars.outputs.build_base_sha }} - ${{ steps.vars.outputs.container_registry }}/${{ steps.vars.outputs.cr_user }}/${{ steps.vars.outputs.platform_build }}:${{ steps.vars.outputs.build_base_label }}-latest - push: true - build-args: | - CR_REPO=${{ steps.vars.outputs.container_registry }} - BASE_VER=${{ steps.vcpkg_vars.outputs.base_ver }} - BUILD_USER=${{ github.repository_owner }} - BUILD_TAG=${{ steps.vars.outputs.build_base_sha }} - BUILD_TYPE=${{ steps.vars.outputs.build_type }} - GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} - USE_CPPUNIT=${{ steps.vars.outputs.use_cppunit }} - BUILD_THREADS=${{ steps.vars.outputs.build_threads }} diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 56f318be832..e7bfab4a02f 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -30,7 +30,7 @@ on: default: false strip-files: type: boolean - description: 'Single Package' + description: 'Strip Debug Symbols' required: false default: true cmake-configuration: @@ -138,7 +138,7 @@ jobs: --mount source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=delegated \ --mount source="${{ github.workspace }}/.ccache",target=/root/.ccache,type=bind,consistency=delegated \ ${{ steps.vars.outputs.docker_tag }} "\ - cmake -G Ninja -S /hpcc-dev/${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform -DCMAKE_BUILD_TYPE=${{ inputs.build-type }} -DCONTAINERIZED=${{ inputs.containerized == true && 'ON' || 'OFF' }} -DCPACK_STRIP_FILES=${{ inputs.strip-files == true && 'ON' || 'OFF' }} ${{ inputs.cmake-configuration }} ${{ inputs.cmake-configuration-ex }} ${{ inputs.single-package == true && '-DINCLUDE_PLUGINS=ON' || '-D$plugin=ON' }} && \ + cmake -G Ninja -S /hpcc-dev/${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} -B /hpcc-dev/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform -DCMAKE_BUILD_TYPE=${{ inputs.build-type }} -DCONTAINERIZED=${{ inputs.containerized == true && 'ON' || 'OFF' }} -DCPACK_STRIP_FILES=${{ inputs.strip-files == true && 'ON' || 'OFF' }} ${{ inputs.single-package == true && '-DINCLUDE_PLUGINS=ON' || '-D$plugin=ON' }} ${{ inputs.cmake-configuration }} ${{ inputs.cmake-configuration-ex }} && \ cmake --build /hpcc-dev/build --parallel ${{ inputs.upload-package == true && '--target package' || ''}}" done diff --git a/.github/workflows/build-gh_runner.yml b/.github/workflows/build-gh_runner.yml index 58e0dbc3052..48823c004c5 100644 --- a/.github/workflows/build-gh_runner.yml +++ b/.github/workflows/build-gh_runner.yml @@ -34,7 +34,7 @@ on: default: false strip-files: type: boolean - description: 'Single Package' + description: 'Strip Debug Symbols' required: false default: true cmake-configuration: @@ -204,8 +204,14 @@ jobs: run: | mkdir -p ${{ github.workspace }}/LN mkdir -p ${{ github.workspace }}/build - cmake ${{ !contains(inputs.os, 'windows') && '-G Ninja' || '' }} -S ./${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} -B ./build -DCMAKE_BUILD_TYPE=${{ inputs.build-type }} -DCONTAINERIZED=${{ inputs.containerized == true && 'ON' || 'OFF' }} -DCPACK_STRIP_FILES=${{ inputs.strip-files == true && 'ON' || 'OFF' }} ${{ inputs.cmake-configuration }} ${{ inputs.cmake-configuration-ex }} - cmake --build ./build ${{ contains(inputs.os, 'windows') && '--config Release' || ''}} --parallel ${{ inputs.upload-package == true && '--target package' || ''}} + declare -a plugins + plugins=(${{ inputs.single-package == true && '"PLATFORM"' || '"PLATFORM" "CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "PARQUETEMBED" "REDIS" "REMBED" "SQLITE3EMBED" "SQS"' }}) + for plugin in "${plugins[@]}"; do + rm -f ./build/CMakeCache.txt + rm -rf ./build/CMakeFiles + cmake ${{ !contains(inputs.os, 'windows') && '-G Ninja' || '' }} -S ./${{ inputs.ln == true && 'LN' || 'HPCC-Platform' }} -B ./build -DCMAKE_BUILD_TYPE=${{ inputs.build-type }} -DCONTAINERIZED=${{ inputs.containerized == true && 'ON' || 'OFF' }} -DCPACK_STRIP_FILES=${{ inputs.strip-files == true && 'ON' || 'OFF' }} ${{ inputs.single-package == true && '-DINCLUDE_PLUGINS=ON' || '-D$plugin=ON' }} ${{ inputs.cmake-configuration }} ${{ inputs.cmake-configuration-ex }} + cmake --build ./build ${{ contains(inputs.os, 'windows') && '--config RelWithDebInfo' || ''}} --parallel ${{ inputs.upload-package == true && '--target package' || ''}} + done - name: Upload Package if: ${{ inputs.upload-package == true }} diff --git a/.github/workflows/build-test-eclwatch.yml b/.github/workflows/build-test-eclwatch.yml index 13eee0bfa2a..520fea47ba2 100644 --- a/.github/workflows/build-test-eclwatch.yml +++ b/.github/workflows/build-test-eclwatch.yml @@ -38,13 +38,6 @@ jobs: - uses: actions/setup-node@v2 with: node-version: ${{ matrix.node }} - - name: Cache node modules - uses: actions/cache@v2 - with: - path: ./esp/src/node_modules - key: npm-deps-${{ hashFiles('./esp/src/package-lock.json') }} - restore-keys: | - npm-deps-${{ hashFiles('./esp/src/package-lock.json') }} - name: Install Dependencies working-directory: ./esp/src run: npm ci diff --git a/.github/workflows/build-vcpkg.yml b/.github/workflows/build-vcpkg.yml index 7c79c2d72a6..583cecc5d4e 100644 --- a/.github/workflows/build-vcpkg.yml +++ b/.github/workflows/build-vcpkg.yml @@ -1,11 +1,5 @@ name: Test Build -env: - VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read" - VCPKG_NUGET_REPOSITORY: "https://github.com/hpcc-systems/vcpkg" - OS_DEPENDENCIES: "bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk - r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake" - on: workflow_dispatch: inputs: @@ -32,8 +26,8 @@ on: required: false default: false - # Do not include "push" in final version - push: + # Do not include "push" in final version (uncomment for testing on users own repo) + # push: pull_request: branches: @@ -51,6 +45,7 @@ on: - cron: "0 0 * * *" jobs: + build-workflow-dispatch: if: ${{ contains('workflow_dispatch', github.event_name) }} uses: ./.github/workflows/build-docker.yml @@ -75,8 +70,6 @@ jobs: uses: ./.github/workflows/build-docker.yml with: os: ubuntu-23.10 - single-package: true - containerized: false secrets: inherit build-docker-ubuntu-22_04: @@ -84,10 +77,8 @@ jobs: uses: ./.github/workflows/build-docker.yml with: os: ubuntu-22.04 - single-package: true - containerized: false upload-package: true - asset-name: 'docker-package' + asset-name: 'docker-ubuntu-22_04' secrets: inherit test-smoke-docker-ubuntu-22_04: @@ -96,7 +87,7 @@ jobs: uses: ./.github/workflows/test-smoke-gh_runner.yml with: os: ubuntu-22.04 - asset-name: 'docker-package' + asset-name: 'docker-ubuntu-22_04' secrets: inherit test-unit-docker-ubuntu-22_04: @@ -105,7 +96,7 @@ jobs: uses: ./.github/workflows/test-unit-gh_runner.yml with: os: ubuntu-22.04 - asset-name: 'docker-package' + asset-name: 'docker-ubuntu-22_04' secrets: inherit test-ui-docker-ubuntu-22_04: @@ -114,7 +105,7 @@ jobs: uses: ./.github/workflows/test-ui-gh_runner.yml with: os: ubuntu-22.04 - asset-name: 'docker-package' + asset-name: 'docker-ubuntu-22_04' secrets: inherit build-docker-ubuntu-20_04: @@ -122,8 +113,6 @@ jobs: uses: ./.github/workflows/build-docker.yml with: os: ubuntu-20.04 - single-package: true - containerized: false secrets: inherit build-docker-centos-8: @@ -131,8 +120,6 @@ jobs: uses: ./.github/workflows/build-docker.yml with: os: centos-8 - single-package: true - containerized: false secrets: inherit build-docker-centos-7: @@ -140,8 +127,6 @@ jobs: uses: ./.github/workflows/build-docker.yml with: os: centos-7 - single-package: true - containerized: false cmake-configuration-ex: "-DVCPKG_TARGET_TRIPLET=x64-centos-7-dynamic" secrets: inherit @@ -150,56 +135,21 @@ jobs: uses: ./.github/workflows/build-docker.yml with: os: amazonlinux - single-package: true - containerized: false cmake-configuration-ex: "-DVCPKG_TARGET_TRIPLET=x64-amazonlinux-dynamic" secrets: inherit build-gh_runner-ubuntu-22_04: - if: ${{ contains('pull_request,push', github.event_name) }} + if: ${{ contains('schedule,push', github.event_name) }} uses: ./.github/workflows/build-gh_runner.yml with: os: ubuntu-22.04 - single-package: true - containerized: false - upload-package: true - asset-name: 'gh_runner-package' secrets: inherit - test-smoke-gh_runner-ubuntu-22_04: - if: ${{ contains('pull_request,push', github.event_name) }} - needs: build-gh_runner-ubuntu-22_04 - uses: ./.github/workflows/test-smoke-gh_runner.yml - with: - os: ubuntu-22.04 - asset-name: 'gh_runner-package' - secrets: inherit - - test-unit-gh_runner-ubuntu-22_04: - if: ${{ contains('pull_request,push', github.event_name) }} - needs: build-gh_runner-ubuntu-22_04 - uses: ./.github/workflows/test-unit-gh_runner.yml - with: - os: ubuntu-22.04 - asset-name: 'gh_runner-package' - secrets: inherit - - test-ui-gh_runner-ubuntu-22_04: - if: ${{ contains('pull_request,push', github.event_name) }} - needs: build-gh_runner-ubuntu-22_04 - uses: ./.github/workflows/test-ui-gh_runner.yml - with: - os: ubuntu-22.04 - asset-name: 'gh_runner-package' - secrets: inherit - build-gh_runner-ubuntu-20_04: if: ${{ contains('schedule,push', github.event_name) }} uses: ./.github/workflows/build-gh_runner.yml with: os: ubuntu-20.04 - single-package: true - containerized: false secrets: inherit build-gh_runner-windows-2022: @@ -207,9 +157,7 @@ jobs: uses: ./.github/workflows/build-gh_runner.yml with: os: windows-2022 - single-package: true - containerized: false - cmake-configuration: '-T host=x64 -A x64 -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + cmake-configuration-ex: '-T host=x64 -A x64 -DUSE_CPPUNIT=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DINCLUDE_PLUGINS=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' secrets: inherit build-gh_runner-windows-2019: @@ -217,9 +165,7 @@ jobs: uses: ./.github/workflows/build-gh_runner.yml with: os: windows-2019 - single-package: true - containerized: false - cmake-configuration: '-T host=x64 -A x64 -DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + cmake-configuration-ex: '-T host=x64 -A x64 -DUSE_CPPUNIT=OFF -DCLIENTTOOLS_ONLY=ON -DINCLUDE_PLUGINS=OFF -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' secrets: inherit build-gh_runner-macos-12: @@ -227,10 +173,8 @@ jobs: uses: ./.github/workflows/build-gh_runner.yml with: os: macos-12 - single-package: true - containerized: false build-type: 'Release' - cmake-configuration: '-DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + cmake-configuration-ex: '-DUSE_CPPUNIT=OFF -DCLIENTTOOLS_ONLY=ON -DINCLUDE_PLUGINS=OFF -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DSUPPRESS_CASSANDRAEMBED=ON -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' secrets: inherit build-gh_runner-macos-11: @@ -238,8 +182,6 @@ jobs: uses: ./.github/workflows/build-gh_runner.yml with: os: macos-11 - single-package: true - containerized: false build-type: 'Release' - cmake-configuration: '-DUSE_OPTIONAL=OFF -DCLIENTTOOLS_ONLY=ON -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' + cmake-configuration-ex: '-DUSE_CPPUNIT=OFF -DCLIENTTOOLS_ONLY=ON -DINCLUDE_PLUGINS=OFF -DUSE_AZURE=OFF -DUSE_CASSANDRA=OFF -DSUPPRESS_CASSANDRAEMBED=ON -DUSE_JAVA=OFF -DUSE_OPENLDAP=OFF' secrets: inherit diff --git a/.github/workflows/smoketest.yml b/.github/workflows/smoketest.yml deleted file mode 100644 index 0428925cbaf..00000000000 --- a/.github/workflows/smoketest.yml +++ /dev/null @@ -1,592 +0,0 @@ -# Smoketest github action -# ======================= -# -# Uses cached instances of previous builds to perform partial (and therefore quicker) incremental builds. -# Once the platform is built and installed (make install), -# it runs the regression suite setup stage within the same github job ('build-and-setup'). -# -# On success, an artifact is built, that contains the install binaries and -# the HPCC binaries that have accumulated from the setup stage. -# -# Once the 'build-and-setup' job is complete, the dependent regression suite jobs and unittest job are launched in parallel. -# The regression suite queries are manually sub-divided into chunks (alphabeticaly), and run via a job matrix for parallism. -# If anything fails, all jobs are aborted (fail-fast=true), and the logs are captured into a published artifact. -# -# NOTES: -# + pre-requisite build dependencies (and runtime dependencies) are listed manually, and must be kept up to date with the -# requirements of the platform. MORE: a list of required build dependencies could be kept within the platform source and picked up -# + 'cacheversion' is purely in case it is necessary to force a cache-miss, i.e. all cached items are based on this version -# + Caching is via github's actions/cache, and is limited to 5GB total per repository, with oldest ejected first, and/or >7 days -# cached builds are tagged with: -# 1) base+ref+SHA (exact match, e.g. hpccbuild-1-7.12.10-6c981c48ae6e35b62d86d8e59e42799c5cd14812) -# 2) base_ref (branch match, e.g. hpccbuild-1-7.12.10) -# 3) base-ref major-minor only (e.g. hpccbuild-1-7.12.) -# 4) base-ref major only (e.g. hpccbuild-1-7.) -# 5) generic cacheversion only (e.g. hpccbuild-1-) -# The cache will initially try to match an exact key match (only true if rerunning PR and SHA same), -# and will then failover to trying to find a match of 2-5, in that order. - - -name: smoketest -env: - cacheversion: 3 - VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read" - OS_DEPENDENCIES: "bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk - r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake" - -on: - pull_request: - branches: - - "master" - - "newbuild" - - "candidate-*" - - "!candidate-7.6.*" - - "!candidate-7.4.*" - - "!candidate-7.2.*" - - "!candidate-7.0.*" - - "!candidate-6.*" - -# NB: this fails to cancel in-flight actions, with error 'Resource not accessible by integration', -# but it's non-fatal. Appears to be dependent on what permissions the PR owner has, i.e. the github token used -# does not have permission to cancel actions. -jobs: - check-skip: - # continue-on-error: true # Uncomment once integration is finished - runs-on: ubuntu-22.04 - # Map a step output to a job output - outputs: - platform: ${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') || steps.skip_check.outputs.platform }} - eclwatch: ${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') || steps.skip_check.outputs.eclwatch }} - - steps: - - id: skip_check - uses: hpcc-systems/github-actions/changed-modules@main - with: - github_token: ${{ github.token }} - - build-and-setup: - name: Build platform and regress setup - needs: check-skip - if: ${{ needs.check-skip.outputs.platform || needs.check-skip.outputs.eclwatch }} - runs-on: ubuntu-22.04 - timeout-minutes: 150 # the build step has its own timeout, however the job can sometimes take time to download dependencies - outputs: - cache-state: ${{ steps.check-cache.outputs.state }} - - steps: - - name: Setup and git clone - id: vars - run: | - if [[ "${{ github.event_name }}" = 'pull_request' ]] - then - echo ::set-output name=branch::${{ github.head_ref }} - echo ::set-output name=base_ref::${{ github.base_ref }} - versioned=$(echo ${{ github.base_ref }} | sed -E -n 's/^(.*)-([0-9]+)\.([0-9]+)\.([0-9]+|x)(-[0-9]+|).*$/\1 \2 \3 \4 \5/p') - if [[ "${versioned}" ]] - then - echo "matched version = ${versioned}" - IFS=' ' read prefix major minor point cand <<< ${versioned} - echo "name=base-majorminor-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}.${minor}." - echo "name=base-majoronly-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}." - echo ::set-output name=base-majorminor-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}.${minor}. - echo ::set-output name=base-majoronly-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}. - fi - else - echo ::set-output name=branch::${GITHUB_REF#refs/heads/} - echo ::set-output name=base_ref::${GITHUB_REF#refs/heads/} - fi - if [[ "${{ github.event.pull_request.head.repo.owner.login }}" = "${{ github.repository_owner }}" ]] - then - repository=${{ github.repository }} - else - repository=$(echo -n ${{ github.event.pull_request.head.repo.owner.login }}/ ; echo -n ${{ github.repository }} | sed -E -n 's@^[^/]+/(.+)$@\1@p') - fi - echo ::set-output name=repository::${repository} - echo "Cloning https://github.com/${repository}" - git clone -n https://github.com/${repository} src - if [[ "${{ github.event_name }}" = 'pull_request' ]] - then - cd src - git remote add upstream https://github.com/${{ github.repository }} - git fetch upstream +${{ github.ref }} - fetch_sha=$(git rev-parse --verify FETCH_HEAD) - git fetch upstream ${{ github.base_ref}} - base_ref_sha=$(git rev-parse upstream/${{ github.base_ref }}) - echo ::set-output name=base_ref_sha::${base_ref_sha} - echo ::set-output name=sha::${fetch_sha} - echo "base_ref_sha = ${base_ref_sha}" - echo "fetch_sha = ${fetch_sha}" - else - echo ::set-output name=sha::${{ github.sha }} - echo ::set-output name=base_ref_sha::${{ github.sha }} - fi - - # NB: restore-keys set to e.g. hpccbuild-2-7.12.10, hpccbuild-2-7.12, hpccbuild-2-7, hpccbuild-2- - - name: Fetch cache - id: cache - uses: actions/cache@v2 - with: - path: | - build-cache - merge-patch - src-cache - key: hpccbuild-${{ env.cacheversion }}-${{ steps.vars.outputs.base_ref }}-${{ steps.vars.outputs.sha }} - restore-keys: | - hpccbuild-${{ env.cacheversion }}-${{ steps.vars.outputs.base_ref }} - ${{ steps.vars.outputs.base-majorminor-restore-key }} - ${{ steps.vars.outputs.base-majoronly-restore-key }} - hpccbuild-${{ env.cacheversion }}- - - - name: tracing - run: | - echo "Branch = ${{ steps.vars.outputs.branch }}" - echo "Base ref = ${{ steps.vars.outputs.base_ref }}" - echo "SHA = ${{ steps.vars.outputs.sha}}" - echo "Action = ${{ github.action }}" - echo "Event = ${{ github.event_name }}" - echo "Actor = ${{ github.actor }}" - echo "Ref = ${{ github.ref }}" - echo "Sha = ${{ github.sha }}" - echo "github.repository = ${{ github.repository }}" - echo "repository = ${{ steps.vars.outputs.repository }}" - echo "repository_owner = ${{ github.repository_owner }}" - echo "github.workspace = ${{ github.workspace }}" - echo "runner.workspace = ${{ runner.workspace }}" - echo "github.event.pull_request.head.repo.owner.login = ${{ github.event.pull_request.head.repo.owner.login }}" - - if [[ "${{ github.event_name }}" = 'pull_request' ]] - then - echo "PR base_ref = ${{ github.base_ref }}" - echo "PR head_ref = ${{ github.head_ref }}" - echo "PR base SHA = ${{ github.event.pull_request.base.sha }}" - fi - echo "restore-key1: hpccbuild-${{ env.cacheversion }}-${{ steps.vars.outputs.base_ref }}" - echo "restore-key2: ${{ steps.vars.outputs.base-majorminor-restore-key }}" - echo "restore-key3: ${{ steps.vars.outputs.base-majoronly-restore-key }}" - echo "restore-key4: hpccbuild-${{ env.cacheversion }}-" - - # NB: actions/cache does not set cache-hit to true if restore-keys used. - # So here we: - # 1) check if a cache was restored - # 2) check that the SHA stored in the cache can be found in git - # 3) mv cache into place (build-cache->build) - # 4) checkout source @ base_ref - # 5) get submodules - # 6) apply git merge-patch from cache, bringing source up-to-date with the source used in the cached binary build - # 7) copy src-cache files into place - # 8) touch all files so that 'old' [before original make timestamp], i.e. so behind objects in cache - # 9) checkout pr branch - ensuring only pr files that differ are updated (and hence will rebuild) - # 10) list updated files (for tracing only) - # 11) set 'state' output variable to 'miss' if not absolute match, - # and set to 'hit' if cache is usable. i.e. if set at all, run build and tests. - - name: Check cache - id: check-cache - if: ${{ (steps.cache.outputs.cache-hit != 'true') || contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') }} - run: | - if [[ ("${{ steps.cache.outputs.cache-hit }}" == 'true') && ("${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') }}" == 'true') ]] - then - echo "Exact cache match, but 'smoketest-force-rerun' label forced rerun" - fi - echo ::set-output name=state::miss - if [[ "${{ contains(github.event.pull_request.labels.*.name, 'smoketest-ignore-cache') }}" == 'true' ]] - then - echo "Cache found, but ignored because 'smoketest-ignore-cache' label set" - rm -rf build-cache merge-patch src-cache - else - if [[ -d build-cache ]] - then - echo "Attempting to use existing cache" - stat build-cache merge-patch src-cache - du -sh build-cache merge-patch src-cache - cd src - if [[ "commit" == "$(git cat-file -t $(cat ../build-cache/cache-sha))" ]] - then - mv ../build-cache ../build - echo "Checking out cache SHA (cached github.base_ref SHA): $(cat ../build/cache-sha)" - git checkout $(cat ../build/cache-sha) - git submodule update --init --recursive --jobs 4 - echo "Applying merge patch" - git apply ../merge-patch - # Add all mods that came from merge-patch into a commit, - # so that the final git checkout will not clash with working tree files - git add -A - git -c user.name='Smoketest' -c user.email='smoke@test.com' commit -m 'merge-patch' - cd ../src-cache - echo "Cached source tree files:" - find . -type f - echo "================" - find . -type f | cpio -p -dum ../src - cd ../src - find . -type f -exec touch -r ../build/cache-timestamp {} + - git checkout ${{ steps.vars.outputs.sha }} - git submodule update --init --recursive --jobs 4 - echo "Changed files (from SHA: $(cat ../build/cache-sha)):" - find -name '.git*' -prune -o -newer ../build/cache-timestamp -type f -print - echo ::set-output name=state::hit - else - echo "Cached SHA $(cat ../build-cache/cache-sha) could not be found). Skipping cache." - fi - fi - fi - - - name: Dependencies22.04 - if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed - run: | - sudo apt-get update - sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} - sudo apt-get install -y xmlstarlet - - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - - name: Cache miss prep - if: ${{ steps.check-cache.outputs.state == 'miss' }} - run: | - rm -rf build build-cache merge-patch src-cache - mkdir build - mkdir src-cache - cd src - git checkout ${{ steps.vars.outputs.sha }} - git submodule update --init --recursive --jobs 4 - - - name: "Setup NuGet credentials" - if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed - run: | - mono `vcpkg fetch nuget | tail -n 1` \ - sources add \ - -source "https://nuget.pkg.github.com/hpcc-systems/index.json" \ - -storepasswordincleartext \ - -name "GitHub" \ - -username "${{ github.actor }}" \ - -password "${{ secrets.GITHUB_TOKEN }}" - - - name: "Remove builtin vcpkg" - if: ${{ github.event_name == matrix.event_name && needs.check-skip.outputs.platform }} - shell: "bash" - run: | - sudo rm -rf "$VCPKG_INSTALLATION_ROOT" - - - name: "vcpkg Bootstrap" - if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed - working-directory: ./src/vcpkg - run: | - ./bootstrap-vcpkg.sh - - # Subsequent cache hit builds use cache-timestamp to ensure all sources are marked 'old', - # except those changed between the cache SHA and the SHA being built. - # NB: BUILD_TAG is overridden to a constant "smoketest", this is to prevent the auto-generated build tag being - # used, which would cause it to change per PR (per SHA), and because it's in a common header, cause a lot of undesirable rebuilding. - - name: Build - timeout-minutes: 120 # Generous, typical build time from clean is < 60 - if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set to anything, then step needed - run: | - cd build - echo "removing CMakeCache.txt" - rm -f CMakeCache.txt - if [[ ! -f cache-timestamp ]]; then touch cache-timestamp; fi - touch ../cmake-timestamp - `../src/vcpkg/vcpkg fetch cmake | tail -n 1` ../src -Wno-dev -DRUNTIME_USER=$(whoami) -DRUNTIME_GROUP=$(id -gn) -DDESTDIR=$(realpath ..)/install -DINCLUDE_PLUGINS=1 -DCMAKE_BUILD_TYPE=Release -DUSE_PYTHON2=0 -DUSE_PYTHON3=1 -DSUPPRESS_SPARK=1 -DUSE_CPPUNIT=1 -DUSE_CASSANDRA=Off -DCMAKE_CXX_FLAGS="-DBUILD_TAG=\\\"dummytag\\\" -DBUILD_VERSION_MAJOR=1 -DBUILD_VERSION_MINOR=1 -DBUILD_VERSION_POINT=0" -DBUILD_TAG="smoketest" -DSTRIP_RELEASE_SYMBOLS=0 -DCMAKE_CXX_FLAGS="-DTRACE_GLOBAL_GROUP=1" - make -j$(nproc) install - - - name: build-logs-artifact - if: ${{ failure() }} - uses: actions/upload-artifact@v2 - with: - name: build-logs - path: | - build/CMakeCache.txt - build/CMakeFiles/CMakeOutput.log - build/CMakeFiles/CMakeError.log - - # Cache contains: - # - make binaries (build-cache) - # - Any sources written since cache-timestamp within the source dir (src-cache) (e.g. AWS writes a config header) - # - cache-timestamp, timestamp of cache-miss build. All files except diffs set to this time, ensuring no existing binaries are rebuilt - # - cache-sha, the SHA of the base_ref used to build. On a cache hit, used to find changed files and ensure they are only ones rebuilt. - # - merge-path, the diffs between the base_ref and the PR commit at the time of PR/cache build. - - name: Prepare cache - if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set to anything, then step needed - run: | - echo ${{ steps.vars.outputs.base_ref_sha }} > build/cache-sha - mv build build-cache - cd src - find -name '.git*' -prune -o -newer ../cmake-timestamp -type f -print | cpio -p -dum ../src-cache - echo "Merge patch files:" - git diff --name-only ${{ steps.vars.outputs.base_ref_sha }} ${{ steps.vars.outputs.sha }} - git diff --binary ${{ steps.vars.outputs.base_ref_sha }} ${{ steps.vars.outputs.sha }} > ../merge-patch - - # - alter stock ecl-test.json, to point to install dir (using jq) - # - create a clean copy of the install directory (for later reuse) - # - rm hpcc-data from copy (the post-setup hpcc-data will be moved in when done) - - name: Prepare install artifact - if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} - run: | - # configure ecl-test.json with correct paths - jq --arg rootdir "${{ github.workspace }}" \ - '.Regress.dropzonePath = $rootdir + "/install" + .Regress.dropzonePath | .Regress.setupExtraParams.OriginalTextFilesOsPath = $rootdir + "/install" + .Regress.setupExtraParams.OriginalTextFilesOsPath | .Regress.setupExtraDParams.HPCCBaseDir = $rootdir + "/install" + .Regress.setupExtraDParams.HPCCBaseDir | .Regress.regressionDir = $rootdir + "/regress" | .Regress.maxAttemptCount = "1" | .Regress.logDir = $rootdir + "/regress/log"' \ - install/opt/HPCCSystems/testing/regress/ecl-test.json > ecl-test.json - mv -f ecl-test.json install/opt/HPCCSystems/testing/regress/ecl-test.json - # configure environment.xml to slavesPerNode=2, channelsPerNode=1 - xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 install/etc/HPCCSystems/environment.xml - cp ${{ github.workspace }}/src/.github/workflows/timeoutcmd install/opt/HPCCSystems/bin/ - cp ${{ github.workspace }}/src/.github/workflows/smoketest-preabort.sh install/opt/HPCCSystems/bin/ - mkdir copy - cp -rp install copy/ - rm -rf copy/install/var/lib/HPCCSystems/hpcc-data - rm -rf copy/install/var/lib/HPCCSystems/queries - - - name: Run regression suite setup - if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} - timeout-minutes: 10 - run: | - export LANG="en_US.UTF-8" - sudo update-locale - source install/opt/HPCCSystems/sbin/hpcc_setenv - install/opt/HPCCSystems/etc/init.d/hpcc-init start - mkdir -p regress - cd install/opt/HPCCSystems/testing/regress - # force regression suite to timeout after 8 minutes, so it captures ZAP report of any inflight hung queries - timeoutcmd $((8 * 60)) \ - ./ecl-test setup --preAbort 'smoketest-preabort.sh' --pq 2 --generateStackTrace - grep Failure: ${{ github.workspace }}/regress/log/setup_*.log - if [[ "$(grep -ohP '(?<=^Failure: )[0-9]+$' ${{ github.workspace }}/regress/log/setup_*.log | awk '{s+=$1} END {print s}')" -gt 0 ]]; then exit 1; fi - ${{ github.workspace }}/install/opt/HPCCSystems/etc/init.d/hpcc-init stop - - - name: regression-setup-logs-artifact - if: ${{ failure() || cancelled() }} - uses: actions/upload-artifact@v2 - with: - name: regress-setup-logs - path: | - install/var/log/HPCCSystems - regress/ - - # - mv regression suite setup created data from hpcc-data and queries into copy of install - # - create tarball of install for artifact uploading - - name: Finalize install artifact - if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} - run: | - mv install/var/lib/HPCCSystems/hpcc-data copy/install/var/lib/HPCCSystems/ - mv install/var/lib/HPCCSystems/queries copy/install/var/lib/HPCCSystems/ - [[ -d ${{ github.workspace }}/src/esp/src/test-ui ]] && cp -r ${{ github.workspace }}/src/esp/src/test-ui copy/install/ - cd copy - tar --zstd -cf ../install.tgz install - - - name: Upload install artifact - if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} - uses: actions/upload-artifact@v2 - with: - name: installsetup-artifact - path: | - install.tgz - - # Matrix of jobs run in parallel once build+setup above completes successfully. - # All use the post-setup install.tgz artifact, that contains binaries and setup state - # Break each engine run into sets for speed - # NB: each regression suite job, runs these steps: - # - installs dependencies (probably too many for runtime) - # - Starts HPCC - # - Runs regression suite with params { engine, match[pattern,exclude] } - # TODO: needs to process results, capture report, to be assembled by workflow when all jobs done - regression-jobs: - needs: build-and-setup - if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.platform }} - timeout-minutes: 60 # each matrix step has its own timeout, however the job can sometimes take time to download dependencies - runs-on: ubuntu-22.04 - strategy: - fail-fast: true - matrix: - engine: ['hthor', 'thor', 'roxie'] - match: - - pattern: '[a-d]*.ecl' - - pattern: '[e-h]*.ecl' - - pattern: '[i-l]*.ecl' - - pattern: '[m-q]*.ecl' - - pattern: '[r-u]*.ecl' - exclude: teststdlibrary.ecl - - pattern: '[v-z]*.ecl' - include: - - engine: 'hthor' - match: - pattern: teststdlibrary.ecl - - engine: 'thor' - match: - pattern: teststdlibrary.ecl - - engine: 'roxie' - match: - pattern: teststdlibrary.ecl - - steps: - - uses: actions/download-artifact@v2 - with: - name: installsetup-artifact - - - name: vars - id: vars - run: | - echo ::set-output name=matrix-setname::$(echo -n ${{ matrix.match.pattern }} | tr -c "[:alnum:]" _) - - - name: Prerequisites - run: | - sudo apt-get update - sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} - sudo apt-get install -y gdb - - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - - name: run - timeout-minutes: 30 # generous, each matrix step shouldn't take more than 10 mins - run: | - export LANG="en_US.UTF-8" - sudo update-locale - tar --zstd -xf install.tgz - source install/opt/HPCCSystems/sbin/hpcc_setenv - install/opt/HPCCSystems/etc/init.d/hpcc-init start - cd install/opt/HPCCSystems/testing/regress - - (cd ecl; ls ${{ matrix.match.pattern }}) > matches.tmp - echo queries are: - cat matches.tmp - if [[ -z "${{ matrix.match.exclude }}" ]] - then - queries="$(cat matches.tmp | tr '\n' ' ')" - else - queries="$(cd ecl; ls ${{ matrix.match.exclude }} | grep -v -f - ../matches.tmp | tr '\n' ' ')" - fi - echo queries after exclude: ${queries} - - # force regression suite to timeout after 28 minutes, so it captures ZAP report of any inflight hung queries - timeoutcmd $((28 * 60)) \ - ./ecl-test query --preAbort 'smoketest-preabort.sh' --pq 2 --target ${{ matrix.engine }} --excludeclass python2,embedded-r,embedded-js,3rdpartyservice,mongodb --generateStackTrace ${queries} - grep Failure: ${{ github.workspace }}/regress/log/${{ matrix.engine }}.*.log - if [[ "$(grep -oP '(?<=^Failure: )[0-9]+$' ${{ github.workspace }}/regress/log/${{ matrix.engine }}.*.log)" -gt 0 ]]; then exit 1; fi - - - name: regression-run-logs-artifact - if: ${{ failure() || cancelled() }} - uses: actions/upload-artifact@v2 - with: - name: regression-run-logs-${{ matrix.engine }}-${{ steps.vars.outputs.matrix-setname }} - path: | - install/var/log/HPCCSystems - regress/ - if-no-files-found: error - - - # NB: this doesn't really need the post-setup data files included in install.tgz - # but as this is relatively quick and in parallel with others, it probably doesn't matter - unittests: - needs: build-and-setup - if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' }} - timeout-minutes: 30 # the unittests run step has its own timeout, however the job can sometimes take time to download dependencies - runs-on: ubuntu-22.04 - steps: - - uses: actions/download-artifact@v2 - with: - name: installsetup-artifact - - - name: Prerequisites - run: | - sudo apt-get update - sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} - sudo apt-get install -y gdb - - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - - name: run - timeout-minutes: 10 # generous, expected time is approx 1 min. - run: | - export LANG="en_US.UTF-8" - sudo update-locale - tar --zstd -xf install.tgz - install/opt/HPCCSystems/bin/unittests - - # NB: this doesn't really need the post-setup data files included in install.tgz - # but as this is relatively quick and in parallel with others, it probably doesn't matter - eclwatch-ui-tests: - needs: build-and-setup - if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.eclwatch }} - timeout-minutes: 30 # the ui-tests run step has its own timeout, however the job can sometimes take time to download dependencies - runs-on: ubuntu-22.04 - steps: - - uses: actions/download-artifact@v2 - with: - name: installsetup-artifact - - - name: Check-ECLWatch-UI-Test-Directory - id: check - run: | - tar --zstd -xf install.tgz - if [[ ! -d install/test-ui/tests ]] - then - echo "ECLWatch UI test-ui/tests directory missing." - else - javaFilesCount=$(find install/test-ui/tests/ -iname '*.java' -type f -print | wc -l ) - echo "Number of test java files is $javaFilesCount" - if [[ ${javaFilesCount} -eq 0 ]] - then - echo "No java files, do nothing." - else - echo ::set-output name=runtests::true - fi - fi - - - name: Prerequisites - if: steps.check.outputs.runtests - run: | - sudo apt-get update - sudo apt-get install -y git wget net-tools - sudo apt-get install -y tzdata unzip xvfb libxi6 - sudo apt-get install -y default-jdk - sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} - sudo apt-get install -y gdb - - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - - name: UI-Test-Prerequisites - if: steps.check.outputs.runtests - run: | - wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb - sudo apt-get install -y ./google-chrome-stable_current_amd64.deb - wget https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip - unzip chromedriver_linux64.zip - sudo mv chromedriver /usr/bin/chromedriver - sudo chown root:root /usr/bin/chromedriver - sudo chmod +x /usr/bin/chromedriver - wget https://selenium-release.storage.googleapis.com/3.141/selenium-server-standalone-3.141.59.jar - wget http://www.java2s.com/Code/JarDownload/testng/testng-6.8.7.jar.zip - unzip testng-6.8.7.jar.zip - - - name: run - timeout-minutes: 10 # generous, current runtime is ~1min, this should be increased if new tests are added - if: steps.check.outputs.runtests - run: | - export LANG="en_US.UTF-8" - sudo update-locale - source install/opt/HPCCSystems/sbin/hpcc_setenv - install/opt/HPCCSystems/etc/init.d/hpcc-init start - export CLASSPATH=".:$(realpath selenium-server-standalone-3.141.59.jar):$(realpath testng-6.8.7.jar)" - pushd install/test-ui - ./run.sh tests http://localhost:8010 > eclWatchUiTest.log 2>&1 - retCode=$? - echo "UI test done" - [[ $retCode -ne 0 ]] && exit 1 - popd - - - name: eclwatch-ui-test-logs-artifact - if: ${{ failure() || cancelled() }} - uses: actions/upload-artifact@v2 - with: - name: ECLWatchUiTest - path: install/test-ui/eclWatchUiTest.log - if-no-files-found: error diff --git a/.github/workflows/test-smoke-gh_runner.yml b/.github/workflows/test-smoke-gh_runner.yml index 2d65c3b9199..b01fdd3ebbc 100644 --- a/.github/workflows/test-smoke-gh_runner.yml +++ b/.github/workflows/test-smoke-gh_runner.yml @@ -76,11 +76,6 @@ jobs: sudo apt-get install -y ${{ inputs.dependencies }} sudo apt-get install -y gdb - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - name: Install Package shell: "bash" run: | @@ -103,7 +98,6 @@ jobs: working-directory: /opt/HPCCSystems/testing/regress shell: "bash" run: | - # todo: run once before matrix /opt/HPCCSystems/bin/timeoutcmd $((8 * 60)) \ ./ecl-test setup --preAbort '/opt/HPCCSystems/bin/smoketest-preabort.sh' --pq 2 --generateStackTrace grep Failure: /home/runner/HPCCSystems-regression/log/setup_*.log @@ -130,8 +124,8 @@ jobs: working-directory: /opt/HPCCSystems/testing/regress shell: "bash" run: | - # force regression suite to timeout after 60 minutes, so it captures ZAP report of any inflight hung queries - /opt/HPCCSystems/bin/timeoutcmd $((60 * 60)) \ + # force regression suite to timeout after 28 minutes, so it captures ZAP report of any inflight hung queries + /opt/HPCCSystems/bin/timeoutcmd $((28 * 60)) \ ./ecl-test query --preAbort '/opt/HPCCSystems/bin/smoketest-preabort.sh' --pq 2 --target ${{ matrix.engine }} --excludeclass python2,embedded-r,embedded-js,3rdpartyservice,mongodb --generateStackTrace ${{ steps.select-tests.outputs.queries }} grep Failure: /home/runner/HPCCSystems-regression/log/${{ matrix.engine }}.*.log if [[ "$(grep -oP '(?<=^Failure: )[0-9]+$' /home/runner/HPCCSystems-regression/log/${{ matrix.engine }}.*.log)" -gt 0 ]]; then exit 1; fi diff --git a/.github/workflows/test-ui-gh_runner.yml b/.github/workflows/test-ui-gh_runner.yml index ec2a6b79235..0126f2c6e39 100644 --- a/.github/workflows/test-ui-gh_runner.yml +++ b/.github/workflows/test-ui-gh_runner.yml @@ -36,7 +36,7 @@ jobs: name: ${{ inputs.asset-name }}-ui_test-files path: ${{ inputs.asset-name }}-ui_test-files - - name: Check-ECLWatch-UI-Test-Directory + - name: Check ECLWatch UI Test Directory id: check run: | if [[ ! -d ${{ inputs.asset-name }}-ui_test-files ]] @@ -58,18 +58,19 @@ jobs: shell: "bash" run: | sudo apt-get update - sudo apt-get install -y git wget net-tools - sudo apt-get install -y tzdata unzip xvfb libxi6 - sudo apt-get install -y default-jdk - sudo apt-get install -y ${{ inputs.dependencies }} - sudo apt-get install -y gdb + sudo apt-get install -y \ + git \ + wget \ + net-tools \ + tzdata \ + unzip \ + xvfb \ + libxi6 \ + default-jdk \ + gdb \ + ${{ inputs.dependencies }} - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - - name: UI-Test-Prerequisites + - name: Install UI Dependencies if: steps.check.outputs.runtests run: | wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb @@ -99,17 +100,22 @@ jobs: sudo xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 /etc/HPCCSystems/environment.xml - - name: run - timeout-minutes: 10 # generous, current runtime is ~1min, this should be increased if new tests are added - if: steps.check.outputs.runtests + chmod +x ./${{ inputs.asset-name }}-ui_test-files/* + + - name: Start HPCC-Platform + shell: "bash" run: | export LANG="en_US.UTF-8" - update-locale - source /opt/HPCCSystems/sbin/hpcc_setenv + sudo update-locale sudo /etc/init.d/hpcc-init start + + - name: Run Tests + timeout-minutes: 10 # generous, current runtime is ~1min, this should be increased if new tests are added + if: steps.check.outputs.runtests + run: | export CLASSPATH=".:$(realpath selenium-server-standalone-3.141.59.jar):$(realpath testng-6.8.7.jar)" pushd ${{ inputs.asset-name }}-ui_test-files - ./run.sh tests http://localhost:8010 > eclWatchUiTest.log 2>&1 + sudo ./run.sh tests http://localhost:8010 > eclWatchUiTest.log 2>&1 retCode=$? echo "UI test done" [[ $retCode -ne 0 ]] && exit 1 @@ -119,6 +125,6 @@ jobs: if: ${{ failure() || cancelled() }} uses: actions/upload-artifact@v2 with: - name: ECLWatchUiTest + name: ${{ inputs.asset-name }}-ui_test-logs path: ${{ inputs.asset-name }}-ui_test-files/eclWatchUiTest.log if-no-files-found: error diff --git a/.github/workflows/test-unit-gh_runner.yml b/.github/workflows/test-unit-gh_runner.yml index 904187785e8..69928aa0811 100644 --- a/.github/workflows/test-unit-gh_runner.yml +++ b/.github/workflows/test-unit-gh_runner.yml @@ -35,12 +35,6 @@ jobs: name: ${{ inputs.asset-name }} path: ${{ inputs.asset-name }} - - name: Download Support Files - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.asset-name }}-support-files - path: ${{ inputs.asset-name }}-support-files - - name: Install Dependencies shell: "bash" run: | @@ -48,31 +42,26 @@ jobs: sudo apt-get install -y ${{ inputs.dependencies }} sudo apt-get install -y gdb - curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - - sudo apt-get install -y nodejs - - echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" - - name: Install Package shell: "bash" run: | sudo apt-get install -y -f ./${{ inputs.asset-name }}/*.deb sudo chown -R $USER:$USER /opt/HPCCSystems - sudo xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 /etc/HPCCSystems/environment.xml - - chmod +x ./${{ inputs.asset-name }}-support-files/* - sudo cp ./${{ inputs.asset-name }}-support-files/* /opt/HPCCSystems/bin - - - name: Start HPCC-Platform + - name: Run Tests + timeout-minutes: 10 # generous, expected time is approx 1 min. shell: "bash" run: | export LANG="en_US.UTF-8" sudo update-locale - sudo /etc/init.d/hpcc-init start + sudo /opt/HPCCSystems/bin/unittests - - name: Run Tests - timeout-minutes: 10 # generous, expected time is approx 1 min. - shell: "bash" - run: | - /opt/HPCCSystems/bin/unittests + - name: unittests-logs-artifact + if: ${{ failure() || cancelled() }} + uses: actions/upload-artifact@v3 + with: + name: test-unit-${{ inputs.asset-name }} + path: | + /var/log/HPCCSystems + /home/runner/HPCCSystems-regression + if-no-files-found: error diff --git a/cmake_modules/options.cmake b/cmake_modules/options.cmake index a744818799d..0d1dbc1f677 100644 --- a/cmake_modules/options.cmake +++ b/cmake_modules/options.cmake @@ -88,7 +88,7 @@ if (NOT USE_JAVA) endif () if ((INCLUDE_PLUGINS AND NOT SUPPRESS_JAVAEMBED) OR INCLUDE_JAVAEMBED) message(WARNING "JAVAEMBED requires USE_JAVA, forcing SUPPRESS_JAVAEMBED ON") - set (SUPRESS_JAVAEMBED ON) + set (SUPPRESS_JAVAEMBED ON) endif () endif () diff --git a/dockerfiles/vcpkg/build.sh b/dockerfiles/vcpkg/build.sh index 2a624617b69..f314770edc2 100755 --- a/dockerfiles/vcpkg/build.sh +++ b/dockerfiles/vcpkg/build.sh @@ -29,9 +29,9 @@ docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD CMAKE_OPTIONS="-G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON" function doBuild() { - # docker pull "hpccsystems/platform-build-base-$1:$VCPKG_REF" || true - # docker pull "hpccsystems/platform-build-$1:$VCPKG_REF" || true - # docker pull "hpccsystems/platform-build-$1:$GITHUB_BRANCH" || true + docker pull "hpccsystems/platform-build-base-$1:$VCPKG_REF" || true + docker pull "hpccsystems/platform-build-$1:$VCPKG_REF" || true + docker pull "hpccsystems/platform-build-$1:$GITHUB_BRANCH" || true docker build --progress plain --rm -f "$SCRIPT_DIR/$1.dockerfile" \ --build-arg DOCKER_NAMESPACE=$DOCKER_USERNAME \ diff --git a/dockerfiles/vcpkg/centos-7.dockerfile b/dockerfiles/vcpkg/centos-7.dockerfile deleted file mode 100644 index 7369d75cdcc..00000000000 --- a/dockerfiles/vcpkg/centos-7.dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -ARG VCPKG_REF=latest -FROM hpccsystems/platform-build-base-centos-7:$VCPKG_REF - -RUN yum makecache && yum install -y \ - epel-release \ - java-11-openjdk-devel \ - python3-devel \ - wget && \ - yum update -y && yum install -y \ - ccache \ - R-core-devel && \ - yum -y clean all && rm -rf /var/cache - -ENV Rcpp_package=Rcpp_0.12.19.tar.gz -ENV RInside_package=RInside_0.2.12.tar.gz - -RUN wget https://cran.r-project.org/src/contrib/Archive/Rcpp/${Rcpp_package} -RUN wget https://cran.r-project.org/src/contrib/Archive/RInside/${RInside_package} -RUN R CMD INSTALL ${Rcpp_package} ${RInside_package} -RUN rm -f ${Rcpp_package} ${RInside_package} - -WORKDIR /hpcc-dev - -ENTRYPOINT ["/bin/bash", "--login", "-c"] diff --git a/dockerfiles/vcpkg/centos-8.dockerfile b/dockerfiles/vcpkg/centos-8.dockerfile deleted file mode 100644 index 35d54ef9567..00000000000 --- a/dockerfiles/vcpkg/centos-8.dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -ARG VCPKG_REF=latest -FROM hpccsystems/platform-build-base-centos-8:$VCPKG_REF - -RUN yum remove -y python3.11 java-1.* && yum install -y \ - java-11-openjdk-devel \ - python3-devel \ - epel-release && \ - yum update -y && yum install -y \ - ccache \ - R-core-devel \ - R-Rcpp-devel \ - R-RInside-devel - -WORKDIR /hpcc-dev - -ENTRYPOINT ["/bin/bash", "--login", "-c"] diff --git a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile deleted file mode 100644 index c5d0585c472..00000000000 --- a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -ARG VCPKG_REF=latest -FROM hpccsystems/platform-build-base-ubuntu-20.04:$VCPKG_REF - -ENV RInside_package=RInside_0.2.14.tar.gz - -RUN apt-get update && apt-get install --no-install-recommends -y \ - ccache \ - default-jdk \ - python3-dev \ - wget \ - r-base \ - r-cran-rcpp -RUN wget https://cran.r-project.org/src/contrib/Archive/RInside/${RInside_package} -RUN R CMD INSTALL ${RInside_package} -RUN rm -f ${RInside_package} - -WORKDIR /hpcc-dev - -ENTRYPOINT ["/bin/bash", "--login", "-c"] diff --git a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile deleted file mode 100644 index 9ff881b9bb3..00000000000 --- a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -ARG VCPKG_REF=latest -FROM hpccsystems/platform-build-base-ubuntu-22.04:$VCPKG_REF - -RUN apt-get update && apt-get install --no-install-recommends -y \ - ccache \ - default-jdk \ - ninja-build \ - python3-dev \ - rsync \ - fop \ - libsaxonb-java \ - r-base \ - r-cran-rcpp \ - r-cran-rinside \ - r-cran-inline - -RUN git config --global --add safe.directory '*' - -WORKDIR /hpcc-dev - -ENTRYPOINT ["/bin/bash", "--login", "-c"] diff --git a/dockerfiles/vcpkg/ubuntu-23.10.dockerfile b/dockerfiles/vcpkg/ubuntu-23.10.dockerfile deleted file mode 100644 index a9bbac10c3d..00000000000 --- a/dockerfiles/vcpkg/ubuntu-23.10.dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -ARG VCPKG_REF=latest -FROM hpccsystems/platform-build-base-ubuntu-23.10:$VCPKG_REF - -RUN apt-get update && apt-get install --no-install-recommends -y \ - ccache \ - default-jdk \ - ninja-build \ - python3-dev \ - rsync \ - fop \ - libsaxonb-java \ - r-base \ - r-cran-rcpp \ - r-cran-rinside \ - r-cran-inline - -RUN git config --global --add safe.directory '*' - -WORKDIR /hpcc-dev - -ENTRYPOINT ["/bin/bash", "--login", "-c"] diff --git a/vcpkg b/vcpkg index fcde93bcb3f..a65a37760ac 160000 --- a/vcpkg +++ b/vcpkg @@ -1 +1 @@ -Subproject commit fcde93bcb3fec4ae8b650cec2e4e1b7787308098 +Subproject commit a65a37760ac2a85527b72b2d81aa50d882ec2e70 From a0319213e6f867dd0d93ce68e41473328639430b Mon Sep 17 00:00:00 2001 From: Rodrigo Pastrana Date: Tue, 31 Oct 2023 14:07:28 -0400 Subject: [PATCH 4/9] HPCC-30697 Track passthrough data as attributes - Creates new setContextAttributes function - Utilizes new setContextAttributes - Defines otel attribute names for globalid, and callerid - No unittest since no exporter available to test locally Signed-off-by: Rodrigo Pastrana --- system/jlib/jtrace.cpp | 10 ++++++++++ system/jlib/jtrace.hpp | 2 ++ 2 files changed, 12 insertions(+) diff --git a/system/jlib/jtrace.cpp b/system/jlib/jtrace.cpp index 88ccae0ec6b..dea2ced7a4f 100644 --- a/system/jlib/jtrace.cpp +++ b/system/jlib/jtrace.cpp @@ -655,6 +655,15 @@ class CServerSpan : public CSpan return success; } + void setContextAttributes() + { + if (!isEmptyString(hpccGlobalId)) + setSpanAttribute(kGlobalIdOtelAttributeName, hpccGlobalId.get()); + + if (!isEmptyString(hpccCallerId)) + setSpanAttribute(kCallerIdOtelAttributeName, hpccCallerId.get()); + } + public: CServerSpan(const char * spanName, const char * tracerName_, const IProperties * httpHeaders, SpanFlags flags) : CSpan(spanName, tracerName_) @@ -662,6 +671,7 @@ class CServerSpan : public CSpan opts.kind = opentelemetry::trace::SpanKind::kServer; setSpanContext(httpHeaders, flags); init(); + setContextAttributes(); } void toLog(StringBuffer & out) const override diff --git a/system/jlib/jtrace.hpp b/system/jlib/jtrace.hpp index a9ec14becca..9e9d58dd8fc 100644 --- a/system/jlib/jtrace.hpp +++ b/system/jlib/jtrace.hpp @@ -27,6 +27,8 @@ static constexpr const char *kGlobalIdHttpHeaderName = "Global-Id"; static constexpr const char *kCallerIdHttpHeaderName = "Caller-Id"; static constexpr const char *kLegacyGlobalIdHttpHeaderName = "HPCC-Global-Id"; static constexpr const char *kLegacyCallerIdHttpHeaderName = "HPCC-Caller-Id"; +static constexpr const char *kGlobalIdOtelAttributeName = "hpcc.globalid"; +static constexpr const char *kCallerIdOtelAttributeName = "hpcc.callerid"; enum class SpanFlags : unsigned { From 8e965c218bf4d87a66cb538947dfeab3f9d8ddcd Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Thu, 9 Nov 2023 15:18:31 -0500 Subject: [PATCH 5/9] HPCC-30567 ECL Watch WU logs view set default start date - sets the StartDate as the timestamp in the WUID - fixed issue where the logging engine was still queried twice (once for the row count and again for the grid data), by allowing the Logs grid to set the logCount state from WorkunitDetails Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/src-react/components/Logs.tsx | 40 ++++++++++++++----- .../src-react/components/WorkunitDetails.tsx | 13 ++---- .../components/WorkunitsDashboard.tsx | 3 +- esp/src/src/Utility.ts | 8 ++++ 4 files changed, 42 insertions(+), 22 deletions(-) diff --git a/esp/src/src-react/components/Logs.tsx b/esp/src/src-react/components/Logs.tsx index c2c37b119a0..5d857e364fb 100644 --- a/esp/src/src-react/components/Logs.tsx +++ b/esp/src/src-react/components/Logs.tsx @@ -5,7 +5,7 @@ import { GetLogsExRequest, TargetAudience, LogType } from "@hpcc-js/comms"; import { Level } from "@hpcc-js/util"; import { CreateLogsQueryStore } from "src/ESPLog"; import nlsHPCC from "src/nlsHPCC"; -import { logColor } from "src/Utility"; +import { logColor, wuidToDate, wuidToTime } from "src/Utility"; import { HolyGrail } from "../layouts/HolyGrail"; import { pushParams } from "../util/history"; import { FluentPagedGrid, FluentPagedFooter, useCopyButtons, useFluentStoreState, FluentColumns } from "./controls/Grid"; @@ -14,7 +14,8 @@ import { Fields } from "./forms/Fields"; import { ShortVerticalDivider } from "./Common"; const maximumTimeUntilRefresh = 8 * 60 * 60 * 1000; -const startTimeOffset = 6 * 60 * 60 * 1000; +const startTimeOffset = 1 * 60 * 60 * 1000; +const endTimeOffset = 23 * 60 * 60 * 1000; const defaultStartDate = new Date(new Date().getTime() - startTimeOffset); const FilterFields: Fields = { @@ -62,7 +63,8 @@ function formatQuery(_request: any): Partial { interface LogsProps { wuid?: string; filter?: Partial; - page?: number + page?: number; + setLogCount?: (count: number | string) => void; } export const defaultFilter: Partial = { StartDate: defaultStartDate }; @@ -83,7 +85,8 @@ const levelMap = (level) => { export const Logs: React.FunctionComponent = ({ wuid, filter = defaultFilter, - page + page, + setLogCount }) => { const hasFilter = React.useMemo(() => Object.keys(filter).length > 0, [filter]); @@ -103,12 +106,21 @@ export const Logs: React.FunctionComponent = ({ const query = React.useMemo(() => { if (wuid !== undefined) { filter.workunits = wuid; - } - if (typeof filter.StartDate === "string") { - filter.StartDate = new Date(filter.StartDate); - } - if (filter.StartDate && now.getTime() - filter.StartDate.getTime() > maximumTimeUntilRefresh) { - filter.StartDate = new Date(now.getTime() - startTimeOffset); + if (typeof filter.StartDate === "string") { + filter.StartDate = new Date(filter.StartDate + ":00Z"); + } else { + filter.StartDate = new Date(`${wuidToDate(wuid)}T${wuidToTime(wuid)}Z`); + } + } else { + if (typeof filter.StartDate === "string") { + filter.StartDate = new Date(filter.StartDate + ":00Z"); + } + if (filter.StartDate && now.getTime() - filter.StartDate.getTime() > maximumTimeUntilRefresh) { + filter.StartDate = new Date(now.getTime() - startTimeOffset); + } + if (!filter.EndDate) { + filter.EndDate = new Date(now.getTime() + endTimeOffset); + } } return formatQuery(filter); }, [filter, now, wuid]); @@ -157,6 +169,7 @@ export const Logs: React.FunctionComponent = ({ for (const field in FilterFields) { retVal[field] = { ...FilterFields[field], value: filter[field] }; if (wuid !== undefined) { + delete filter.workunits; delete retVal.jobId; } } @@ -175,7 +188,12 @@ export const Logs: React.FunctionComponent = ({ total={total} columns={columns} setSelection={setSelection} - setTotal={setTotal} + setTotal={(total) => { + setTotal(total); + if (setLogCount) { + setLogCount(total); + } + }} refresh={refreshTable} > diff --git a/esp/src/src-react/components/WorkunitDetails.tsx b/esp/src/src-react/components/WorkunitDetails.tsx index ccfdccccf8c..3ae985ed24c 100644 --- a/esp/src/src-react/components/WorkunitDetails.tsx +++ b/esp/src/src-react/components/WorkunitDetails.tsx @@ -3,7 +3,7 @@ import { IPivotItemProps, Pivot, PivotItem } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { SizeMe } from "react-sizeme"; import nlsHPCC from "src/nlsHPCC"; -import { service, hasLogAccess } from "src/ESPLog"; +import { hasLogAccess } from "src/ESPLog"; import { useWorkunit } from "../hooks/workunit"; import { useUserTheme } from "../hooks/theme"; import { useDeepEffect } from "../hooks/deepHooks"; @@ -64,13 +64,8 @@ export const WorkunitDetails: React.FunctionComponent = ({ hasLogAccess().then(response => { setLogsDisabled(!response); return response; - }).then(hasLogAccess => { - if (hasLogAccess) { - service.GetLogsEx({ ...queryParams, workunits: wuid, LogLineStartFrom: 0, LogLineLimit: 10 }).then(response => { // HPCC-27711 - Requesting LogLineLimit=1 causes issues - setLogCount(response.total); - }).catch((err) => logger.error(err)); - } - }).catch(() => { + }).catch(err => { + logger.warning(err); setLogsDisabled(true); }); }, [wuid], [queryParams]); @@ -116,7 +111,7 @@ export const WorkunitDetails: React.FunctionComponent = ({ } - + diff --git a/esp/src/src-react/components/WorkunitsDashboard.tsx b/esp/src/src-react/components/WorkunitsDashboard.tsx index c994d9ad108..b541ae83afc 100644 --- a/esp/src/src-react/components/WorkunitsDashboard.tsx +++ b/esp/src/src-react/components/WorkunitsDashboard.tsx @@ -8,6 +8,7 @@ import { chain, filter, group, map, sort } from "@hpcc-js/dataflow"; import * as Observable from "dojo/store/Observable"; import * as ESPWorkunit from "src/ESPWorkunit"; import nlsHPCC from "src/nlsHPCC"; +import { wuidToDate } from "src/Utility"; import { Memory } from "src/store/Memory"; import { Chip } from "./controls/Chip"; import { pushParamExact } from "../util/history"; @@ -32,8 +33,6 @@ const innerStackTokens: IStackTokens = { const service = new WorkunitsService({ baseUrl: "" }); -const wuidToDate = (wuid: string) => `${wuid.substr(1, 4)}-${wuid.substr(5, 2)}-${wuid.substr(7, 2)}`; - interface WorkunitEx extends WUQuery.ECLWorkunit { Day: string; } diff --git a/esp/src/src/Utility.ts b/esp/src/src/Utility.ts index 04de91c4617..7dc65172d7f 100644 --- a/esp/src/src/Utility.ts +++ b/esp/src/src/Utility.ts @@ -1197,4 +1197,12 @@ export function wrapStringWithTag(string, tag = "span") { export function isSpill(sourceKind: string, targetKind: string): boolean { return sourceKind === "2" || targetKind === "71"; +} + +export function wuidToDate(wuid: string): string { + return `${wuid.substring(1, 5)}-${wuid.substring(5, 7)}-${wuid.substring(7, 9)}`; +} + +export function wuidToTime(wuid: string): string { + return `${wuid.substring(10, 12)}:${wuid.substring(12, 14)}:${wuid.substring(14, 16)}`; } \ No newline at end of file From ba798351c96b39736efdcc2012d28e9dfa6eb6f7 Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Tue, 14 Nov 2023 11:47:11 -0500 Subject: [PATCH 6/9] HPCC-30844 ECL Watch revert IdleWatcher event strings Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/src/ESPUtil.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/esp/src/src/ESPUtil.ts b/esp/src/src/ESPUtil.ts index 8c8a94e4abc..43cf058435d 100644 --- a/esp/src/src/ESPUtil.ts +++ b/esp/src/src/ESPUtil.ts @@ -314,10 +314,10 @@ export function goToPageUserPreference(gridName, key) { export const MonitorLockClick = dojo.declare([Evented], { unlocked() { - this.emit("Unlocked", {}); + this.emit("unlocked", {}); }, locked() { - this.emit("Locked", {}); + this.emit("locked", {}); } }); From 6e9d9a9fd00af901b5ac72c6158690bb28c09bd3 Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Tue, 14 Nov 2023 12:09:15 -0500 Subject: [PATCH 7/9] HPCC-30846 ECL Watch fix merge issues in Frame.tsx Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/src-react/components/Frame.tsx | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/esp/src/src-react/components/Frame.tsx b/esp/src/src-react/components/Frame.tsx index 2c4f7b0cecf..594f756080c 100644 --- a/esp/src/src-react/components/Frame.tsx +++ b/esp/src/src-react/components/Frame.tsx @@ -90,24 +90,6 @@ export const Frame: React.FunctionComponent = () => { return () => unlisten(); }, []); - React.useEffect(() => { - initSession(); - - topic.subscribe("hpcc/session_management_status", function (publishedMessage) { - if (publishedMessage.status === "Unlocked") { - unlock(); - } else if (publishedMessage.status === "Locked") { - lock(); - } else if (publishedMessage.status === "DoIdle") { - fireIdle(); - } else if (publishedMessage.status === "Idle") { - window.localStorage.setItem("pageOnLock", window.location.hash.substring(1)); - setUserSession({ ...userSession, Status: "Locked" }); - window.location.reload(); - } - }); - }, [setUserSession, userSession]); - React.useEffect(() => { document.title = `${showEnvironmentTitle && environmentTitle.length ? environmentTitle : "ECL Watch v9"}${locationPathname.split("/").join(" | ")}`; }, [environmentTitle, locationPathname, showEnvironmentTitle]); From 4d2fd1acdc8e53043c091bbe35da255ce0db9314 Mon Sep 17 00:00:00 2001 From: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> Date: Wed, 15 Nov 2023 09:28:38 -0500 Subject: [PATCH 8/9] HPCC-30841 ECL Watch v9 fix DFU WU details incorrect information Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- .../components/DFUWorkunitDetails.tsx | 93 +++++++++++++++---- esp/src/src-react/components/forms/Fields.tsx | 2 +- 2 files changed, 77 insertions(+), 18 deletions(-) diff --git a/esp/src/src-react/components/DFUWorkunitDetails.tsx b/esp/src/src-react/components/DFUWorkunitDetails.tsx index c03751a0486..e959727a081 100644 --- a/esp/src/src-react/components/DFUWorkunitDetails.tsx +++ b/esp/src/src-react/components/DFUWorkunitDetails.tsx @@ -9,11 +9,43 @@ import { useDfuWorkunit } from "../hooks/workunit"; import { pivotItemStyle } from "../layouts/pivot"; import { pushUrl, replaceUrl } from "../util/history"; import { ShortVerticalDivider } from "./Common"; +import { Field, Fields } from "./forms/Fields"; import { TableGroup } from "./forms/Groups"; import { XMLSourceEditor } from "./SourceEditor"; const logger = scopedLogger("../components/DFUWorkunitDetails.tsx"); +const createField = (label: string, value: any): Field => { + return { label, type: typeof value === "number" ? "number" : "string", value, readonly: true }; +}; + +type FieldMap = { key: string, label: string }; +const sourceFieldIds: FieldMap[] = [ + { key: "SourceIP", label: nlsHPCC.IP }, + { key: "SourceDirectory", label: nlsHPCC.Directory }, + { key: "SourceFilePath", label: nlsHPCC.FilePath }, + { key: "SourceLogicalName", label: nlsHPCC.LogicalName }, + { key: "SourceNumParts", label: nlsHPCC.NumberofParts }, + { key: "SourceDali", label: nlsHPCC.Dali }, + { key: "SourceFormat", label: nlsHPCC.Format }, + { key: "SourceRecordSize", label: nlsHPCC.RecordSize }, + { key: "RowTag", label: nlsHPCC.RowTag }, + { key: "SourceCsvSeparate", label: nlsHPCC.Separators }, + { key: "SourceCsvEscape", label: nlsHPCC.Escape }, + { key: "SourceCsvTerminate", label: nlsHPCC.Terminators }, + { key: "SourceCsvQuote", label: nlsHPCC.Quote } +]; +const targetFieldIds: FieldMap[] = [ + { key: "DestIP", label: nlsHPCC.IP }, + { key: "DestDirectory", label: nlsHPCC.Directory }, + { key: "DestFilePath", label: nlsHPCC.FilePath }, + { key: "DestLogicalName", label: nlsHPCC.LogicalName }, + { key: "DestGroupName", label: nlsHPCC.GroupName }, + { key: "DestNumParts", label: nlsHPCC.NumberofParts }, + { key: "DestFormat", label: nlsHPCC.Format }, + { key: "DestRecordSize", label: nlsHPCC.RecordSize } +]; + interface DFUWorkunitDetailsProps { wuid: string; tab?: string; @@ -27,6 +59,10 @@ export const DFUWorkunitDetails: React.FunctionComponent(); + const [targetFormatMessage, setTargetFormatMessage] = React.useState(""); + const [targetFields, setTargetFields] = React.useState(); const [_protected, setProtected] = React.useState(false); const [showMessageBar, setShowMessageBar] = React.useState(false); @@ -53,9 +89,44 @@ export const DFUWorkunitDetails: React.FunctionComponent { + if (!workunit) return; setJobname(workunit?.JobName); setProtected(workunit?.isProtected); - }, [workunit?.JobName, workunit?.isProtected]); + + const sourceFormatMsg = FileSpray.FormatMessages[workunit?.SourceFormat]; + if (sourceFormatMsg === "csv") { + setSourceFormatMessage(`(${nlsHPCC.CSV})`); + } else if (sourceFormatMsg === "fixed") { + setSourceFormatMessage(`(${nlsHPCC.Fixed})`); + } else if (!!workunit?.RowTag) { + setSourceFormatMessage(`(${nlsHPCC.XML}/${nlsHPCC.JSON})`); + } + + const _sourceFields: Fields = {}; + for (const fieldId of sourceFieldIds) { + if (workunit[fieldId.key] !== undefined) { + const value = fieldId.key === "SourceFormat" ? FileSpray.FormatMessages[workunit[fieldId.key]] : workunit[fieldId.key]; + _sourceFields[fieldId.key] = createField(fieldId.label, value ?? null); + } + } + setSourceFields(_sourceFields); + + const destFormatMsg = FileSpray.FormatMessages[workunit?.DestFormat]; + if (destFormatMsg === "csv") { + setTargetFormatMessage(`(${nlsHPCC.CSV})`); + } else if (destFormatMsg === "fixed") { + setTargetFormatMessage(`(${nlsHPCC.Fixed})`); + } + + const _targetFields: Fields = {}; + for (const fieldId of targetFieldIds) { + if (workunit[fieldId.key] !== undefined) { + const value = fieldId.key === "DestFormat" ? FileSpray.FormatMessages[workunit[fieldId.key]] : workunit[fieldId.key]; + _targetFields[fieldId.key] = createField(fieldId.label, value ?? null); + } + } + setTargetFields(_targetFields); + }, [workunit]); const canSave = React.useMemo(() => { return jobname !== workunit?.JobName || _protected !== workunit?.isProtected; @@ -161,23 +232,11 @@ export const DFUWorkunitDetails: React.FunctionComponent
-

{nlsHPCC.Source} ({nlsHPCC.Fixed})

- +

{nlsHPCC.Source} {sourceFormatMessage}

+
-

{nlsHPCC.Target}

- +

{nlsHPCC.Target} {targetFormatMessage}

+

{nlsHPCC.Other}

Date: Thu, 16 Nov 2023 11:09:53 -0500 Subject: [PATCH 9/9] HPCC-30836 ECL Watch add jobname to event scheduler filter Signed-off-by: Jeremy Clements <79224539+jeclrsg@users.noreply.github.com> --- esp/src/eclwatch/templates/EventScheduleWorkunitWidget.html | 5 +++-- esp/src/src-react/components/EventScheduler.tsx | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/esp/src/eclwatch/templates/EventScheduleWorkunitWidget.html b/esp/src/eclwatch/templates/EventScheduleWorkunitWidget.html index 3550aeb1bd2..097538f1aa1 100644 --- a/esp/src/eclwatch/templates/EventScheduleWorkunitWidget.html +++ b/esp/src/eclwatch/templates/EventScheduleWorkunitWidget.html @@ -10,8 +10,9 @@
- - + + +
diff --git a/esp/src/src-react/components/EventScheduler.tsx b/esp/src/src-react/components/EventScheduler.tsx index a10c1391b2b..cbe039a8492 100644 --- a/esp/src/src-react/components/EventScheduler.tsx +++ b/esp/src/src-react/components/EventScheduler.tsx @@ -19,7 +19,8 @@ import { useMyAccount } from "../hooks/user"; const logger = scopedLogger("src-react/components/EventScheduler.tsx"); const FilterFields: Fields = { - "EventName": { type: "string", label: nlsHPCC.EventName }, + "EventName": { type: "string", label: nlsHPCC.EventName, placeholder: nlsHPCC.EventNamePH }, + "JobName": { type: "string", label: nlsHPCC.JobName, placeholder: nlsHPCC.log_analysis_1 }, "State": { type: "workunit-state", label: nlsHPCC.State, placeholder: "" }, "Owner": { type: "string", label: nlsHPCC.Owner, placeholder: nlsHPCC.jsmi }, "EventText": { type: "string", label: nlsHPCC.EventText, placeholder: nlsHPCC.EventTextPH },