diff --git a/.github/workflows/build_centos7.yml b/.github/workflows/build_centos7.yml deleted file mode 100644 index da87fd5bd..000000000 --- a/.github/workflows/build_centos7.yml +++ /dev/null @@ -1,167 +0,0 @@ -name: Centos7 Build - -on: - merge_group: - push: - branches: - - develop - - dependabot/* - pull_request: - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - docker_publish: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - with: - fetch-depth: 0 - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v32 - with: - files: | - docker/centos7-system-deps - - - name: Docker file push - id: docker_push - if: steps.changed-files.outputs.any_changed == 'true' - uses: elgohr/Publish-Docker-Github-Action@main - with: - name: antaresrte/rte-antares - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - workdir: docker - dockerfile: centos7-system-deps - cache: false - tags: centos7-system-deps - - versions: - runs-on: ubuntu-latest - outputs: - antares-version: ${{steps.antares-version.outputs.result}} - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - steps: - - uses: actions/checkout@v3 #Keep at 3. v4 uses node 20 which uses glibc_2.27 - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - build: - runs-on: ubuntu-latest - needs: [ docker_publish, versions ] - container: 'antaresrte/rte-antares:centos7-system-deps' - strategy: - matrix: - xprs: [ - # { value: XPRESS-ON, ref: 8.13a }, - { value: XPRESS-ON, ref: 9.2.5 }, - # { value: XPRESS-OFF } - ] - env: - XPRESSDIR: ${{ github.workspace }}/xpress - XPRESS: ${{ github.workspace }}/xpress/bin - XPRS_LIB_Path: ${{ github.workspace }}/xpress/lib - XPRESSDIR_CONTAINER: ${GITHUB_WORKSPACE}/xpress - XPRESS_CONTAINER: ${GITHUB_WORKSPACE}/xpress/bin - XPRS_LIB_Path_CONTAINER: ${GITHUB_WORKSPACE}/xpress/lib - - steps: - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: bruceadams/get-release@v1.3.2 - - - uses: actions/checkout@v3 #Keep at 3 - with: - submodules: true - - - uses: ./.github/workflows/compile-gtest - - name: Checkout xpressmp linux - uses: actions/checkout@v3 #keep v3 - with: - token: ${{ secrets.AS_TOKEN }} - repository: rte-france/xpress-mp - path: ${{ env.XPRESSDIR }} - github-server-url: https://github.com - ref: ${{matrix.xprs.ref}} - if: matrix.xprs.value == 'XPRESS-ON' - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{needs.versions.outputs.antares-deps-version}} - antares-version: ${{needs.versions.outputs.antares-version}} - os: centos7 - os-full-name: CentOS-7.9.2009 - #variant: -ortools-xpress - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - with: - cmake: 'cmake3' - - - name: Install dependencies - run: | - pip3 install --upgrade pip - pip3 install wheel #Does not work in requirements - pip3 install -r requirements-tests.txt - pip3 install -r requirements-ui.txt - - name: Configure - run: | - [[ ${{ matrix.xprs.value }} == "XPRESS-ON" ]] && XPRESS_VALUE="ON" || XPRESS_VALUE="OFF" - source /opt/rh/devtoolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 -B _build -S . \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=ON \ - -DXPRESS=${{ env.XPRESS_VALUE }} \ - -DXPRESS_ROOT=${{ env.XPRESSDIR }} \ - -DALLOW_RUN_AS_ROOT=ON - - name: Build - run: | - source /opt/rh/devtoolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 --build _build --config Release -j8 --target install - - name: Running unit tests - timeout-minutes: 120 - shell: bash - run: | - source /etc/profile.d/modules.sh - module load mpi - export LD_LIBRARY_PATH=LD_LIBRARY_PATH:${{ env.XPRS_LIB_Path_CONTAINER }} - export XPRESS=${{ env.XPRESS_CONTAINER }} - cd _build - ctest3 -C Release --output-on-failure -L "unit|benders|lpnamer|medium" diff --git a/.github/workflows/build_oracle8.yml b/.github/workflows/build_oracle8.yml deleted file mode 100644 index 132444be4..000000000 --- a/.github/workflows/build_oracle8.yml +++ /dev/null @@ -1,151 +0,0 @@ -name: Oracle 8 Build - -on: - merge_group: - push: - branches: - - develop - - dependabot/* - pull_request: - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - versions: - runs-on: ubuntu-latest - outputs: - antares-version: ${{steps.antares-version.outputs.result}} - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - steps: - - uses: actions/checkout@v4 - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - build: - name: Build - runs-on: ubuntu-latest - container: 'oraclelinux:8' - strategy: - matrix: - xprs: [ #{ value: XPRESS-ON, ref: 8.13a }, - { value: XPRESS-ON, ref: 9.2.5 }, - # { value: XPRESS-OFF } - ] - needs: [ versions ] - env: - XPRESSDIR: ${{ github.workspace }}/xpress - XPRESS: ${{ github.workspace }}/xpress/bin - XPRS_LIB_Path: ${{ github.workspace }}/xpress/lib - XPRESSDIR_CONTAINER: ${GITHUB_WORKSPACE}/xpress - XPRESS_CONTAINER: ${GITHUB_WORKSPACE}/xpress/bin - XPRS_LIB_Path_CONTAINER: ${GITHUB_WORKSPACE}/xpress/lib - - steps: - - - name: Install System - run: | - dnf install -y epel-release git cmake wget rpm-build redhat-lsb-core openmpi-devel - dnf install -y unzip libuuid-devel boost-test boost-devel gcc-toolset-10-toolchain zlib-devel python3-devel - - - uses: actions/checkout@v4 - with: - submodules: true - - - uses: ./.github/workflows/compile-gtest - - - name: Checkout xpressmp linux - uses: actions/checkout@v4 - with: - token: ${{ secrets.AS_TOKEN }} - repository: rte-france/xpress-mp - path: ${{ env.XPRESSDIR }} - github-server-url: https://github.com - ref: ${{matrix.xprs.ref}} - if: matrix.xprs.value == 'XPRESS-ON' - - - name: Set up Python - run: | - dnf update -y - dnf install -y python3 python3-pip - - - run: - echo ${{needs.versions.outputs.antares-deps-version}} - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{needs.versions.outputs.antares-deps-version}} - antares-version: ${{needs.versions.outputs.antares-version}} - os: Oracle8 - os-full-name: OracleServer-8.9 - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - - name: Compile TBB - uses: ./.github/workflows/compile-tbb - - - name: Install dependencies - run: | - source /opt/rh/gcc-toolset-10/enable - pip3 install wheel #Too late to install in requirements.txt - pip3 install -r requirements-tests.txt - - - name: Configure - run: | - [[ ${{ matrix.xprs.value }} == "XPRESS-ON" ]] && XPRESS_VALUE="ON" || XPRESS_VALUE="OFF" - source /opt/rh/gcc-toolset-10/enable - dnf install jsoncpp-devel - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 -B _build -S . \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=OFF \ - -DXPRESS=${{ env.XPRESS_VALUE }} \ - -DXPRESS_ROOT=${{ env.XPRESSDIR }} \ - -DALLOW_RUN_AS_ROOT=ON - - - - name: Build - run: | - source /opt/rh/gcc-toolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake --build _build --config Release -j2 - - - name: Running unit tests - timeout-minutes: 120 - shell: bash - run: | - source /etc/profile.d/modules.sh - module load mpi - export LD_LIBRARY_PATH=LD_LIBRARY_PATH:${{ env.XPRS_LIB_Path_CONTAINER }} - export XPRESS=${{ env.XPRESS_CONTAINER }} - cd _build - ctest3 -C Release --output-on-failure -L "unit|benders|lpnamer|medium" diff --git a/.github/workflows/build_ubuntu.yml b/.github/workflows/build_ubuntu.yml index 902bbbf97..7417fd034 100644 --- a/.github/workflows/build_ubuntu.yml +++ b/.github/workflows/build_ubuntu.yml @@ -3,10 +3,6 @@ name: Ubuntu build on: merge_group: push: - branches: - - main - - develop - - dependabot/* pull_request: release: types: [ created ] @@ -23,8 +19,8 @@ jobs: os: [ ubuntu-20.04 ] xprs: [ #{ value: XPRESS-ON, ref: 8.13a }, - { value: XPRESS-ON, ref: 9.2.5 }, - # { value: XPRESS-OFF } + { value: XPRESS-ON, ref: 9.2.5 }, + # { value: XPRESS-OFF } ] env: XPRESSDIR: ${{ github.workspace }}/xpress @@ -36,106 +32,7 @@ jobs: with: submodules: true - - name: Checkout xpressmp linux - if: matrix.xprs.value == 'XPRESS-ON' - uses: actions/checkout@v4 - with: - repository: rte-france/xpress-mp - path: ${{ env.XPRESSDIR }} - ref: ${{ matrix.xprs.ref}} - token: ${{ secrets.AS_TOKEN }} #reniew token periodically - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.3 - with: - key: ${{ matrix.os }}-${{ matrix.xprs.value }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - cache: 'pip' - python-version: 3.8 - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-tests.txt - pip install -r requirements-ui.txt - - - name: Install mandatory system libraries - run: | - sudo apt-get update --fix-missing - sudo apt-get install -y ccache cmake libgtest-dev libjsoncpp-dev libtbb-dev libopenmpi-dev - sudo apt-get install -y g++-10 gcc-10 - - - name: Update alternatives - #mpicxx uses "g++" so we need g++ to be symbolic link to g++-10 - run: | - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10 - sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc 30 - sudo update-alternatives --set cc /usr/bin/gcc - sudo update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++ 30 - sudo update-alternatives --set c++ /usr/bin/g++ - - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - antares-version: ${{steps.antares-version.outputs.result}} - os: ${{matrix.os}} - os-full-name: Ubuntu-20.04 - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - load-toolset: 'false' - - - name: Configure - shell: bash - #XPRESS_VALUE = ${{ matrix.xprs }} == "XPRESS-ON" ? "ON" : "OFF" - run: | - [[ ${{ matrix.xprs.value }} == "XPRESS-ON" ]] && XPRESS_VALUE="ON" || XPRESS_VALUE="OFF" - cmake -B _build -S . \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_C_COMPILER=/usr/bin/gcc-10 \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER=/usr/bin/g++-10 \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=ON \ - -DXPRESS=${{ env.XPRESS_VALUE }} \ - -DXPRESS_ROOT=${{ env.XPRESSDIR }} - - - name: Build - run: | - cmake --build _build --config Release -j8 - - - name: Test - run: | - cd _build - ctest -C Release --output-on-failure -L "medium|unit|benders|lpnamer" + - name: Dump GitHub context + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: echo "$GITHUB_CONTEXT" diff --git a/.github/workflows/build_windows.yml b/.github/workflows/build_windows.yml deleted file mode 100644 index be2f6cfd8..000000000 --- a/.github/workflows/build_windows.yml +++ /dev/null @@ -1,150 +0,0 @@ -name: Windows build - -on: - merge_group: - push: - branches: - - main - - develop - - dependabot/* - pull_request: - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - windows: - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, '[skip ci]')" - strategy: - matrix: - os: [ windows-latest ] - triplet: [ x64-windows ] - xprs: [ #{ value: XPRESS-ON, ref: 8.13a }, - { value: XPRESS-ON, ref: 9.2.5 }, - #{ value: XPRESS-OFF } - ] - env: - XPRESSDIR: ${{ github.workspace }}\xpress - XPRESS: ${{ github.workspace }}\xpress\bin - XPRS_LIB_Path: ${{ github.workspace }}\xpress\lib - # Indicates the location of the vcpkg as a Git submodule of the project repository. - VCPKG_ROOT: ${{ github.workspace }}/vcpkg - - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - - name: Checkout xpressmp linux - if: matrix.xprs.value == 'XPRESS-ON' - uses: actions/checkout@v4 - with: - repository: rte-france/xpress-mp-temp - path: ${{ env.XPRESSDIR }} - ref: ${{matrix.xprs.ref}} - token: ${{ secrets.AS_TOKEN }} - - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: - bruceadams/get-release@v1.3.2 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - cache: 'pip' - cache-dependency-path: requirements*.txt - - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v4 - with: - # The first path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - ${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/vcpkg/HEAD' )}}-${{ matrix.triplet }} - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-tests.txt - pip install -r requirements-ui.txt - - - name: Pre-requisites - shell: cmd - run: | - choco install wget zip unzip --no-progress - wget -nv https://github.com/microsoft/Microsoft-MPI/releases/download/v10.1.1/msmpisetup.exe - msmpisetup.exe -unattend - - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - - name: Install deps with VCPKG - run: | - cd vcpkg - ./bootstrap-vcpkg.sh - vcpkg install --triplet ${{matrix.triplet}} - rm -rf buildtrees - rm -rf packages - rm -rf downloads - shell: bash - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - with: - cmake: 'cmake' - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-zip - with: - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - antares-version: ${{steps.antares-version.outputs.result}} - os: ${{matrix.os}} - - - name: Expand xpress value in env - #I can't seem to expand the variable in the cmake command line so export it in env - shell: bash - run: | - [[ ${{ matrix.xprs.value }} == "XPRESS-ON" ]] && XPRESS_VALUE="ON" || XPRESS_VALUE="OFF" - echo "XPRESS_VALUE=$XPRESS_VALUE" >> $GITHUB_ENV - - - name: Configure - run: | - $pwd=Get-Location - cmake -B _build -S . -DDEPS_INSTALL_DIR=rte-antares-deps-Release -DCMAKE_PREFIX_PATH="$pwd\rte-antares-${{steps.antares-version.outputs.result}}-installer-64bits" -DBUILD_TESTING=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE="${{env.VCPKG_ROOT}}/scripts/buildsystems/vcpkg.cmake" -DVCPKG_TARGET_TRIPLET=${{ matrix.triplet }} -DCMAKE_INSTALL_PREFIX=_install -DBUILD_UI=ON -DXPRESS=${{ env.XPRESS_VALUE }} -DXPRESS_ROOT="${{ env.XPRESSDIR }}" - - name: Build - run: | - cmake --build _build --config Release -j2 --target install - - name: Running unit tests - timeout-minutes: 120 - shell: cmd - run: | - set PATH=%PATH%;C:\Program Files\Microsoft MPI\Bin - set PATH=%PATH%;${{ env.XPRESS }} - set XPRESSDIR=${{ env.XPRESSDIR }} - cd _build - ctest -C Release --output-on-failure -L "medium|unit|benders|lpnamer" \ No newline at end of file diff --git a/.github/workflows/centos-release.yml b/.github/workflows/centos-release.yml deleted file mode 100644 index 509140cc5..000000000 --- a/.github/workflows/centos-release.yml +++ /dev/null @@ -1,297 +0,0 @@ -name: Centos7 release - -on: - push: - branches: - - main - - develop - - ci/* - - dependabot/* - workflow_dispatch: - workflow_run: - workflows: [ "Publish Release" ] - types: - - completed - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - - docker_publish: - runs-on: ubuntu-latest - steps: - - - uses: actions/checkout@master - with: - fetch-depth: 0 - - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v32 - with: - files: | - docker/centos7-system-deps - - - name: Docker file push - id: docker_push - if: steps.changed-files.outputs.any_changed == 'true' - uses: elgohr/Publish-Docker-Github-Action@main - with: - name: antaresrte/rte-antares - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - workdir: docker - dockerfile: centos7-system-deps - cache: false - tags: centos7-system-deps - - userguide: - runs-on: ubuntu-latest - outputs: - pdf-name: ${{ steps.create-user-guide.outputs.pdf-name }} - - steps: - - uses: actions/checkout@v3 - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - id: create-user-guide - name: user guide pdf creation - uses: ./.github/workflows/generate-userguide-pdf - with: - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - - - name: user guide upload - id: userguide_upload - uses: actions/upload-artifact@v3 - with: - name: user-guide - path: ${{ steps.create-user-guide.outputs.pdf-path }} - - versions: - runs-on: ubuntu-latest - outputs: - antares-version: ${{steps.antares-version.outputs.result}} - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - steps: - - uses: actions/checkout@v3 - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - build: - runs-on: ubuntu-latest - needs: [ docker_publish, userguide, versions ] - container: 'antaresrte/rte-antares:centos7-system-deps' - strategy: - matrix: - xprs: [ - XPRESS-ON, - #XPRESS-OFF - ] - env: - XPRESSDIR: ${{ github.workspace }}/xpress - XPRESS: ${{ github.workspace }}/xpress/bin - XPRS_LIB_Path: ${{ github.workspace }}/xpress/lib - XPRESSDIR_CONTAINER: ${GITHUB_WORKSPACE}/xpress - XPRESS_CONTAINER: ${GITHUB_WORKSPACE}/xpress/bin - XPRS_LIB_Path_CONTAINER: ${GITHUB_WORKSPACE}/xpress/lib - outputs: - zip_name: ${{ steps.zip_name.outputs.zip_name }} - singlefile_name: ${{ steps.zip_name.outputs.singlefile_name }} - steps: - - id: branch-name - uses: tj-actions/branch-names@v6 - - - name: Checkout - uses: actions/checkout@v3 - with: - submodules: true - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{needs.versions.outputs.antares-deps-version}} - antares-version: ${{needs.versions.outputs.antares-version}} - os: centos7 - os-full-name: CentOS-7.9.2009 - #variant: -ortools-xpress - - - uses: ./.github/workflows/compile-gtest - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - with: - cmake: 'cmake3' - - - name: Install dependencies - run: | - pip3 install --upgrade pip - pip3 install wheel #Does not work in requirements - pip3 install -r requirements-tests.txt - pip3 install -r requirements-ui.txt - - - name: Download userguide - uses: actions/download-artifact@v3 - with: - name: user-guide - path: docs/ - - - name: Checkout xpressmp linux - uses: actions/checkout@v3 - with: - token: ${{ secrets.AS_TOKEN }} - repository: rte-france/xpress-mp - path: ${{ env.XPRESSDIR }} - github-server-url: https://github.com - ref: 8.13a - if: matrix.xprs == 'XPRESS-ON' - - - name: Configure - shell: bash - run: | - if [ ${{ matrix.xprs }} == "XPRESS-ON" ]; then - export XPRESS_VALUE="ON" - else - export XPRESS_VALUE="OFF" - fi - source /opt/rh/devtoolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 -B _build -S . \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=ON \ - -DUSER_GUIDE_PATH="docs/${{ needs.userguide.outputs.pdf-name }}" \ - -DXPRESS=${XPRESS_VALUE} \ - -DXPRESS_ROOT=${XPRESSDIR} \ - -DALLOW_RUN_AS_ROOT=ON - - - name: Build - shell: bash - run: | - source /opt/rh/devtoolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 --build _build --config Release -j2 --target install - - - name: Running unit tests - timeout-minutes: 120 - shell: bash - run: | - source /etc/profile.d/modules.sh - module load mpi - export LD_LIBRARY_PATH=LD_LIBRARY_PATH:${{ env.XPRS_LIB_Path_CONTAINER }} - export XPRESS=${{ env.XPRESS_CONTAINER }} - cd _build - ctest3 -C Release --output-on-failure -L "unit|benders|lpnamer|medium" - - - name: set name variables - id: single_file_name - shell: bash - run: | - if [ ${{ matrix.xprs }} == "XPRESS-ON" ]; then - WITH_XPRS="-xpress" - else - WITH_XPRS="" - fi - VERSION=${{needs.versions.outputs.antares-xpansion-version}}${WITH_XPRS} - echo "VERSION_WITH_XPRESS=$VERSION" >> $GITHUB_ENV - - - name: .tar.gz creation - run: | - cd _build - export FILE_NAME="antaresXpansion-${{env.VERSION_WITH_XPRESS}}-CentOS-7.9.2009" - cpack3 -G TGZ -D CPACK_PACKAGE_FILE_NAME=$FILE_NAME - echo "TGZ_NAME=$FILE_NAME.tar.gz" >> $GITHUB_ENV - - - name: Upload .tar.gz - uses: actions/upload-artifact@v3 - with: - name: ${{env.TGZ_NAME}} - path: _build/${{env.TGZ_NAME}} - - - id: create-single-file - name: Single file .tar.gz creation - uses: ./.github/workflows/single-file-creation-tgz - with: - antares-xpansion-version: ${{env.VERSION_WITH_XPRESS}} - - - name: Upload single file - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.create-single-file.outputs.archive-name }} - path: ${{ steps.create-single-file.outputs.archive-path }} - - - id: zip_name - run: | - echo "singlefile_name=${{steps.create-single-file.outputs.archive-name}}" >> "$GITHUB_OUTPUT" - echo "zip_name=${{env.TGZ_NAME}}" >> "$GITHUB_OUTPUT" - ####################### - - upload_asset_to_release: - if: github.event_name == 'release' && github.event.action == 'created' - runs-on: ubuntu-latest - needs: build - env: - ZIP_NAME: ${{needs.build.outputs.zip_name}} - SINGLEFILE_NAME: ${{needs.build.outputs.singlefile_name}} - steps: - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: bruceadams/get-release@main - - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: ${{ env.ZIP_NAME}} - path: . - - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: ${{env.SINGLEFILE_NAME}} - path: . - - - name: Upload Release Asset - env: - GH_REPO: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh release upload --repo ${{env.GH_REPO}} ${{ steps.get_release.outputs.tag_name }} ${{env.ZIP_NAME}} - gh release upload --repo ${{env.GH_REPO}} ${{ steps.get_release.outputs.tag_name }} ${{env.SINGLEFILE_NAME}} - - ######################## \ No newline at end of file diff --git a/.github/workflows/centos7-system-deps-build.yml b/.github/workflows/centos7-system-deps-build.yml deleted file mode 100644 index 7be482647..000000000 --- a/.github/workflows/centos7-system-deps-build.yml +++ /dev/null @@ -1,139 +0,0 @@ -name: Centos7 CI (build dependencies) - -on: - push: - branches: - - main - - develop - - release/* - - ci/* - - dependabot/* -jobs: - docker_publish: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - with: - fetch-depth: 0 - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v32 - with: - files: | - docker/centos7-system-deps - - - name: Docker file push - id: docker_push - if: steps.changed-files.outputs.any_changed == 'true' - uses: elgohr/Publish-Docker-Github-Action@main - with: - name: antaresrte/rte-antares - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - workdir: docker - dockerfile: centos7-system-deps - cache: false - tags: centos7-system-deps - - versions: - runs-on: ubuntu-latest - outputs: - antares-version: ${{steps.antares-version.outputs.result}} - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - steps: - - uses: actions/checkout@v3 - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - build: - - runs-on: ubuntu-latest - needs: [ docker_publish, versions ] - container: 'antaresrte/rte-antares:centos7-system-deps' - - steps: - - id: branch-name - uses: tj-actions/branch-names@v6 - - - uses: actions/checkout@v3 - with: - submodules: true - - - name: Install dependencies - run: | - pip3 install wheel #Does not work in requirements - pip3 install -r requirements-tests.txt - - - uses: ./.github/workflows/compile-gtest - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - with: - cmake: 'cmake3' - - - name: Setup cmake - uses: jwlawson/actions-setup-cmake@v1.13 - with: - cmake-version: '3.22.x' - - - name: Configure - run: | - source /opt/rh/devtoolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake -B _build -S . \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=_install -DBUILD_UI=ON -DALLOW_RUN_AS_ROOT=ON - - - name: Build - run: | - source /opt/rh/devtoolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake --build _build --config Release -j2 --target install - - - name: Running unit tests - run: | - source /etc/profile.d/modules.sh - module load mpi - cd _build - ctest3 -C Release --output-on-failure -L "unit|benders|lpnamer|medium" - - - name: .tar.gz creation - run: | - cd _build - cpack3 -G TGZ - - - name: Installer .rpm creation - run: | - cd _build - cpack3 -G RPM - - - id: create-single-file - name: Single file .tar.gz creation - uses: ./.github/workflows/single-file-creation-tgz - with: - antares-xpansion-version: ${{needs.version.outputs.antares-xpansion-version}} diff --git a/.github/workflows/doxygen.yml b/.github/workflows/doxygen.yml deleted file mode 100644 index 23c0cab37..000000000 --- a/.github/workflows/doxygen.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: GitHub Pages - -on: - push: - branches: - - develop # Set a branch name to trigger deployment - - dependabot/* - -jobs: - deploy: - runs-on: ubuntu-22.04 - permissions: - contents: write - concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - steps: - - uses: actions/checkout@v4 - with: - submodules: true # Fetch Hugo themes (true OR recursive) - fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod - - - name: theme - run: | - git clone https://github.com/jothepro/doxygen-awesome-css.git - cd doxygen-awesome-css - git checkout v2.2.1 - git apply ../docs/antares-xpansion.patch - - - name: Doxygen - uses: mattnotmitt/doxygen-action@1.9.5 - with: - doxyfile-path: docs/Doxyfile - - - name: Deploy - uses: peaceiris/actions-gh-pages@v3 - # If you're changing the branch from main, - # also change the `main` in `refs/heads/main` - # below accordingly. - if: github.ref == 'refs/heads/develop' - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./html - - - diff --git a/.github/workflows/ol8-release.yml b/.github/workflows/ol8-release.yml deleted file mode 100644 index 12883b9f9..000000000 --- a/.github/workflows/ol8-release.yml +++ /dev/null @@ -1,274 +0,0 @@ -name: Oracle-linux8 release - -on: - push: - branches: - - main - - develop - - ci/* - - dependabot/* - workflow_dispatch: - workflow_run: - workflows: [ "Publish Release" ] - types: - - completed - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - userguide: - runs-on: ubuntu-latest - outputs: - pdf-name: ${{ steps.create-user-guide.outputs.pdf-name }} - - steps: - - uses: actions/checkout@v4 - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - id: create-user-guide - name: user guide pdf creation - uses: ./.github/workflows/generate-userguide-pdf - with: - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - - - name: user guide upload - id: userguide_upload - uses: actions/upload-artifact@v3 - with: - name: user-guide - path: ${{ steps.create-user-guide.outputs.pdf-path }} - - versions: - runs-on: ubuntu-latest - outputs: - antares-version: ${{steps.antares-version.outputs.result}} - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - steps: - - uses: actions/checkout@v4 - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - build: - runs-on: ubuntu-latest - needs: [ userguide, versions ] - container: 'oraclelinux:8' - strategy: - matrix: - xprs: [ - { value: XPRESS-ON, ref: 9.2.5 }, - #{ value: XPRESS-OFF } - ] - env: - XPRESSDIR: ${{ github.workspace }}/xpress - XPRESS: ${{ github.workspace }}/xpress/bin - XPRS_LIB_Path: ${{ github.workspace }}/xpress/lib - XPRESSDIR_CONTAINER: ${GITHUB_WORKSPACE}/xpress - XPRESS_CONTAINER: ${GITHUB_WORKSPACE}/xpress/bin - XPRS_LIB_Path_CONTAINER: ${GITHUB_WORKSPACE}/xpress/lib - outputs: - zip_name: ${{ steps.zip_name.outputs.zip_name }} - singlefile_name: ${{ steps.zip_name.outputs.singlefile_name }} - steps: - - id: branch-name - uses: tj-actions/branch-names@v6 - - - name: Install System - run: | - dnf install -y epel-release git cmake wget rpm-build redhat-lsb-core openmpi-devel - dnf install -y unzip libuuid-devel boost-test boost-devel gcc-toolset-10-toolchain zlib-devel python3-devel - source /opt/rh/gcc-toolset-10/enable - dnf install -y jsoncpp-devel - - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: true - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{needs.versions.outputs.antares-deps-version}} - antares-version: ${{needs.versions.outputs.antares-version}} - os: oracle8 - os-full-name: OracleServer-8.9 - #variant: -ortools-xpress - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - - - uses: ./.github/workflows/compile-gtest - - - name: Install dependencies - run: | - pip3 install --upgrade pip - pip3 install wheel #Does not work in requirements - pip3 install -r requirements-tests.txt - pip3 install -r requirements-ui.txt - - - name: Download userguide - uses: actions/download-artifact@v3 - with: - name: user-guide - path: docs/ - - - name: Checkout xpressmp linux - uses: actions/checkout@v4 - with: - token: ${{ secrets.AS_TOKEN }} - repository: rte-france/xpress-mp - path: ${{ env.XPRESSDIR }} - github-server-url: https://github.com - ref: ${{matrix.xprs.ref}} - if: matrix.xprs.value == 'XPRESS-ON' - - - name: Configure - shell: bash - run: | - if [ ${{ matrix.xprs.value }} == "XPRESS-ON" ]; then - export XPRESS_VALUE="ON" - else - export XPRESS_VALUE="OFF" - fi - source /opt/rh/gcc-toolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 -B _build -S . \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=ON \ - -DUSER_GUIDE_PATH="docs/${{ needs.userguide.outputs.pdf-name }}" \ - -DXPRESS=${XPRESS_VALUE} \ - -DXPRESS_ROOT=${XPRESSDIR} \ - -DALLOW_RUN_AS_ROOT=ON - - - name: Build - shell: bash - run: | - source /opt/rh/gcc-toolset-10/enable - export LD_LIBRARY_PATH=/usr/lib64/openmpi/lib:$LD_LIBRARY_PATH - export PATH=/usr/lib64/openmpi/bin:$PATH - cmake3 --build _build --config Release -j2 --target install - - - name: Running unit tests - timeout-minutes: 120 - shell: bash - run: | - source /etc/profile.d/modules.sh - module load mpi - export LD_LIBRARY_PATH=LD_LIBRARY_PATH:${{ env.XPRS_LIB_Path_CONTAINER }} - export XPRESS=${{ env.XPRESS_CONTAINER }} - cd _build - ctest3 -C Release --output-on-failure -L "unit|benders|lpnamer|medium" - - - name: set name variables - id: single_file_name - shell: bash - run: | - if [ ${{ matrix.xprs.value }} == "XPRESS-ON" ]; then - WITH_XPRS="-xpress" - else - WITH_XPRS="" - fi - VERSION=${{needs.versions.outputs.antares-xpansion-version}}${WITH_XPRS} - echo "VERSION_WITH_XPRESS=$VERSION" >> $GITHUB_ENV - - - name: .tar.gz creation - run: | - cd _build - export FILE_NAME="antaresXpansion-${{env.VERSION_WITH_XPRESS}}-OracleServer-8.9" - cpack3 -G TGZ -D CPACK_PACKAGE_FILE_NAME=$FILE_NAME - echo "TGZ_NAME=$FILE_NAME.tar.gz" >> $GITHUB_ENV - - - name: Upload .tar.gz - uses: actions/upload-artifact@v3 - with: - name: ${{env.TGZ_NAME}} - path: _build/${{env.TGZ_NAME}} - - - id: create-single-file - name: Single file .tar.gz creation - uses: ./.github/workflows/single-file-creation-tgz - with: - antares-xpansion-version: ${{env.VERSION_WITH_XPRESS}} - - - name: Upload single file - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.create-single-file.outputs.archive-name }} - path: ${{ steps.create-single-file.outputs.archive-path }} - - - id: zip_name - run: | - echo "singlefile_name=${{steps.create-single-file.outputs.archive-name}}" >> "$GITHUB_OUTPUT" - echo "zip_name=${{env.TGZ_NAME}}" >> "$GITHUB_OUTPUT" - ####################### - - upload_asset_to_release: - if: github.event_name == 'release' && github.event.action == 'created' - runs-on: ubuntu-latest - needs: build - env: - ZIP_NAME: ${{needs.build.outputs.zip_name}} - SINGLEFILE_NAME: ${{needs.build.outputs.singlefile_name}} - steps: - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: bruceadams/get-release@main - - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: ${{env.ZIP_NAME}} - path: . - - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: ${{env.SINGLEFILE_NAME}} - path: . - - - name: Upload Release Asset - env: - GH_REPO: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh release upload --repo ${{env.GH_REPO}} ${{ steps.get_release.outputs.tag_name }} ${{env.ZIP_NAME}} - gh release upload --repo ${{env.GH_REPO}} ${{ steps.get_release.outputs.tag_name }} ${{env.SINGLEFILE_NAME}} - - ######################## \ No newline at end of file diff --git a/.github/workflows/publish_centos_docker.yml b/.github/workflows/publish_centos_docker.yml deleted file mode 100644 index 12d7aa448..000000000 --- a/.github/workflows/publish_centos_docker.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Publish docker image - -on: - workflow_dispatch: - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - docker_publish: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - with: - fetch-depth: 0 - - - name: Docker file push - id: docker_push - uses: elgohr/Publish-Docker-Github-Action@main - with: - name: antaresrte/rte-antares - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - workdir: docker - dockerfile: centos7-system-deps - cache: false - tags: centos7-system-deps \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index d1ea7a841..000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Publish Release - -on: - push: - tags: - - "v*.*.*" - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Release - uses: softprops/action-gh-release@v1 - with: - prerelease: ${{ contains(github.ref, '-rc') }} \ No newline at end of file diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml deleted file mode 100644 index 960fee6e9..000000000 --- a/.github/workflows/sonarcloud.yml +++ /dev/null @@ -1,144 +0,0 @@ -name: SonarCloud - -on: - push: - branches: - - main - - develop - - release/* - - dependabot/* - pull_request: - -jobs: - sonarcloud: - name: SonarCloud - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, '[skip ci]')" - strategy: - matrix: - os: [ ubuntu-20.04 ] - - env: - SONAR_SCANNER_VERSION: 4.7.0.2747 # Find the latest version in the "Linux" link on this page: - # https://sonarcloud.io/documentation/analysis/scan/sonarscanner/ - SONAR_SERVER_URL: "https://sonarcloud.io" - - steps: - - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Install sonar-scanner and build-wrapper - uses: SonarSource/sonarcloud-github-c-cpp@v2 - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: sonarcloud-${{ env.SONAR_SCANNER_VERSION }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.8 - - - name: Install gcovr - run: sudo pip install gcovr==5.0 #5.1 generate issues with sonarcloud report parsing - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip3 install -r requirements-tests.txt - - - name: Install libraries - run: | - sudo apt-get update --fix-missing - sudo apt-get install libjsoncpp-dev libgtest-dev libboost-mpi-dev libboost-program-options-dev libtbb-dev - sudo apt-get install g++-10 gcc-10 - cd /usr/src/googletest/ - sudo cmake . - sudo cmake --build . --target install - - - name: Update alternatives - #mpicxx uses "g++" so we need g++ to be symbolic link to g++-10 - run: | - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10 - sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc 30 - sudo update-alternatives --set cc /usr/bin/gcc - sudo update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++ 30 - sudo update-alternatives --set c++ /usr/bin/g++ - - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - antares-version: ${{steps.antares-version.outputs.result}} - os: ${{matrix.os}} - os-full-name: Ubuntu-20.04 - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - load-toolset: 'false' - - - - name: Init submodule - run: | - git submodule update --init --recursive . - - - name: Configure - shell: bash - run: | - cmake -B _build -S . \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_C_COMPILER=/usr/bin/gcc-10 \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER=/usr/bin/g++-10 \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DCODE_COVERAGE=ON \ - -DBUILD_TESTING=ON \ - -DBUILD_antares_solver=OFF \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install - - - name: Build - run: build-wrapper-linux-x86-64 --out-dir $GITHUB_WORKSPACE/_build/output cmake --build _build --config Release -j2 - - - name: Test and generate coverage - continue-on-error: true - run: | - cd $GITHUB_WORKSPACE/_build - ctest -C Release --output-on-failure -L "unit" - - - name: Compile coverage reports - run: | - cmake --build $GITHUB_WORKSPACE/_build --target code-coverage - - - name: Run sonar-scanner - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN_2022 }} - run: sonar-scanner --define sonar.host.url="${{ env.SONAR_SERVER_URL }}" \ No newline at end of file diff --git a/.github/workflows/ubuntu-release.yml b/.github/workflows/ubuntu-release.yml deleted file mode 100644 index 102369ea3..000000000 --- a/.github/workflows/ubuntu-release.yml +++ /dev/null @@ -1,242 +0,0 @@ -name: Ubuntu Release - -on: - push: - branches: - - main - - develop - - ci/* - - dependabot/* - workflow_dispatch: - workflow_run: - workflows: [ "Publish Release" ] - types: - - completed - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - - userguide: - runs-on: ubuntu-latest - outputs: - pdf-name: ${{ steps.create-user-guide.outputs.pdf-name }} - - steps: - - uses: actions/checkout@v4 - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - id: create-user-guide - name: user guide pdf creation - uses: ./.github/workflows/generate-userguide-pdf - with: - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - - - name: user guide upload - id: userguide_upload - uses: actions/upload-artifact@v3 - with: - name: user-guide - path: ${{ steps.create-user-guide.outputs.pdf-path }} - - build: - - needs: userguide - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, '[skip ci]')" - strategy: - matrix: - os: [ ubuntu-20.04 ] - xprs: [ - XPRESS-ON, - #XPRESS-OFF - ] - env: - XPRESSDIR: ${{ github.workspace }}/xpress - XPRESS: ${{ github.workspace }}/xpress/bin - XPRS_LIB_Path: ${{ github.workspace }}/xpress/lib - - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - - name: Checkout xpressmp linux - if: matrix.xprs == 'XPRESS-ON' - uses: actions/checkout@v4 - with: - repository: rte-france/xpress-mp - path: ${{ env.XPRESSDIR }} - ref: 8.13a - token: ${{ secrets.AS_TOKEN }} #reniew token periodically - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.3 - with: - key: ${{ matrix.os }}-${{ matrix.xprs }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.8 - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-tests.txt - pip install -r requirements-ui.txt - - - name: Install mandatory system libraries - run: | - sudo apt-get update --fix-missing - sudo apt-get install libjsoncpp-dev libgtest-dev libboost-mpi-dev libboost-program-options-dev libtbb-dev - cd /usr/src/googletest/ - sudo cmake . - sudo cmake --build . --target install - sudo apt-get install -y g++-10 gcc-10 - - - name: Update alternatives - #mpicxx uses "g++" so we need g++ to be symbolic link to g++-10 - run: | - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10 - sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc 30 - sudo update-alternatives --set cc /usr/bin/gcc - sudo update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++ 30 - sudo update-alternatives --set c++ /usr/bin/g++ - - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-tgz - with: - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - antares-version: ${{steps.antares-version.outputs.result}} - os: ${{matrix.os}} - os-full-name: Ubuntu-20.04 - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - load-toolset: 'false' - - - name: Download userguide - uses: actions/download-artifact@v3 - with: - name: user-guide - path: docs/ - - - name: Configure - shell: bash - run: | - if [ ${{ matrix.xprs }} == "XPRESS-ON" ]; then - XPRESS_VALUE="ON" - else - XPRESS_VALUE="OFF" - fi - cmake -B _build -S . \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DDEPS_INSTALL_DIR=rte-antares-deps-Release \ - -DBUILD_TESTING=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=ON \ - -DUSER_GUIDE_PATH="docs/${{ needs.userguide.outputs.pdf-name }}" \ - -DXPRESS=${{ env.XPRESS_VALUE }} \ - -DXPRESS_ROOT=${{ env.XPRESSDIR }} - - - name: Build - run: | - cmake --build _build --config Release -j8 --target install - - - name: set name variables - id: single_file_name - shell: bash - run: | - if [ ${{ matrix.xprs }} == "XPRESS-ON" ]; then - WITH_XPRS="-xpress" - else - WITH_XPRS="" - fi - VERSION=${{steps.antares-xpansion-version.outputs.result}}${WITH_XPRS} - echo "VERSION_WITH_XPRESS=$VERSION" >> $GITHUB_ENV - - - id: create-single-file - name: Single file .tar.gz creation - uses: ./.github/workflows/single-file-creation-tgz - with: - antares-xpansion-version: ${{env.VERSION_WITH_XPRESS}} - - - name: Installer .tar.gz creation - run: | - cd _build - export FILE_NAME="antaresXpansion-${{env.VERSION_WITH_XPRESS}}-${{ matrix.os }}" - cpack -G TGZ -D CPACK_PACKAGE_FILE_NAME=$FILE_NAME - #Need to differentiate between xpress/no_xpress files - #Cpack command line doesn't seem to care about -P or -R options - echo "TGZ_NAME=$FILE_NAME.tar.gz" >> $GITHUB_ENV - - - name: Running unit tests - run: | - cd _build - ctest -C Release --output-on-failure -L "medium|unit|benders|lpnamer" - - #Uploads are not necessary for release but useful in other cases - - name: Upload .tar.gz - uses: actions/upload-artifact@v3 - with: - name: ${{env.TGZ_NAME}} - path: _build/${{env.TGZ_NAME}} - - - name: Upload single file - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.create-single-file.outputs.archive-name }} - path: ${{ steps.create-single-file.outputs.archive-path }} - - ####################### - - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: bruceadams/get-release@main - - - name: Upload Release Asset - if: github.event_name == 'release' && github.event.action == 'created' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh release upload ${{ steps.get_release.outputs.tag_name }} _build/${{env.TGZ_NAME}} - gh release upload ${{ steps.get_release.outputs.tag_name }} ${{ steps.create-single-file.outputs.archive-path }} - - ######################## \ No newline at end of file diff --git a/.github/workflows/ubuntu-system-deps-build.yml b/.github/workflows/ubuntu-system-deps-build.yml deleted file mode 100644 index 1670a9c88..000000000 --- a/.github/workflows/ubuntu-system-deps-build.yml +++ /dev/null @@ -1,106 +0,0 @@ -name: Ubuntu CI full build - -on: - push: - branches: - - main - - develop - - release/* - - ci/* - - dependabot/* -jobs: - - build: - - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, '[skip ci]')" - strategy: - matrix: - os: [ ubuntu-20.04 ] - - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2.3 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.8 - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-tests.txt - pip install -r requirements-ui.txt - - - name: Install mandatory system libraries - run: | - sudo apt-get update --fix-missing - sudo apt-get install libjsoncpp-dev libgtest-dev libboost-mpi-dev libboost-program-options-dev libtbb-dev - cd /usr/src/googletest/ - sudo cmake . - sudo cmake --build . --target install - sudo apt-get install -y g++-10 gcc-10 - - - name: Update alternatives - #mpicxx uses "g++" so we need g++ to be symbolic link to g++-10 - run: | - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10 - sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10 - sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc 30 - sudo update-alternatives --set cc /usr/bin/gcc - sudo update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++ 30 - sudo update-alternatives --set c++ /usr/bin/g++ - - - name: Compile Boost - uses: ./.github/workflows/compile-boost - with: - prefix: "../rte-antares-deps-Release/" - load-toolset: 'false' - - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Configure - run: | - cmake -B _build -S . \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DBUILD_TESTING=ON -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=_install \ - -DBUILD_UI=ON - - - name: Build - run: | - cmake --build _build --config Release -j2 --target install - - - name: Running unit tests - run: | - cd _build - ctest -C Release --output-on-failure -L "medium|unit|benders|lpnamer" - - - id: create-single-file - name: Single file .tar.gz creation - uses: ./.github/workflows/single-file-creation-tgz - with: - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - - - name: Installer .tar.gz creation - run: | - cd _build - cpack -G TGZ - - - name: Installer .deb creation - run: | - cd _build - cpack -G DEB - diff --git a/.github/workflows/windows-vcpkg-deps-build.yml b/.github/workflows/windows-vcpkg-deps-build.yml deleted file mode 100644 index 594c9a94f..000000000 --- a/.github/workflows/windows-vcpkg-deps-build.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: Windows CI full build - -on: - push: - branches: - - main - - develop - - release/* - - ci/* - - dependabot/* -jobs: - - windows: - - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, '[skip ci]')" - strategy: - matrix: - os: [ windows-latest ] - triplet: [ x64-windows ] - - env: - # Indicates the location of the vcpkg as a Git submodule of the project repository. - VCPKG_ROOT: ${{ github.workspace }}/vcpkg - - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - - name: Enable git longpaths - run: git config --system core.longpaths true - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v4 - with: - # The first path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - ${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/vcpkg/HEAD' )}}-${{ matrix.triplet }}-invalidate - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-tests.txt - pip install -r requirements-ui.txt - - - name: Pre-requisites - shell: cmd - run: | - choco install wget zip unzip --no-progress - wget -nv https://github.com/microsoft/Microsoft-MPI/releases/download/v10.1.1/msmpisetup.exe - msmpisetup.exe -unattend - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Install deps with VCPKG - run: | - cd vcpkg - ./bootstrap-vcpkg.sh - vcpkg install --triplet ${{matrix.triplet}} - rm -rf buildtrees - rm -rf packages - rm -rf downloads - shell: bash - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - with: - cmake: 'cmake' - - - name: Configure - run: | - $pwd=Get-Location - cmake -B _build -S . -DBUILD_TESTING=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE="vcpkg/scripts/buildsystems/vcpkg.cmake" -DVCPKG_TARGET_TRIPLET=${{ matrix.triplet }} -DCMAKE_INSTALL_PREFIX=_install -DBUILD_UI=ON - - - name: Build - run: | - cmake --build _build --config Release -j2 --target install - - - name: Running unit tests - shell: cmd - run: | - set PATH=%PATH%;C:\Program Files\Microsoft MPI\Bin\ - cd _build - ctest -C Release --output-on-failure -L "medium|unit|benders|lpnamer" - - - name: Installer .zip creation - run: | - cd _build - cpack -G ZIP - - - id: create-single-file - name: Single file .zip creation - uses: ./.github/workflows/single-file-creation-zip - with: - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - diff --git a/.github/workflows/windows-vcpkg.yml b/.github/workflows/windows-vcpkg.yml deleted file mode 100644 index aa8418099..000000000 --- a/.github/workflows/windows-vcpkg.yml +++ /dev/null @@ -1,280 +0,0 @@ -name: Windows release - -on: - push: - branches: - - main - - develop - - ci/* - - dependabot/* - workflow_dispatch: - workflow_run: - workflows: [ "Publish Release" ] - types: - - completed - release: - types: [ created ] - -env: - GITHUB_TOKEN: ${{ github.token }} - -jobs: - - userguide: - runs-on: ubuntu-latest - outputs: - pdf-name: ${{ steps.create-user-guide.outputs.pdf-name }} - - steps: - - uses: actions/checkout@v4 - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - id: create-user-guide - name: user guide pdf creation - uses: ./.github/workflows/generate-userguide-pdf - with: - antares-xpansion-version: ${{steps.antares-xpansion-version.outputs.result}} - - - name: user guide upload - id: userguide_upload - uses: actions/upload-artifact@v3 - with: - name: user-guide - path: ${{ steps.create-user-guide.outputs.pdf-path }} - - build: - needs: userguide - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, '[skip ci]')" - strategy: - matrix: - os: [ windows-latest ] - triplet: [ x64-windows ] - xprs: [ - XPRESS-ON, - #XPRESS-OFF - ] - env: - XPRESSDIR: ${{ github.workspace }}\xpress - XPRESS: ${{ github.workspace }}\xpress\bin - XPRS_LIB_Path: ${{ github.workspace }}\xpress\lib - # Indicates the location of the vcpkg as a Git submodule of the project repository. - VCPKG_ROOT: ${{ github.workspace }}/vcpkg - outputs: - zip_name: ${{ steps.zip_name.outputs.zip_name }} - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - - name: Enable git longpaths - run: git config --system core.longpaths true - - - name: Checkout xpressmp linux - if: matrix.xprs == 'XPRESS-ON' - uses: actions/checkout@v4 - with: - repository: rte-france/xpress-mp-temp - path: ${{ env.XPRESSDIR }} - ref: 8.13a - token: ${{ secrets.AS_TOKEN }} - - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: - bruceadams/get-release@main - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v4 - with: - # The first path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - ${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/vcpkg/HEAD' )}}-${{ matrix.triplet }}-invalidate - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-tests.txt - pip install -r requirements-ui.txt - - - name: Pre-requisites - shell: cmd - run: | - choco install wget zip unzip --no-progress - wget -nv https://github.com/microsoft/Microsoft-MPI/releases/download/v10.1.1/msmpisetup.exe - msmpisetup.exe -unattend - - - name: Read antares-solver version - id: antares-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_version' - - - name: Read antares-xpansion version - id: antares-xpansion-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_xpansion_version' - - - name: Read antares-deps version - id: antares-deps-version - uses: ./.github/actions/read-json-value - with: - path: 'antares-version.json' - key: 'antares_deps_version' - - - name: Install deps with VCPKG - run: | - cd vcpkg - ./bootstrap-vcpkg.sh - vcpkg install --triplet ${{matrix.triplet}} - rm -rf buildtrees - rm -rf packages - rm -rf downloads - shell: bash - - - name: Compile tbb - uses: ./.github/workflows/compile-tbb - with: - cmake: 'cmake' - - - name: Download pre-compiled librairies - uses: ./.github/workflows/download-extract-precompiled-libraries-zip - with: - antares-deps-version: ${{steps.antares-deps-version.outputs.result}} - antares-version: ${{steps.antares-version.outputs.result}} - os: ${{matrix.os}} - - - name: Download userguide - uses: actions/download-artifact@v3 - with: - name: user-guide - path: docs/ - - - name: Expand xpress value in env - #I can't seem to expand the variable in the cmake command line so export it in env - shell: bash - run: | - [[ ${{ matrix.xprs }} == "XPRESS-ON" ]] && XPRESS_VALUE="ON" || XPRESS_VALUE="OFF" - echo "XPRESS_VALUE=$XPRESS_VALUE" >> $GITHUB_ENV - - - name: Configure - #Inverted ternary : @({'condition is false'},{'condition is true'})[$condition] => ({true}, {false})[!$condition] - run: | - $pwd=Get-Location - cmake -B _build -S . -DDEPS_INSTALL_DIR=rte-antares-deps-Release -DCMAKE_PREFIX_PATH="$pwd\rte-antares-${{steps.antares-version.outputs.result}}-installer-64bits" -DBUILD_TESTING=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE="${{env.VCPKG_ROOT}}/scripts/buildsystems/vcpkg.cmake" -DVCPKG_TARGET_TRIPLET=${{ matrix.triplet }} -DCMAKE_INSTALL_PREFIX=_install -DBUILD_UI=ON -DUSER_GUIDE_PATH="docs/${{ needs.userguide.outputs.pdf-name }}" -DXPRESS=${{ env.XPRESS_VALUE }} -DXPRESS_ROOT="${{ env.XPRESSDIR }}" - - - name: Build - run: | - cmake --build _build --config Release -j2 --target install - - - name: Running unit tests - shell: cmd - run: | - set PATH=%PATH%;C:\Program Files\Microsoft MPI\Bin\ - set PATH=%PATH%;${{ env.XPRESS }} - set XPRESSDIR=${{ env.XPRESSDIR }} - cd _build - ctest -C Release --output-on-failure -L "medium|unit|benders|lpnamer" - - - name: Create installer .zip - shell: bash - run: | - cd _build - cpack -G ZIP - export ZIP_NAME=$(ls *.zip) - echo "ZIP_NAME=$ZIP_NAME" >> $GITHUB_ENV - - - name: set name variables - id: single_file_name - shell: bash - run: | - [[ ${{ matrix.xprs }} == "XPRESS-ON" ]] && XPRESS_VALUE="ON" || XPRESS_VALUE="OFF" - XPRS=${{ env.XPRESS_VALUE }} - [ $XPRS == "ON" ] && WITH_XPRS="-xpress" || WITH_XPRS="" - VERSION=${{steps.antares-xpansion-version.outputs.result}}${WITH_XPRS} - echo "VERSION_WITH_XPRESS=$VERSION" - echo "VERSION_WITH_XPRESS=$VERSION" >> $GITHUB_ENV - - - name: Upload installer - uses: actions/upload-artifact@v3 - with: - name: ${{env.ZIP_NAME}} - path: _build/${{env.ZIP_NAME}} - - - name: Create single file .zip - id: create-single-file - uses: ./.github/workflows/single-file-creation-zip - with: - antares-xpansion-version: ${{env.VERSION_WITH_XPRESS}} - - - name: Upload single file - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.create-single-file.outputs.archive-name }} - path: ${{ steps.create-single-file.outputs.archive-path }} - - - id: zip_name - shell: bash - run: | - echo "zip_name=${{env.ZIP_NAME}}" >> "$GITHUB_OUTPUT" - echo "singlefile_name=${{steps.create-single-file.outputs.archive-name}}" >> "$GITHUB_OUTPUT" - - upload_asset_to_release: - if: github.event_name == 'release' && github.event.action == 'created' - runs-on: ubuntu-latest - needs: build - env: - ZIP_NAME: ${{needs.build.outputs.zip_name}} - SINGLEFILE_NAME: ${{needs.build.outputs.singlefile_name}} - steps: - - name: Get release - if: github.event_name == 'release' && github.event.action == 'created' - id: get_release - uses: bruceadams/get-release@main - - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: ${{env.ZIP_NAME}} - path: . - - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: ${{env.SINGLEFILE_NAME}} - path: . - - - name: Upload Release Asset - env: - GH_REPO: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh release upload --repo ${{env.GH_REPO}} ${{ steps.get_release.outputs.tag_name }} ${{env.ZIP_NAME}} - gh release upload --repo ${{env.GH_REPO}} ${{ steps.get_release.outputs.tag_name }} ${{env.SINGLEFILE_NAME}} - - ######################## \ No newline at end of file diff --git a/src/cpp/benders/benders_by_batch/BendersByBatch.cpp b/src/cpp/benders/benders_by_batch/BendersByBatch.cpp index 1f5653ed7..db421c3ec 100644 --- a/src/cpp/benders/benders_by_batch/BendersByBatch.cpp +++ b/src/cpp/benders/benders_by_batch/BendersByBatch.cpp @@ -6,6 +6,7 @@ #include #include "BatchCollection.h" +#include "CustomVector.h" #include "RandomBatchShuffler.h" #include "glog/logging.h" BendersByBatch::BendersByBatch( @@ -46,6 +47,10 @@ void BendersByBatch::InitializeProblems() { problem_count++; } } + + // if (Rank() == rank_0) { + SetSubproblemsVariablesIndex(); + // } init_problems_ = false; } void BendersByBatch::BroadcastSingleSubpbCostsUnderApprox() { @@ -151,6 +156,7 @@ void BendersByBatch::SeparationLoop() { SolveBatches(); if (Rank() == rank_0) { + outer_loop_criterion_.push_back(_data.outer_loop_criterion); UpdateTrace(); SaveCurrentBendersData(); } @@ -195,8 +201,10 @@ void BendersByBatch::SolveBatches() { const auto &batch_sub_problems = batch.sub_problem_names; double batch_subproblems_costs_contribution_in_gap_per_proc = 0; double batch_subproblems_costs_contribution_in_gap = 0; + std::vector external_loop_criterion_current_batch = {}; BuildCut(batch_sub_problems, - &batch_subproblems_costs_contribution_in_gap_per_proc); + &batch_subproblems_costs_contribution_in_gap_per_proc, + external_loop_criterion_current_batch); Reduce(batch_subproblems_costs_contribution_in_gap_per_proc, batch_subproblems_costs_contribution_in_gap, std::plus(), rank_0); @@ -206,6 +214,8 @@ void BendersByBatch::SolveBatches() { _data.number_of_subproblem_solved += batch_sub_problems.size(); _data.cumulative_number_of_subproblem_solved += batch_sub_problems.size(); remaining_epsilon_ -= batch_subproblems_costs_contribution_in_gap; + AddVectors(_data.outer_loop_criterion, + external_loop_criterion_current_batch); } BroadCast(remaining_epsilon_, rank_0); @@ -222,7 +232,8 @@ void BendersByBatch::SolveBatches() { */ void BendersByBatch::BuildCut( const std::vector &batch_sub_problems, - double *batch_subproblems_costs_contribution_in_gap_per_proc) { + double *batch_subproblems_costs_contribution_in_gap_per_proc, + std::vector &external_loop_criterion_current_batch) { SubProblemDataMap subproblem_data_map; Timer subproblems_timer_per_proc; GetSubproblemCut(subproblem_data_map, batch_sub_problems, @@ -235,7 +246,8 @@ void BendersByBatch::BuildCut( misprice_ = global_misprice; Gather(subproblem_data_map, gathered_subproblem_map, rank_0); SetSubproblemsWalltime(subproblems_timer_per_proc.elapsed()); - + external_loop_criterion_current_batch = + ComputeSubproblemsContributionToOuterLoopCriterion(subproblem_data_map); for (const auto &subproblem_map : gathered_subproblem_map) { for (auto &&[sub_problem_name, subproblem_data] : subproblem_map) { SetSubproblemCost(GetSubproblemCost() + subproblem_data.subproblem_cost); diff --git a/src/cpp/benders/benders_by_batch/include/BendersByBatch.h b/src/cpp/benders/benders_by_batch/include/BendersByBatch.h index 06af08e98..03dde786e 100644 --- a/src/cpp/benders/benders_by_batch/include/BendersByBatch.h +++ b/src/cpp/benders/benders_by_batch/include/BendersByBatch.h @@ -13,8 +13,8 @@ class BendersByBatch : public BendersMpi { std::shared_ptr mathLoggerDriver); ~BendersByBatch() override = default; void Run() override; - void BuildCut(const std::vector &batch_sub_problems, - double *sum); + void BuildCut(const std::vector &batch_sub_problems, double *sum, + std::vector &external_loop_criterion_current_batch); std::string BendersName() const override { return "Benders By Batch mpi"; } protected: diff --git a/src/cpp/benders/benders_core/BendersBase.cpp b/src/cpp/benders/benders_core/BendersBase.cpp index b0c94599c..6b6ae5e83 100644 --- a/src/cpp/benders/benders_core/BendersBase.cpp +++ b/src/cpp/benders/benders_core/BendersBase.cpp @@ -9,6 +9,7 @@ #include "LastIterationReader.h" #include "LastIterationWriter.h" #include "LogUtils.h" +#include "VariablesGroup.h" #include "glog/logging.h" #include "solver_utils.h" @@ -379,7 +380,7 @@ void BendersBase::GetSubproblemCut(SubProblemDataMap &subproblem_data_map) { worker->solve(subproblem_data.lpstatus, _options.OUTPUTROOT, _options.LAST_MASTER_MPS + MPS_SUFFIX, _writer); worker->get_value(subproblem_data.subproblem_cost); - worker->get_solution(subproblem_data.variables); + worker->get_solution(subproblem_data.solution); worker->get_subgradient(subproblem_data.var_name_and_subgradient); worker->get_splex_num_of_ite_last(subproblem_data.simplex_iter); subproblem_data.subproblem_timer = subproblem_timer.elapsed(); @@ -401,6 +402,7 @@ void BendersBase::GetSubproblemCut(SubProblemDataMap &subproblem_data_map) { * */ void BendersBase::compute_cut(const SubProblemDataMap &subproblem_data_map) { + // current_outer_loop_criterion_ = 0.0; for (auto const &[subproblem_name, subproblem_data] : subproblem_data_map) { _data.ub += subproblem_data.subproblem_cost; @@ -408,7 +410,9 @@ void BendersBase::compute_cut(const SubProblemDataMap &subproblem_data_map) { subproblem_data.var_name_and_subgradient, _data.x_cut, subproblem_data.subproblem_cost); relevantIterationData_.last._cut_trace[subproblem_name] = subproblem_data; + // ComputeOuterLoopCriterion(subproblem_name, subproblem_data); } + // outer_loop_criterion_.push_back(current_outer_loop_criterion_); } void compute_cut_val(const Point &var_name_subgradient, const Point &x_cut, @@ -728,6 +732,16 @@ void BendersBase::MatchProblemToId() { } } +void BendersBase::SetSubproblemsVariablesIndex() { + if (!subproblem_map.empty()) { + auto subproblem = subproblem_map.begin(); + subproblems_vars_names_.clear(); + subproblems_vars_names_ = subproblem->second->_solver->get_col_names(); + VariablesGroup variablesGroup(subproblems_vars_names_, patterns_); + var_indices_ = variablesGroup.Indices(); + } +} + void BendersBase::AddSubproblemName(const std::string &name) { subproblems.push_back(name); } @@ -931,16 +945,44 @@ WorkerMasterData BendersBase::BestIterationWorkerMaster() const { return relevantIterationData_.best; } -void BendersBase::ResetData(double criterion) { - init_data(); - _data.external_loop_criterion = criterion; -} - void BendersBase::InitExternalValues() { - _data.external_loop_criterion = 0; + // _data.outer_loop_criterion = 0; _data.benders_num_run = 0; } CurrentIterationData BendersBase::GetCurrentIterationData() const { return _data; } +std::vector BendersBase::GetOuterLoopCriterion() const { + return _data.outer_loop_criterion; +} + +std::vector BendersBase::ComputeOuterLoopCriterion( + const std::string &subproblem_name, + const PlainData::SubProblemData &sub_problem_data) { + std::vector outer_loop_criterion_per_sub_problem(patterns_.size(), + {}); + // for (auto i(0); i < sub_problem_data.variables.names.size(); ++i) { + // auto var_name = sub_problem_data.variables.names[i]; + // auto solution = sub_problem_data.variables.values[i]; + // if (std::regex_search(var_name, rgx_) && + // solution > + // _options.EXTERNAL_LOOP_OPTIONS.EXT_LOOP_CRITERION_COUNT_THRESHOLD) + // { + // // 1h of unsupplied energy + // outer_loop_criterion_per_sub_problem += 1; + // } + // } + for (int pattern_index(0); pattern_index < patterns_.size(); + ++pattern_index) { + auto pattern_variables_indices = var_indices_[pattern_index]; + for (auto variables_index : pattern_variables_indices) { + if (auto solution = sub_problem_data.solution[variables_index]; + solution > + _options.EXTERNAL_LOOP_OPTIONS.EXT_LOOP_CRITERION_COUNT_THRESHOLD) + // 1h of unsupplied energy + outer_loop_criterion_per_sub_problem[pattern_index] += 1; + } + } + return outer_loop_criterion_per_sub_problem; +} diff --git a/src/cpp/benders/benders_core/BendersMathLogger.cpp b/src/cpp/benders/benders_core/BendersMathLogger.cpp index ccf2ace3d..708144b06 100644 --- a/src/cpp/benders/benders_core/BendersMathLogger.cpp +++ b/src/cpp/benders/benders_core/BendersMathLogger.cpp @@ -158,8 +158,11 @@ void PrintExternalLoopData(LogDestination& log_destination, const HEADERSTYPE& type, const BENDERSMETHOD& method) { log_destination << data.benders_num_run; + // TODO + // log_destination << std::scientific << std::setprecision(10) + // << data.outer_loop_criterion; log_destination << std::scientific << std::setprecision(10) - << data.external_loop_criterion; + << data.outer_loop_criterion[0]; PrintBendersData(log_destination, data, type, method); } void MathLoggerBaseExternalLoop::Print(const CurrentIterationData& data) { diff --git a/src/cpp/benders/benders_core/CMakeLists.txt b/src/cpp/benders/benders_core/CMakeLists.txt index e3f38ffac..10d127fbe 100644 --- a/src/cpp/benders/benders_core/CMakeLists.txt +++ b/src/cpp/benders/benders_core/CMakeLists.txt @@ -27,6 +27,7 @@ add_library (benders_core STATIC ${CMAKE_CURRENT_SOURCE_DIR}/LastIterationPrinter.cpp ${CMAKE_CURRENT_SOURCE_DIR}/StartUp.cpp ${CMAKE_CURRENT_SOURCE_DIR}/BendersMathLogger.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/VariablesGroup.cpp ) get_target_property(JSON_INC_PATH jsoncpp_lib INTERFACE_INCLUDE_DIRECTORIES) diff --git a/src/cpp/benders/benders_core/SimulationOptions.cpp b/src/cpp/benders/benders_core/SimulationOptions.cpp index 13cbe07b5..464d98639 100644 --- a/src/cpp/benders/benders_core/SimulationOptions.cpp +++ b/src/cpp/benders/benders_core/SimulationOptions.cpp @@ -168,7 +168,12 @@ BendersBaseOptions SimulationOptions::get_benders_options() const { result.LAST_MASTER_MPS = LAST_MASTER_MPS; result.LAST_MASTER_BASIS = LAST_MASTER_BASIS; result.BATCH_SIZE = BATCH_SIZE; - + result.EXTERNAL_LOOP_OPTIONS.EXT_LOOP_CRITERION_VALUE = + EXT_LOOP_CRITERION_VALUE; + result.EXTERNAL_LOOP_OPTIONS.EXT_LOOP_CRITERION_TOLERANCE = + EXT_LOOP_CRITERION_TOLERANCE; + result.EXTERNAL_LOOP_OPTIONS.EXT_LOOP_CRITERION_COUNT_THRESHOLD = + EXT_LOOP_CRITERION_COUNT_THRESHOLD; return result; } diff --git a/src/cpp/benders/benders_core/SubproblemWorker.cpp b/src/cpp/benders/benders_core/SubproblemWorker.cpp index e88cea2a5..5778710da 100644 --- a/src/cpp/benders/benders_core/SubproblemWorker.cpp +++ b/src/cpp/benders/benders_core/SubproblemWorker.cpp @@ -73,14 +73,12 @@ void SubproblemWorker::get_subgradient(Point &s) const { * * \param lb : reference to a map */ -void SubproblemWorker::get_solution(PlainData::Variables &vars) const { - vars.values = std::vector(_solver->get_ncols()); +void SubproblemWorker::get_solution(std::vector &solution) const { + solution = std::vector(_solver->get_ncols()); if (_solver->get_n_integer_vars() > 0) { - _solver->get_mip_sol(vars.values.data()); + _solver->get_mip_sol(solution.data()); } else { - _solver->get_lp_sol(vars.values.data(), NULL, NULL); + _solver->get_lp_sol(solution.data(), NULL, NULL); } - - vars.names = _solver->get_col_names(); } \ No newline at end of file diff --git a/src/cpp/benders/benders_core/VariablesGroup.cpp b/src/cpp/benders/benders_core/VariablesGroup.cpp new file mode 100644 index 000000000..029c381f3 --- /dev/null +++ b/src/cpp/benders/benders_core/VariablesGroup.cpp @@ -0,0 +1,22 @@ +#include "VariablesGroup.h" + +VariablesGroup::VariablesGroup(const std::vector& all_variables, + const std::vector& patterns) + : all_variables_(all_variables), patterns_(patterns) { + Search(); +} + +void VariablesGroup::Search() { + indices_.assign(patterns_.size(), {}); + int var_index(0); + for (const auto& variable : all_variables_) { + int pattern_index(0); + for (const auto& pattern : patterns_) { + if (std::regex_search(variable, pattern)) { + indices_[pattern_index].push_back(var_index); + } + ++pattern_index; + } + ++var_index; + } +} \ No newline at end of file diff --git a/src/cpp/benders/benders_core/include/BendersBase.h b/src/cpp/benders/benders_core/include/BendersBase.h index 3054d3712..40ec9e886 100644 --- a/src/cpp/benders/benders_core/include/BendersBase.h +++ b/src/cpp/benders/benders_core/include/BendersBase.h @@ -2,6 +2,7 @@ #include #include +#include #include "BendersMathLogger.h" #include "BendersStructsDatas.h" @@ -79,11 +80,12 @@ class BendersBase { _options.MAX_ITERATIONS = max_iteration; } BendersBaseOptions Options() const { return _options; } - void ResetData(double criterion); virtual void free() = 0; void InitExternalValues(); int GetBendersRunNumber() const { return _data.benders_num_run; } CurrentIterationData GetCurrentIterationData() const; + std::vector GetOuterLoopCriterion() const; + virtual void init_data(); protected: CurrentIterationData _data; @@ -96,10 +98,21 @@ class BendersBase { bool init_data_ = true; bool init_problems_ = true; bool free_problems_ = true; + const std::string positive_unsupplied_vars_prefix_ = + "^PositiveUnsuppliedEnergy::"; + const std::string negative_unsupplied_vars_prefix_ = + "^NegativeUnsuppliedEnergy::"; + const std::regex rgx_ = std::regex(positive_unsupplied_vars_prefix_); + const std::regex nrgx_ = std::regex(negative_unsupplied_vars_prefix_); + std::vector> outer_loop_criterion_; + std::vector subproblems_vars_names_ = {}; + // tmp + // std::vector patterns_ = {rgx_, nrgx_}; + std::vector patterns_ = {rgx_}; + std::vector> var_indices_; protected: virtual void Run() = 0; - virtual void init_data(); void update_best_ub(); bool ShouldBendersStop(); bool is_initial_relaxation_requested() const; @@ -130,6 +143,15 @@ class BendersBase { void AddSubproblem(const std::pair &kvp); [[nodiscard]] WorkerMasterPtr get_master() const; void MatchProblemToId(); + /** + * for the nth variable name, Subproblems shares the same prefix , only the + suffix is different + * ex variable at index = 0 is named in: + + * subproblems-1-1 --> NTCDirect::link::hour<0> + * subproblems-3-5 --> NTCDirect::link::hour<672> + */ + void SetSubproblemsVariablesIndex(); void AddSubproblemName(const std::string &name); [[nodiscard]] std::string get_master_name() const; [[nodiscard]] std::string get_solver_name() const; @@ -192,6 +214,11 @@ class BendersBase { SolverLogManager solver_log_manager_; + // outer loop criterion per pattern + std::vector ComputeOuterLoopCriterion( + const std::string &subproblem_name, + const PlainData::SubProblemData &sub_problem_data); + private: void print_master_and_cut(std::ostream &file, int ite, WorkerMasterData &trace, Point const &xopt); diff --git a/src/cpp/benders/benders_core/include/BendersStructsDatas.h b/src/cpp/benders/benders_core/include/BendersStructsDatas.h index 30318ab6b..990ad5aa9 100644 --- a/src/cpp/benders/benders_core/include/BendersStructsDatas.h +++ b/src/cpp/benders/benders_core/include/BendersStructsDatas.h @@ -40,7 +40,7 @@ struct CurrentIterationData { int max_simplexiter; // ugly int benders_num_run; - double external_loop_criterion; + std::vector outer_loop_criterion; }; // /*! \struct to store benders cuts data diff --git a/src/cpp/benders/benders_core/include/CustomVector.h b/src/cpp/benders/benders_core/include/CustomVector.h new file mode 100644 index 000000000..60844a510 --- /dev/null +++ b/src/cpp/benders/benders_core/include/CustomVector.h @@ -0,0 +1,9 @@ +#pragma once +#include + +template +void AddVectors(std::vector& a, const std::vector& b) { + if (a.size() == b.size()) { + std::transform(a.begin(), a.end(), b.begin(), a.begin(), std::plus()); + } +} diff --git a/src/cpp/benders/benders_core/include/SubproblemCut.h b/src/cpp/benders/benders_core/include/SubproblemCut.h index ac85fbeba..69c0f392c 100644 --- a/src/cpp/benders/benders_core/include/SubproblemCut.h +++ b/src/cpp/benders/benders_core/include/SubproblemCut.h @@ -5,20 +5,11 @@ #include "Worker.h" #include "common.h" namespace PlainData { -struct Variables { - std::vector names; - std::vector values; - template - void serialize(Archive &ar, const unsigned int version) { - ar & names; - ar & values; - } -}; struct SubProblemData { double subproblem_cost; Point var_name_and_subgradient; - Variables variables; + std::vector solution; double single_subpb_costs_under_approx; double subproblem_timer; int simplex_iter; @@ -28,7 +19,7 @@ struct SubProblemData { void serialize(Archive &ar, const unsigned int version) { ar & subproblem_cost; ar & var_name_and_subgradient; - ar & variables; + ar & solution; ar & single_subpb_costs_under_approx; ar & subproblem_timer; ar & simplex_iter; diff --git a/src/cpp/benders/benders_core/include/SubproblemWorker.h b/src/cpp/benders/benders_core/include/SubproblemWorker.h index f84c80d93..1369b2a8d 100644 --- a/src/cpp/benders/benders_core/include/SubproblemWorker.h +++ b/src/cpp/benders/benders_core/include/SubproblemWorker.h @@ -22,7 +22,7 @@ class SubproblemWorker : public Worker { SolverLogManager&solver_log_manager, Logger logger); virtual ~SubproblemWorker() = default; - void get_solution(PlainData::Variables &vars) const; + void get_solution(std::vector &solution) const; public: void fix_to(Point const &x0) const; diff --git a/src/cpp/benders/benders_core/include/VariablesGroup.h b/src/cpp/benders/benders_core/include/VariablesGroup.h new file mode 100644 index 000000000..8059d014c --- /dev/null +++ b/src/cpp/benders/benders_core/include/VariablesGroup.h @@ -0,0 +1,17 @@ +#pragma once +#include +#include +#include + +class VariablesGroup { + public: + explicit VariablesGroup(const std::vector& all_variables, + const std::vector& patterns); + std::vector> Indices() const { return indices_; } + + private: + void Search(); + const std::vector& all_variables_; + std::vector patterns_; // pos + zone1 // pos zon 2 + std::vector> indices_; +}; \ No newline at end of file diff --git a/src/cpp/benders/benders_core/include/common.h b/src/cpp/benders/benders_core/include/common.h index 3cfb2d115..7cd3f5d14 100644 --- a/src/cpp/benders/benders_core/include/common.h +++ b/src/cpp/benders/benders_core/include/common.h @@ -141,6 +141,13 @@ struct BaseOptions { Str2Dbl weights; }; typedef BaseOptions MergeMPSOptions; + +struct ExternalLoopOptions { + double EXT_LOOP_CRITERION_VALUE = 1.0; + double EXT_LOOP_CRITERION_TOLERANCE = 1e-1; + double EXT_LOOP_CRITERION_COUNT_THRESHOLD = 1e-1; +}; + struct BendersBaseOptions : public BaseOptions { explicit BendersBaseOptions(const BaseOptions &base_to_copy) : BaseOptions(base_to_copy) {} @@ -164,12 +171,7 @@ struct BendersBaseOptions : public BaseOptions { std::string LAST_MASTER_BASIS; size_t BATCH_SIZE; -}; - -struct ExternalLoopOptions { - double EXT_LOOP_CRITERION_VALUE = 1.0; - double EXT_LOOP_CRITERION_TOLERANCE = 1e-1; - double EXT_LOOP_CRITERION_COUNT_THRESHOLD = 1e-1; + ExternalLoopOptions EXTERNAL_LOOP_OPTIONS; }; void usage(int argc); diff --git a/src/cpp/benders/benders_mpi/BendersMPI.cpp b/src/cpp/benders/benders_mpi/BendersMPI.cpp index bf2859bc7..46662bf75 100644 --- a/src/cpp/benders/benders_mpi/BendersMPI.cpp +++ b/src/cpp/benders/benders_mpi/BendersMPI.cpp @@ -4,6 +4,7 @@ #include #include +#include "CustomVector.h" #include "Timer.h" #include "glog/logging.h" @@ -40,7 +41,11 @@ void BendersMpi::InitializeProblems() { } current_problem_id++; } - init_problems_ = false; + + // if (_world.rank() == rank_0) { + SetSubproblemsVariablesIndex(); + // } + init_problems_ = false; } void BendersMpi::BuildMasterProblem() { if (_world.rank() == rank_0) { @@ -139,11 +144,36 @@ void BendersMpi::gather_subproblems_cut_package_and_build_cuts( Reduce(GetSubproblemsCpuTime(), cumulative_subproblems_timer_per_iter, std::plus(), rank_0); SetSubproblemsCumulativeCpuTime(cumulative_subproblems_timer_per_iter); + _data.outer_loop_criterion = + ComputeSubproblemsContributionToOuterLoopCriterion(subproblem_data_map); + if (_world.rank() == rank_0) { + outer_loop_criterion_.push_back(_data.outer_loop_criterion); + } // only rank_0 receive non-emtpy gathered_subproblem_map master_build_cuts(gathered_subproblem_map); } } +std::vector +BendersMpi::ComputeSubproblemsContributionToOuterLoopCriterion( + const SubProblemDataMap &subproblem_data_map) { + std::vector outer_loop_criterion_per_sub_problem_per_pattern( + patterns_.size(), {}); + std::vector outer_loop_criterion_sub_problems_map_result( + patterns_.size(), {}); + for (const auto &[subproblem_name, subproblem_data] : subproblem_data_map) { + AddVectors( + outer_loop_criterion_per_sub_problem_per_pattern, + ComputeOuterLoopCriterion(subproblem_name, subproblem_data)); + } + Reduce(outer_loop_criterion_per_sub_problem_per_pattern, + outer_loop_criterion_sub_problems_map_result, std::plus(), + rank_0); + // outer_loop_criterion_sub_problems_map_result/=nbyears; + + return outer_loop_criterion_sub_problems_map_result; +} + SubProblemDataMap BendersMpi::get_subproblem_cut_package() { SubProblemDataMap subproblem_data_map; GetSubproblemCut(subproblem_data_map); diff --git a/src/cpp/benders/benders_mpi/include/BendersMPI.h b/src/cpp/benders/benders_mpi/include/BendersMPI.h index fd3bedaa5..2041cebd0 100644 --- a/src/cpp/benders/benders_mpi/include/BendersMPI.h +++ b/src/cpp/benders/benders_mpi/include/BendersMPI.h @@ -85,4 +85,7 @@ class BendersMpi : public BendersBase { void AllReduce(const T &in_value, T &out_value, Op op) const { mpi::all_reduce(_world, in_value, out_value, op); } + virtual std::vector + ComputeSubproblemsContributionToOuterLoopCriterion( + const SubProblemDataMap &subproblem_data_map); }; diff --git a/src/cpp/benders/external_loop/MasterUpdateBase.cpp b/src/cpp/benders/external_loop/MasterUpdateBase.cpp index 2217dc88a..4e1e56ab7 100644 --- a/src/cpp/benders/external_loop/MasterUpdateBase.cpp +++ b/src/cpp/benders/external_loop/MasterUpdateBase.cpp @@ -1,29 +1,37 @@ #include "MasterUpdate.h" -MasterUpdateBase::MasterUpdateBase(pBendersBase benders, double tau) - : benders_(std::move(benders)), lambda_(0), lambda_min_(0) { +MasterUpdateBase::MasterUpdateBase(pBendersBase benders, double tau, + double epsilon_lambda) + : benders_(std::move(benders)), + lambda_(0), + lambda_min_(0), + epsilon_lambda_(epsilon_lambda) { CheckTau(tau); } MasterUpdateBase::MasterUpdateBase(pBendersBase benders, double tau, - const std::string &name) - : MasterUpdateBase(benders, tau) { + const std::string &name, + double epsilon_lambda) + : MasterUpdateBase(benders, tau, epsilon_lambda) { min_invest_constraint_name_ = name; } MasterUpdateBase::MasterUpdateBase(pBendersBase benders, double lambda, double lambda_min, double lambda_max, - double tau) + double tau, double epsilon_lambda) : benders_(std::move(benders)), lambda_(lambda), lambda_min_(lambda_min), - lambda_max_(lambda_max) { + lambda_max_(lambda_max), + epsilon_lambda_(epsilon_lambda) { CheckTau(tau); } MasterUpdateBase::MasterUpdateBase(pBendersBase benders, double lambda, double lambda_min, double lambda_max, - double tau, const std::string &name) - : MasterUpdateBase(benders, lambda, lambda_min, lambda_max, tau) { + double tau, const std::string &name, + double epsilon_lambda) + : MasterUpdateBase(benders, lambda, lambda_min, lambda_max, tau, + epsilon_lambda) { min_invest_constraint_name_ = name; } @@ -50,22 +58,21 @@ void MasterUpdateBase::SetLambdaMaxToMaxInvestmentCosts() { lambda_max_ += obj[var_id] * max_invest.at(var_name); } } -void MasterUpdateBase::Update(const CRITERION &criterion) { - switch (criterion) { - case CRITERION::LOW: - lambda_max_ = - std::min(lambda_max_, benders_->GetBestIterationData().invest_cost); - break; - case CRITERION::HIGH: - lambda_min_ = lambda_; - break; +bool MasterUpdateBase::Update(bool is_criterion_high) { + if (is_criterion_high) { + lambda_min_ = lambda_; + } else { + lambda_max_ = + std::min(lambda_max_, benders_->GetBestIterationData().invest_cost); + } - default: - return; + stop_update_ = std::abs(lambda_max_ - lambda_min_) < epsilon_lambda_; + if (!stop_update_) { + lambda_ = dichotomy_weight_coeff_ * lambda_max_ + + (1 - dichotomy_weight_coeff_) * lambda_min_; + UpdateConstraints(); } - lambda_ = dichotomy_weight_coeff_ * lambda_max_ + - (1 - dichotomy_weight_coeff_) * lambda_min_; - UpdateConstraints(); + return stop_update_; } void MasterUpdateBase::UpdateConstraints() { diff --git a/src/cpp/benders/external_loop/OuterLoop.cpp b/src/cpp/benders/external_loop/OuterLoop.cpp index f86ca3d92..85cc31833 100644 --- a/src/cpp/benders/external_loop/OuterLoop.cpp +++ b/src/cpp/benders/external_loop/OuterLoop.cpp @@ -21,7 +21,7 @@ void OuterLoop::Run() { benders_->DoFreeProblems(false); benders_->InitializeProblems(); benders_->InitExternalValues(); - CRITERION criterion = CRITERION::IS_MET; + bool criterion_check = false; std::vector obj_coeff; if (world_.rank() == 0) { obj_coeff = benders_->MasterObjectiveFunctionCoeffs(); @@ -38,9 +38,10 @@ void OuterLoop::Run() { // de-comment for general case // cuts_manager_->Save(benders_->AllCuts()); // auto cuts = cuts_manager_->Load(); - criterion = - criterion_->IsCriterionSatisfied(benders_->BestIterationWorkerMaster()); - if (criterion == CRITERION::HIGH) { + criterion_check = + criterion_->IsCriterionHigh(benders_->GetOuterLoopCriterion()); + // High + if (criterion_check) { std::ostringstream err_msg; err_msg << PrefixMessage(LogUtils::LOGLEVEL::FATAL, "External Loop") << "Criterion cannot be satisfied for your study:\n" @@ -51,25 +52,22 @@ void OuterLoop::Run() { master_updater_->Init(); } - mpi::broadcast(world_, criterion, 0); - - while (criterion != CRITERION::IS_MET) { - benders_->ResetData(criterion_->CriterionValue()); + bool stop_update_master = false; + while (!stop_update_master) { + benders_->init_data(); PrintLog(); benders_->launch(); if (world_.rank() == 0) { - criterion = criterion_->IsCriterionSatisfied( - benders_->BestIterationWorkerMaster()); - master_updater_->Update(criterion); + criterion_check = + criterion_->IsCriterionHigh(benders_->GetOuterLoopCriterion()); + stop_update_master = master_updater_->Update(criterion_check); } - mpi::broadcast(world_, criterion, 0); + mpi::broadcast(world_, stop_update_master, 0); } // last prints PrintLog(); - auto benders_data = benders_->GetCurrentIterationData(); - benders_data.external_loop_criterion = criterion_->CriterionValue(); - benders_->mathLoggerDriver_->Print(benders_data); + benders_->mathLoggerDriver_->Print(benders_->GetCurrentIterationData()); // TODO general-case // cuts_manager_->Save(benders_->AllCuts()); @@ -83,8 +81,8 @@ void OuterLoop::PrintLog() { msg << "*** Outer loop: " << benders_->GetBendersRunNumber(); logger->display_message(msg.str()); msg.str(""); - msg << "*** Criterion value: " << std::scientific << std::setprecision(10) - << criterion_->CriterionValue(); + msg << "*** Sum loss: " << std::scientific << std::setprecision(10) + << criterion_->SumCriterions(); logger->display_message(msg.str()); logger->PrintIterationSeparatorEnd(); } \ No newline at end of file diff --git a/src/cpp/benders/external_loop/OuterloopCriterion.cpp b/src/cpp/benders/external_loop/OuterloopCriterion.cpp index 842546b7a..b480fad26 100644 --- a/src/cpp/benders/external_loop/OuterloopCriterion.cpp +++ b/src/cpp/benders/external_loop/OuterloopCriterion.cpp @@ -1,47 +1,52 @@ #include "OuterLoopCriterion.h" -#include "LoggerUtils.h" +#include +#include +#include "LoggerUtils.h" +bool OuterloopCriterionLossOfLoad::DoubleCompare(double a, double b) { + return a > b + options_.EXT_LOOP_CRITERION_TOLERANCE; +} OuterloopCriterionLossOfLoad::OuterloopCriterionLossOfLoad( const ExternalLoopOptions& options) : options_(options) {} -CRITERION OuterloopCriterionLossOfLoad::IsCriterionSatisfied( - const WorkerMasterData& worker_master_data) { - ProcessSum(worker_master_data); +bool OuterloopCriterionLossOfLoad::IsCriterionHigh( + const std::vector& criterion_values) { + // tmp EXT_LOOP_CRITERION_VALUES must be a vector of size + // criterion_values.size() + EXT_LOOP_CRITERION_VALUES_ = std::vector( + criterion_values.size(), options_.EXT_LOOP_CRITERION_VALUE); + // si une zone est depassé sur au moins + criterion_values_ = criterion_values; + // options_.EXT_LOOP_CRITERION_VALUE --> to vect + // options_.EXT_LOOP_CRITERION_TOLERANCE --> to vect - if (sum_loss_ <= options_.EXT_LOOP_CRITERION_VALUE + - options_.EXT_LOOP_CRITERION_TOLERANCE) { - if (sum_loss_ >= options_.EXT_LOOP_CRITERION_VALUE - - options_.EXT_LOOP_CRITERION_TOLERANCE) { - return CRITERION::IS_MET; - } - return CRITERION::LOW; - } else { - return CRITERION::HIGH; - } -} + // return std::equal(criterion_value.begin(), criterion_value.end(), + // options_.EXT_LOOP_CRITERION_VALUE.begin(), + // DoubleCompare); + // return std::equal(criterion_values.begin(), criterion_values.end(), + // EXT_LOOP_CRITERION_VALUES_.begin(), + // &OuterloopCriterionLossOfLoad::DoubleCompare); -void OuterloopCriterionLossOfLoad::ProcessSum( - const WorkerMasterData& worker_master_data) { - sum_loss_ = 0; - for (const auto& [sub_problem_name, sub_problem_data] : - worker_master_data._cut_trace) { - for (auto i(0); i < sub_problem_data.variables.names.size(); ++i) { - auto var_name = sub_problem_data.variables.names[i]; - auto solution = sub_problem_data.variables.values[i]; - if (std::regex_search(var_name, rgx_) && - solution > options_.EXT_LOOP_CRITERION_COUNT_THRESHOLD) { - // 1h of unsupplied energy - sum_loss_ += 1; - } + for (int index(0); index < criterion_values_.size(); ++index) { + if (criterion_values_[index] > EXT_LOOP_CRITERION_VALUES_[index] + + options_.EXT_LOOP_CRITERION_TOLERANCE) { + return true; } } + return false; } +double OuterloopCriterionLossOfLoad::SumCriterions() const { + return std::accumulate(criterion_values_.begin(), criterion_values_.end(), + 0.0); +} std::string OuterloopCriterionLossOfLoad::StateAsString() const { std::ostringstream msg; - msg << "Sum loss = " << sum_loss_ << "\n" + auto sum_loss = + std::accumulate(criterion_values_.begin(), criterion_values_.end(), 0.0); + msg << "Sum loss = " << sum_loss << "\n" << "threshold: " << options_.EXT_LOOP_CRITERION_VALUE << "\n" << "epsilon: " << options_.EXT_LOOP_CRITERION_TOLERANCE << "\n"; diff --git a/src/cpp/benders/external_loop/include/MasterUpdate.h b/src/cpp/benders/external_loop/include/MasterUpdate.h index 51f3a746b..15566c893 100644 --- a/src/cpp/benders/external_loop/include/MasterUpdate.h +++ b/src/cpp/benders/external_loop/include/MasterUpdate.h @@ -3,21 +3,23 @@ class IMasterUpdate { public: - virtual void Update(const CRITERION &criterion) = 0; + virtual bool Update(bool is_criterion_high) = 0; virtual void Init() = 0; }; class MasterUpdateBase : public IMasterUpdate { public: explicit MasterUpdateBase(pBendersBase benders, double lambda, - double lambda_min, double lambda_max, double tau); + double lambda_min, double lambda_max, double tau, + double epsilon_lambda); explicit MasterUpdateBase(pBendersBase benders, double lambda, double lambda_min, double lambda_max, double tau, - const std::string &name); + const std::string &name, double epsilon_lambda); + explicit MasterUpdateBase(pBendersBase benders, double tau, + const std::string &name, double epsilon_lambda); explicit MasterUpdateBase(pBendersBase benders, double tau, - const std::string &name); - explicit MasterUpdateBase(pBendersBase benders, double tau); - void Update(const CRITERION &criterion) override; + double epsilon_lambda); + bool Update(bool is_criterion_high) override; void Init() override; private: @@ -34,4 +36,6 @@ class MasterUpdateBase : public IMasterUpdate { double lambda_max_ = -1; // tau double dichotomy_weight_coeff_ = 0.5; + double epsilon_lambda_ = 1e-1; + bool stop_update_ = true; }; diff --git a/src/cpp/benders/external_loop/include/OuterLoopCriterion.h b/src/cpp/benders/external_loop/include/OuterLoopCriterion.h index 128380873..9a214699b 100644 --- a/src/cpp/benders/external_loop/include/OuterLoopCriterion.h +++ b/src/cpp/benders/external_loop/include/OuterLoopCriterion.h @@ -14,26 +14,27 @@ class CriterionCouldNotBeSatisfied enum class CRITERION { LOW, IS_MET, HIGH }; class IOuterLoopCriterion { public: - virtual CRITERION IsCriterionSatisfied( - const WorkerMasterData& worker_master_data) = 0; + virtual bool IsCriterionHigh( + const std::vector& criterion_value) = 0; virtual std::string StateAsString() const = 0; - virtual double CriterionValue() const = 0; + virtual std::vector CriterionValues() const = 0; + virtual double SumCriterions() const = 0; }; class OuterloopCriterionLossOfLoad : public IOuterLoopCriterion { public: explicit OuterloopCriterionLossOfLoad(const ExternalLoopOptions& options); - CRITERION IsCriterionSatisfied( - const WorkerMasterData& milp_solution) override; + bool IsCriterionHigh( + const std::vector& criterion_values) override; std::string StateAsString() const override; - double CriterionValue() const override { return sum_loss_; } + std::vector CriterionValues() const override { + return criterion_values_; + } + double SumCriterions() const override; private: - void ProcessSum(const WorkerMasterData& worker_master_data); - const std::string positive_unsupplied_vars_prefix_ = - "^PositiveUnsuppliedEnergy::"; - const std::regex rgx_ = std::regex(positive_unsupplied_vars_prefix_); - + bool DoubleCompare(double a, double b); ExternalLoopOptions options_; - double sum_loss_ = 0.0; + std::vector EXT_LOOP_CRITERION_VALUES_; + std::vector criterion_values_ = {}; }; diff --git a/src/cpp/benders/factories/BendersFactory.cpp b/src/cpp/benders/factories/BendersFactory.cpp index 6231a1f7b..37e68e9fd 100644 --- a/src/cpp/benders/factories/BendersFactory.cpp +++ b/src/cpp/benders/factories/BendersFactory.cpp @@ -168,11 +168,12 @@ int RunExternalLoop_(char** argv, const std::filesystem::path& options_file, auto benders = PrepareForExecution(benders_loggers, options, argv[0], true, env, world); double tau = 0.5; + double epsilon_lambda = 0.1; std::shared_ptr criterion = std::make_shared( options.GetExternalLoopOptions()); std::shared_ptr master_updater = - std::make_shared(benders, tau); + std::make_shared(benders, tau, epsilon_lambda); std::shared_ptr cuts_manager = std::make_shared(); diff --git a/tests/cpp/ext_loop/ext_loop_test.cpp b/tests/cpp/ext_loop/ext_loop_test.cpp index 4b68c7f1f..87d584dd3 100644 --- a/tests/cpp/ext_loop/ext_loop_test.cpp +++ b/tests/cpp/ext_loop/ext_loop_test.cpp @@ -35,25 +35,12 @@ TEST_F(OuterLoopCriterionTest, IsCriterionHigh) { double epsilon = 1e-1; double max_unsup_energy = 0.1; const ExternalLoopOptions options = {threshold, epsilon, max_unsup_energy}; - PlainData::Variables variables = { - {"PositiveUnsuppliedEnergy::1", "PositiveUnsuppliedEnergy::2", "var3"}, - {0.2, 0.3, 68}}; - double criterion_value = 2.0; // two vars named ^PositiveUnsuppliedEnergy - // with value > max_unsup_energy - - PlainData::SubProblemData subProblemData; - subProblemData.variables = variables; - SubProblemDataMap cut_trace = { - std::make_pair(std::string("P1"), subProblemData)}; - - WorkerMasterData worker_master_data; - worker_master_data._cut_trace = cut_trace; + std::vector criterion_value = {2.0}; OuterloopCriterionLossOfLoad criterion(options); - EXPECT_EQ(criterion.IsCriterionSatisfied(worker_master_data), - CRITERION::HIGH); - EXPECT_EQ(criterion.CriterionValue(), criterion_value); + // criterion_value = 2 > threshold+epsilon + EXPECT_EQ(criterion.IsCriterionHigh(criterion_value), true); } TEST_F(OuterLoopCriterionTest, IsCriterionLow) { @@ -61,51 +48,25 @@ TEST_F(OuterLoopCriterionTest, IsCriterionLow) { double epsilon = 1e-1; double max_unsup_energy = 0.1; const ExternalLoopOptions options = {threshold, epsilon, max_unsup_energy}; - PlainData::Variables variables = { - {"PositiveUnsuppliedEnergy::1", "PositiveUnsuppliedEnergy::2", "var3"}, - {0.2, 0.3, 68}}; - double criterion_value = 2.0; // two vars named PositiveUnsuppliedEnergy with - // value > max_unsup_energy - - PlainData::SubProblemData subProblemData; - subProblemData.variables = variables; - SubProblemDataMap cut_trace = { - std::make_pair(std::string("P1"), subProblemData)}; - - WorkerMasterData worker_master_data; - worker_master_data._cut_trace = cut_trace; - + std::vector criterion_value = {2.0}; OuterloopCriterionLossOfLoad criterion(options); - EXPECT_EQ(criterion.IsCriterionSatisfied(worker_master_data), CRITERION::LOW); - EXPECT_EQ(criterion.CriterionValue(), criterion_value); + // criterion_value < threshold - epsilon + EXPECT_EQ(criterion.IsCriterionHigh(criterion_value), false); } -TEST_F(OuterLoopCriterionTest, IsMet) { - double threshold = 2.0; - double epsilon = 1e-1; - double max_unsup_energy = 0.1; - const ExternalLoopOptions options = {threshold, epsilon, max_unsup_energy}; - PlainData::Variables variables = { - {"PositiveUnsuppliedEnergy::1", "PositiveUnsuppliedEnergy::2", "var3"}, - {0.2, 0.3, 68}}; - double criterion_value = 2.0; // two vars named PositiveUnsuppliedEnergy with - // value > max_unsup_energy - - PlainData::SubProblemData subProblemData; - subProblemData.variables = variables; - SubProblemDataMap cut_trace = { - std::make_pair(std::string("P1"), subProblemData)}; - - WorkerMasterData worker_master_data; - worker_master_data._cut_trace = cut_trace; +// TEST_F(OuterLoopCriterionTest, IsMet) { +// double threshold = 2.0; +// double epsilon = 1e-1; +// double max_unsup_energy = 0.1; +// const ExternalLoopOptions options = {threshold, epsilon, max_unsup_energy}; +// std::vector criterion_value = {2.0}; - OuterloopCriterionLossOfLoad criterion(options); +// OuterloopCriterionLossOfLoad criterion(options); - EXPECT_EQ(criterion.IsCriterionSatisfied(worker_master_data), - CRITERION::IS_MET); - EXPECT_EQ(criterion.CriterionValue(), criterion_value); -} +// // threshold - epsilon <= criterion_value <= threshold + epsilon +// EXPECT_EQ(criterion.IsCriterionHigh(criterion_value), CRITERION::IS_MET); +// } //-------------------- MasterUpdateBaseTest ------------------------- const auto STUDY_PATH = @@ -176,21 +137,20 @@ TEST_P(MasterUpdateBaseTest, ConstraintIsAddedBendersMPI) { benders->InitializeProblems(); benders->launch(); - MasterUpdateBase master_updater(benders, 0.5); + MasterUpdateBase master_updater(benders, 0.5, 0.1); // update lambda_max master_updater.Init(); - benders->ResetData(3.0); + benders->init_data(); benders->launch(); auto num_constraints_master_before = benders->MasterGetnrows(); - master_updater.Update(CRITERION::LOW); + master_updater.Update(true); auto num_constraints_master_after = benders->MasterGetnrows(); auto master_variables = benders->MasterVariables(); auto expected_coeffs = benders->MasterObjectiveFunctionCoeffs(); // criterion is low <=> lambda_max = min(lambda_max, invest_cost) - auto lambda_max = (std::min)(LambdaMax(benders), - benders->GetBestIterationData().invest_cost); + auto lambda_max = LambdaMax(benders); auto expected_rhs = 0.5 * lambda_max; // @@ -233,13 +193,13 @@ TEST_P(MasterUpdateBaseTest, InitialRhs) { benders->launch(); - MasterUpdateBase master_updater(benders, 0.5); + MasterUpdateBase master_updater(benders, 0.5, 0.1); // update lambda_max master_updater.Init(); auto lambda_max = LambdaMax(benders); - benders->ResetData(3.0); + benders->init_data(); benders->launch(); - master_updater.Update(CRITERION::HIGH); + master_updater.Update(true); auto expected_initial_rhs = lambda_max * 0.5; auto added_row_index = benders->MasterGetnrows() - 1;