diff --git a/.bazelignore b/.bazelignore index 807b1738f8f..3d8c9b42e33 100644 --- a/.bazelignore +++ b/.bazelignore @@ -4,3 +4,7 @@ install_make build_cross dependencies/install dependencies/sources +temp_cpp +temp_dotnet +temp_java +temp_python diff --git a/.bazelrc b/.bazelrc index bbd40359de6..57502972c07 100644 --- a/.bazelrc +++ b/.bazelrc @@ -22,8 +22,8 @@ build --apple_platform_type=macos build --enable_platform_specific_config build:linux --cxxopt="-std=c++17" --cxxopt=-Wno-sign-compare --host_cxxopt="-std=c++17" --host_cxxopt=-Wno-sign-compare -build:macos --cxxopt="-std=c++17" --cxxopt=-Wno-sign-compare --cxxopt=-mmacos-version-min=10.15 --features=-supports_dynamic_linker -build:macos --host_cxxopt="-std=c++17" --host_cxxopt=-Wno-sign-compare --host_cxxopt=-mmacos-version-min=10.15 +build:macos --cxxopt="-std=c++17" --cxxopt=-Wno-sign-compare --cxxopt=-mmacos-version-min=10.15 --cxxopt=-Wno-dangling-field --features=-supports_dynamic_linker +build:macos --host_cxxopt="-std=c++17" --host_cxxopt=-Wno-sign-compare --host_cxxopt=-mmacos-version-min=10.15 --host_cxxopt=-Wno-dangling-field build:windows --cxxopt="/std:c++20" --host_cxxopt="/std:c++20" # Enable the runfiles symlink tree on Windows. This makes it possible to build @@ -37,7 +37,16 @@ build:windows --enable_runfiles # build --subcommands=pretty_print # Print test logs for failed tests. -test --test_output=errors +test --test_output=errors --test_timeout_filters=-eternal # Put user-specific options in .bazelrc.user try-import %workspace%/.bazelrc.user + +# asan +build:asan --strip=never +build:asan --copt -fsanitize=address +build:asan --copt -DADDRESS_SANITIZER +build:asan --copt -O1 +build:asan --copt -g +build:asan --copt -fno-omit-frame-pointer +build:asan --linkopt -fsanitize=address diff --git a/.github/workflows/aarch64_toolchain.yml b/.github/not_used/aarch64_toolchain.yml similarity index 95% rename from .github/workflows/aarch64_toolchain.yml rename to .github/not_used/aarch64_toolchain.yml index 30402ff6b15..38e4ad4a1ea 100644 --- a/.github/workflows/aarch64_toolchain.yml +++ b/.github/not_used/aarch64_toolchain.yml @@ -1,3 +1,4 @@ +# ref: https://toolchains.bootlin.com/ name: aarch64 Toolchain on: [push, pull_request, workflow_dispatch] diff --git a/.github/workflows/amd64_cmake_glop_cpp.yml b/.github/not_used/amd64_cmake_glop_cpp.yml similarity index 88% rename from .github/workflows/amd64_cmake_glop_cpp.yml rename to .github/not_used/amd64_cmake_glop_cpp.yml index 8971c7753fb..f2048bdc7b2 100644 --- a/.github/workflows/amd64_cmake_glop_cpp.yml +++ b/.github/not_used/amd64_cmake_glop_cpp.yml @@ -39,7 +39,15 @@ jobs: install_target: install }, { - runner: "macos-latest", + runner: "macos-13", # last macos intel based runner + generator: "Xcode", + config: Release, + build_target: ALL_BUILD, + test_target: RUN_TESTS, + install_target: install + }, + { + runner: "macos-latest", # macos arm64 based runner generator: "Xcode", config: Release, build_target: ALL_BUILD, diff --git a/.github/not_used/amd64_docker_bazel.yml b/.github/not_used/amd64_docker_bazel.yml new file mode 100644 index 00000000000..f28c0d1b322 --- /dev/null +++ b/.github/not_used/amd64_docker_bazel.yml @@ -0,0 +1,37 @@ +# ref: https://github.com/docker-library/official-images +name: amd64 Docker Bazel + +on: [push, pull_request, workflow_dispatch] + +jobs: + bazel: + strategy: + matrix: + platform: [amd64] # arm64 riscv64 + distro: [ + almalinux, + # alpine, + archlinux, + debian, + fedora, + opensuse, + rockylinux, + ubuntu + ] + fail-fast: false + name: ${{ matrix.platform }} • ${{ matrix.distro }} • Bazel + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Check docker + run: | + docker info + docker buildx ls + - name: Build env image + run: make --directory=bazel ${{ matrix.platform }}_${{ matrix.distro }}_env + - name: Build devel image + run: make --directory=bazel ${{ matrix.platform }}_${{ matrix.distro }}_devel + - name: Build project + run: make --directory=bazel ${{ matrix.platform }}_${{ matrix.distro }}_build + - name: Test project + run: make --directory=bazel ${{ matrix.platform }}_${{ matrix.distro }}_test diff --git a/.github/workflows/amd64_docker_cmake.yml b/.github/not_used/amd64_docker_cmake.yml similarity index 90% rename from .github/workflows/amd64_docker_cmake.yml rename to .github/not_used/amd64_docker_cmake.yml index d96a34ad114..8c3f2e55c20 100644 --- a/.github/workflows/amd64_docker_cmake.yml +++ b/.github/not_used/amd64_docker_cmake.yml @@ -1,4 +1,4 @@ -# ref: https://github.com/actions/runner-images +# ref: https://github.com/docker-library/official-images name: amd64 Docker CMake on: [push, pull_request, workflow_dispatch] @@ -7,7 +7,7 @@ jobs: cmake: strategy: matrix: - distro: [alpine, archlinux, centos, debian, fedora, opensuse, ubuntu] + distro: [almalinux, alpine, archlinux, debian, fedora, opensuse, rockylinux, ubuntu] lang: [cpp, python, dotnet, java] fail-fast: false name: ${{ matrix.distro }} • CMake • ${{ matrix.lang }} diff --git a/.github/workflows/amd64_freebsd_cmake.yml b/.github/not_used/amd64_freebsd_cmake.yml similarity index 88% rename from .github/workflows/amd64_freebsd_cmake.yml rename to .github/not_used/amd64_freebsd_cmake.yml index 84d508717ae..f70f58c2ab2 100644 --- a/.github/workflows/amd64_freebsd_cmake.yml +++ b/.github/not_used/amd64_freebsd_cmake.yml @@ -2,7 +2,7 @@ name: amd64 FreeBSD CMake on: [push, pull_request, workflow_dispatch] -# Only macos runner provide virtualisation with vagrant/virtualbox installed. +# Only macos-12 runner provide virtualisation with vagrant/virtualbox installed. # ref: https://github.com/actions/runner-images/tree/main/images/macos # ref: https://app.vagrantup.com/generic/boxes/freebsd13 jobs: @@ -21,7 +21,7 @@ jobs: lang: java allow_failure: true name: FreeBSD • CMake • ${{ matrix.lang }} - runs-on: macos-latest + runs-on: macos-12 steps: - uses: actions/checkout@v4 - name: vagrant version diff --git a/.github/workflows/amd64_linux_bazel.yml b/.github/not_used/amd64_linux_bazel.yml similarity index 86% rename from .github/workflows/amd64_linux_bazel.yml rename to .github/not_used/amd64_linux_bazel.yml index 07dc21c7aea..7e4818dbbd4 100644 --- a/.github/workflows/amd64_linux_bazel.yml +++ b/.github/not_used/amd64_linux_bazel.yml @@ -24,6 +24,8 @@ jobs: - uses: actions/checkout@v4 - name: Check Java run: java -version + - name: Check mvn + run: mvn --version - name: Setup Python uses: actions/setup-python@v4 with: @@ -39,6 +41,12 @@ jobs: sudo apt-get install bazel - name: Check Bazel run: bazel version + - name: Change Python in WORKSPACE + run: > + sed + -i + 's/\(DEFAULT_PYTHON =\) "3.[0-9]\+"/\1 "${{ matrix.python.version }}"/g' + WORKSPACE - name: Build run: > bazel build diff --git a/.github/workflows/amd64_linux_cmake_coinor_off.yml b/.github/not_used/amd64_linux_cmake_coinor_off.yml similarity index 98% rename from .github/workflows/amd64_linux_cmake_coinor_off.yml rename to .github/not_used/amd64_linux_cmake_coinor_off.yml index 8796f2b3876..28879a37e95 100644 --- a/.github/workflows/amd64_linux_cmake_coinor_off.yml +++ b/.github/not_used/amd64_linux_cmake_coinor_off.yml @@ -26,7 +26,7 @@ jobs: run: swig -version # Install .NET SDKs - name: Setup .NET 6.0 - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - name: Check dotnet diff --git a/.github/workflows/amd64_linux_cmake_dotnet.yml b/.github/not_used/amd64_linux_cmake_dotnet.yml similarity index 97% rename from .github/workflows/amd64_linux_cmake_dotnet.yml rename to .github/not_used/amd64_linux_cmake_dotnet.yml index 6e7b373a593..2dff4dcb9df 100644 --- a/.github/workflows/amd64_linux_cmake_dotnet.yml +++ b/.github/not_used/amd64_linux_cmake_dotnet.yml @@ -15,7 +15,7 @@ jobs: sudo apt install -y swig swig -version - name: Setup .NET 6.0 - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - name: Check dotnet diff --git a/.github/workflows/amd64_linux_cmake_scip_off.yml b/.github/not_used/amd64_linux_cmake_scip_off.yml similarity index 98% rename from .github/workflows/amd64_linux_cmake_scip_off.yml rename to .github/not_used/amd64_linux_cmake_scip_off.yml index a77b24046af..6249d73aa87 100644 --- a/.github/workflows/amd64_linux_cmake_scip_off.yml +++ b/.github/not_used/amd64_linux_cmake_scip_off.yml @@ -26,7 +26,7 @@ jobs: run: swig -version # Install .NET SDKs - name: Setup .NET 6.0 - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - name: Check dotnet diff --git a/.github/workflows/amd64_macos_bazel.yml b/.github/not_used/amd64_macos_bazel.yml similarity index 83% rename from .github/workflows/amd64_macos_bazel.yml rename to .github/not_used/amd64_macos_bazel.yml index cb4ecc4901f..a478b5b6040 100644 --- a/.github/workflows/amd64_macos_bazel.yml +++ b/.github/not_used/amd64_macos_bazel.yml @@ -16,7 +16,7 @@ jobs: ] fail-fast: false name: MacOS • Bazel • Python-${{ matrix.python.version }} - runs-on: macos-latest + runs-on: macos-13 # last macos intel based runner steps: - uses: actions/checkout@v4 - name: Set Java to OpenJDK 17 (Temurin) @@ -32,13 +32,14 @@ jobs: python-version: ${{ matrix.python.version }} - name: Check Python run: python --version - - name: Install Bazel - run: | - brew update - brew unlink bazelisk - brew install bazel - name: Check Bazel run: bazel version + - name: Change Python in WORKSPACE + run: > + sed + -i '' + -e 's/\(DEFAULT_PYTHON =\) "3.[0-9]*"/\1 "${{ matrix.python.version }}"/g' + WORKSPACE - name: Build run: > bazel build diff --git a/.github/workflows/amd64_macos_cmake_cpp.yml b/.github/not_used/amd64_macos_cmake_cpp.yml similarity index 96% rename from .github/workflows/amd64_macos_cmake_cpp.yml rename to .github/not_used/amd64_macos_cmake_cpp.yml index 70893e6453e..f89a97db9d0 100644 --- a/.github/workflows/amd64_macos_cmake_cpp.yml +++ b/.github/not_used/amd64_macos_cmake_cpp.yml @@ -14,7 +14,7 @@ jobs: ] fail-fast: false name: MacOS • ${{ matrix.cmake.generator }} • C++ - runs-on: macos-latest + runs-on: macos-13 # last macos intel based runner steps: - uses: actions/checkout@v4 - name: Check cmake diff --git a/.github/workflows/amd64_macos_cmake_dotnet.yml b/.github/not_used/amd64_macos_cmake_dotnet.yml similarity index 95% rename from .github/workflows/amd64_macos_cmake_dotnet.yml rename to .github/not_used/amd64_macos_cmake_dotnet.yml index bbf678ec03c..cbae5400bd5 100644 --- a/.github/workflows/amd64_macos_cmake_dotnet.yml +++ b/.github/not_used/amd64_macos_cmake_dotnet.yml @@ -14,7 +14,7 @@ jobs: ] fail-fast: false name: MacOS • ${{ matrix.cmake.generator }} • .Net - runs-on: macos-latest + runs-on: macos-13 # last macos intel based runner steps: - uses: actions/checkout@v4 - name: Swig install @@ -22,7 +22,7 @@ jobs: brew install swig swig -version - name: Setup .NET 6.0 - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - name: Check dotnet diff --git a/.github/workflows/amd64_macos_cmake_java.yml b/.github/not_used/amd64_macos_cmake_java.yml similarity index 96% rename from .github/workflows/amd64_macos_cmake_java.yml rename to .github/not_used/amd64_macos_cmake_java.yml index fd09b62d5b9..c5c89a15c7c 100644 --- a/.github/workflows/amd64_macos_cmake_java.yml +++ b/.github/not_used/amd64_macos_cmake_java.yml @@ -14,7 +14,7 @@ jobs: ] fail-fast: false name: MacOS • ${{ matrix.cmake.generator }} • Java - runs-on: macos-latest + runs-on: macos-13 # last macos intel based runner steps: - uses: actions/checkout@v4 - name: Swig install diff --git a/.github/workflows/amd64_macos_cmake_python.yml b/.github/not_used/amd64_macos_cmake_python.yml similarity index 97% rename from .github/workflows/amd64_macos_cmake_python.yml rename to .github/not_used/amd64_macos_cmake_python.yml index 7aa31ab2a00..b1729db0c90 100644 --- a/.github/workflows/amd64_macos_cmake_python.yml +++ b/.github/not_used/amd64_macos_cmake_python.yml @@ -20,7 +20,7 @@ jobs: ] fail-fast: false name: MacOS • ${{ matrix.cmake.generator }} • Python-${{ matrix.python.version }} - runs-on: macos-latest + runs-on: macos-13 # last macos intel based runner steps: - uses: actions/checkout@v4 - name: Swig install diff --git a/.github/workflows/amd64_windows_cmake_dotnet.yml b/.github/not_used/amd64_windows_cmake_dotnet.yml similarity index 77% rename from .github/workflows/amd64_windows_cmake_dotnet.yml rename to .github/not_used/amd64_windows_cmake_dotnet.yml index 9df698847bf..4e8e612092c 100644 --- a/.github/workflows/amd64_windows_cmake_dotnet.yml +++ b/.github/not_used/amd64_windows_cmake_dotnet.yml @@ -14,17 +14,12 @@ jobs: fail-fast: false name: Windows • ${{ matrix.cmake.generator }} • .Net runs-on: windows-latest + env: + CTEST_OUTPUT_ON_FAILURE: 1 steps: - uses: actions/checkout@v4 - - name: Install SWIG 4.1.1 - run: | - (New-Object System.Net.WebClient).DownloadFile("http://prdownloads.sourceforge.net/swig/swigwin-4.1.1.zip","swigwin-4.1.1.zip"); - Expand-Archive .\swigwin-4.1.1.zip .; - echo "$((Get-Item .).FullName)/swigwin-4.1.1" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append - - name: Check swig - run: swig -version - name: Setup .NET 6.0 - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - name: Check dotnet diff --git a/.github/not_used/amd64_windows_cmake_java.yml b/.github/not_used/amd64_windows_cmake_java.yml new file mode 100644 index 00000000000..190ebf7d3ff --- /dev/null +++ b/.github/not_used/amd64_windows_cmake_java.yml @@ -0,0 +1,75 @@ +# ref: https://github.com/actions/runner-images +name: amd64 Windows CMake Java + +on: [push, pull_request, workflow_dispatch] + +# Building using the github runner environement directly. +jobs: + native: + strategy: + matrix: + cmake: [ + {generator: "Visual Studio 17 2022", config: Release, build_target: ALL_BUILD, test_target: RUN_TESTS, install_target: INSTALL}, + ] + java: [ + # see https://endoflife.date/azul-zulu + {distrib: 'zulu', version: '8'}, # 2030/12 + {distrib: 'zulu', version: '11'}, # 2026/09 + {distrib: 'zulu', version: '17'}, # 2029/09 + {distrib: 'zulu', version: '21'}, # 2031/09 + # see https://endoflife.date/eclipse-temurin + {distrib: 'temurin', version: '8'}, # 2026/11 + {distrib: 'temurin', version: '11'}, # 2027/10 + {distrib: 'temurin', version: '17'}, # 2027/10 + {distrib: 'temurin', version: '21'}, # 2029/12 + # see https://endoflife.date/microsoft-build-of-openjdk + {distrib: 'microsoft', version: '11'}, # 2027/09 + {distrib: 'microsoft', version: '17'}, # 2027/09 + {distrib: 'microsoft', version: '21'}, # 2028/09 + ] + fail-fast: false + name: Windows • ${{ matrix.cmake.generator }} • ${{ matrix.java.distrib }}-${{ matrix.java.version }} + runs-on: windows-latest + env: + CTEST_OUTPUT_ON_FAILURE: 1 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: ${{ matrix.java.distrib }} + java-version: ${{ matrix.java.version }} + - name: Update maven + run: | + choco upgrade maven + echo "C:\ProgramData\chocolatey\lib\maven\apache-maven-3.9.9\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + - name: Check java + run: | + java -version + mvn --version + - name: Check cmake + run: cmake --version + - name: Configure + run: > + cmake -S. -Bbuild + -G "${{ matrix.cmake.generator }}" + -DCMAKE_BUILD_TYPE=${{ matrix.cmake.config }} + -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF + -DBUILD_JAVA=ON + - name: Build + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.build_target }} + -v -j2 + - name: Test + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.test_target }} + -v + - name: Install + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.install_target }} + -v diff --git a/.github/workflows/amd64_windows_cmake_python.yml b/.github/not_used/amd64_windows_cmake_python.yml similarity index 83% rename from .github/workflows/amd64_windows_cmake_python.yml rename to .github/not_used/amd64_windows_cmake_python.yml index 8f40e36d054..a8d0babfd60 100644 --- a/.github/workflows/amd64_windows_cmake_python.yml +++ b/.github/not_used/amd64_windows_cmake_python.yml @@ -20,6 +20,8 @@ jobs: fail-fast: false name: Windows • ${{ matrix.cmake.generator }} • Python-${{ matrix.python.version }} runs-on: windows-latest + env: + CTEST_OUTPUT_ON_FAILURE: 1 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 @@ -27,13 +29,6 @@ jobs: python-version: ${{ matrix.python.version }} - name: Install python3 run: python3 -m pip install --user mypy-protobuf absl-py setuptools wheel numpy pandas - - name: Install SWIG 4.1.1 - run: | - (New-Object System.Net.WebClient).DownloadFile("http://prdownloads.sourceforge.net/swig/swigwin-4.1.1.zip","swigwin-4.1.1.zip"); - Expand-Archive .\swigwin-4.1.1.zip .; - echo "$((Get-Item .).FullName)/swigwin-4.1.1" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append - - name: Check swig - run: swig -version - name: Add Python binaries to path run: > echo "$((Get-Item ~).FullName)/AppData/Roaming/Python/${{ matrix.python.dir }}/Scripts" | diff --git a/.github/workflows/amd64_windows_bazel.yml b/.github/not_used/arm64_macos_bazel.yml similarity index 70% rename from .github/workflows/amd64_windows_bazel.yml rename to .github/not_used/arm64_macos_bazel.yml index f33ca41ab52..59d67a5d1ee 100644 --- a/.github/workflows/amd64_windows_bazel.yml +++ b/.github/not_used/arm64_macos_bazel.yml @@ -1,5 +1,5 @@ # ref: https://github.com/actions/runner-images -name: amd64 Windows Bazel +name: arm64 MacOS Bazel on: [push, pull_request, workflow_dispatch] @@ -8,24 +8,22 @@ jobs: native: strategy: matrix: - runner: [windows-2022] python: [ {version: '3.9'}, {version: '3.10'}, {version: '3.11'}, {version: '3.12'}, ] - fail-fast: false # Don't cancel all jobs if one fails. - name: ${{ matrix.runner }} • Bazel • Python-${{ matrix.python.version }} - runs-on: ${{ matrix.runner }} + fail-fast: false + name: MacOS • Bazel • Python-${{ matrix.python.version }} + runs-on: macos-latest # macos arm64 based runner steps: - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - name: Set Java to OpenJDK 17 (Temurin) + uses: actions/setup-java@v3 with: distribution: 'temurin' java-version: '17' - - name: Check java - run: java -version - name: Check mvn run: mvn --version - name: Setup Python @@ -34,10 +32,14 @@ jobs: python-version: ${{ matrix.python.version }} - name: Check Python run: python --version - - name: Install Bazel - run: choco install bazel - name: Check Bazel run: bazel version + - name: Change Python in WORKSPACE + run: > + sed + -i '' + -e 's/\(DEFAULT_PYTHON =\) "3.[0-9]*"/\1 "${{ matrix.python.version }}"/g' + WORKSPACE - name: Build run: > bazel build diff --git a/.github/not_used/arm64_macos_cmake_cpp.yml b/.github/not_used/arm64_macos_cmake_cpp.yml new file mode 100644 index 00000000000..8388c59aa14 --- /dev/null +++ b/.github/not_used/arm64_macos_cmake_cpp.yml @@ -0,0 +1,47 @@ +# ref: https://github.com/actions/runner-images +name: arm64 MacOS CMake C++ + +on: [push, pull_request, workflow_dispatch] + +# Building using the github runner environement directly. +jobs: + native: + strategy: + matrix: + cmake: [ + {generator: "Xcode", config: Release, build_target: ALL_BUILD, test_target: RUN_TESTS, install_target: install}, + {generator: "Unix Makefiles", config: Release, build_target: all, test_target: test, install_target: install}, + ] + fail-fast: false + name: MacOS • ${{ matrix.cmake.generator }} • C++ + runs-on: macos-latest # macos arm64 based runner + steps: + - uses: actions/checkout@v4 + - name: Check cmake + run: cmake --version + - name: Configure + run: > + cmake -S. -Bbuild + -G "${{ matrix.cmake.generator }}" + -DCMAKE_BUILD_TYPE=${{ matrix.cmake.config }} + -DBUILD_DEPS=ON + -DCMAKE_INSTALL_PREFIX=install + - name: Build + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.build_target }} + -v -j2 + - name: Test + run: > + CTEST_OUTPUT_ON_FAILURE=1 + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.test_target }} + -v + - name: Install + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.install_target }} + -v diff --git a/.github/not_used/arm64_macos_cmake_dotnet.yml b/.github/not_used/arm64_macos_cmake_dotnet.yml new file mode 100644 index 00000000000..193d3675b85 --- /dev/null +++ b/.github/not_used/arm64_macos_cmake_dotnet.yml @@ -0,0 +1,58 @@ +# ref: https://github.com/actions/runner-images +name: arm64 MacOS CMake .Net + +on: [push, pull_request, workflow_dispatch] + +# Building using the github runner environement directly. +jobs: + native: + strategy: + matrix: + cmake: [ + {generator: "Xcode", config: Release, build_target: ALL_BUILD, test_target: RUN_TESTS, install_target: install}, + {generator: "Unix Makefiles", config: Release, build_target: all, test_target: test, install_target: install}, + ] + fail-fast: false + name: MacOS • ${{ matrix.cmake.generator }} • .Net + runs-on: macos-latest # macos arm64 based runner + steps: + - uses: actions/checkout@v4 + - name: Swig install + run: | + brew install swig + swig -version + - name: Setup .NET 6.0 + uses: actions/setup-dotnet@v4 + with: + dotnet-version: 6.0.x + - name: Check dotnet + run: dotnet --info + - name: Check cmake + run: cmake --version + - name: Configure + run: > + cmake -S. -Bbuild + -G "${{ matrix.cmake.generator }}" + -DCMAKE_BUILD_TYPE=${{ matrix.cmake.config }} + -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF + -DBUILD_DOTNET=ON + -DCMAKE_INSTALL_PREFIX=install + - name: Build + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.build_target }} + -v -j2 + - name: Test + run: > + CTEST_OUTPUT_ON_FAILURE=1 + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.test_target }} + -v + - name: Install + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.install_target }} + -v diff --git a/.github/not_used/arm64_macos_cmake_java.yml b/.github/not_used/arm64_macos_cmake_java.yml new file mode 100644 index 00000000000..21e25b5c811 --- /dev/null +++ b/.github/not_used/arm64_macos_cmake_java.yml @@ -0,0 +1,55 @@ +# ref: https://github.com/actions/runner-images +name: arm64 MacOS CMake Java + +on: [push, pull_request, workflow_dispatch] + +# Building using the github runner environement directly. +jobs: + native: + strategy: + matrix: + cmake: [ + {generator: "Xcode", config: Release, build_target: ALL_BUILD, test_target: RUN_TESTS, install_target: install}, + {generator: "Unix Makefiles", config: Release, build_target: all, test_target: test, install_target: install}, + ] + fail-fast: false + name: MacOS • ${{ matrix.cmake.generator }} • Java + runs-on: macos-latest # macos arm64 based runner + steps: + - uses: actions/checkout@v4 + - name: Swig install + run: | + brew install swig + swig -version + - name: Check java + run: java -version + - name: Check cmake + run: cmake --version + - name: Configure + run: > + cmake -S. -Bbuild + -G "${{ matrix.cmake.generator }}" + -DCMAKE_BUILD_TYPE=${{ matrix.cmake.config }} + -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF + -DBUILD_JAVA=ON -DSKIP_GPG=ON + -DCMAKE_INSTALL_PREFIX=install + - name: Build + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.build_target }} + -v -j2 + - name: Test + run: > + CTEST_OUTPUT_ON_FAILURE=1 + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.test_target }} + -v + - name: Install + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.install_target }} + -v + \ No newline at end of file diff --git a/.github/workflows/amd64_linux_cmake_python.yml b/.github/not_used/arm64_macos_cmake_python.yml similarity index 74% rename from .github/workflows/amd64_linux_cmake_python.yml rename to .github/not_used/arm64_macos_cmake_python.yml index 68ac73ad65c..7cd90596f4a 100644 --- a/.github/workflows/amd64_linux_cmake_python.yml +++ b/.github/not_used/arm64_macos_cmake_python.yml @@ -1,5 +1,5 @@ # ref: https://github.com/actions/runner-images -name: amd64 Linux CMake Python +name: arm64 MacOS CMake Python on: [push, pull_request, workflow_dispatch] @@ -9,35 +9,32 @@ jobs: strategy: matrix: cmake: [ - {generator: "Ninja", config: Release, build_target: all, test_target: test, install_target: install}, - {generator: "Ninja Multi-Config", config: Release, build_target: all, test_target: test, install_target: install}, + {generator: "Xcode", config: Release, build_target: ALL_BUILD, test_target: RUN_TESTS, install_target: install}, {generator: "Unix Makefiles", config: Release, build_target: all, test_target: test, install_target: install}, ] python: [ {version: "3.9"}, {version: "3.10"}, {version: "3.11"}, - {version: "3.12"}, + {version: "3.12"} ] fail-fast: false - name: Linux • ${{ matrix.cmake.generator }} • Python-${{ matrix.python.version }} - runs-on: ubuntu-latest + name: MacOS • ${{ matrix.cmake.generator }} • Python-${{ matrix.python.version }} + runs-on: macos-latest # macos arm64 based runner steps: - uses: actions/checkout@v4 - - name: Install Ninja - run: | - sudo apt update - sudo apt install -y ninja-build - name: Swig install run: | - sudo apt install -y swig + brew install swig swig -version - name: Setup Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python.version }} - name: Update Path - run: echo "$HOME/.local/bin" >> $GITHUB_PATH + run: | + echo "$HOME/Library/Python/${{ matrix.python.version }}/bin" >> $GITHUB_PATH + echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Check cmake run: cmake --version - name: Configure diff --git a/.github/workflows/mips_toolchain.yml b/.github/not_used/mips_toolchain.yml similarity index 90% rename from .github/workflows/mips_toolchain.yml rename to .github/not_used/mips_toolchain.yml index b3fbf09cc34..0234f00f21b 100644 --- a/.github/workflows/mips_toolchain.yml +++ b/.github/not_used/mips_toolchain.yml @@ -1,3 +1,4 @@ +# ref: https://codescape.mips.com/components/toolchain/2021.09-01/downloads.html name: mips Toolchain on: [push, pull_request, workflow_dispatch] diff --git a/.github/workflows/powerpc_toolchain.yml b/.github/not_used/powerpc_toolchain.yml similarity index 95% rename from .github/workflows/powerpc_toolchain.yml rename to .github/not_used/powerpc_toolchain.yml index bd210528261..cd64bdb2353 100644 --- a/.github/workflows/powerpc_toolchain.yml +++ b/.github/not_used/powerpc_toolchain.yml @@ -1,3 +1,4 @@ +# ref: https://toolchains.bootlin.com/ name: powerpc Toolchain on: [push, pull_request, workflow_dispatch] diff --git a/.github/workflows/Dockerfile b/.github/workflows/Dockerfile deleted file mode 100644 index 4b93054b8d1..00000000000 --- a/.github/workflows/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -# Create a virtual environment with all tools installed -# ref: https://hub.docker.com/_/alpine -FROM alpine:edge -# Install system build dependencies -RUN apk add --no-cache git clang-extra-tools -RUN apk add --no-cache python3 py3-pip \ -&& rm -f /usr/lib/python3.*/EXTERNALLY-MANAGED \ -&& python3 -m pip install black diff --git a/.github/workflows/amd64_docker_bazel.yml b/.github/workflows/amd64_docker_bazel.yml deleted file mode 100644 index 69ac67c7fac..00000000000 --- a/.github/workflows/amd64_docker_bazel.yml +++ /dev/null @@ -1,23 +0,0 @@ -# ref: https://github.com/actions/runner-images -name: amd64 Docker Bazel - -on: [push, pull_request, workflow_dispatch] - -jobs: - bazel: - strategy: - matrix: - distro: [archlinux, debian, fedora, opensuse, ubuntu] - fail-fast: false - name: ${{ matrix.distro }} • Bazel • C++ - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Build env image - run: make --directory=bazel ${{ matrix.distro }}_env - - name: Build devel image - run: make --directory=bazel ${{ matrix.distro }}_devel - - name: Build project - run: make --directory=bazel ${{ matrix.distro }}_build - - name: Test project - run: make --directory=bazel ${{ matrix.distro }}_test diff --git a/.github/workflows/amd64_linux_cmake_cpp.yml b/.github/workflows/amd64_linux_cmake_cpp.yml index c54acef4ef9..233f9175473 100644 --- a/.github/workflows/amd64_linux_cmake_cpp.yml +++ b/.github/workflows/amd64_linux_cmake_cpp.yml @@ -6,6 +6,10 @@ on: [push, pull_request, workflow_dispatch] # Building using the github runner environement directly. jobs: native: + env: + KNITRODIR: ${{ github.workspace }}/knitro_distrib/knitro-14.0.0-Linux64 + ARTELYS_LICENSE: ${{ github.workspace }}/knitro_distrib + OR_ROOT: ${{ github.workspace }} strategy: matrix: cmake: [ @@ -18,6 +22,35 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - name: Checkout Knitro Linux + uses: actions/checkout@v4 + with: + repository: yojvr/knitro1400-Linux64 + path: knitro_distrib + token: ${{ secrets.KNITRO_DISTRIB }} + - name: Untar large files + run: | + cd ${{ env.KNITRODIR }} + cd knitromatlab + tar -xzf knitromatlab_mex.mexa64.tar.gz + rm knitromatlab_mex.mexa64.tar.gz + cd ../knitroampl + tar -xzf knitroampl.tar.gz + rm knitroampl.tar.gz + cd ../lib + tar -xzf libknitro1400.a.tar.gz + tar -xzf libknitro1400.so.tar.gz + rm libknitro1400.*.tar.gz + - name: Copy license + run: | + echo "${{ secrets.KNITRO_DISTRIB_LICENSE }}" >> ${{ env.ARTELYS_LICENSE }}/artelys_lic_cicd.txt + - name: Check Knitro install + run: | + ls -l ${{ env.ARTELYS_LICENSE }} + ls -l ${{ env.KNITRODIR }} + ls -l ${{ env.KNITRODIR }}/lib + ls -l ${{ env.KNITRODIR }}/knitroampl + ls -l ${{ env.KNITRODIR }}/knitromatlab - name: Install Ninja run: | sudo apt-get update diff --git a/.github/workflows/amd64_linux_cmake_glpk_on.yml b/.github/workflows/amd64_linux_cmake_glpk_on.yml deleted file mode 100644 index ba6acab4fac..00000000000 --- a/.github/workflows/amd64_linux_cmake_glpk_on.yml +++ /dev/null @@ -1,41 +0,0 @@ -# ref: https://github.com/actions/runner-images -name: amd64 Linux CMake GLPK ON - -on: [push, pull_request, workflow_dispatch] - -# Building using the github runner environement directly. -jobs: - native: - name: Linux • CMake • GLPK ON - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Check cmake - run: cmake --version - - name: Configure - run: > - cmake -S. -Bbuild - -DCMAKE_BUILD_TYPE=Release - -DBUILD_DEPS=ON - -DUSE_SCIP=OFF -DUSE_COINOR=OFF - -DUSE_GLPK=ON - - name: Build - run: > - cmake --build build - --config Release - --target all - -v -j2 - - name: Test - run: > - CTEST_OUTPUT_ON_FAILURE=1 - cmake --build build - --config Release - --target test - -v - - name: Install - run: > - cmake --build build - --config Release - --target install - -v - -- DESTDIR=install diff --git a/.github/workflows/amd64_linux_cmake_java.yml b/.github/workflows/amd64_linux_cmake_java.yml index 27ef4936389..edf3840ca7a 100644 --- a/.github/workflows/amd64_linux_cmake_java.yml +++ b/.github/workflows/amd64_linux_cmake_java.yml @@ -43,4 +43,4 @@ jobs: cmake --build build --config Release --target install - -v + -v \ No newline at end of file diff --git a/.github/workflows/amd64_linux_cmake_no_lp_parser.yml b/.github/workflows/amd64_linux_cmake_no_lp_parser.yml deleted file mode 100644 index 9da054149ef..00000000000 --- a/.github/workflows/amd64_linux_cmake_no_lp_parser.yml +++ /dev/null @@ -1,40 +0,0 @@ -# ref: https://github.com/actions/runner-images -name: amd64 Linux CMake C++ LP_PARSER OFF - -on: [push, pull_request, workflow_dispatch] - -# Building using the github runner environement directly. -jobs: - native: - name: Linux • CMake • LP_PARSER OFF - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Check cmake - run: cmake --version - - name: Configure - run: > - cmake -S. -Bbuild - -DCMAKE_BUILD_TYPE=Release - -DBUILD_DEPS=ON - -DBUILD_LP_PARSER=OFF - - name: Build - run: > - cmake --build build - --config Release - --target all - -v -j2 - - name: Test - run: > - CTEST_OUTPUT_ON_FAILURE=1 - cmake --build build - --config Release - --target test - -v - - name: Install - run: > - cmake --build build - --config Release - --target install - -v - -- DESTDIR=install diff --git a/.github/workflows/amd64_linux_cmake_system_deps.yml b/.github/workflows/amd64_linux_cmake_system_deps.yml deleted file mode 100644 index c818ca9b149..00000000000 --- a/.github/workflows/amd64_linux_cmake_system_deps.yml +++ /dev/null @@ -1,39 +0,0 @@ -# Test or-tools using system wide install dependencies. -name: amd64 Linux CMake System Dependencies - -on: [push, pull_request, workflow_dispatch] - -jobs: - native: - strategy: - matrix: - distro: [system_deps] - lang: [cpp, python, dotnet, java] - fail-fast: false - name: Archlinux • CMake • ${{ matrix.lang }} • BUILD_DEPS OFF - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Build base image - run: make --directory=cmake ${{ matrix.distro }}_base - - name: Build env image - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_env - - name: Build devel image - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_devel - - name: Build project - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_build - - name: Test project - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_test - - - name: Build Install env image - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_install_env - - name: Build Install devel image - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_install_devel - - name: Build Install - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_install_build - - name: Test Install - run: make --directory=cmake ${{ matrix.distro }}_${{ matrix.lang }}_install_test - -# TODO(user): Add macOS + brew job -# TODO(user): Add Windows + choco/vcpkg job - diff --git a/.github/workflows/amd64_web.yml b/.github/workflows/amd64_web.yml deleted file mode 100644 index 06150646dcc..00000000000 --- a/.github/workflows/amd64_web.yml +++ /dev/null @@ -1,23 +0,0 @@ -# ref: https://github.com/docker-library/official-images -name: amd64 Web - -on: [push, pull_request, workflow_dispatch] - -jobs: - emscripten: - name: Linux • Emscripten - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Check docker - run: | - docker info - docker buildx ls - - name: Build env image - run: make --directory=cmake web_env - - name: Build devel project - run: make --directory=cmake web_devel - - name: Build project - run: make --directory=cmake web_build - - name: Test project - run: make --directory=cmake web_test diff --git a/.github/workflows/amd64_windows_cmake_cpp.yml b/.github/workflows/amd64_windows_cmake_cpp.yml index f970d6c4475..334255746b7 100644 --- a/.github/workflows/amd64_windows_cmake_cpp.yml +++ b/.github/workflows/amd64_windows_cmake_cpp.yml @@ -6,6 +6,10 @@ on: [push, pull_request, workflow_dispatch] # Building using the github runner environement directly. jobs: native: + env : + KNITRODIR: ${{ github.workspace }}\knitro_distrib\knitro-14.0.0-Win64 + ARTELYS_LICENSE: ${{ github.workspace }}\knitro_distrib + OR_ROOT: ${{ github.workspace }} strategy: matrix: cmake: [ @@ -15,7 +19,36 @@ jobs: name: Windows • ${{ matrix.cmake.generator }} • C++ runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - name: Checkout or-tools + uses: actions/checkout@v4 + - name: Checkout Knitro Windows + uses: actions/checkout@v4 + with: + repository: yojvr/knitro1400-Win64 + path: knitro_distrib + token: ${{ secrets.KNITRO_DISTRIB }} + - name: Unzip + run: | + 7z x "${{ env.KNITRODIR }}/knitroampl/knitroampl.zip" -o"${{ env.KNITRODIR }}/knitroampl" + 7z x "${{ env.KNITRODIR }}/knitromatlab/knitromatlab_mex.zip" -o"${{ env.KNITRODIR }}/knitromatlab" + 7z x "${{ env.KNITRODIR }}/lib/knitro.zip" -o"${{ env.KNITRODIR }}/lib" + rm ${{ env.KNITRODIR }}/knitroampl/knitroampl.zip + rm ${{ env.KNITRODIR }}/knitromatlab/knitromatlab_mex.zip + rm ${{ env.KNITRODIR }}/lib/knitro.zip + - name: Copy license + run: | + echo "${{ secrets.KNITRO_DISTRIB_LICENSE }}" >> ${{ env.ARTELYS_LICENSE }}/artelys_lic_cicd.txt + - name: Grant access to knitro distrib + run: | + icacls ${{ env.KNITRODIR }} /grant Everyone:RX /t + - name: Check what I have + run: | + ls -l . + ls -l ${{ env.ARTELYS_LICENSE }} + ls -l ${{ env.KNITRODIR }} + ls -l ${{ env.KNITRODIR }}/knitroampl/ + ls -l ${{ env.KNITRODIR }}/knitromatlab/ + ls -l ${{ env.KNITRODIR }}/lib/ - name: Check cmake run: | cmake --version @@ -26,6 +59,8 @@ jobs: -G "${{ matrix.cmake.generator }}" -DCMAKE_BUILD_TYPE=${{ matrix.cmake.config }} -DBUILD_DEPS=ON + -DUSE_KNITRO=ON + -DBUILD_TESTING=ON -DCMAKE_INSTALL_PREFIX=install - name: Build run: > diff --git a/.github/workflows/amd64_windows_cmake_java.yml b/.github/workflows/amd64_windows_cmake_java.yml index f1ac34155f5..48dfbff7191 100644 --- a/.github/workflows/amd64_windows_cmake_java.yml +++ b/.github/workflows/amd64_windows_cmake_java.yml @@ -42,6 +42,7 @@ jobs: -v -j2 - name: Test run: > + CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --config ${{ matrix.cmake.config }} --target ${{ matrix.cmake.test_target }} @@ -51,4 +52,4 @@ jobs: cmake --build build --config ${{ matrix.cmake.config }} --target ${{ matrix.cmake.install_target }} - -v + -v \ No newline at end of file diff --git a/.github/workflows/check_format.yml b/.github/workflows/check_format.yml deleted file mode 100644 index 16bedf598bb..00000000000 --- a/.github/workflows/check_format.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Check Format - -on: [push, pull_request, workflow_dispatch] - -jobs: - # Building using the github runner environement directly. - clang-format: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Fetch origin/main - run: git fetch origin main - - name: List of changed file(s) - run: git diff --name-only FETCH_HEAD - - - name: Build clang-format docker - run: cd .github/workflows && docker build --tag=linter . - - name: Check clang-format - run: docker run --rm --init -v $(pwd):/repo linter:latest clang-format --version - - name: clang-format help - run: docker run --rm --init -v $(pwd):/repo linter:latest clang-format --help - - - name: Check current commit - run: docker run --rm --init -v $(pwd):/repo -w /repo linter:latest sh -c "git diff --diff-filter=d --name-only FETCH_HEAD | grep '\.c$\|\.h$\|\.cc$\|\.java$\|\.cs$' | xargs clang-format --verbose --style=file --dry-run --Werror " diff --git a/.github/workflows/linux_knitro_tests.yml b/.github/workflows/linux_knitro_tests.yml new file mode 100644 index 00000000000..9dec24e6029 --- /dev/null +++ b/.github/workflows/linux_knitro_tests.yml @@ -0,0 +1,68 @@ +# ref: https://github.com/actions/runner-images +name: Linux Knitro Test + +on: [push, pull_request, workflow_dispatch] + +# Building using the github runner environement directly. +jobs: + native: + env: + KNITRODIR: ${{ github.workspace }}/knitro_distrib/knitro-14.0.0-Linux64 + ARTELYS_LICENSE: ${{ github.workspace }}/knitro_distrib + OR_ROOT: ${{ github.workspace }} + strategy: + matrix: + cmake: [ + {generator: "Unix Makefiles", config: "Release", binDir: ""}, + {generator: "Ninja", config: "Release", binDir: ""}, + {generator: "Ninja Multi-Config", config: "Release", binDir: "RELEASE"}, + ] + fail-fast: false + name: Linux • Knitro Test • C++ + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Checkout Knitro Linux + uses: actions/checkout@v4 + with: + repository: yojvr/knitro1400-Linux64 + path: knitro_distrib + token: ${{ secrets.KNITRO_DISTRIB }} + - name: Untar large files + run: | + cd ${{ env.KNITRODIR }} + cd knitromatlab + tar -xzf knitromatlab_mex.mexa64.tar.gz + rm knitromatlab_mex.mexa64.tar.gz + cd ../knitroampl + tar -xzf knitroampl.tar.gz + rm knitroampl.tar.gz + cd ../lib + tar -xzf libknitro1400.a.tar.gz + tar -xzf libknitro1400.so.tar.gz + rm libknitro1400.*.tar.gz + - name: Copy license + run: | + echo "${{ secrets.KNITRO_DISTRIB_LICENSE }}" >> ${{ env.ARTELYS_LICENSE }}/artelys_lic_cicd.txt + - name: Install Ninja + run: | + sudo apt-get update + sudo apt-get install ninja-build + - name: Check cmake + run: cmake --version + - name: Configure + run: > + cmake -S. -Bbuild + -G "${{ matrix.cmake.generator }}" + -DCMAKE_BUILD_TYPE="$BUILD_TYPE" + -DBUILD_DEPS=ON + -DCMAKE_INSTALL_PREFIX=install + - name: Build + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target all + -v -j2 + - name: Run Knitro Test + run: > + ./build/${{ matrix.cmake.binDir }}/bin/test_knitro_interface diff --git a/.github/workflows/windows_knitro_tests.yml b/.github/workflows/windows_knitro_tests.yml new file mode 100644 index 00000000000..a0e4c83713f --- /dev/null +++ b/.github/workflows/windows_knitro_tests.yml @@ -0,0 +1,72 @@ +# ref: https://github.com/actions/runner-images +name: Windows knitro tests + +on: [push, pull_request, workflow_dispatch] + +# Building using the github runner environement directly. +jobs: + native: + env : + KNITRODIR: ${{ github.workspace }}\knitro_distrib\knitro-14.0.0-Win64 + ARTELYS_LICENSE: ${{ github.workspace }}\knitro_distrib + OR_ROOT: ${{ github.workspace }} + strategy: + matrix: + cmake: [ + {generator: "Visual Studio 17 2022", config: Release, build_target: ALL_BUILD, test_target: RUN_TESTS, install_target: INSTALL}, + ] + fail-fast: false + name: Windows • Knitro Test • C++ + runs-on: windows-latest + steps: + - name: Checkout or-tools + uses: actions/checkout@v4 + - name: Checkout Knitro Windows + uses: actions/checkout@v4 + with: + repository: yojvr/knitro1400-Win64 + path: knitro_distrib + token: ${{ secrets.KNITRO_DISTRIB }} + - name: Unzip + run: | + unzip "${{ env.KNITRODIR }}/knitroampl/knitroampl.zip" -d "${{ env.KNITRODIR }}/knitroampl" + unzip "${{ env.KNITRODIR }}/knitromatlab/knitromatlab_mex.zip" -d "${{ env.KNITRODIR }}/knitromatlab" + unzip "${{ env.KNITRODIR }}/lib/knitro.zip" -d "${{ env.KNITRODIR }}/lib" + rm ${{ env.KNITRODIR }}/knitroampl/knitroampl.zip + rm ${{ env.KNITRODIR }}/knitromatlab/knitromatlab_mex.zip + rm ${{ env.KNITRODIR }}/lib/knitro.zip + - name: Copy license + run: | + echo "${{ secrets.KNITRO_DISTRIB_LICENSE }}" >> ${{ env.ARTELYS_LICENSE }}/artelys_lic_cicd.txt + - name: Grant access to knitro distrib + run: | + icacls ${{ env.KNITRODIR }} /grant Everyone:RX /t + - name: Check cmake + run: | + cmake --version + cmake -G || true + - name: Configure + run: > + cmake -S. -Bbuild + -G "${{ matrix.cmake.generator }}" + -DCMAKE_BUILD_TYPE=${{ matrix.cmake.config }} + -DBUILD_DEPS=ON + -DUSE_KNITRO=ON + -DBUILD_TESTING=ON + - name: Build + run: > + cmake --build build + --config ${{ matrix.cmake.config }} + --target ${{ matrix.cmake.build_target }} + -v -j2 + - name: Check Build dir + run: | + ls -l . + ls -l ${{ env.ARTELYS_LICENSE }} + ls -l ${{ env.KNITRODIR }} + ls -l ${{ env.KNITRODIR }}/knitroampl/ + ls -l ${{ env.KNITRODIR }}/knitromatlab/ + ls -l ${{ env.KNITRODIR }}/lib/ + - name: Run Knitro Test + run: > + ./build/RELEASE/bin/test_knitro_interface diff --git a/.gitignore b/.gitignore index 54694e398c8..596f11b08a6 100644 --- a/.gitignore +++ b/.gitignore @@ -103,3 +103,7 @@ CMakeFiles DartConfiguration.tcl *build*/* build/ + +# workflow stuff +.github/workflows/hyrae/ +.github/workflows/not_used/ \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index 9f254ba5798..3ef4917a2c9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -12,7 +12,7 @@ # limitations under the License. # This file is just an orchestration -cmake_minimum_required(VERSION 3.18) +cmake_minimum_required(VERSION 3.20) list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") include(utils) @@ -196,8 +196,13 @@ if(BUILD_TESTING) CMAKE_DEPENDENT_OPTION(BUILD_googletest "Build googletest" OFF "NOT BUILD_DEPS" ON) message(STATUS "Build googletest: ${BUILD_googletest}") + + CMAKE_DEPENDENT_OPTION(BUILD_benchmark "Build benchmark" OFF + "NOT BUILD_DEPS" ON) + message(STATUS "Build benchmark: ${BUILD_benchmark}") else() set(BUILD_googletest OFF) + set(BUILD_benchmark OFF) endif() # Optional third party solvers (enabled by default) @@ -254,7 +259,7 @@ message(STATUS "Gurobi support: ${USE_GUROBI}") ## HiGHS # see: https://github.com/ERGO-Code/HiGHS -CMAKE_DEPENDENT_OPTION(USE_HIGHS "Use the HiGHS solver" OFF "BUILD_CXX" OFF) +CMAKE_DEPENDENT_OPTION(USE_HIGHS "Use the HiGHS solver" ON "BUILD_CXX" OFF) message(STATUS "HiGHS support: ${USE_HIGHS}") if(USE_HIGHS) CMAKE_DEPENDENT_OPTION(BUILD_HIGHS "Build the HiGHS dependency Library" OFF @@ -291,6 +296,10 @@ message(STATUS "CPLEX support: ${USE_CPLEX}") CMAKE_DEPENDENT_OPTION(USE_XPRESS "Use the Xpress solver" ON "BUILD_CXX" OFF) message(STATUS "Xpress support: ${USE_XPRESS}") +## KNITRO +CMAKE_DEPENDENT_OPTION(USE_KNITRO "Use the Knitro solver" ON "BUILD_CXX" OFF) +message(STATUS "Knitro support: ${USE_KNITRO}") + # Language specific options if(BUILD_CXX) CMAKE_DEPENDENT_OPTION(BUILD_CXX_DOC "Build the C++ doc" OFF "NOT BUILD_DOC" ON) @@ -393,35 +402,17 @@ CMAKE_DEPENDENT_OPTION(BUILD_PYTHON_DOC "Build the Python doc" OFF "NOT BUILD_DO message(STATUS "Python: Fetch dependencies: ${FETCH_PYTHON_DEPS}") endif() +# Find system deps +include(system_deps) + # Build Needed dependencies add_subdirectory(cmake/dependencies dependencies) list(APPEND CMAKE_PREFIX_PATH ${CMAKE_CURRENT_BINARY_DIR}/dependencies/install) -# Basic type -include(CMakePushCheckState) -cmake_push_check_state(RESET) -set(CMAKE_EXTRA_INCLUDE_FILES "cstdint") -include(CheckTypeSize) -check_type_size("long" SIZEOF_LONG LANGUAGE CXX) -message(STATUS "Found long size: ${SIZEOF_LONG}") -check_type_size("long long" SIZEOF_LONG_LONG LANGUAGE CXX) -message(STATUS "Found long long size: ${SIZEOF_LONG_LONG}") -check_type_size("int64_t" SIZEOF_INT64_T LANGUAGE CXX) -message(STATUS "Found int64_t size: ${SIZEOF_INT64_T}") - -check_type_size("unsigned long" SIZEOF_ULONG LANGUAGE CXX) -message(STATUS "Found unsigned long size: ${SIZEOF_ULONG}") -check_type_size("unsigned long long" SIZEOF_ULONG_LONG LANGUAGE CXX) -message(STATUS "Found unsigned long long size: ${SIZEOF_ULONG_LONG}") -check_type_size("uint64_t" SIZEOF_UINT64_T LANGUAGE CXX) -message(STATUS "Found uint64_t size: ${SIZEOF_UINT64_T}") - -check_type_size("int *" SIZEOF_INT_P LANGUAGE CXX) -message(STATUS "Found int * size: ${SIZEOF_INT_P}") -cmake_pop_check_state() - include(host) -include(deps) +# verify deps +include(check_deps) + include(cpp) include(flatzinc) include(glop) diff --git a/Dependencies.txt b/Dependencies.txt index fea7be5644e..3644d3f8ddb 100644 --- a/Dependencies.txt +++ b/Dependencies.txt @@ -1,18 +1,19 @@ -abseil-cpp=20240116.1 -Protobuf=v25.3 +ZLIB=1.3.1 +abseil-cpp=20240722.0 +Protobuf=v26.1 Eigen=3.4.0 -Re2=2024-02-01 +Re2=2024-04-01 CoinUtils=2.11.6 Osi=0.108.7 Clp=1.17.7 Cgl=0.60.5 Cbc=2.10.7 GLPK=5.0 -HiGHS=v1.6.0 -Scip=v810 +HiGHS=v1.7.2 +Scip=v900 # Python -pybind11=v2.11.1 +pybind11=v2.12.0 pybind11_abseil=52f2739 pybind11_protobuf=3b11990 # Testing -googletest=v1.14.0 +googletest=v1.15.2 diff --git a/Version.txt b/Version.txt index 90a08d0df4f..3d5f981049e 100644 --- a/Version.txt +++ b/Version.txt @@ -1,3 +1,3 @@ OR_TOOLS_MAJOR=9 -OR_TOOLS_MINOR=9 +OR_TOOLS_MINOR=11 #PRE_RELEASE=YES diff --git a/WORKSPACE b/WORKSPACE index 2fec7dbcb5d..a399d160b52 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -20,7 +20,7 @@ load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository", "new_git_r ## Bazel Skylib rules. git_repository( name = "bazel_skylib", - tag = "1.5.0", + tag = "1.7.1", remote = "https://github.com/bazelbuild/bazel-skylib.git", ) load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace") @@ -29,7 +29,7 @@ bazel_skylib_workspace() ## Bazel rules. git_repository( name = "platforms", - tag = "0.0.8", + tag = "0.0.10", remote = "https://github.com/bazelbuild/platforms.git", ) @@ -47,25 +47,25 @@ git_repository( git_repository( name = "rules_java", - tag = "7.3.2", + tag = "7.7.0", remote = "https://github.com/bazelbuild/rules_java.git", ) git_repository( name = "rules_jvm_external", - tag = "6.0", + tag = "6.2", remote = "https://github.com/bazelbuild/rules_jvm_external.git", ) git_repository( name = "contrib_rules_jvm", - tag = "v0.24.0", + tag = "v0.27.0", remote = "https://github.com/bazel-contrib/rules_jvm.git", ) git_repository( name = "rules_python", - tag = "0.29.0", + tag = "0.34.0", remote = "https://github.com/bazelbuild/rules_python.git", ) @@ -74,35 +74,36 @@ git_repository( new_git_repository( name = "zlib", build_file = "@com_google_protobuf//:third_party/zlib.BUILD", - tag = "v1.2.13", + tag = "v1.3.1", remote = "https://github.com/madler/zlib.git", ) -## Re2 -git_repository( - name = "com_google_re2", - tag = "2024-02-01", - remote = "https://github.com/google/re2.git", -) - ## Abseil-cpp git_repository( name = "com_google_absl", - tag = "20240116.1", - patches = ["//patches:abseil-cpp-20240116.1.patch"], + tag = "20240722.0", + patches = ["//patches:abseil-cpp-20240722.0.patch"], patch_args = ["-p1"], remote = "https://github.com/abseil/abseil-cpp.git", ) +## Re2 +git_repository( + name = "com_google_re2", + tag = "2024-04-01", + remote = "https://github.com/google/re2.git", + repo_mapping = {"@abseil-cpp": "@com_google_absl"}, +) + ## Protobuf # proto_library, cc_proto_library, and java_proto_library rules implicitly # depend on @com_google_protobuf for protoc and proto runtimes. # This statement defines the @com_google_protobuf repo. git_repository( name = "com_google_protobuf", - patches = ["//patches:protobuf-v25.3.patch"], + patches = ["//patches:protobuf-v26.1.patch"], patch_args = ["-p1"], - tag = "v25.3", + tag = "v26.1", remote = "https://github.com/protocolbuffers/protobuf.git", ) # Load common dependencies. @@ -129,9 +130,9 @@ http_archive( new_git_repository( name = "scip", build_file = "//bazel:scip.BUILD.bazel", - patches = ["//bazel:scip.patch"], + patches = ["//bazel:scip-v900.patch"], patch_args = ["-p1"], - tag = "v810", + tag = "v900", remote = "https://github.com/scipopt/scip.git", ) @@ -155,7 +156,7 @@ cc_library( git_repository( name = "highs", - branch = "bazel", + branch = "v1.7.2", remote = "https://github.com/ERGO-Code/HiGHS.git", ) @@ -192,6 +193,14 @@ new_git_repository( load("@rules_python//python:repositories.bzl", "py_repositories") py_repositories() +load("@rules_python//python:repositories.bzl", "python_register_toolchains") +DEFAULT_PYTHON = "3.12" +python_register_toolchains( + name = "python3_12", + python_version = DEFAULT_PYTHON, + ignore_root_user_error=True, +) + # Create a central external repo, @pip_deps, that contains Bazel targets for all the # third-party packages specified in the bazel/requirements.txt file. load("@rules_python//python:pip.bzl", "pip_parse") @@ -214,6 +223,13 @@ load("@ortools_notebook_deps//:requirements.bzl", install_notebook_deps="install_deps") install_notebook_deps() +# Protobuf +load("@com_google_protobuf//bazel:system_python.bzl", "system_python") +system_python( + name = "system_python", + minimum_python_version = "3.8", +) + # Absl python library http_archive( name = "com_google_absl_py", @@ -228,7 +244,7 @@ http_archive( ## `pybind11_bazel` git_repository( name = "pybind11_bazel", - commit = "23926b00e2b2eb2fc46b17e587cf0c0cfd2f2c4b", # 2023/11/29 + tag = "v2.12.0", # 2024/04/08 patches = ["//patches:pybind11_bazel.patch"], patch_args = ["-p1"], remote = "https://github.com/pybind/pybind11_bazel.git", @@ -236,14 +252,14 @@ git_repository( new_git_repository( name = "pybind11", - build_file = "@pybind11_bazel//:pybind11.BUILD", - tag = "v2.11.1", + build_file = "@pybind11_bazel//:pybind11-BUILD.bazel", + tag = "v2.13.1", remote = "https://github.com/pybind/pybind11.git", ) new_git_repository( - name = "pybind11_abseil", - commit = "52f27398876a3177049977249e004770bd869e61", # 2024/01/11 + name = "org_pybind11_abseil", + tag = "v202402.0", patches = ["//patches:pybind11_abseil.patch"], patch_args = ["-p1"], remote = "https://github.com/pybind/pybind11_abseil.git", @@ -251,17 +267,10 @@ new_git_repository( new_git_repository( name = "pybind11_protobuf", - commit = "3b11990a99dea5101799e61d98a82c4737d240cc", # 2024/01/04 + commit = "84653a591aea5df482dc2bde42c19efafbd53a57", # 2024/06/28 remote = "https://github.com/pybind/pybind11_protobuf.git", ) -load("@pybind11_bazel//:python_configure.bzl", "python_configure") -python_configure(name = "local_config_python", python_version = "3") -bind( - name = "python_headers", - actual = "@local_config_python//:python_headers", -) - ## Java support (with junit 5) load("@rules_java//java:repositories.bzl", "rules_java_dependencies", "rules_java_toolchains") rules_java_dependencies() @@ -300,12 +309,12 @@ contrib_rules_jvm_setup() ## Testing git_repository( name = "com_google_googletest", - tag = "v1.13.0", + tag = "v1.15.2", remote = "https://github.com/google/googletest.git", ) git_repository( name = "com_google_benchmark", - tag = "v1.8.3", + tag = "v1.8.5", remote = "https://github.com/google/benchmark.git", ) diff --git a/bazel/BUILD.bazel b/bazel/BUILD.bazel index 5a5b1a7416d..21a00f04ab8 100644 --- a/bazel/BUILD.bazel +++ b/bazel/BUILD.bazel @@ -14,22 +14,19 @@ load("@rules_python//python:pip.bzl", "compile_pip_requirements") exports_files([ - "gtest.BUILD", - "glpk.BUILD", - "pcre2.BUILD", - "pcre2.patch", - "re2.patch", - "scip.BUILD", - "scip.patch", - "swig.BUILD", - "swig.patch", - "bliss.BUILD", - "bliss-0.73.patch", "archive_helper.bzl", + "bliss-0.73.patch", + "bliss.BUILD.bazel", + "glpk.BUILD.bazel", "notebook_requirements.in", "notebook_requirements.txt", "ortools_requirements.in", "ortools_requirements.txt", + "pcre2.BUILD.bazel", + "scip-v900.patch", + "scip.BUILD.bazel", + "swig.BUILD.bazel", + "swig.patch", ]) compile_pip_requirements( diff --git a/bazel/Makefile b/bazel/Makefile index b047530c34d..e30f70110e2 100644 --- a/bazel/Makefile +++ b/bazel/Makefile @@ -31,32 +31,57 @@ help: @echo -e "${BOLD}MAKE TARGETS${RESET}" @echo -e "\t${BOLD}help${RESET}: display this help and exit." @echo - @echo -e "\t${BOLD}${RESET}: build all docker images." - @echo -e "\t${BOLD}_${RESET}: build the docker image for a specific distro." - @echo -e "\t${BOLD}save_${RESET}: Save all docker images." - @echo -e "\t${BOLD}save__${RESET}: Save the docker image for a specific distro." - @echo -e "\t${BOLD}sh__${RESET}: run a container using the docker image (debug purpose)." + @echo -e "\tBuild using docker and the host platform." + @echo -e "\t${BOLD}_${RESET}: build a docker image for a specific distro." + @echo -e "\t${BOLD}save__${RESET}: Save a docker image for a specific distro." + @echo -e "\t${BOLD}sh__${RESET}: run a container using the docker image specified (debug purpose)." + @echo -e "\t${BOLD}clean__${RESET}: Remove cache and docker image." + @echo -e "\t${BOLD}clean_native${RESET}: Remove ALL caches and docker images." @echo - @echo -e "\t${BOLD}${RESET}:" - @echo -e "\t\t${BOLD}env${RESET}" - @echo -e "\t\t${BOLD}devel${RESET}" - @echo -e "\t\t${BOLD}build${RESET}" - @echo -e "\t\t${BOLD}test${RESET}" - @echo - @echo -e "\t${BOLD}${RESET}:" + @echo -e "\tWith ${BOLD}${RESET}:" + @echo -e "\t\t${BOLD}almalinux${RESET} (latest)" @echo -e "\t\t${BOLD}alpine${RESET} (edge)" @echo -e "\t\t${BOLD}archlinux${RESET} (latest)" - @echo -e "\t\t${BOLD}centos${RESET} (latest)" @echo -e "\t\t${BOLD}debian${RESET} (latest)" @echo -e "\t\t${BOLD}fedora${RESET} (latest)" @echo -e "\t\t${BOLD}opensuse${RESET} (tumbleweed)" + @echo -e "\t\t${BOLD}rockylinux${RESET} (9)" @echo -e "\t\t${BOLD}ubuntu${RESET} (rolling)" + @echo -e "\t\t${BOLD}all${RESET} trigger ALL DISTROS." + @echo + @echo -e "\tWith ${BOLD}${RESET}:" + @echo -e "\t\t${BOLD}env${RESET}" + @echo -e "\t\t${BOLD}devel${RESET}" + @echo -e "\t\t${BOLD}build${RESET}" + @echo -e "\t\t${BOLD}test${RESET}" @echo -e "\te.g. 'make ubuntu_test'" @echo - @echo -e "\t${BOLD}clean${RESET}: Remove cache and ALL docker images." - @echo -e "\t${BOLD}clean_${RESET}: Remove cache and docker images for the specified distro." + @echo -e "\tBuild using docker buildx with a platform specified." + @echo -e "\t${BOLD}_${RESET}: build docker images for ALL DISTROS." + @echo -e "\t${BOLD}__${RESET}: build docker image for a specific distro." + @echo -e "\t${BOLD}save__${RESET}: Save docker images for ALL DISTROS." + @echo -e "\t${BOLD}save___${RESET}: Save the docker image for a specific distro." + @echo -e "\t${BOLD}sh___${RESET}: run a container using the docker image specified (debug purpose)." + @echo -e "\t${BOLD}clean___${RESET}: Remove cache and docker image." + @echo -e "\t${BOLD}clean_platforms${RESET}: Remove ALL cache and docker image." + @echo + @echo -e "\tWith ${BOLD}${RESET}:" + @echo -e "\t\t${BOLD}amd64${RESET}: linux/amd64 (x86_64)" + @echo -e "\t\t${BOLD}arm64${RESET}: linux/arm64 (aarch64, arm64v8)" + @echo -e "\t\t${BOLD}mips64${RESET}: linux/mips64 (mips 64bits)" + @echo -e "\t\t${BOLD}mips64le${RESET}: linux/mips64le (mips 64bits Little Endian)" + @echo -e "\t\t${BOLD}ppc64${RESET}: linux/ppc64 (PowerPC 64Bits)" + @echo -e "\t\t${BOLD}ppc64le${RESET}: linux/ppc64le (PowerPC 64Bits Little Endian)" + @echo -e "\t\t${BOLD}riscv64${RESET}: linux/riscv64 (RISC-V 64bits)" + @echo -e "\te.g. 'make amd64_ubuntu_test'" + @echo -e "\tDocker image unavailable: arm64_archlinux" + @echo + @echo -e "\tGlobal targets." + @echo -e "\t${BOLD}clean${RESET}: Remove ALL caches and docker images." + @echo -e "\t${BOLD}distclean${RESET}: Remove everything." @echo @echo -e "\t${BOLD}NOCACHE=1${RESET}: use 'docker build --no-cache' when building container (default use cache)." + @echo -e "\t${BOLD}VERBOSE=1${RESET}: use 'docker build --progress=plain' when building container." @echo @echo -e "branch: $(BRANCH)" @echo -e "sha1: $(SHA1)" @@ -64,7 +89,7 @@ help: # Need to add cmd_distro to PHONY otherwise target are ignored since they do not # contain recipe (using FORCE do not work here) .PHONY: all -all: build +all: all_test # Delete all implicit rules to speed up makefile MAKEFLAGS += --no-builtin-rules @@ -78,36 +103,52 @@ SUFFIXES := # Docker image name prefix. IMAGE := ${PROJECT}/${BUILD_SYSTEM} -ifdef NOCACHE -DOCKER_BUILD_CMD := docker build --no-cache -else DOCKER_BUILD_CMD := docker build +DOCKER_BUILDX_CMD := docker buildx build +ifdef NOCACHE +DOCKER_BUILD_CMD := ${DOCKER_BUILD_CMD} --no-cache +DOCKER_BUILDX_CMD := ${DOCKER_BUILDX_CMD} --no-cache +endif +ifdef VERBOSE +DOCKER_BUILD_CMD := ${DOCKER_BUILD_CMD} --progress=plain +DOCKER_BUILDX_CMD := ${DOCKER_BUILDX_CMD} --progress=plain endif DOCKER_RUN_CMD := docker run --rm --init --net=host # Currently supported distro -DISTROS := alpine archlinux centos debian fedora opensuse ubuntu +DISTROS := \ + almalinux \ + alpine \ + archlinux \ + debian \ + fedora \ + opensuse \ + rockylinux \ + ubuntu # $* stem # $< first prerequist # $@ target name +############ +## STAGES ## +############ STAGES := env devel build test + define make-stage-target = #$$(info STAGE: $1) -.PHONY: $1 -#$$(info Create targets: $1 $(addsuffix _$1, $(DISTROS)).) +#$$(info Create targets: all_$1 $(addsuffix _$1, $(DISTROS)).) targets_$1 = $(addsuffix _$1, $(DISTROS)) -.PHONY: $(targets_$1) -$1: $$(targets_$1) +.PHONY: all_$1 $$(targets_$1) +all_$1: $$(targets_$1) $$(targets_$1): %_$1: docker/%/Dockerfile #@docker image rm -f ${IMAGE}:$$*_$1 2>/dev/null ${DOCKER_BUILD_CMD} --target=$1 --tag ${IMAGE}:$$*_$1 -f $$< .. -#$$(info Create targets: save_$1 $(addprefix save_, $(addsuffix _$1, $(DISTROS))) (debug).) +#$$(info Create targets: save_all_$1 $(addprefix save_, $(addsuffix _$1, $(DISTROS))) (debug).) save_targets_$1 = $(addprefix save_, $(addsuffix _$1, $(DISTROS))) -.PHONY: save_$1 $(save_targets_$1) -save_$1: $$(save_targets_$1) +.PHONY: save_all_$1 $$(save_targets_$1) +save_all_$1: $$(save_targets_$1) $$(save_targets_$1): save_%_$1: cache/%/docker_$1.tar cache/%/docker_$1.tar: %_$1 @rm -f $$@ @@ -116,14 +157,14 @@ cache/%/docker_$1.tar: %_$1 #$$(info Create targets: $(addprefix sh_, $(addsuffix _$1, $(DISTROS))) (debug).) sh_targets_$1 = $(addprefix sh_, $(addsuffix _$1, $(DISTROS))) -.PHONY: $(sh_targets_$1) +.PHONY: $$(sh_targets_$1) $$(sh_targets_$1): sh_%_$1: %_$1 ${DOCKER_RUN_CMD} -it --name ${PROJECT}_${BUILD_SYSTEM}_$$*_$1 ${IMAGE}:$$*_$1 -#$$(info Create targets: clean_$1 $(addprefix clean_, $(addsuffix _$1, $(DISTROS))).) +#$$(info Create targets: clean_all_$1 $(addprefix clean_, $(addsuffix _$1, $(DISTROS))).) clean_targets_$1 = $(addprefix clean_, $(addsuffix _$1, $(DISTROS))) -.PHONY: clean_$1 $(clean_targets_$1) -clean_$1: $$(clean_targets_$1) +.PHONY: clean_all_$1 $$(clean_targets_$1) +clean_all_$1: $$(clean_targets_$1) $$(clean_targets_$1): clean_%_$1: docker image rm -f ${IMAGE}:$$*_$1 2>/dev/null rm -f cache/$$*/docker_$1.tar @@ -131,15 +172,83 @@ endef $(foreach stage,$(STAGES),$(eval $(call make-stage-target,$(stage)))) +## MERGE ## +.PHONY: clean_all +clean_all: $(addprefix clean_all_, $(STAGES)) + rm -f $(addprefix cache/, $(DISTROS)) + +############## +## PLATFORM ## +############## +# ref: https://go.dev/doc/install/source#environment +# ref: https://github.com/containerd/containerd/blob/269548fa27e0089a8b8278fc4fc781d7f65a939b/platforms/platforms.go#L80-L94 +PLATFORMS := \ + amd64 \ + arm64 \ + mips64 mips64le \ + ppc64 ppc64le \ + riscv64 + +define make-platform-stage-target = +#$$(info PLATFORM: '$1' STAGE: '$2') +#$$(info Create targets: $1_all_$2 $(addprefix $1_, $(addsuffix _$2, $(DISTROS))).) +targets_$1_$2 = $(addprefix $1_, $(addsuffix _$2, $(DISTROS))) +.PHONY: $1_all_$2 $$(targets_$1_$2) +$1_all_$2: $$(targets_$1_$2) +$$(targets_$1_$2): $1_%_$2: docker/%/Dockerfile + #@docker image rm -f ${IMAGE}:$1_$$*_$2 2>/dev/null + ${DOCKER_BUILDX_CMD} --platform linux/$1 --target=$2 --tag ${IMAGE}:$1_$$*_$2 -f $$< .. + +#$$(info Create targets: save_$1_all_$2 $(addprefix save_$1_, $(addsuffix _$2, $(DISTROS))) (debug).) +save_targets_$1_$2 = $(addprefix save_$1_, $(addsuffix _$2, $(DISTROS))) +.PHONY: save_$1_all_$2 $$(save_targets_$1_$2) +save_$1_all_$2: $$(save_targets_$1_$2) +$$(save_targets_$1_$2): save_$1_%_$2: cache/$1/%/docker_$2.tar +cache/$1/%/docker_$2.tar: $1_%_$2 + @rm -f $$@ + mkdir -p cache/$1/$$* + docker save ${IMAGE}:$1_$$*_$2 -o $$@ + +#$$(info Create targets: $(addprefix sh_$1_, $(addsuffix _$2, $(DISTROS))) (debug).) +sh_targets_$1_$2 = $(addprefix sh_$1_, $(addsuffix _$2, $(DISTROS))) +.PHONY: $$(sh_targets_$1_$2) +$$(sh_targets_$1_$2): sh_$1_%_$2: $1_%_$2 + ${DOCKER_RUN_CMD} --platform linux/$1 -it --name ${PROJECT}_${BUILD_SYSTEM}_$1_$$*_$2 ${IMAGE}:$1_$$*_$2 + +#$$(info Create targets: clean_$1_all_$2 $(addprefix clean_$1_, $(addsuffix _$2, $(DISTROS))).) +clean_targets_$1_$2 = $(addprefix clean_$1_, $(addsuffix _$2, $(DISTROS))) +.PHONY: clean_$1_all_$2 $$(clean_targets_$1_$2) +clean_$1_all_$2: $$(clean_targets_$1_$2) +$$(clean_targets_$1_$2): clean_$1_%_$2: + docker image rm -f ${IMAGE}:$1_$$*_$2 2>/dev/null + rm -f cache/$1/$$*/docker_$2.tar +endef + +define make-platform-target = +#$$(info PLATFORM: $1) +$(foreach stage,$(STAGES),$(eval $(call make-platform-stage-target,$1,$(stage)))) + +# merge +.PHONY: clean_$1 +clean_$1: $(addprefix clean_$1_all_, $(STAGES)) + -rmdir $(addprefix cache/$1/, $(DISTROS)) + -rmdir cache/$1 +endef + +$(foreach platform,$(PLATFORMS),$(eval $(call make-platform-target,$(platform)))) + +## MERGE ## +.PHONY: clean_platforms +clean_platforms: $(addprefix clean_, $(PLATFORMS)) + +########### ## CLEAN ## -clean_targets = $(addprefix clean_, $(DISTROS)) -.PHONY: clean $(clean_targets) -clean: $(clean_targets) +########### +.PHONY: clean +clean: clean_all clean_platforms docker container prune -f docker image prune -f -rmdir cache -$(clean_targets): clean_%: $(addprefix clean_%_, $(STAGES)) - -rmdir cache/$* .PHONY: distclean distclean: clean diff --git a/bazel/docker/almalinux/Dockerfile b/bazel/docker/almalinux/Dockerfile new file mode 100644 index 00000000000..bbd545b078b --- /dev/null +++ b/bazel/docker/almalinux/Dockerfile @@ -0,0 +1,51 @@ +# Create a virtual environment with all tools installed +# ref: https://hub.docker.com/_/almalinux +FROM almalinux:latest AS env + +# Install system build dependencies +ENV PATH=/usr/local/bin:$PATH +RUN dnf -y update \ +&& dnf -y install git wget zlib-devel gcc-toolset-13 \ +&& dnf -y groupinstall "Development Tools" \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf + +RUN echo "source /opt/rh/gcc-toolset-13/enable" >> /etc/bashrc +SHELL ["/bin/bash", "--login", "-c"] + +# Install Bazelisk +RUN wget \ +https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 \ +&& chmod +x bazelisk-linux-amd64 \ +&& mv bazelisk-linux-amd64 /usr/local/bin/bazel + +# Install Java +RUN dnf -y update \ +&& dnf -y install java-11-openjdk java-11-openjdk-devel maven \ +&& dnf clean all +ENV JAVA_HOME=/usr/lib/jvm/java-openjdk + +# Install Python +RUN dnf -y update \ +&& dnf -y install python3-devel python3-pip python3-numpy \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf + +# Add the library src to our build env +FROM env AS devel +WORKDIR /home/project +COPY . . +RUN sed -i 's/\(DEFAULT_PYTHON =\) "3.[0-9]\+"/\1 "3.9"/' WORKSPACE + +FROM devel AS build +RUN bazel version +RUN bazel build \ + -c opt \ + --subcommands=true \ + //ortools/... //examples/... + +FROM build AS test +RUN bazel test \ + -c opt \ + --test_output=errors \ + //ortools/... //examples/... diff --git a/bazel/docker/alpine/Dockerfile b/bazel/docker/alpine/Dockerfile index f129072f7c4..5908e0fb036 100644 --- a/bazel/docker/alpine/Dockerfile +++ b/bazel/docker/alpine/Dockerfile @@ -1,17 +1,18 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/alpine FROM alpine:edge AS env + # Install system build dependencies ENV PATH=/usr/local/bin:$PATH RUN apk add --no-cache git build-base linux-headers zlib-dev -RUN apk add --no-cache -X http://dl-cdn.alpinelinux.org/alpine/edge/testing bazel6 +RUN apk add --no-cache -X http://dl-cdn.alpinelinux.org/alpine/edge/testing bazel7 ENV JAVA_HOME=/usr/lib/jvm/default-jvm ENV PATH=$JAVA_HOME/bin:$PATH # Install Python -RUN apk add --no-cache python3-dev py3-pip \ - py3-setuptools py3-wheel +RUN apk add --no-cache openssl python3-dev py3-pip py3-wheel py3-numpy py3-pandas +RUN python3 -m pip install --break-system-package absl-py mypy-protobuf FROM env AS devel WORKDIR /home/project @@ -19,23 +20,13 @@ COPY . . FROM devel AS build RUN bazel version - -# --javabase=@local_jdk_11//:jdk \ -# --host_javabase=@local_jdk_11//:jdk \ - -# --javabase=@bazel_tools//tools/jdk:local_jdk11 \ -# --host_javabase=@bazel_tools//tools/jdk:local_jdk11 \ RUN bazel build \ -c opt \ - --java_language_version=11 \ - --java_runtime_version=local_jdk11 \ - --tool_java_runtime_version=local_jdk11 \ --subcommands=true \ //ortools/... //examples/... FROM build AS test RUN bazel test \ - --host_javabase=@local_jdk//:jdk \ -c opt \ --test_output=errors \ //ortools/... //examples/... diff --git a/bazel/docker/archlinux/Dockerfile b/bazel/docker/archlinux/Dockerfile index b47c1236888..727cc90a779 100644 --- a/bazel/docker/archlinux/Dockerfile +++ b/bazel/docker/archlinux/Dockerfile @@ -1,23 +1,14 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/archlinux/ FROM archlinux:latest AS env + # Install system build dependencies ENV PATH=/usr/local/bin:$PATH -RUN pacman -Syu --noconfirm git wget base-devel - -# Install Bazelisk -RUN wget \ -https://github.com/bazelbuild/bazelisk/releases/download/v1.19.0/bazelisk-linux-amd64 \ -&& chmod +x bazelisk-linux-amd64 \ -&& mv bazelisk-linux-amd64 /usr/local/bin/bazel - -# Java -RUN pacman -Syu --noconfirm jdk11-openjdk -ENV JAVA_HOME=/usr/lib/jvm/default -ENV PATH=${JAVA_HOME}/bin:$PATH +RUN pacman -Syu --noconfirm git base-devel bazel # Install Python -RUN pacman -Syu --noconfirm python python-pip python-setuptools +RUN pacman -Syu --noconfirm python python-pip python-setuptools \ + python-numpy python-pandas FROM env AS devel WORKDIR /home/project diff --git a/bazel/docker/debian/Dockerfile b/bazel/docker/debian/Dockerfile index 0aaedb68b7b..66237cdca26 100644 --- a/bazel/docker/debian/Dockerfile +++ b/bazel/docker/debian/Dockerfile @@ -1,35 +1,29 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/debian FROM debian:latest AS env + # Install system build dependencies -ENV PATH=/usr/local/bin:$PATH RUN apt-get update -qq \ && apt-get install -yq git wget curl libssl-dev build-essential \ +&& apt-get install -yq python3-dev python3-pip python3-venv \ + python3-numpy python3-pandas \ +&& apt-get install -yq default-jdk \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + # Install Bazel RUN curl https://bazel.build/bazel-release.pub.gpg | apt-key add - -RUN echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list +ARG TARGETARCH=amd64 +RUN echo "deb [arch=$TARGETARCH] https://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list RUN apt-get update -qq \ && apt-get install -yq bazel \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Install Java -RUN apt-get update -qq \ -&& apt-get install -yq default-jdk \ -&& apt-get clean \ -&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install Python -RUN apt-get update -qq \ -&& apt-get install -yq python3-dev python3-pip python3-venv \ -&& apt-get clean \ -&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - FROM env AS devel WORKDIR /home/project COPY . . +RUN sed -i 's/\(DEFAULT_PYTHON =\) "3.[0-9]\+"/\1 "3.11"/' WORKSPACE FROM devel AS build RUN bazel version diff --git a/bazel/docker/fedora/Dockerfile b/bazel/docker/fedora/Dockerfile index 2b7c480a4da..ed944754dfd 100644 --- a/bazel/docker/fedora/Dockerfile +++ b/bazel/docker/fedora/Dockerfile @@ -14,7 +14,7 @@ RUN dnf -y update \ # Install Bazelisk RUN wget \ -https://github.com/bazelbuild/bazelisk/releases/download/v1.19.0/bazelisk-linux-amd64 \ +https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 \ && chmod +x bazelisk-linux-amd64 \ && mv bazelisk-linux-amd64 /usr/local/bin/bazel @@ -26,7 +26,7 @@ ENV JAVA_HOME=/usr/lib/jvm/java-openjdk # Install Python RUN dnf -y update \ -&& dnf -y install python3 python3-devel python3-pip \ +&& dnf -y install python3 python3-devel python3-pip python3-numpy \ && dnf clean all FROM env AS devel diff --git a/bazel/docker/opensuse/Dockerfile b/bazel/docker/opensuse/Dockerfile index 3bf558c6bea..51fbb2ec01e 100644 --- a/bazel/docker/opensuse/Dockerfile +++ b/bazel/docker/opensuse/Dockerfile @@ -11,16 +11,15 @@ ENV CC=gcc CXX=g++ # Install Bazelisk RUN wget \ -https://github.com/bazelbuild/bazelisk/releases/download/v1.19.0/bazelisk-linux-amd64 \ +https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 \ && chmod +x bazelisk-linux-amd64 \ && mv bazelisk-linux-amd64 /usr/local/bin/bazel -# Java -RUN zypper install -y java-11-openjdk-devel \ +# Install Java JDK and Maven +RUN zypper refresh \ +&& zypper install -y java-17-openjdk-devel maven \ && zypper clean -a - -ENV JAVA_HOME=/usr/lib64/jvm/java -ENV PATH=$JAVA_HOME/bin:$PATH +ENV PATH=/usr/share/maven/bin:$PATH # Install Python RUN zypper update -y \ @@ -30,6 +29,7 @@ RUN zypper update -y \ FROM env AS devel WORKDIR /home/project COPY . . +RUN sed -i 's/\(DEFAULT_PYTHON =\) "3.[0-9]\+"/\1 "3.11"/' WORKSPACE FROM devel AS build RUN bazel version diff --git a/bazel/docker/centos/Dockerfile b/bazel/docker/rockylinux/Dockerfile similarity index 56% rename from bazel/docker/centos/Dockerfile rename to bazel/docker/rockylinux/Dockerfile index 33e4cdd8569..c2d59103250 100644 --- a/bazel/docker/centos/Dockerfile +++ b/bazel/docker/rockylinux/Dockerfile @@ -1,38 +1,41 @@ # Create a virtual environment with all tools installed -# ref: https://quay.io/repository/centos/centos -FROM quay.io/centos/centos:stream AS env +# ref: https://hub.docker.com/_/rockylinux +FROM rockylinux:9 AS env # Install system build dependencies ENV PATH=/usr/local/bin:$PATH RUN dnf -y update \ -&& dnf -y install git wget gcc-toolset-11 \ +&& dnf -y install git wget zlib-devel gcc-toolset-13 \ +&& dnf -y groupinstall "Development Tools" \ && dnf clean all \ && rm -rf /var/cache/dnf -RUN echo "source /opt/rh/gcc-toolset-11/enable" >> /etc/bashrc +RUN echo "source /opt/rh/gcc-toolset-13/enable" >> /etc/bashrc SHELL ["/bin/bash", "--login", "-c"] # Install Bazelisk RUN wget \ -https://github.com/bazelbuild/bazelisk/releases/download/v1.17.0/bazelisk-linux-amd64 \ +https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 \ && chmod +x bazelisk-linux-amd64 \ && mv bazelisk-linux-amd64 /usr/local/bin/bazel # Install Java RUN dnf -y update \ -&& dnf -y install java-1.8.0-openjdk java-1.8.0-openjdk-devel maven \ -&& dnf clean all \ -&& rm -rf /var/cache/dnf +&& dnf -y install java-11-openjdk java-11-openjdk-devel maven \ +&& dnf clean all +ENV JAVA_HOME=/usr/lib/jvm/java-openjdk # Install Python RUN dnf -y update \ -&& dnf -y install python3.11-devel \ +&& dnf -y install python3-devel python3-pip python3-numpy \ && dnf clean all \ && rm -rf /var/cache/dnf +# Add the library src to our build env FROM env AS devel WORKDIR /home/project COPY . . +RUN sed -i 's/\(DEFAULT_PYTHON =\) "3.[0-9]\+"/\1 "3.9"/' WORKSPACE FROM devel AS build RUN bazel version diff --git a/bazel/docker/ubuntu/Dockerfile b/bazel/docker/ubuntu/Dockerfile index 8b0f4151861..268e79a94bc 100644 --- a/bazel/docker/ubuntu/Dockerfile +++ b/bazel/docker/ubuntu/Dockerfile @@ -1,6 +1,7 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/ubuntu FROM ubuntu:rolling AS env + # Install system build dependencies ENV TZ=Europe/Paris RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone @@ -8,29 +9,21 @@ RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone # Install system build dependencies RUN apt-get update -qq \ && apt-get install -yq git wget curl libssl-dev build-essential \ +&& apt-get install -yq python3-dev python3-pip python3-venv \ + python3-numpy python3-pandas \ +&& apt-get install -yq default-jdk \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # Install Bazel RUN curl https://bazel.build/bazel-release.pub.gpg | apt-key add - -RUN echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list +ARG TARGETARCH=amd64 +RUN echo "deb [arch=$TARGETARCH] https://storage.googleapis.com/bazel-apt stable jdk1.8" | tee /etc/apt/sources.list.d/bazel.list RUN apt-get update -qq \ && apt-get install -yq bazel \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Install Java -RUN apt-get update -qq \ -&& apt-get install -yq default-jdk \ -&& apt-get clean \ -&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install Python -RUN apt-get update -qq \ -&& apt-get install -yq python3-dev python3-pip \ -&& apt-get clean \ -&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - FROM env AS devel WORKDIR /home/project COPY . . diff --git a/bazel/notebook_requirements.in b/bazel/notebook_requirements.in index 6fd384e206f..77d68e279e1 100644 --- a/bazel/notebook_requirements.in +++ b/bazel/notebook_requirements.in @@ -1,16 +1,16 @@ # OR-Tools code dependencies absl-py==2.0.0 immutabledict==3.0.0 -numpy==1.26.1 -protobuf==4.25.3 -requests==2.31.0 +numpy==1.26.4 +protobuf==5.26.1 +requests==2.32.0 scipy==1.11.3 # OR-Tools build dependencies mypy==1.6.1 mypy-protobuf==3.5.0 virtualenv==20.24.6 -black==23.10.1 +black==24.3.0 # Example dependencies pandas==2.1.2 @@ -20,8 +20,9 @@ svgwrite==1.4.3 plotly==5.15.0 # Notebook -jupyter==1.0.0 -jupyter-server==2.12.5 -tornado==6.3.3 +jupyterlab==4.2.5 +notebook==7.2.2 +jupyter-server==2.14.2 +tornado==6.4.1 Pygments==2.15.0 jsonschema==4.19.0 diff --git a/bazel/notebook_requirements.txt b/bazel/notebook_requirements.txt index 94630c0026a..0736f7072af 100644 --- a/bazel/notebook_requirements.txt +++ b/bazel/notebook_requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # bazel run //bazel:notebook_requirements.update @@ -7,7 +7,9 @@ absl-py==2.0.0 # via -r bazel/notebook_requirements.in anyio==4.0.0 - # via jupyter-server + # via + # httpx + # jupyter-server argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -28,12 +30,15 @@ backcall==0.2.0 # via ipython beautifulsoup4==4.12.2 # via nbconvert -black==23.10.1 +black==24.3.0 # via -r bazel/notebook_requirements.in bleach==6.0.0 # via nbconvert -certifi==2023.7.22 - # via requests +certifi==2024.7.4 + # via + # httpcore + # httpx + # requests cffi==1.15.1 # via argon2-cffi-bindings charset-normalizer==3.2.0 @@ -41,9 +46,7 @@ charset-normalizer==3.2.0 click==8.1.3 # via black comm==0.1.4 - # via - # ipykernel - # ipywidgets + # via ipykernel debugpy==1.6.7.post1 # via ipykernel decorator==5.1.1 @@ -60,28 +63,24 @@ filelock==3.12.2 # via virtualenv fqdn==1.5.1 # via jsonschema -idna==3.4 +h11==0.14.0 + # via httpcore +httpcore==1.0.5 + # via httpx +httpx==0.27.2 + # via jupyterlab +idna==3.7 # via # anyio + # httpx # jsonschema # requests immutabledict==3.0.0 # via -r bazel/notebook_requirements.in ipykernel==6.25.2 - # via - # jupyter - # jupyter-console - # jupyterlab - # qtconsole + # via jupyterlab ipython==8.15.0 - # via - # ipykernel - # ipywidgets - # jupyter-console -ipython-genutils==0.2.0 - # via qtconsole -ipywidgets==8.1.0 - # via jupyter + # via ipykernel isoduration==20.11.0 # via jsonschema jedi==0.19.0 @@ -104,33 +103,25 @@ jsonschema[format-nongpl]==4.19.0 # nbformat jsonschema-specifications==2023.7.1 # via jsonschema -jupyter==1.0.0 - # via -r bazel/notebook_requirements.in jupyter-client==8.3.1 # via # ipykernel - # jupyter-console # jupyter-server # nbclient - # qtconsole -jupyter-console==6.6.3 - # via jupyter jupyter-core==5.3.1 # via # ipykernel # jupyter-client - # jupyter-console # jupyter-server # jupyterlab # nbclient # nbconvert # nbformat - # qtconsole jupyter-events==0.9.0 # via jupyter-server jupyter-lsp==2.2.2 # via jupyterlab -jupyter-server==2.12.5 +jupyter-server==2.14.2 # via # -r bazel/notebook_requirements.in # jupyter-lsp @@ -140,16 +131,16 @@ jupyter-server==2.12.5 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.11 - # via notebook +jupyterlab==4.2.5 + # via + # -r bazel/notebook_requirements.in + # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 - # via ipywidgets markupsafe==2.1.3 # via # jinja2 @@ -171,9 +162,7 @@ mypy-protobuf==3.5.0 nbclient==0.8.0 # via nbconvert nbconvert==7.8.0 - # via - # jupyter - # jupyter-server + # via jupyter-server nbformat==5.9.2 # via # jupyter-server @@ -181,13 +170,13 @@ nbformat==5.9.2 # nbconvert nest-asyncio==1.5.7 # via ipykernel -notebook==7.0.7 - # via jupyter +notebook==7.2.2 + # via -r bazel/notebook_requirements.in notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.26.1 +numpy==1.26.4 # via # -r bazel/notebook_requirements.in # pandas @@ -203,8 +192,6 @@ packaging==23.1 # jupyterlab-server # nbconvert # plotly - # qtconsole - # qtpy pandas==2.1.2 # via -r bazel/notebook_requirements.in pandocfilters==1.5.0 @@ -227,10 +214,8 @@ plotly==5.15.0 prometheus-client==0.17.1 # via jupyter-server prompt-toolkit==3.0.39 - # via - # ipython - # jupyter-console -protobuf==4.25.3 + # via ipython +protobuf==5.26.1 # via # -r bazel/notebook_requirements.in # mypy-protobuf @@ -248,9 +233,7 @@ pygments==2.15.0 # via # -r bazel/notebook_requirements.in # ipython - # jupyter-console # nbconvert - # qtconsole python-dateutil==2.8.2 # via # arrow @@ -266,19 +249,13 @@ pyzmq==25.1.1 # via # ipykernel # jupyter-client - # jupyter-console # jupyter-server - # qtconsole -qtconsole==5.4.4 - # via jupyter -qtpy==2.4.0 - # via qtconsole referencing==0.30.2 # via # jsonschema # jsonschema-specifications # jupyter-events -requests==2.31.0 +requests==2.32.0 # via # -r bazel/notebook_requirements.in # jupyterlab-server @@ -305,7 +282,9 @@ six==1.16.0 # python-dateutil # rfc3339-validator sniffio==1.3.0 - # via anyio + # via + # anyio + # httpx soupsieve==2.5 # via beautifulsoup4 stack-data==0.6.2 @@ -320,7 +299,7 @@ terminado==0.17.1 # jupyter-server-terminals tinycss2==1.2.1 # via nbconvert -tornado==6.3.3 +tornado==6.4.1 # via # -r bazel/notebook_requirements.in # ipykernel @@ -334,9 +313,7 @@ traitlets==5.9.0 # comm # ipykernel # ipython - # ipywidgets # jupyter-client - # jupyter-console # jupyter-core # jupyter-events # jupyter-server @@ -345,7 +322,6 @@ traitlets==5.9.0 # nbclient # nbconvert # nbformat - # qtconsole types-protobuf==4.24.0.0 # via mypy-protobuf typing-extensions==4.8.0 @@ -354,7 +330,7 @@ tzdata==2023.3 # via pandas uri-template==1.3.0 # via jsonschema -urllib3==2.0.7 +urllib3==2.2.2 # via requests virtualenv==20.24.6 # via -r bazel/notebook_requirements.in @@ -366,7 +342,9 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.2 +websocket-client==1.8.0 # via jupyter-server -widgetsnbextension==4.0.8 - # via ipywidgets + +# The following packages are considered to be unsafe in a requirements file: +setuptools==74.0.0 + # via jupyterlab diff --git a/bazel/ortools_requirements.in b/bazel/ortools_requirements.in index 71bfe41f46b..37171a5d729 100644 --- a/bazel/ortools_requirements.in +++ b/bazel/ortools_requirements.in @@ -1,16 +1,16 @@ # OR-Tools code dependencies absl-py==2.0.0 immutabledict==3.0.0 -numpy==1.26.1 -protobuf==4.25.3 -requests==2.31.0 +numpy==1.26.4 +protobuf==5.26.1 +requests==2.32.0 scipy==1.11.3 # OR-Tools build dependencies mypy==1.6.1 mypy-protobuf==3.5.0 virtualenv==20.24.6 -black==23.10.1 +black==24.3.0 # Example dependencies pandas==2.1.2 diff --git a/bazel/ortools_requirements.txt b/bazel/ortools_requirements.txt index 8471bcbba8e..bf5f16fa878 100644 --- a/bazel/ortools_requirements.txt +++ b/bazel/ortools_requirements.txt @@ -1,14 +1,14 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # bazel run //bazel:ortools_requirements.update # absl-py==2.0.0 # via -r bazel/ortools_requirements.in -black==23.10.1 +black==24.3.0 # via -r bazel/ortools_requirements.in -certifi==2023.7.22 +certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests @@ -18,7 +18,7 @@ distlib==0.3.7 # via virtualenv filelock==3.12.2 # via virtualenv -idna==3.4 +idna==3.7 # via requests immutabledict==3.0.0 # via -r bazel/ortools_requirements.in @@ -30,7 +30,7 @@ mypy-extensions==1.0.0 # mypy mypy-protobuf==3.5.0 # via -r bazel/ortools_requirements.in -numpy==1.26.1 +numpy==1.26.4 # via # -r bazel/ortools_requirements.in # pandas @@ -45,7 +45,7 @@ platformdirs==3.10.0 # via # black # virtualenv -protobuf==4.25.3 +protobuf==5.26.1 # via # -r bazel/ortools_requirements.in # mypy-protobuf @@ -53,7 +53,7 @@ python-dateutil==2.8.2 # via pandas pytz==2022.7.1 # via pandas -requests==2.31.0 +requests==2.32.0 # via -r bazel/ortools_requirements.in scipy==1.11.3 # via -r bazel/ortools_requirements.in @@ -67,7 +67,7 @@ typing-extensions==4.8.0 # via mypy tzdata==2023.3 # via pandas -urllib3==2.1.0 +urllib3==2.2.2 # via requests virtualenv==20.24.6 # via -r bazel/ortools_requirements.in diff --git a/bazel/scip-v900.patch b/bazel/scip-v900.patch new file mode 100644 index 00000000000..520e019eb9b --- /dev/null +++ b/bazel/scip-v900.patch @@ -0,0 +1,225 @@ +diff --git a/src/lpi/lpi_glop.cpp b/src/lpi/lpi_glop.cpp +index a90120188a..664cb4d097 100644 +--- a/src/lpi/lpi_glop.cpp ++++ b/src/lpi/lpi_glop.cpp +@@ -51,7 +51,6 @@ + #include "ortools/util/time_limit.h" + + #include "ortools/base/logging.h" +-#include "ortools/base/vlog_is_on.h" + + #include "lpi/lpi.h" + #include "scip/pub_message.h" +@@ -2942,12 +2941,12 @@ SCIP_RETCODE SCIPlpiSetIntpar( + SCIPdebugMessage("SCIPlpiSetIntpar: SCIP_LPPAR_LPINFO -> %d.\n", ival); + if ( ival == 0 ) + { +- (void) google::SetVLOGLevel("*", google::GLOG_INFO); ++ absl::SetFlag(&FLAGS_stderrthreshold, 2); + lpi->lp_info = false; + } + else + { +- (void) google::SetVLOGLevel("*", google::GLOG_ERROR); ++ absl::SetFlag(&FLAGS_stderrthreshold, 0); + lpi->lp_info = true; + } + break; +@@ -3190,7 +3189,7 @@ SCIP_RETCODE SCIPlpiReadLP( + + const std::string filespec(fname); + MPModelProto proto; +- if ( ! ReadFileToProto(filespec, &proto) ) ++ if ( ! ReadFileToProto(filespec, &proto).ok() ) + { + SCIPerrorMessage("Could not read <%s>\n", fname); + return SCIP_READERROR; +@@ -3214,7 +3213,7 @@ SCIP_RETCODE SCIPlpiWriteLP( + MPModelProto proto; + LinearProgramToMPModelProto(*lpi->linear_program, &proto); + const std::string filespec(fname); +- if ( ! WriteProtoToFile(filespec, proto, operations_research::ProtoWriteFormat::kProtoText, true) ) ++ if ( ! WriteProtoToFile(filespec, proto, operations_research::ProtoWriteFormat::kProtoText, true).ok() ) + { + SCIPerrorMessage("Could not write <%s>\n", fname); + return SCIP_READERROR; +diff --git a/src/scip/config.h b/src/scip/config.h +new file mode 100644 +index 0000000000..871fde8e55 +--- /dev/null ++++ b/src/scip/config.h +@@ -0,0 +1,32 @@ ++#ifndef __CONFIG_H__ ++#define __CONFIG_H__ ++ ++#define CMAKE_BUILD_TYPE "Release" ++#define SCIP_VERSION_MAJOR 9 ++#define SCIP_VERSION_MINOR 0 ++#define SCIP_VERSION_PATCH 0 ++#define SCIP_VERSION_SUB 0 ++#define SCIP_VERSION_API 114 ++/* #undef BMS_NOBLOCKMEM */ ++/* #undef SCIP_NOBUFFERMEM */ ++/* #undef WITH_DEBUG_SOLUTION */ ++/* #undef SCIP_NO_SIGACTION */ ++/* #undef SCIP_NO_STRTOK_R */ ++/* #undef TPI_NONE */ ++#define TPI_NONE ++/* #undef TPI_OMP */ ++#define SCIP_THREADSAFE ++#define WITH_SCIPDEF ++/* #undef SCIP_WITH_LAPACK */ ++/* #undef SCIP_WITH_PAPILO */ ++/* #undef SCIP_WITH_ZLIB */ ++/* #undef SCIP_WITH_READLINE */ ++/* #undef SCIP_WITH_GMP */ ++/* #undef SCIP_WITH_LPSCHECK */ ++/* #undef SCIP_WITH_ZIMPL */ ++/* #undef SCIP_WITH_AMPL */ ++#define SCIP_ROUNDING_FE ++/* #undef SCIP_ROUNDING_FP */ ++/* #undef SCIP_ROUNDING_MS */ ++ ++#endif +diff --git a/src/scip/githash.c b/src/scip/githash.c +new file mode 100644 +index 0000000000..4b1dfc587f +--- /dev/null ++++ b/src/scip/githash.c +@@ -0,0 +1 @@ ++#define SCIP_GITHASH "7205bedd94" +diff --git a/src/scip/scip_export.h b/src/scip/scip_export.h +new file mode 100644 +index 0000000000..8bf2aaefa5 +--- /dev/null ++++ b/src/scip/scip_export.h +@@ -0,0 +1,42 @@ ++ ++#ifndef SCIP_EXPORT_H ++#define SCIP_EXPORT_H ++ ++#ifdef SCIP_STATIC_DEFINE ++# define SCIP_EXPORT ++# define SCIP_NO_EXPORT ++#else ++# ifndef SCIP_EXPORT ++# ifdef libscip_EXPORTS ++/* We are building this library */ ++# define SCIP_EXPORT ++# else ++/* We are using this library */ ++# define SCIP_EXPORT ++# endif ++# endif ++ ++# ifndef SCIP_NO_EXPORT ++# define SCIP_NO_EXPORT ++# endif ++#endif ++ ++#ifndef SCIP_DEPRECATED ++# define SCIP_DEPRECATED __attribute__ ((__deprecated__)) ++#endif ++ ++#ifndef SCIP_DEPRECATED_EXPORT ++# define SCIP_DEPRECATED_EXPORT SCIP_EXPORT SCIP_DEPRECATED ++#endif ++ ++#ifndef SCIP_DEPRECATED_NO_EXPORT ++# define SCIP_DEPRECATED_NO_EXPORT SCIP_NO_EXPORT SCIP_DEPRECATED ++#endif ++ ++#if 0 /* DEFINE_NO_DEPRECATED */ ++# ifndef SCIP_NO_DEPRECATED ++# define SCIP_NO_DEPRECATED ++# endif ++#endif ++ ++#endif /* SCIP_EXPORT_H */ +diff --git a/src/scip/scipbuildflag.c b/src/scip/scipbuildflag.c +new file mode 100644 +index 0000000000..2af785150e +--- /dev/null ++++ b/src/scip/scipbuildflag.c +@@ -0,0 +1,65 @@ ++/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ ++/* */ ++/* This file is part of the program and library */ ++/* SCIP --- Solving Constraint Integer Programs */ ++/* */ ++/* Copyright (c) 2002-2024 Zuse Institute Berlin (ZIB) */ ++/* */ ++/* Licensed under the Apache License, Version 2.0 (the "License"); */ ++/* you may not use this file except in compliance with the License. */ ++/* You may obtain a copy of the License at */ ++/* */ ++/* http://www.apache.org/licenses/LICENSE-2.0 */ ++/* */ ++/* Unless required by applicable law or agreed to in writing, software */ ++/* distributed under the License is distributed on an "AS IS" BASIS, */ ++/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ ++/* See the License for the specific language governing permissions and */ ++/* limitations under the License. */ ++/* */ ++/* You should have received a copy of the Apache-2.0 license */ ++/* along with SCIP; see the file LICENSE. If not visit scipopt.org. */ ++/* */ ++/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ ++ ++/**@file scipbuildflags.c ++ * @brief build flags methods ++ * @author Felipe Serrano ++ */ ++ ++/*---+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8----+----9----+----0----+----1----+----2*/ ++ ++#include "scip/scipbuildflags.h" ++ ++/** returns the flags that were used to build SCIP */ ++const char* SCIPgetBuildFlags( ++ void ++ ) ++{ ++ return " ARCH=x86_64\n\ ++ OSTYPE=Linux-6.7.5-arch1-1\n\ ++ COMP=GNU 13.2.1\n\ ++ BUILD=Release\n\ ++ DEBUGSOL=OFF\n\ ++ EXPRINT=none\n\ ++ SYM=none\n\ ++ GMP=OFF\n\ ++ IPOPT=OFF\n\ ++ WORHP=OFF\n\ ++ LPS=none\n\ ++ LPSCHECK=OFF\n\ ++ NOBLKBUFMEM=OFF\n\ ++ NOBLKMEM=OFF\n\ ++ NOBUFMEM=OFF\n\ ++ THREADSAFE=ON;FORCE\n\ ++ READLINE=OFF\n\ ++ SANITIZE_ADDRESS=OFF\n\ ++ SANITIZE_MEMORY=OFF\n\ ++ SANITIZE_UNDEFINED=OFF\n\ ++ SANITIZE_THREAD=OFF\n\ ++ SHARED=OFF\n\ ++ VERSION=9.0.0.0\n\ ++ API_VERSION=114\n\ ++ ZIMPL=OFF\n\ ++ ZLIB=ON"; ++} +diff --git a/src/symmetry/compute_symmetry_bliss.cpp b/src/symmetry/compute_symmetry_bliss.cpp +index 0ba5ea060e..10570448a0 100644 +--- a/src/symmetry/compute_symmetry_bliss.cpp ++++ b/src/symmetry/compute_symmetry_bliss.cpp +@@ -34,8 +34,9 @@ + #include "compute_symmetry.h" + + /* include bliss graph */ +-#include +-#include ++#define BLISS_VERSION "0.73" ++#include ++#include + + #include + #include diff --git a/bazel/scip.BUILD.bazel b/bazel/scip.BUILD.bazel index 6b11d719e52..8271c09fb9e 100644 --- a/bazel/scip.BUILD.bazel +++ b/bazel/scip.BUILD.bazel @@ -40,12 +40,10 @@ PLATFORM_FLAGS = select({ "on_linux": [ "-Wunknown-pragmas", "-fexceptions", - "-DSYM=bliss" ], "on_macos": [ "-Wunknown-pragmas", "-fexceptions", - "-DSYM=bliss" ], "on_windows": [ "/DSYM=none", @@ -82,6 +80,7 @@ cc_library( "src/scip/nlpi_filtersqp.c", "src/scip/nlpi_worhp.c", "src/scip/*_xyz.c", + "src/scip/scipbuildflags.c", "src/scip/sorttpl.c", "src/symmetry/compute_symmetry_*.cpp", "src/symmetry/*nauty*", @@ -89,7 +88,8 @@ cc_library( ], ) + BLISS_FILE + [ "src/scip/exprinterpret_none.c", - "src/tpi/tpi_tnycthrd.c", + #"src/tpi/tpi_tnycthrd.c", + "src/tpi/tpi_none.c", ], hdrs = glob( [ @@ -97,7 +97,6 @@ cc_library( "src/*/*.hpp", "src/scip/githash.c", "src/scip/sorttpl.c", - "src/scip/buildflags.c", ], exclude = [ @@ -106,26 +105,12 @@ cc_library( ]), copts = [ "$(STACK_FRAME_UNLIMITED)", # src/scip/reader_cnf.c - "-DSCIP_WITH_ZLIB", - "-DWITH_SCIPDEF", - "-DSCIP_ROUNDING_FE", - "-DTPI_TNY", # src/tpi/type_tpi_tnycthrd.h - # Compile in thead-safe mode (required since we use TPI_TNYC). Note, - # one does not technically need to add this, as SCIP code always - # uses syntax like "#ifndef NPARASCIP". But let's be explicit here. - "-DPARASCIP", + #"-DTPI_TNY", # src/tpi/type_tpi_tnycthrd.h + "-DTPI_NONE", # src/tpi/type_tpi_none.h "-Isrc", "-Isrc/scip", ] + PLATFORM_FLAGS, - defines = [ - # Scip v800 optionally depends on scip/config.h and - # scip/scip_export.h that are generated by build system. - # - # We need every library and binary that depends on SCIP libraries to - # define this macro. That is why we use `defines' here instead of - # `copts' or `local_defines'. - "NO_CONFIG_HEADER", - ], + defines = [], features = ["-parse_headers"], includes = [ "src", diff --git a/bazel/scip.patch b/bazel/scip.patch deleted file mode 100644 index 03e6c58e7da..00000000000 --- a/bazel/scip.patch +++ /dev/null @@ -1,85 +0,0 @@ -diff --git a/src/lpi/lpi_glop.cpp b/src/lpi/lpi_glop.cpp -index 2471778a8f..17fd1e8c34 100644 ---- a/src/lpi/lpi_glop.cpp -+++ b/src/lpi/lpi_glop.cpp -@@ -51,7 +51,6 @@ - #include "ortools/util/time_limit.h" - - #include "ortools/base/logging.h" --#include "ortools/base/vlog_is_on.h" - - #include "lpi/lpi.h" - #include "scip/pub_message.h" -@@ -2942,12 +2941,12 @@ SCIP_RETCODE SCIPlpiSetIntpar( - SCIPdebugMessage("SCIPlpiSetIntpar: SCIP_LPPAR_LPINFO -> %d.\n", ival); - if ( ival == 0 ) - { -- (void) google::SetVLOGLevel("*", google::GLOG_INFO); -+ absl::SetFlag(&FLAGS_stderrthreshold, 2); - lpi->lp_info = false; - } - else - { -- (void) google::SetVLOGLevel("*", google::GLOG_ERROR); -+ absl::SetFlag(&FLAGS_stderrthreshold, 0); - lpi->lp_info = true; - } - break; -@@ -3190,7 +3189,7 @@ SCIP_RETCODE SCIPlpiReadLP( - - const std::string filespec(fname); - MPModelProto proto; -- if ( ! ReadFileToProto(filespec, &proto) ) -+ if ( ! ReadFileToProto(filespec, &proto).ok() ) - { - SCIPerrorMessage("Could not read <%s>\n", fname); - return SCIP_READERROR; -@@ -3214,7 +3213,7 @@ SCIP_RETCODE SCIPlpiWriteLP( - MPModelProto proto; - LinearProgramToMPModelProto(*lpi->linear_program, &proto); - const std::string filespec(fname); -- if ( ! WriteProtoToFile(filespec, proto, operations_research::ProtoWriteFormat::kProtoText, true) ) -+ if ( ! WriteProtoToFile(filespec, proto, operations_research::ProtoWriteFormat::kProtoText, true).ok() ) - { - SCIPerrorMessage("Could not write <%s>\n", fname); - return SCIP_READERROR; -diff --git a/src/symmetry/compute_symmetry_bliss.cpp b/src/symmetry/compute_symmetry_bliss.cpp -index 484627c4b9..27c2895165 100644 ---- a/src/symmetry/compute_symmetry_bliss.cpp -+++ b/src/symmetry/compute_symmetry_bliss.cpp -@@ -25,5 +25,5 @@ - #include "compute_symmetry.h" - - /* include bliss graph */ --#include --#include -+#include -+#include - - #include - #include - -diff --git a/src/scip/githash.c b/src/scip/githash.c -new file mode 100644 -index 0000000000..2891bc72de ---- /dev/null -+++ b/src/scip/githash.c -@@ -0,0 +1,1 @@ -+#define SCIP_GITHASH "a740f0891e" -diff --git a/src/scip/scipbuildflags.c b/src/scip/scipbuildflags.c -index b54b9112cb..dc8e62b5e0 100644 ---- a/src/scip/scipbuildflags.c -+++ b/src/scip/scipbuildflags.c -@@ -21,10 +21,9 @@ - - /*---+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8----+----9----+----0----+----1----+----2*/ - -+#define SCIP_BUILDFLAGS " ARCH=x86_64\n COMP=gnu\n DEBUGSOL=false\n EXPRINT=none\n GAMS=false\n SYM=bliss\n GMP=false\n IPOPT=false\n IPOPTOPT=opt\n WORHP=false\n WORHPOPT=opt\n LPS=spx2\n LPSCHECK=false\n LPSOPT=opt\n NOBLKBUFMEM=false\n NOBLKMEM=false\n NOBUFMEM=false\n OPT=opt\n OSTYPE=linux\n PARASCIP=true\n READLINE=false\n SANITIZE=\n SHARED=false\n USRARFLAGS=\n USRCFLAGS=-fPIC\n USRCXXFLAGS=-fPIC\n USRDFLAGS=\n USRFLAGS=\n USRLDFLAGS=\n USROFLAGS=\n VERSION=7.0.1\n ZIMPL=false\n ZIMPLOPT=opt\n ZLIB=true" -+ - #include "scip/scipbuildflags.h" --#ifdef NO_CONFIG_HEADER --#include "buildflags.c" --#endif - - /** returns the flags that were used to build SCIP */ - const char* SCIPgetBuildFlags( diff --git a/bazel/swig_java.bzl b/bazel/swig_java.bzl index 877f597c7a3..ec26d1f0f91 100644 --- a/bazel/swig_java.bzl +++ b/bazel/swig_java.bzl @@ -13,6 +13,9 @@ """Build definitions for SWIG Java.""" +load("@rules_java//java:java_library.bzl", "java_library") +load("@rules_java//java/common:java_common.bzl", "java_common") + def _create_src_jar(ctx, java_runtime_info, input_dir, output_jar): jar_args = ctx.actions.args() jar_args.add("cf", output_jar) @@ -198,8 +201,7 @@ def ortools_java_wrap_cc( visibility = visibility, **kwargs ) - - native.java_library( + java_library( name = name, srcs = [srcjar], deps = java_deps, diff --git a/cmake/Findglog.cmake b/cmake/Findglog.cmake deleted file mode 100644 index b6ebdcf1361..00000000000 --- a/cmake/Findglog.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2010-2024 Google LLC -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#[=======================================================================[.rst: -Findglog --------- - -This module determines the glog library of the system. - -IMPORTED Targets -^^^^^^^^^^^^^^^^ - -This module defines :prop_tgt:`IMPORTED` target ``glog::glog``, if -glog has been found. - -Result Variables -^^^^^^^^^^^^^^^^ - -This module defines the following variables: - -:: - -glog_FOUND - True if glog found. - -#]=======================================================================] -find_package(PkgConfig REQUIRED) - -pkg_check_modules(GLOG REQUIRED libglog IMPORTED_TARGET GLOBAL) -add_library(glog::glog ALIAS PkgConfig::GLOG) diff --git a/cmake/Makefile b/cmake/Makefile index d1044fc80b6..848e9bd1c9f 100644 --- a/cmake/Makefile +++ b/cmake/Makefile @@ -38,12 +38,13 @@ help: @echo -e "\t${BOLD}clean__${RESET}: Remove a docker image for a specific distro." @echo @echo -e "\tWith ${BOLD}${RESET}:" + @echo -e "\t\t${BOLD}almalinux${RESET} (latest)" @echo -e "\t\t${BOLD}alpine${RESET} (edge)" @echo -e "\t\t${BOLD}archlinux${RESET} (latest)" - @echo -e "\t\t${BOLD}centos${RESET} (latest)" @echo -e "\t\t${BOLD}debian${RESET} (latest)" @echo -e "\t\t${BOLD}fedora${RESET} (latest)" @echo -e "\t\t${BOLD}opensuse${RESET} (tumbleweed)" + @echo -e "\t\t${BOLD}rockylinux${RESET} (9)" @echo -e "\t\t${BOLD}ubuntu${RESET} (rolling)" @echo -e "\t\t${BOLD}system_deps${RESET} (archlinux with all deps from pacman)" @echo -e "\t\t${BOLD}all${RESET}: ALL DISTROS" @@ -103,12 +104,15 @@ help: @echo -e "\t${BOLD}clean_toolchains${RESET}: Remove ALL cache and docker image." @echo @echo -e "\tWith ${BOLD}${RESET}:" - @echo -e "\t\t${BOLD}aarch64${RESET} (bootlin toolchain)" - @echo -e "\t\t${BOLD}aarch64be${RESET} (bootlin toolchain)" - @echo -e "\t\t${BOLD}mips64${RESET} (codespace toolchain)" - @echo -e "\t\t${BOLD}mips64el${RESET} (codespace toolchain)" - @echo -e "\t\t${BOLD}ppc64${RESET} (bootlin toolchain)" - @echo -e "\t\t${BOLD}ppc64le${RESET} (bootlin toolchain)" + @echo -e "\t\t${BOLD}aarch64${RESET} (alias: arm64) (bootlin toolchain)" + @echo -e "\t\t${BOLD}aarch64be${RESET} (alias: arm64be) (bootlin toolchain)" + @echo -e "\t\t${BOLD}mips64-r6${RESET} (alias: mips64) (codespace toolchain)" + @echo -e "\t\t${BOLD}mips64el-r6${RESET} (alias: mips64el) (codespace toolchain)" + @echo -e "\t\t${BOLD}mips64-r2${RESET} (codespace toolchain)" + @echo -e "\t\t${BOLD}mips64el-r2${RESET} (codespace toolchain)" + @echo -e "\t\t${BOLD}ppc64-power8${RESET} (alias: ppc64) (bootlin toolchain)" + @echo -e "\t\t${BOLD}ppc64le-power8${RESET} (alias: ppc64le) (bootlin toolchain)" + @echo -e "\t\t${BOLD}riscv64${RESET} (bootlin toolchain)" @echo @echo -e "\tWith ${BOLD}${RESET}:" @echo -e "\t\t${BOLD}env${RESET}" @@ -116,7 +120,7 @@ help: @echo -e "\t\t${BOLD}build${RESET}" @echo -e "\t\t${BOLD}test${RESET}" @echo -e "\te.g. 'make toolchain_mips64_build'" - @echo -e "\te.g. 'make toolchain_aarch64_test'" + @echo -e "\te.g. 'make toolchain_arm64_test'" @echo @echo -e "\tBuild for web using emscripten." @echo -e "\t${BOLD}web_${RESET}: build the emscripten ." @@ -187,7 +191,16 @@ endif DOCKER_RUN_CMD := docker run --rm --init --net=host # Currently supported distro -DISTROS := alpine archlinux centos debian fedora opensuse ubuntu system_deps +DISTROS := \ + almalinux \ + alpine \ + archlinux \ + debian \ + fedora \ + opensuse \ + rockylinux \ + ubuntu \ + system_deps # $* stem # $< first prerequist @@ -605,10 +618,14 @@ clean_platforms: $(addprefix clean_, $(PLATFORMS)) ## TOOLCHAIN ## ############### TOOLCHAIN_TARGETS := \ - aarch64 aarch64be \ - mips64 mips64el \ - ppc64 ppc64le -TOOLCHAIN_STAGES := env devel build test + arm64 aarch64 \ + arm64be aarch64be \ + mips64 mips64-r6 mips64-r2 \ + mips64el mips64el-r6 mips64el-r2 \ + ppc64 ppc64-power8 \ + ppc64le ppc64le-power8 \ + riscv64 +TOOLCHAIN_STAGES := env devel toolchain build test define toolchain-stage-target = #$$(info STAGE: $1) diff --git a/cmake/README.md b/cmake/README.md index 89b3776cf29..9973076d909 100644 --- a/cmake/README.md +++ b/cmake/README.md @@ -43,11 +43,11 @@ Dockers \[Alpine, Archlinux, Centos, Debian, Fedora, OpenSuse, Ubuntu\]x [![Build Status][mips_toolchain_status]][mips_toolchain_link] [![Build Status][powerpc_toolchain_status]][powerpc_toolchain_link] -[aarch64_toolchain_status]: ./../../../actions/workflows/aarch64_toolchain.yml/badge.svg +[aarch64_toolchain_status]: ./../../../actions/workflows/aarch64_toolchain.yml/badge.svg?branch=main [aarch64_toolchain_link]: ./../../../actions/workflows/aarch64_toolchain.yml -[mips_toolchain_status]: ./../../../actions/workflows/mips_toolchain.yml/badge.svg +[mips_toolchain_status]: ./../../../actions/workflows/mips_toolchain.yml/badge.svg?branch=main [mips_toolchain_link]: ./../../../actions/workflows/mips_toolchain.yml -[powerpc_toolchain_status]: ./../../../actions/workflows/powerpc_toolchain.yml/badge.svg +[powerpc_toolchain_status]: ./../../../actions/workflows/powerpc_toolchain.yml/badge.svg?branch=main [powerpc_toolchain_link]: ./../../../actions/workflows/powerpc_toolchain.yml ## Introduction diff --git a/cmake/check_deps.cmake b/cmake/check_deps.cmake new file mode 100644 index 00000000000..36736909ee9 --- /dev/null +++ b/cmake/check_deps.cmake @@ -0,0 +1,113 @@ +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Check dependencies +if(NOT TARGET ZLIB::ZLIB) + message(FATAL_ERROR "Target ZLIB::ZLIB not available.") +endif() + +if(NOT TARGET absl::base) + message(FATAL_ERROR "Target absl::base not available.") +endif() +set(ABSL_DEPS + absl::base + absl::core_headers + absl::absl_check + absl::absl_log + absl::check + absl::die_if_null + absl::flags + absl::flags_commandlineflag + absl::flags_marshalling + absl::flags_parse + absl::flags_reflection + absl::flags_usage + absl::log + absl::log_flags + absl::log_globals + absl::log_initialize + absl::log_internal_message + absl::cord + absl::random_random + absl::raw_hash_set + absl::hash + absl::leak_check + absl::memory + absl::meta + absl::stacktrace + absl::status + absl::statusor + absl::str_format + absl::strings + absl::synchronization + absl::time + absl::any + ) + +if(NOT TARGET protobuf::libprotobuf) + message(FATAL_ERROR "Target protobuf::libprotobuf not available.") +endif() + +if(NOT TARGET Eigen3::Eigen) + message(FATAL_ERROR "Target Eigen3::Eigen not available.") +endif() + +if(BUILD_LP_PARSER OR BUILD_TESTING) + if(NOT TARGET re2::re2) + message(FATAL_ERROR "Target re2::re2 not available.") + endif() + set(RE2_DEPS re2::re2) +endif() + +if(USE_COINOR) + if(NOT TARGET Coin::CbcSolver) + message(FATAL_ERROR "Target Coin::CbcSolver not available.") + endif() + if(NOT TARGET Coin::ClpSolver) + message(FATAL_ERROR "Target Coin::ClpSolver not available.") + endif() + set(COINOR_DEPS Coin::CbcSolver Coin::OsiCbc Coin::ClpSolver Coin::OsiClp) +endif() + +if(USE_PDLP AND BUILD_PDLP) + set(PDLP_DEPS Eigen3::Eigen) +endif() + +if(USE_SCIP AND NOT TARGET libscip) + message(FATAL_ERROR "Target libscip not available.") +endif() + +# Check optional Dependencies +if(USE_CPLEX AND NOT TARGET CPLEX::CPLEX) + message(FATAL_ERROR "Target CPLEX::CPLEX not available.") +endif() + +# CXX Test +if(BUILD_TESTING AND NOT TARGET GTest::gtest_main) + message(FATAL_ERROR "Target GTest::gtest_main not available.") +endif() + +# Check language Dependencies +if(BUILD_PYTHON) + if(NOT TARGET pybind11::pybind11_headers) + message(FATAL_ERROR "Target pybind11::pybind11_headers not available.") + endif() + + if(NOT TARGET pybind11_abseil::absl_casters) + message(FATAL_ERROR "Target pybind11_abseil::absl_casters not available.") + endif() + + if(NOT TARGET pybind11_native_proto_caster) + message(FATAL_ERROR "Target pybind11_native_proto_caster not available.") + endif() +endif() diff --git a/cmake/cpp.cmake b/cmake/cpp.cmake index 705986209af..87f4442da15 100644 --- a/cmake/cpp.cmake +++ b/cmake/cpp.cmake @@ -15,6 +15,29 @@ if(NOT BUILD_CXX) return() endif() +# Basic type +include(CMakePushCheckState) +cmake_push_check_state(RESET) +set(CMAKE_EXTRA_INCLUDE_FILES "cstdint") +include(CheckTypeSize) +check_type_size("long" SIZEOF_LONG LANGUAGE CXX) +message(STATUS "Found long size: ${SIZEOF_LONG}") +check_type_size("long long" SIZEOF_LONG_LONG LANGUAGE CXX) +message(STATUS "Found long long size: ${SIZEOF_LONG_LONG}") +check_type_size("int64_t" SIZEOF_INT64_T LANGUAGE CXX) +message(STATUS "Found int64_t size: ${SIZEOF_INT64_T}") + +check_type_size("unsigned long" SIZEOF_ULONG LANGUAGE CXX) +message(STATUS "Found unsigned long size: ${SIZEOF_ULONG}") +check_type_size("unsigned long long" SIZEOF_ULONG_LONG LANGUAGE CXX) +message(STATUS "Found unsigned long long size: ${SIZEOF_ULONG_LONG}") +check_type_size("uint64_t" SIZEOF_UINT64_T LANGUAGE CXX) +message(STATUS "Found uint64_t size: ${SIZEOF_UINT64_T}") + +check_type_size("int *" SIZEOF_INT_P LANGUAGE CXX) +message(STATUS "Found int * size: ${SIZEOF_INT_P}") +cmake_pop_check_state() + ############# ## FLAGS ## ############# @@ -82,7 +105,6 @@ if(MSVC) "/D_CRT_SECURE_NO_DEPRECATE" "/MP" # Build with multiple processes "/Zc:preprocessor" # Enable preprocessor conformance mode - "/DNDEBUG" "/fp:precise" ) # MSVC warning suppressions @@ -232,10 +254,12 @@ if(USE_SCIP OR BUILD_MATH_OPT) list(APPEND OR_TOOLS_PROTO_FILES ${GSCIP_PROTO_FILES}) endif() +# ORTools proto generate_proto_library( - NAME ${PROJECT_NAME} + NAME ortools FILES ${OR_TOOLS_PROTO_FILES}) +# MathOpt proto if(BUILD_MATH_OPT) file(GLOB_RECURSE MATH_OPT_PROTO_FILES RELATIVE ${PROJECT_SOURCE_DIR} "ortools/math_opt/*.proto" @@ -244,7 +268,7 @@ if(BUILD_MATH_OPT) generate_proto_library( NAME math_opt FILES ${MATH_OPT_PROTO_FILES} - LINK_LIBRARIES ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto) + LINK_LIBRARIES ${PROJECT_NAMESPACE}::ortools_proto) endif() ############### @@ -298,11 +322,17 @@ if(XCODE) target_sources(${PROJECT_NAME} PRIVATE ${PROJECT_BINARY_DIR}/${PROJECT_NAME}/version.cpp) endif() -# Add ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto to libortools -#target_link_libraries(${PROJECT_NAME} PRIVATE ${PROJECT_NAMESPACE}::proto) +# Add ${PROJECT_NAMESPACE}::ortools_proto to libortools target_sources(${PROJECT_NAME} PRIVATE - $) -add_dependencies(${PROJECT_NAME} ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto) + $) +add_dependencies(${PROJECT_NAME} ${PROJECT_NAMESPACE}::ortools_proto) + +if(BUILD_MATH_OPT) + # Add ${PROJECT_NAMESPACE}::math_opt_proto to libortools + target_sources(${PROJECT_NAME} PRIVATE + $) + add_dependencies(${PROJECT_NAME} ${PROJECT_NAMESPACE}::math_opt_proto) +endif() foreach(SUBPROJECT IN ITEMS base @@ -319,6 +349,7 @@ foreach(SUBPROJECT IN ITEMS ${PDLP_DIR} sat xpress + knitro lp_data packing scheduling @@ -331,11 +362,6 @@ foreach(SUBPROJECT IN ITEMS endforeach() if(BUILD_MATH_OPT) - #target_link_libraries(${PROJECT_NAME} PRIVATE ${PROJECT_NAMESPACE}::math_opt_proto) - target_sources(${PROJECT_NAME} PRIVATE - $) - add_dependencies(${PROJECT_NAME} ${PROJECT_NAMESPACE}::math_opt_proto) - add_subdirectory(ortools/${MATH_OPT_DIR}) target_link_libraries(${PROJECT_NAME} PRIVATE ${PROJECT_NAME}_math_opt) endif() @@ -349,6 +375,13 @@ target_sources(${PROJECT_NAME} PRIVATE $:CPLEX::CPLEX> $<$:GLPK::GLPK> - $<$:HIGHS::HIGHS> + $<$:highs::highs> ${PDLP_DEPS} $<$:libscip> Threads::Threads) diff --git a/cmake/dependencies/CMakeLists.txt b/cmake/dependencies/CMakeLists.txt index 3ee30fe20be..b6ee347c95f 100644 --- a/cmake/dependencies/CMakeLists.txt +++ b/cmake/dependencies/CMakeLists.txt @@ -60,10 +60,11 @@ set(CMAKE_Fortran_COMPILER OFF) if(BUILD_ZLIB) message(CHECK_START "Fetching ZLIB") list(APPEND CMAKE_MESSAGE_INDENT " ") + set(ZLIB_BUILD_EXAMPLES OFF) FetchContent_Declare( zlib GIT_REPOSITORY "https://github.com/madler/ZLIB.git" - GIT_TAG "v1.2.13" + GIT_TAG "v1.3.1" PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/ZLIB.patch") FetchContent_MakeAvailable(zlib) list(POP_BACK CMAKE_MESSAGE_INDENT) @@ -81,12 +82,13 @@ if(BUILD_absl) set(ABSL_PROPAGATE_CXX_STD ON) set(ABSL_BUILD_TESTING OFF) FetchContent_Declare( - abseil-cpp + absl GIT_REPOSITORY "https://github.com/abseil/abseil-cpp.git" - GIT_TAG "20240116.1" - PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/abseil-cpp-20240116.1.patch" + GIT_TAG "20240722.0" + GIT_SHALLOW TRUE + PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/abseil-cpp-20240722.0.patch" ) - FetchContent_MakeAvailable(abseil-cpp) + FetchContent_MakeAvailable(absl) list(POP_BACK CMAKE_MESSAGE_INDENT) message(CHECK_PASS "fetched") endif() @@ -101,12 +103,15 @@ if(BUILD_Protobuf) set(protobuf_BUILD_SHARED_LIBS OFF) set(protobuf_BUILD_EXPORT OFF) set(protobuf_MSVC_STATIC_RUNTIME OFF) + #set(protobuf_BUILD_LIBUPB ON) FetchContent_Declare( Protobuf GIT_REPOSITORY "https://github.com/protocolbuffers/protobuf.git" - GIT_TAG "v25.3" + GIT_TAG "v26.1" + PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/protobuf-v26.1.patch" + GIT_SHALLOW TRUE GIT_SUBMODULES "" - PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/protobuf-v25.3.patch") + ) FetchContent_MakeAvailable(Protobuf) list(POP_BACK CMAKE_MESSAGE_INDENT) message(CHECK_PASS "fetched") @@ -122,8 +127,9 @@ if(BUILD_re2) FetchContent_Declare( re2 GIT_REPOSITORY "https://github.com/google/re2.git" - GIT_TAG "2024-02-01" - #PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/re2-2024-02-01.patch" + GIT_TAG "2024-04-01" + GIT_SHALLOW TRUE + #PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/re2-2024-04-01.patch" ) FetchContent_MakeAvailable(re2) list(POP_BACK CMAKE_MESSAGE_INDENT) @@ -144,6 +150,7 @@ if(BUILD_Eigen3) eigen3 GIT_REPOSITORY "https://gitlab.com/libeigen/eigen.git" GIT_TAG "3.4.0" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/eigen3-3.4.0.patch" ) FetchContent_MakeAvailable(eigen3) @@ -162,12 +169,14 @@ endif() if(BUILD_PYTHON AND BUILD_pybind11) message(CHECK_START "Fetching pybind11") list(APPEND CMAKE_MESSAGE_INDENT " ") + set(PYBIND11_FINDPYTHON ON) set(PYBIND11_INSTALL ON) set(PYBIND11_TEST OFF) FetchContent_Declare( pybind11 GIT_REPOSITORY "https://github.com/pybind/pybind11.git" - GIT_TAG "v2.11.1" + GIT_TAG "v2.13.1" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/pybind11.patch" ) FetchContent_MakeAvailable(pybind11) @@ -181,7 +190,8 @@ if(BUILD_PYTHON AND BUILD_pybind11_abseil) FetchContent_Declare( pybind11_abseil GIT_REPOSITORY "https://github.com/pybind/pybind11_abseil.git" # 2024/01/11 - GIT_TAG "52f27398876a3177049977249e004770bd869e61" + GIT_TAG "v202402.0" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/pybind11_abseil.patch" ) FetchContent_MakeAvailable(pybind11_abseil) @@ -195,8 +205,9 @@ if(BUILD_PYTHON AND BUILD_pybind11_protobuf) FetchContent_Declare( pybind11_protobuf GIT_REPOSITORY "https://github.com/pybind/pybind11_protobuf.git" - GIT_TAG "3b11990a99dea5101799e61d98a82c4737d240cc" # 2024/01/04 + GIT_TAG "84653a591aea5df482dc2bde42c19efafbd53a57" # 2024/06/28 PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/pybind11_protobuf.patch" + #GIT_SHALLOW TRUE ) FetchContent_MakeAvailable(pybind11_protobuf) list(POP_BACK CMAKE_MESSAGE_INDENT) @@ -219,6 +230,7 @@ if(BUILD_GLPK) glpk GIT_REPOSITORY "https://github.com/Mizux/GLPK.git" GIT_TAG "5.0" + GIT_SHALLOW TRUE ) FetchContent_MakeAvailable(glpk) list(POP_BACK CMAKE_MESSAGE_INDENT) @@ -232,10 +244,13 @@ if(BUILD_HIGHS) message(CHECK_START "Fetching HiGHS") list(APPEND CMAKE_MESSAGE_INDENT " ") set(CI OFF) # disable CI tests + set(BUILD_EXAMPLES OFF) FetchContent_Declare( highs GIT_REPOSITORY "https://github.com/ERGO-Code/HiGHS.git" - GIT_TAG "v1.6.0" + GIT_TAG "v1.7.2" + GIT_SHALLOW TRUE + #PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/highs.patch" ) FetchContent_MakeAvailable(highs) list(POP_BACK CMAKE_MESSAGE_INDENT) @@ -263,8 +278,9 @@ if(BUILD_SCIP) FetchContent_Declare( scip GIT_REPOSITORY "https://github.com/scipopt/scip.git" - GIT_TAG "v810" - PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/scip-v810.patch" + GIT_TAG "v900" + GIT_SHALLOW TRUE + PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/scip-v900.patch" ) FetchContent_MakeAvailable(scip) set(LPI_GLOP_SRC ${scip_SOURCE_DIR}/src/lpi/lpi_glop.cpp PARENT_SCOPE) @@ -285,6 +301,7 @@ if(BUILD_CoinUtils) CoinUtils GIT_REPOSITORY "https://github.com/Mizux/CoinUtils.git" GIT_TAG "cmake/2.11.6" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/coinutils-2.11.patch") FetchContent_MakeAvailable(CoinUtils) @@ -302,6 +319,7 @@ if(BUILD_Osi) Osi GIT_REPOSITORY "https://github.com/Mizux/Osi.git" GIT_TAG "cmake/0.108.7" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/osi-0.108.patch") FetchContent_MakeAvailable(Osi) @@ -319,6 +337,7 @@ if(BUILD_Clp) Clp GIT_REPOSITORY "https://github.com/Mizux/Clp.git" GIT_TAG "cmake/1.17.7" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/clp-1.17.4.patch") FetchContent_MakeAvailable(Clp) @@ -336,6 +355,7 @@ if(BUILD_Cgl) Cgl GIT_REPOSITORY "https://github.com/Mizux/Cgl.git" GIT_TAG "cmake/0.60.5" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/cgl-0.60.patch") FetchContent_MakeAvailable(Cgl) @@ -353,6 +373,7 @@ if(BUILD_Cbc) Cbc GIT_REPOSITORY "https://github.com/Mizux/Cbc.git" GIT_TAG "cmake/2.10.7" + GIT_SHALLOW TRUE PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/cbc-2.10.patch") FetchContent_MakeAvailable(Cbc) @@ -375,7 +396,10 @@ if(BUILD_googletest) FetchContent_Declare( googletest GIT_REPOSITORY https://github.com/google/googletest.git - GIT_TAG v1.14.0) + GIT_TAG v1.15.2 + GIT_SHALLOW TRUE + #PATCH_COMMAND git apply --ignore-whitespace "" + ) set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) set(INSTALL_GTEST OFF) set(GTEST_HAS_ABSL ON) @@ -383,3 +407,21 @@ if(BUILD_googletest) list(POP_BACK CMAKE_MESSAGE_INDENT) message(CHECK_PASS "fetched") endif() + +if(BUILD_benchmark) + message(CHECK_START "Fetching benchmark") + list(APPEND CMAKE_MESSAGE_INDENT " ") + FetchContent_Declare( + benchmark + GIT_REPOSITORY https://github.com/google/benchmark.git + GIT_TAG v1.8.4 + GIT_SHALLOW TRUE + #PATCH_COMMAND git apply --ignore-whitespace "" + ) + set(BENCHMARK_ENABLE_TESTING OFF) + set(BENCHMARK_ENABLE_WERROR OFF) + set(BENCHMARK_ENABLE_INSTALL OFF) + FetchContent_MakeAvailable(benchmark) + list(POP_BACK CMAKE_MESSAGE_INDENT) + message(CHECK_PASS "fetched") +endif() diff --git a/cmake/dependencies/SWIG.CMakeLists.txt.in b/cmake/dependencies/SWIG.CMakeLists.txt.in index 08502562937..89be36cb37e 100644 --- a/cmake/dependencies/SWIG.CMakeLists.txt.in +++ b/cmake/dependencies/SWIG.CMakeLists.txt.in @@ -11,7 +11,7 @@ ExternalProject_Add(SWIG_project SOURCE_DIR "@CMAKE_CURRENT_BINARY_DIR@/${PROJECT_NAME}/source" BUILD_IN_SOURCE 1 - URL "http://prdownloads.sourceforge.net/swig/swigwin-4.1.1.zip" + URL "http://prdownloads.sourceforge.net/swig/swigwin-4.2.1.zip" LOG_DOWNLOAD TRUE UPDATE_COMMAND "" diff --git a/cmake/docker/almalinux/Dockerfile b/cmake/docker/almalinux/Dockerfile new file mode 100644 index 00000000000..4a1268f642e --- /dev/null +++ b/cmake/docker/almalinux/Dockerfile @@ -0,0 +1,27 @@ +# Create a virtual environment with all tools installed +# ref: https://hub.docker.com/_/almalinux +FROM almalinux:latest AS base +# Install system build dependencies +ENV PATH=/usr/local/bin:$PATH +RUN dnf -y update \ +&& dnf -y install git wget openssl-devel cmake \ +&& dnf -y groupinstall "Development Tools" \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf +CMD [ "/usr/bin/bash" ] + +# Install SWIG 4.2.1 +FROM base AS swig +RUN dnf -y update \ +&& dnf -y install pcre2-devel \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf \ +&& wget -q "https://downloads.sourceforge.net/project/swig/swig/swig-4.2.1/swig-4.2.1.tar.gz" \ +&& tar xvf swig-4.2.1.tar.gz \ +&& rm swig-4.2.1.tar.gz \ +&& cd swig-4.2.1 \ +&& ./configure --prefix=/usr \ +&& make -j 4 \ +&& make install \ +&& cd .. \ +&& rm -rf swig-4.2.1 diff --git a/cmake/docker/centos/cpp.Dockerfile b/cmake/docker/almalinux/cpp.Dockerfile similarity index 93% rename from cmake/docker/centos/cpp.Dockerfile rename to cmake/docker/almalinux/cpp.Dockerfile index 36a302dea35..f8043394bf9 100644 --- a/cmake/docker/centos/cpp.Dockerfile +++ b/cmake/docker/almalinux/cpp.Dockerfile @@ -1,4 +1,5 @@ -FROM ortools/cmake:centos_base AS env +FROM ortools/cmake:almalinux_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/centos/dotnet.Dockerfile b/cmake/docker/almalinux/dotnet.Dockerfile similarity index 62% rename from cmake/docker/centos/dotnet.Dockerfile rename to cmake/docker/almalinux/dotnet.Dockerfile index 0764f086ab2..3979577eb2b 100644 --- a/cmake/docker/centos/dotnet.Dockerfile +++ b/cmake/docker/almalinux/dotnet.Dockerfile @@ -1,9 +1,11 @@ -FROM ortools/cmake:centos_swig AS env -# see: https://docs.microsoft.com/en-us/dotnet/core/install/linux-package-manager-centos8 -RUN dnf -y update \ -&& dnf -y install dotnet-sdk-6.0 \ -&& dnf clean all \ -&& rm -rf /var/cache/dnf +FROM ortools/cmake:almalinux_swig AS env + +# Install .NET SDK +# see: https://learn.microsoft.com/en-us/dotnet/core/install/linux-scripted-manual#scripted-install +RUN wget -q "https://dot.net/v1/dotnet-install.sh" \ +&& chmod a+x dotnet-install.sh \ +&& ./dotnet-install.sh -c 3.1 -i /usr/local/bin \ +&& ./dotnet-install.sh -c 6.0 -i /usr/local/bin # Trigger first run experience by running arbitrary cmd RUN dotnet --info @@ -11,6 +13,7 @@ RUN dotnet --info FROM env AS devel WORKDIR /home/project COPY . . +RUN sed -i 's/\(\).*\(<\/SignAssembly>\)/\1false\2/' ortools/dotnet/Google.OrTools*.csproj.in FROM devel AS build RUN cmake -version diff --git a/cmake/docker/centos/java.Dockerfile b/cmake/docker/almalinux/java.Dockerfile similarity index 94% rename from cmake/docker/centos/java.Dockerfile rename to cmake/docker/almalinux/java.Dockerfile index c480333b017..73b0d9cde85 100644 --- a/cmake/docker/centos/java.Dockerfile +++ b/cmake/docker/almalinux/java.Dockerfile @@ -1,4 +1,5 @@ -FROM ortools/cmake:centos_swig AS env +FROM ortools/cmake:almalinux_swig AS env + RUN dnf -y update \ && dnf -y install java-1.8.0-openjdk java-1.8.0-openjdk-devel maven \ && dnf clean all \ diff --git a/cmake/docker/centos/python.Dockerfile b/cmake/docker/almalinux/python.Dockerfile similarity index 81% rename from cmake/docker/centos/python.Dockerfile rename to cmake/docker/almalinux/python.Dockerfile index 32ad5979a5d..5bbd43dbae9 100644 --- a/cmake/docker/centos/python.Dockerfile +++ b/cmake/docker/almalinux/python.Dockerfile @@ -1,10 +1,12 @@ -FROM ortools/cmake:centos_swig AS env +FROM ortools/cmake:almalinux_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN dnf -y update \ -&& dnf -y install python3.11-devel python3.11-numpy python3.11-pip \ +&& dnf -y install python3-devel python3-pip python3-numpy \ && dnf clean all \ && rm -rf /var/cache/dnf -RUN python3.11 -m pip install absl-py mypy mypy-protobuf pandas +RUN python3 -m pip install \ + absl-py mypy mypy-protobuf pandas FROM env AS devel WORKDIR /home/project diff --git a/cmake/docker/alpine/cpp.Dockerfile b/cmake/docker/alpine/cpp.Dockerfile index 44ace7b16f9..5eef81dd52b 100644 --- a/cmake/docker/alpine/cpp.Dockerfile +++ b/cmake/docker/alpine/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:alpine_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/alpine/java.Dockerfile b/cmake/docker/alpine/java.Dockerfile index 11852b2fd89..d04b74ee155 100644 --- a/cmake/docker/alpine/java.Dockerfile +++ b/cmake/docker/alpine/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:alpine_swig AS env + ENV JAVA_HOME=/usr/lib/jvm/java-1.8-openjdk RUN apk add --no-cache openjdk8 maven diff --git a/cmake/docker/alpine/python.Dockerfile b/cmake/docker/alpine/python.Dockerfile index 2fc34f7af3c..93d5e7edf98 100644 --- a/cmake/docker/alpine/python.Dockerfile +++ b/cmake/docker/alpine/python.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:alpine_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN apk add --no-cache python3-dev py3-pip py3-wheel \ py3-numpy py3-pandas py3-matplotlib py3-scipy diff --git a/cmake/docker/archlinux/cpp.Dockerfile b/cmake/docker/archlinux/cpp.Dockerfile index 7d33ce536bd..785754457b1 100644 --- a/cmake/docker/archlinux/cpp.Dockerfile +++ b/cmake/docker/archlinux/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:archlinux_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/archlinux/dotnet.Dockerfile b/cmake/docker/archlinux/dotnet.Dockerfile index 1e5e540cb51..669139907b6 100644 --- a/cmake/docker/archlinux/dotnet.Dockerfile +++ b/cmake/docker/archlinux/dotnet.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:archlinux_swig AS env + RUN pacman -Syu --noconfirm dotnet-sdk # Trigger first run experience by running arbitrary cmd RUN dotnet --info diff --git a/cmake/docker/archlinux/java.Dockerfile b/cmake/docker/archlinux/java.Dockerfile index cb313a35964..8fb70b144d7 100644 --- a/cmake/docker/archlinux/java.Dockerfile +++ b/cmake/docker/archlinux/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:archlinux_swig AS env + RUN pacman -Syu --noconfirm jdk-openjdk maven ENV JAVA_HOME=/usr/lib/jvm/default diff --git a/cmake/docker/archlinux/python.Dockerfile b/cmake/docker/archlinux/python.Dockerfile index 5daff178d60..ab6e85e0ec6 100644 --- a/cmake/docker/archlinux/python.Dockerfile +++ b/cmake/docker/archlinux/python.Dockerfile @@ -1,7 +1,8 @@ FROM ortools/cmake:archlinux_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN pacman -Syu --noconfirm python python-pip \ - python-wheel python-virtualenv \ + python-wheel python-virtualenv python-setuptools \ python-numpy python-pandas RUN python -m pip install --break-system-package \ absl-py mypy mypy-protobuf diff --git a/cmake/docker/centos/Dockerfile b/cmake/docker/centos/Dockerfile deleted file mode 100644 index 68748f2f189..00000000000 --- a/cmake/docker/centos/Dockerfile +++ /dev/null @@ -1,43 +0,0 @@ -# Create a virtual environment with all tools installed -# ref: https://quay.io/repository/centos/centos -FROM quay.io/centos/centos:stream AS base -# Install system build dependencies -ENV PATH=/usr/local/bin:$PATH -RUN dnf -y update \ -&& dnf -y install git wget openssl-devel \ -&& dnf -y groupinstall "Development Tools" \ -&& dnf clean all \ -&& rm -rf /var/cache/dnf - -# Install system build dependencies -ENV PATH=/usr/local/bin:$PATH -RUN dnf -y update \ -&& dnf -y install gcc-toolset-11 \ -&& dnf clean all \ -&& rm -rf /var/cache/dnf - -RUN echo "source /opt/rh/gcc-toolset-11/enable" >> /etc/bashrc -SHELL ["/bin/bash", "--login", "-c"] - -# Install CMake 3.26.4 -RUN wget -q "https://cmake.org/files/v3.26/cmake-3.26.4-linux-x86_64.sh" \ -&& chmod a+x cmake-3.26.4-linux-x86_64.sh \ -&& ./cmake-3.26.4-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ -&& rm cmake-3.26.4-linux-x86_64.sh -CMD [ "/usr/bin/bash" ] - -# Install SWIG 4.2.0 -FROM base AS swig -RUN dnf -y update \ -&& dnf -y install pcre2-devel \ -&& dnf clean all \ -&& rm -rf /var/cache/dnf \ -&& wget -q "https://downloads.sourceforge.net/project/swig/swig/swig-4.2.0/swig-4.2.0.tar.gz" \ -&& tar xvf swig-4.2.0.tar.gz \ -&& rm swig-4.2.0.tar.gz \ -&& cd swig-4.2.0 \ -&& ./configure --prefix=/usr \ -&& make -j 4 \ -&& make install \ -&& cd .. \ -&& rm -rf swig-4.2.0 diff --git a/cmake/docker/debian/cpp.Dockerfile b/cmake/docker/debian/cpp.Dockerfile index 73f35672c3d..8e9c1a78f72 100644 --- a/cmake/docker/debian/cpp.Dockerfile +++ b/cmake/docker/debian/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:debian_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/debian/dotnet.Dockerfile b/cmake/docker/debian/dotnet.Dockerfile index 55a421dee1c..79b958264d5 100644 --- a/cmake/docker/debian/dotnet.Dockerfile +++ b/cmake/docker/debian/dotnet.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:debian_swig AS env + # see: https://docs.microsoft.com/en-us/dotnet/core/install/linux-debian RUN apt-get update -qq \ && apt-get install -yq wget gpg apt-transport-https \ diff --git a/cmake/docker/debian/java.Dockerfile b/cmake/docker/debian/java.Dockerfile index 300ec5364b2..40d2268ada7 100644 --- a/cmake/docker/debian/java.Dockerfile +++ b/cmake/docker/debian/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:debian_swig AS env + RUN apt-get update -qq \ && apt-get install -yq default-jdk maven \ && apt-get clean \ diff --git a/cmake/docker/debian/python.Dockerfile b/cmake/docker/debian/python.Dockerfile index 010703a184e..a8b75b25c8e 100644 --- a/cmake/docker/debian/python.Dockerfile +++ b/cmake/docker/debian/python.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:debian_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN apt-get update -qq \ && apt-get install -yq \ diff --git a/cmake/docker/fedora/cpp.Dockerfile b/cmake/docker/fedora/cpp.Dockerfile index 873c5da1d05..dcefd24c5d3 100644 --- a/cmake/docker/fedora/cpp.Dockerfile +++ b/cmake/docker/fedora/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:fedora_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/fedora/java.Dockerfile b/cmake/docker/fedora/java.Dockerfile index ae8cf08f66f..62ef7b34611 100644 --- a/cmake/docker/fedora/java.Dockerfile +++ b/cmake/docker/fedora/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:fedora_swig AS env + RUN dnf -y update \ && dnf -y install java-11-openjdk java-11-openjdk-devel maven \ && dnf clean all diff --git a/cmake/docker/fedora/python.Dockerfile b/cmake/docker/fedora/python.Dockerfile index 59d33f1c7f0..9c0bc135f83 100644 --- a/cmake/docker/fedora/python.Dockerfile +++ b/cmake/docker/fedora/python.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:fedora_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN dnf -y update \ && dnf -y install python3 python3-devel python3-pip \ diff --git a/cmake/docker/glop/Dockerfile b/cmake/docker/glop/Dockerfile index 5de7ade82dd..bbd1e2d31d6 100644 --- a/cmake/docker/glop/Dockerfile +++ b/cmake/docker/glop/Dockerfile @@ -1,17 +1,19 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/ubuntu FROM ubuntu:rolling AS env + # Install system build dependencies ENV PATH=/usr/local/bin:$PATH RUN apt-get update -qq \ && apt-get install -yq git wget libssl-dev build-essential \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Install CMake 3.27.7 -RUN wget -q "https://cmake.org/files/v3.27/cmake-3.27.7-linux-x86_64.sh" \ -&& chmod a+x cmake-3.27.7-linux-x86_64.sh \ -&& ./cmake-3.27.7-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ -&& rm cmake-3.27.7-linux-x86_64.sh + +# Install CMake 3.28.3 +RUN wget -q "https://cmake.org/files/v3.28/cmake-3.28.3-linux-x86_64.sh" \ +&& chmod a+x cmake-3.28.3-linux-x86_64.sh \ +&& ./cmake-3.28.3-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.28.3-linux-x86_64.sh CMD [ "/usr/bin/bash" ] FROM env AS devel diff --git a/cmake/docker/opensuse/cpp.Dockerfile b/cmake/docker/opensuse/cpp.Dockerfile index 7a31d94c776..20d89e1f756 100644 --- a/cmake/docker/opensuse/cpp.Dockerfile +++ b/cmake/docker/opensuse/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:opensuse_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/opensuse/dotnet.Dockerfile b/cmake/docker/opensuse/dotnet.Dockerfile index a18803b92e4..8dcdf9ed4be 100644 --- a/cmake/docker/opensuse/dotnet.Dockerfile +++ b/cmake/docker/opensuse/dotnet.Dockerfile @@ -1,5 +1,4 @@ FROM ortools/cmake:opensuse_swig AS env -# see: https://docs.microsoft.com/en-us/dotnet/core/install/linux-opensuse # .NET install # see: https://docs.microsoft.com/en-us/dotnet/core/install/linux-opensuse diff --git a/cmake/docker/opensuse/java.Dockerfile b/cmake/docker/opensuse/java.Dockerfile index 718e2643f53..0d728bf72e8 100644 --- a/cmake/docker/opensuse/java.Dockerfile +++ b/cmake/docker/opensuse/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:opensuse_swig AS env + # Install Java JDK and Maven RUN zypper refresh \ && zypper install -y java-17-openjdk-devel maven \ diff --git a/cmake/docker/opensuse/python.Dockerfile b/cmake/docker/opensuse/python.Dockerfile index 95c1fdaa445..308c08bcab8 100644 --- a/cmake/docker/opensuse/python.Dockerfile +++ b/cmake/docker/opensuse/python.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:opensuse_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN zypper refresh \ && zypper install -y python311 python311-devel \ diff --git a/cmake/docker/rockylinux/Dockerfile b/cmake/docker/rockylinux/Dockerfile new file mode 100644 index 00000000000..c80770fc520 --- /dev/null +++ b/cmake/docker/rockylinux/Dockerfile @@ -0,0 +1,27 @@ +# Create a virtual environment with all tools installed +# ref: https://hub.docker.com/_/rockylinux +FROM rockylinux:9 AS base +# Install system build dependencies +ENV PATH=/usr/local/bin:$PATH +RUN dnf -y update \ +&& dnf -y install git wget openssl-devel cmake \ +&& dnf -y groupinstall "Development Tools" \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf +CMD [ "/usr/bin/bash" ] + +# Install SWIG 4.2.1 +FROM base AS swig +RUN dnf -y update \ +&& dnf -y install pcre2-devel \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf \ +&& wget -q "https://downloads.sourceforge.net/project/swig/swig/swig-4.2.1/swig-4.2.1.tar.gz" \ +&& tar xvf swig-4.2.1.tar.gz \ +&& rm swig-4.2.1.tar.gz \ +&& cd swig-4.2.1 \ +&& ./configure --prefix=/usr \ +&& make -j 4 \ +&& make install \ +&& cd .. \ +&& rm -rf swig-4.2.1 diff --git a/cmake/docker/rockylinux/cpp.Dockerfile b/cmake/docker/rockylinux/cpp.Dockerfile new file mode 100644 index 00000000000..959ca4236d8 --- /dev/null +++ b/cmake/docker/rockylinux/cpp.Dockerfile @@ -0,0 +1,30 @@ +FROM ortools/cmake:rockylinux_base AS env + +RUN cmake -version + +FROM env AS devel +WORKDIR /home/project +COPY . . + +FROM devel AS build +RUN cmake -S. -Bbuild -DBUILD_DEPS=ON +RUN cmake --build build --target all -v +RUN cmake --build build --target install + +FROM build AS test +RUN CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --target test + +FROM env AS install_env +COPY --from=build /usr/local /usr/local/ + +FROM install_env AS install_devel +WORKDIR /home/sample +COPY cmake/samples/cpp . + +FROM install_devel AS install_build +RUN cmake -S. -Bbuild +RUN cmake --build build --target all -v +RUN cmake --build build --target install + +FROM install_build AS install_test +RUN cmake --build build --target test diff --git a/cmake/docker/rockylinux/dotnet.Dockerfile b/cmake/docker/rockylinux/dotnet.Dockerfile new file mode 100644 index 00000000000..c973a59f2fb --- /dev/null +++ b/cmake/docker/rockylinux/dotnet.Dockerfile @@ -0,0 +1,38 @@ +FROM ortools/cmake:rockylinux_swig AS env + +# Install .NET SDK +# see: https://learn.microsoft.com/en-us/dotnet/core/install/linux-scripted-manual#scripted-install +RUN wget -q "https://dot.net/v1/dotnet-install.sh" \ +&& chmod a+x dotnet-install.sh \ +&& ./dotnet-install.sh -c 3.1 -i /usr/local/bin \ +&& ./dotnet-install.sh -c 6.0 -i /usr/local/bin +# Trigger first run experience by running arbitrary cmd +RUN dotnet --info + +# Add the library src to our build env +FROM env AS devel +WORKDIR /home/project +COPY . . +RUN sed -i 's/\(\).*\(<\/SignAssembly>\)/\1false\2/' ortools/dotnet/Google.OrTools*.csproj.in + +FROM devel AS build +RUN cmake -version +RUN cmake -S. -Bbuild -DBUILD_DOTNET=ON -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF +RUN cmake --build build --target all -v +RUN cmake --build build --target install -v + +FROM build AS test +RUN CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --target test -v + +FROM env AS install_env +WORKDIR /home/sample +COPY --from=build /home/project/build/dotnet/packages/*.nupkg ./ + +FROM install_env AS install_devel +COPY cmake/samples/dotnet . + +FROM install_devel AS install_build +RUN dotnet build + +FROM install_build AS install_test +RUN dotnet test diff --git a/cmake/docker/rockylinux/java.Dockerfile b/cmake/docker/rockylinux/java.Dockerfile new file mode 100644 index 00000000000..957033eafa6 --- /dev/null +++ b/cmake/docker/rockylinux/java.Dockerfile @@ -0,0 +1,32 @@ +FROM ortools/cmake:rockylinux_swig AS env + +RUN dnf -y update \ +&& dnf -y install java-1.8.0-openjdk java-1.8.0-openjdk-devel maven \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf + +FROM env AS devel +WORKDIR /home/project +COPY . . + +FROM devel AS build +RUN cmake -S. -Bbuild -DBUILD_JAVA=ON -DSKIP_GPG=ON \ + -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF +RUN cmake --build build --target all -v +RUN cmake --build build --target install + +FROM build AS test +RUN CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --target test + +FROM env AS install_env +COPY --from=build /usr/local /usr/local/ + +FROM install_env AS install_devel +WORKDIR /home/sample +COPY cmake/samples/java . + +FROM install_devel AS install_build +RUN mvn compile + +FROM install_build AS install_test +RUN mvn test diff --git a/cmake/docker/rockylinux/python.Dockerfile b/cmake/docker/rockylinux/python.Dockerfile new file mode 100644 index 00000000000..ada23577b1b --- /dev/null +++ b/cmake/docker/rockylinux/python.Dockerfile @@ -0,0 +1,35 @@ +FROM ortools/cmake:rockylinux_swig AS env + +ENV PATH=/root/.local/bin:$PATH +RUN dnf -y update \ +&& dnf -y install python3-devel python3-pip python3-numpy \ +&& dnf clean all \ +&& rm -rf /var/cache/dnf +RUN python3 -m pip install \ + absl-py mypy mypy-protobuf pandas + +FROM env AS devel +WORKDIR /home/project +COPY . . + +FROM devel AS build +RUN cmake -S. -Bbuild -DBUILD_PYTHON=ON -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF +RUN cmake --build build --target all -v +RUN cmake --build build --target install + +FROM build AS test +RUN CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --target test + +FROM env AS install_env +WORKDIR /home/sample +COPY --from=build /home/project/build/python/dist/*.whl . +RUN python3 -m pip install *.whl + +FROM install_env AS install_devel +COPY cmake/samples/python . + +FROM install_devel AS install_build +RUN python3 -m compileall . + +FROM install_build AS install_test +RUN python3 sample.py diff --git a/cmake/docker/system_deps/cpp.Dockerfile b/cmake/docker/system_deps/cpp.Dockerfile index 75140088593..f0845f9746f 100644 --- a/cmake/docker/system_deps/cpp.Dockerfile +++ b/cmake/docker/system_deps/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:system_deps_base AS env + RUN cmake -version FROM env AS devel @@ -7,7 +8,10 @@ COPY . . FROM devel AS build RUN cmake -S. -Bbuild -DBUILD_DEPS=OFF \ - -DUSE_COINOR=ON -DUSE_GLPK=ON -DUSE_SCIP=ON + -DUSE_COINOR=ON \ + -DUSE_GLPK=ON \ + -DUSE_HIGHS=OFF \ + -DUSE_SCIP=ON RUN cmake --build build --target all -v RUN cmake --build build --target install diff --git a/cmake/docker/system_deps/dotnet.Dockerfile b/cmake/docker/system_deps/dotnet.Dockerfile index 4eeb00e240f..28f8e0a39c5 100644 --- a/cmake/docker/system_deps/dotnet.Dockerfile +++ b/cmake/docker/system_deps/dotnet.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:system_deps_swig AS env + RUN pacman -Syu --noconfirm dotnet-sdk # Trigger first run experience by running arbitrary cmd RUN dotnet --info @@ -9,7 +10,10 @@ COPY . . FROM devel AS build RUN cmake -S. -Bbuild -DBUILD_DEPS=OFF \ - -DUSE_COINOR=ON -DUSE_GLPK=ON -DUSE_SCIP=ON \ + -DUSE_COINOR=ON \ + -DUSE_GLPK=ON \ + -DUSE_HIGHS=OFF \ + -DUSE_SCIP=ON \ -DBUILD_DOTNET=ON \ -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF RUN cmake --build build --target all -v diff --git a/cmake/docker/system_deps/java.Dockerfile b/cmake/docker/system_deps/java.Dockerfile index 03c1de75124..ff39de37019 100644 --- a/cmake/docker/system_deps/java.Dockerfile +++ b/cmake/docker/system_deps/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:system_deps_swig AS env + RUN pacman -Syu --noconfirm jdk-openjdk maven ENV JAVA_HOME=/usr/lib/jvm/default @@ -8,7 +9,10 @@ COPY . . FROM devel AS build RUN cmake -S. -Bbuild -DBUILD_DEPS=OFF \ - -DUSE_COINOR=ON -DUSE_GLPK=ON -DUSE_SCIP=ON \ + -DUSE_COINOR=ON \ + -DUSE_GLPK=ON \ + -DUSE_HIGHS=OFF \ + -DUSE_SCIP=ON \ -DBUILD_JAVA=ON -DSKIP_GPG=ON \ -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF RUN cmake --build build --target all -v diff --git a/cmake/docker/system_deps/python.Dockerfile b/cmake/docker/system_deps/python.Dockerfile index ee8f4d59045..a09157885aa 100644 --- a/cmake/docker/system_deps/python.Dockerfile +++ b/cmake/docker/system_deps/python.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:system_deps_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN pacman -Syu --noconfirm pybind11 RUN pacman -Syu --noconfirm python \ @@ -14,8 +15,12 @@ COPY . . FROM devel AS build # Archlinux do not provide pybind11 protobuf package RUN cmake -S. -Bbuild -DBUILD_DEPS=OFF \ + -DBUILD_pybind11_abseil=ON \ -DBUILD_pybind11_protobuf=ON \ - -DUSE_COINOR=ON -DUSE_GLPK=ON -DUSE_SCIP=ON \ + -DUSE_COINOR=ON \ + -DUSE_GLPK=ON \ + -DUSE_HIGHS=OFF \ + -DUSE_SCIP=ON \ -DBUILD_PYTHON=ON \ -DBUILD_CXX_SAMPLES=OFF -DBUILD_CXX_EXAMPLES=OFF RUN cmake --build build --target all -v diff --git a/cmake/docker/toolchain/Dockerfile b/cmake/docker/toolchain/Dockerfile index 6ad1382bcb0..7e16dceb4e6 100644 --- a/cmake/docker/toolchain/Dockerfile +++ b/cmake/docker/toolchain/Dockerfile @@ -1,21 +1,22 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/ubuntu FROM ubuntu:latest AS env + # Install system build dependencies ENV PATH=/usr/local/bin:$PATH RUN apt-get update -qq \ && DEBIAN_FRONTEND=noninteractive apt-get install -yq git wget libssl-dev build-essential \ - ninja-build python3 pkgconf libglib2.0-dev \ + ninja-build python3 python3-venv pkgconf libglib2.0-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* ENTRYPOINT ["/usr/bin/bash", "-c"] CMD ["/usr/bin/bash"] -# Install CMake 3.25.2 -RUN wget "https://cmake.org/files/v3.25/cmake-3.25.2-linux-x86_64.sh" \ -&& chmod a+x cmake-3.25.2-linux-x86_64.sh \ -&& ./cmake-3.25.2-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ -&& rm cmake-3.25.2-linux-x86_64.sh +# Install CMake 3.28.3 +RUN wget "https://cmake.org/files/v3.28/cmake-3.28.3-linux-x86_64.sh" \ +&& chmod a+x cmake-3.28.3-linux-x86_64.sh \ +&& ./cmake-3.28.3-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.28.3-linux-x86_64.sh FROM env AS devel WORKDIR /home/project @@ -23,9 +24,12 @@ COPY . . ENV PROJECT=or-tools ARG TARGET -ENV TARGET ${TARGET:-unknown} +ENV TARGET=${TARGET:-unknown} + +FROM devel AS toolchain +RUN ./tools/cross_compile.sh toolchain -FROM devel AS build +FROM toolchain AS build RUN cmake --version RUN ./tools/cross_compile.sh build diff --git a/cmake/docker/ubuntu/Dockerfile b/cmake/docker/ubuntu/Dockerfile index c584391799d..4eac7c6b096 100644 --- a/cmake/docker/ubuntu/Dockerfile +++ b/cmake/docker/ubuntu/Dockerfile @@ -5,17 +5,10 @@ FROM ubuntu:rolling AS base ENV PATH=/usr/local/bin:$PATH RUN apt-get update -qq \ && DEBIAN_FRONTEND=noninteractive apt-get install -yq \ - git wget libssl-dev build-essential \ + git wget libssl-dev build-essential cmake \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Install CMake 3.25.2 -RUN wget -q "https://cmake.org/files/v3.25/cmake-3.25.2-linux-x86_64.sh" \ -&& chmod a+x cmake-3.25.2-linux-x86_64.sh \ -&& ./cmake-3.25.2-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ -&& rm cmake-3.25.2-linux-x86_64.sh -CMD [ "/usr/bin/bash" ] - FROM base AS swig RUN apt-get update -qq \ && apt-get install -yq swig \ diff --git a/cmake/docker/ubuntu/cpp.Dockerfile b/cmake/docker/ubuntu/cpp.Dockerfile index 4706ffabff3..31822b58569 100644 --- a/cmake/docker/ubuntu/cpp.Dockerfile +++ b/cmake/docker/ubuntu/cpp.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:ubuntu_base AS env + RUN cmake -version FROM env AS devel diff --git a/cmake/docker/ubuntu/dotnet.Dockerfile b/cmake/docker/ubuntu/dotnet.Dockerfile index a7a2f321712..092513cdfae 100644 --- a/cmake/docker/ubuntu/dotnet.Dockerfile +++ b/cmake/docker/ubuntu/dotnet.Dockerfile @@ -3,7 +3,7 @@ FROM ortools/cmake:ubuntu_swig AS env # Install .NET SDK # see: https://docs.microsoft.com/en-us/dotnet/core/install/linux-ubuntu RUN apt-get update -qq \ -&& apt-get install -yq dotnet-sdk-6.0 \ +&& apt-get install -yq dotnet-sdk-8.0 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # Trigger first run experience by running arbitrary cmd diff --git a/cmake/docker/ubuntu/java.Dockerfile b/cmake/docker/ubuntu/java.Dockerfile index 35899666b00..994c1900ef4 100644 --- a/cmake/docker/ubuntu/java.Dockerfile +++ b/cmake/docker/ubuntu/java.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:ubuntu_swig AS env + RUN apt-get update -qq \ && DEBIAN_FRONTEND=noninteractive apt-get install -yq default-jdk maven \ && apt-get clean \ diff --git a/cmake/docker/ubuntu/python.Dockerfile b/cmake/docker/ubuntu/python.Dockerfile index b6d1cb1a5a1..fa4571c5971 100644 --- a/cmake/docker/ubuntu/python.Dockerfile +++ b/cmake/docker/ubuntu/python.Dockerfile @@ -1,4 +1,5 @@ FROM ortools/cmake:ubuntu_swig AS env + ENV PATH=/root/.local/bin:$PATH RUN apt-get update -qq \ && DEBIAN_FRONTEND=noninteractive apt-get install -yq \ diff --git a/cmake/docker/web/Dockerfile b/cmake/docker/web/Dockerfile index b74b8e13aa3..1ebec3a45c2 100644 --- a/cmake/docker/web/Dockerfile +++ b/cmake/docker/web/Dockerfile @@ -1,6 +1,7 @@ # Create a virtual environment with all tools installed # ref: https://hub.docker.com/_/archlinux/ FROM archlinux:latest AS env + # Install system build dependencies ENV PATH=/usr/local/bin:$PATH RUN pacman -Syu --noconfirm git base-devel cmake diff --git a/cmake/dotnet.cmake b/cmake/dotnet.cmake index 3b967abc1fa..c97cc3c7f01 100644 --- a/cmake/dotnet.cmake +++ b/cmake/dotnet.cmake @@ -263,11 +263,27 @@ function(add_dotnet_test) WORKING_DIRECTORY ${DOTNET_TEST_DIR}) if(BUILD_TESTING) - add_test( - NAME dotnet_${COMPONENT_NAME}_${TEST_NAME} - COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME - ${DOTNET_EXECUTABLE} test --nologo -c Release ${TEST_NAME}.csproj - WORKING_DIRECTORY ${DOTNET_TEST_DIR}) + if(USE_DOTNET_6) + add_test( + NAME dotnet_${COMPONENT_NAME}_${TEST_NAME}_net60 + COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME + ${DOTNET_EXECUTABLE} test --nologo --framework net6.0 -c Release + WORKING_DIRECTORY ${DOTNET_TEST_DIR}) + endif() + if(USE_DOTNET_7) + add_test( + NAME dotnet_${COMPONENT_NAME}_${TEST_NAME}_net70 + COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME + ${DOTNET_EXECUTABLE} test --nologo --framework net7.0 -c Release + WORKING_DIRECTORY ${DOTNET_TEST_DIR}) + endif() + if(USE_DOTNET_8) + add_test( + NAME dotnet_${COMPONENT_NAME}_${TEST_NAME}_net80 + COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME + ${DOTNET_EXECUTABLE} test --nologo --framework net8.0 -c Release + WORKING_DIRECTORY ${DOTNET_TEST_DIR}) + endif() endif() message(STATUS "Configuring test ${TEST_FILE_NAME} ...DONE") endfunction() @@ -278,7 +294,14 @@ endfunction() list(APPEND CMAKE_SWIG_FLAGS "-I${PROJECT_SOURCE_DIR}") # Swig wrap all libraries -foreach(SUBPROJECT IN ITEMS algorithms graph init linear_solver constraint_solver sat util) +foreach(SUBPROJECT IN ITEMS + algorithms + graph + init + linear_solver + constraint_solver + sat + util) add_subdirectory(ortools/${SUBPROJECT}/csharp) target_link_libraries(google-ortools-native PRIVATE dotnet_${SUBPROJECT}) endforeach() @@ -294,7 +317,9 @@ configure_file( COPYONLY) set(DOTNET_README_DIR "${PROJECT_BINARY_DIR}/dotnet") -configure_file(${PROJECT_SOURCE_DIR}/ortools/dotnet/Directory.Build.props.in ${PROJECT_BINARY_DIR}/dotnet/Directory.Build.props) +configure_file( + ${PROJECT_SOURCE_DIR}/ortools/dotnet/Directory.Build.props.in + ${PROJECT_BINARY_DIR}/dotnet/Directory.Build.props) ############################ ## .Net SNK file ## @@ -629,7 +654,8 @@ if(NOT EXAMPLE_FILE_NAME) OUTPUT ${DOTNET_EXAMPLE_DIR}/timestamp COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME ${DOTNET_EXECUTABLE} build --nologo -c Release ${EXAMPLE_NAME}.csproj - COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME ${DOTNET_EXECUTABLE} pack -c Release ${EXAMPLE_NAME}.csproj + COMMAND ${CMAKE_COMMAND} -E env --unset=TARGETNAME + ${DOTNET_EXECUTABLE} pack --nologo -c Release ${EXAMPLE_NAME}.csproj COMMAND ${CMAKE_COMMAND} -E touch ${DOTNET_EXAMPLE_DIR}/timestamp DEPENDS ${DOTNET_EXAMPLE_DIR}/${EXAMPLE_NAME}.csproj diff --git a/cmake/flatzinc.cmake b/cmake/flatzinc.cmake index b651e2e9e2f..55304f89354 100644 --- a/cmake/flatzinc.cmake +++ b/cmake/flatzinc.cmake @@ -27,7 +27,6 @@ if(MSVC) "/D_CRT_SECURE_NO_WARNINGS" "/D_CRT_SECURE_NO_DEPRECATE" "/MP" # Build with multiple processes - "/DNDEBUG" ) # MSVC warning suppressions list(APPEND FLATZINC_COMPILE_OPTIONS @@ -184,8 +183,8 @@ file(RELATIVE_PATH FZ_REL_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_DATADIR}/minizinc/solvers ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/fzn-cp-sat) configure_file( - ortools/flatzinc/cpsat.msc.in - ${PROJECT_BINARY_DIR}/cpsat.msc + ortools/flatzinc/cp-sat.msc.in + ${PROJECT_BINARY_DIR}/cp-sat.msc @ONLY) # Install rules @@ -199,7 +198,7 @@ install(TARGETS flatzinc fzn #fzn-parser_test ) install(DIRECTORY ortools/flatzinc/mznlib/ - DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/minizinc/cpsat + DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/minizinc/cp-sat FILES_MATCHING PATTERN "*.mzn") -install(FILES ${PROJECT_BINARY_DIR}/cpsat.msc +install(FILES ${PROJECT_BINARY_DIR}/cp-sat.msc DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/minizinc/solvers) diff --git a/cmake/glop.cmake b/cmake/glop.cmake index 00d3a8da434..3be3a62544f 100644 --- a/cmake/glop.cmake +++ b/cmake/glop.cmake @@ -180,7 +180,6 @@ if(MSVC) "/D_CRT_SECURE_NO_DEPRECATE" "/MP" # Build with multiple processes "/Zc:preprocessor" # Enable preprocessor conformance mode - "/DNDEBUG" ) # MSVC warning suppressions list(APPEND GLOP_COMPILE_OPTIONS @@ -262,8 +261,8 @@ target_link_libraries(glop PUBLIC absl::strings absl::statusor absl::str_format + absl::random_random protobuf::libprotobuf - ${RE2_DEPS} ) if(WIN32) #target_link_libraries(glop PUBLIC psapi.lib ws2_32.lib) diff --git a/cmake/host.CMakeLists.txt b/cmake/host.CMakeLists.txt index 965e1601e82..b58971db9da 100644 --- a/cmake/host.CMakeLists.txt +++ b/cmake/host.CMakeLists.txt @@ -105,8 +105,8 @@ set(ABSL_PROPAGATE_CXX_STD ON) FetchContent_Declare( absl GIT_REPOSITORY "https://github.com/abseil/abseil-cpp.git" - GIT_TAG "20240116.1" - PATCH_COMMAND git apply "${CMAKE_CURRENT_LIST_DIR}/@PATCHES_PATH@/abseil-cpp-20240116.1.patch") + GIT_TAG "20240722.0" + PATCH_COMMAND git apply "${CMAKE_CURRENT_LIST_DIR}/@PATCHES_PATH@/abseil-cpp-20240722.0.patch") FetchContent_MakeAvailable(absl) list(POP_BACK CMAKE_MESSAGE_INDENT) message(CHECK_PASS "fetched") @@ -121,9 +121,9 @@ set(protobuf_WITH_ZLIB OFF) FetchContent_Declare( protobuf GIT_REPOSITORY "https://github.com/protocolbuffers/protobuf.git" - GIT_TAG "v25.3" + GIT_TAG "v26.1" GIT_SUBMODULES "" - PATCH_COMMAND git apply "${CMAKE_CURRENT_LIST_DIR}/@PATCHES_PATH@/protobuf-v25.3.patch") + PATCH_COMMAND git apply "${CMAKE_CURRENT_LIST_DIR}/@PATCHES_PATH@/protobuf-v26.1.patch") FetchContent_MakeAvailable(protobuf) list(POP_BACK CMAKE_MESSAGE_INDENT) message(CHECK_PASS "fetched") diff --git a/cmake/java.cmake b/cmake/java.cmake index dafc950ef4f..c123541ba86 100644 --- a/cmake/java.cmake +++ b/cmake/java.cmake @@ -250,7 +250,14 @@ endfunction() list(APPEND CMAKE_SWIG_FLAGS "-I${PROJECT_SOURCE_DIR}") # Swig wrap all libraries -foreach(SUBPROJECT IN ITEMS algorithms graph init linear_solver constraint_solver sat util) +foreach(SUBPROJECT IN ITEMS + algorithms + graph + init + linear_solver + constraint_solver + sat + util) add_subdirectory(ortools/${SUBPROJECT}/java) target_link_libraries(jni${JAVA_ARTIFACT} PRIVATE jni${SUBPROJECT}) endforeach() diff --git a/cmake/ortoolsConfig.cmake.in b/cmake/ortoolsConfig.cmake.in index a893537ebb1..9372ce9ab90 100644 --- a/cmake/ortoolsConfig.cmake.in +++ b/cmake/ortoolsConfig.cmake.in @@ -54,8 +54,8 @@ if(@USE_GLPK@) endif() if(@USE_HIGHS@) - if(NOT TARGET HIGHS::HIGHS) - find_dependency(HIGHS REQUIRED ${CONFIG_FLAG}) + if(NOT TARGET highs::highs) + find_dependency(highs REQUIRED ${CONFIG_FLAG}) endif() endif() diff --git a/cmake/python.cmake b/cmake/python.cmake index 2099134b440..b2e5731427e 100644 --- a/cmake/python.cmake +++ b/cmake/python.cmake @@ -31,7 +31,7 @@ if(${SWIG_VERSION} VERSION_GREATER_EQUAL 4) list(APPEND CMAKE_SWIG_FLAGS "-doxygen") endif() -if(UNIX AND NOT APPLE) +if(UNIX AND NOT APPLE AND NOT (CMAKE_SYSTEM_NAME STREQUAL "OpenBSD")) if (CMAKE_SIZEOF_VOID_P EQUAL 8) list(APPEND CMAKE_SWIG_FLAGS "-DSWIGWORDSIZE64") else() @@ -282,9 +282,9 @@ foreach(SUBPROJECT IN ITEMS init algorithms graph - constraint_solver linear_solver ${PDLP_DIR} + constraint_solver sat scheduling util) @@ -358,12 +358,13 @@ if(BUILD_MATH_OPT) file(COPY ortools/math_opt/python/callback.py ortools/math_opt/python/compute_infeasible_subsystem_result.py + ortools/math_opt/python/errors.py ortools/math_opt/python/expressions.py ortools/math_opt/python/hash_model_storage.py ortools/math_opt/python/mathopt.py ortools/math_opt/python/message_callback.py - ortools/math_opt/python/model_parameters.py ortools/math_opt/python/model.py + ortools/math_opt/python/model_parameters.py ortools/math_opt/python/model_storage.py ortools/math_opt/python/normalize.py ortools/math_opt/python/parameters.py @@ -448,6 +449,8 @@ add_custom_command( $ ${PYTHON_PROJECT}/init/python COMMAND ${CMAKE_COMMAND} -E copy $ ${PYTHON_PROJECT}/algorithms/python + COMMAND ${CMAKE_COMMAND} -E copy + $ ${PYTHON_PROJECT}/algorithms/python COMMAND ${CMAKE_COMMAND} -E copy $ ${PYTHON_PROJECT}/graph/python COMMAND ${CMAKE_COMMAND} -E copy @@ -479,6 +482,7 @@ add_custom_command( DEPENDS init_pybind11 knapsack_solver_pybind11 + set_cover_pybind11 linear_sum_assignment_pybind11 max_flow_pybind11 min_cost_flow_pybind11 @@ -514,6 +518,7 @@ add_custom_command( COMMAND ${CMAKE_COMMAND} -E remove -f stub_timestamp COMMAND ${stubgen_EXECUTABLE} -p ortools.init.python.init --output . COMMAND ${stubgen_EXECUTABLE} -p ortools.algorithms.python.knapsack_solver --output . + COMMAND ${stubgen_EXECUTABLE} -p ortools.algorithms.python.set_cover --output . COMMAND ${stubgen_EXECUTABLE} -p ortools.graph.python.linear_sum_assignment --output . COMMAND ${stubgen_EXECUTABLE} -p ortools.graph.python.max_flow --output . COMMAND ${stubgen_EXECUTABLE} -p ortools.graph.python.min_cost_flow --output . diff --git a/cmake/deps.cmake b/cmake/system_deps.cmake similarity index 55% rename from cmake/deps.cmake rename to cmake/system_deps.cmake index 9b65581cec5..d9ae923c150 100644 --- a/cmake/deps.cmake +++ b/cmake/system_deps.cmake @@ -25,73 +25,23 @@ set(CMAKE_FIND_PACKAGE_PREFER_CONFIG TRUE) if(NOT BUILD_ZLIB) find_package(ZLIB REQUIRED) endif() -if(NOT TARGET ZLIB::ZLIB) - message(FATAL_ERROR "Target ZLIB::ZLIB not available.") -endif() if(NOT BUILD_absl) find_package(absl REQUIRED) endif() -set(ABSL_DEPS - absl::base - absl::core_headers - absl::absl_check - absl::absl_log - absl::check - absl::die_if_null - absl::flags - absl::flags_commandlineflag - absl::flags_marshalling - absl::flags_parse - absl::flags_reflection - absl::flags_usage - absl::log - absl::log_flags - absl::log_globals - absl::log_initialize - absl::log_internal_message - absl::cord - absl::random_random - absl::raw_hash_set - absl::hash - absl::leak_check - absl::memory - absl::meta - absl::stacktrace - absl::status - absl::statusor - absl::str_format - absl::strings - absl::synchronization - absl::time - absl::any - ) if(NOT BUILD_Protobuf) find_package(Protobuf REQUIRED) endif() -if(NOT TARGET protobuf::libprotobuf) - message(FATAL_ERROR "Target protobuf::libprotobuf not available.") -endif() if(NOT BUILD_Eigen3) find_package(Eigen3 REQUIRED) endif() -if(NOT TARGET Eigen3::Eigen) - message(FATAL_ERROR "Target Eigen3::Eigen not available.") -endif() if(BUILD_LP_PARSER OR BUILD_TESTING) if(NOT BUILD_re2) find_package(re2 REQUIRED) endif() - if(NOT TARGET re2::re2) - message(FATAL_ERROR "Target re2::re2 not available.") - endif() -endif() - -if(BUILD_LP_PARSER) - set(RE2_DEPS re2::re2) endif() if(USE_COINOR) @@ -114,34 +64,22 @@ if(USE_COINOR) if(NOT BUILD_Cbc) find_package(Cbc REQUIRED) endif() - - set(COINOR_DEPS Coin::CbcSolver Coin::OsiCbc Coin::ClpSolver Coin::OsiClp) endif() -if(USE_GLPK) - if(NOT BUILD_GLPK) - find_package(GLPK REQUIRED) - endif() +if(USE_GLPK AND NOT BUILD_GLPK) + find_package(GLPK REQUIRED) endif() -if(USE_HIGHS) - if(NOT BUILD_HIGHS) - find_package(HIGHS REQUIRED) - endif() +if(USE_HIGHS AND NOT BUILD_HIGHS) + find_package(HIGHS REQUIRED) endif() -if(USE_PDLP) - if(NOT BUILD_PDLP) - find_package(PDLP REQUIRED) - else() - set(PDLP_DEPS Eigen3::Eigen) - endif() +if(USE_PDLP AND NOT BUILD_PDLP) + find_package(PDLP REQUIRED) endif() -if(USE_SCIP) - if(NOT BUILD_SCIP) - find_package(SCIP REQUIRED) - endif() +if(USE_SCIP AND NOT BUILD_SCIP) + find_package(SCIP REQUIRED) endif() # Check optional Dependencies @@ -150,13 +88,8 @@ if(USE_CPLEX) endif() # CXX Test -if(BUILD_TESTING) - if(NOT BUILD_googletest) - find_package(GTest REQUIRED) - endif() - if(NOT TARGET GTest::gtest_main) - message(FATAL_ERROR "Target GTest::gtest_main not available.") - endif() +if(BUILD_TESTING AND NOT BUILD_googletest) + find_package(GTest REQUIRED) endif() # Check language Dependencies @@ -165,6 +98,10 @@ if(BUILD_PYTHON) find_package(pybind11 REQUIRED) endif() + if(NOT BUILD_pybind11_abseil) + find_package(pybind11_abseil REQUIRED) + endif() + if(NOT BUILD_pybind11_protobuf) find_package(pybind11_protobuf REQUIRED) endif() diff --git a/examples/contrib/costas_array.cs b/examples/contrib/costas_array.cs index 97bcfe9807d..15a3d85e829 100644 --- a/examples/contrib/costas_array.cs +++ b/examples/contrib/costas_array.cs @@ -62,7 +62,7 @@ private static void Solve(int n = 6) // Fix the values in the lower triangle in the // difference matrix to -n+1. This removes variants - // of the difference matrix for the the same Costas array. + // of the difference matrix for the same Costas array. for (int i = 0; i < n; i++) { for (int j = 0; j <= i; j++) diff --git a/examples/contrib/costas_array.py b/examples/contrib/costas_array.py index a83970571cb..b6a2d38380e 100644 --- a/examples/contrib/costas_array.py +++ b/examples/contrib/costas_array.py @@ -91,7 +91,7 @@ def main(n=6): # Fix the values in the lower triangle in the # difference matrix to -n+1. This removes variants - # of the difference matrix for the the same Costas array. + # of the difference matrix for the same Costas array. for i in range(n): for j in range(i + 1): solver.Add(differences[i, j] == -n + 1) diff --git a/examples/contrib/debruijn_binary.py b/examples/contrib/debruijn_binary.py index 7fdb00d1909..c90fc3d0c45 100644 --- a/examples/contrib/debruijn_binary.py +++ b/examples/contrib/debruijn_binary.py @@ -21,7 +21,7 @@ base**n. - Compare with the the web based programs: + Compare with the web based programs: http://www.hakank.org/comb/debruijn.cgi http://www.hakank.org/comb/debruijn_arb.cgi diff --git a/examples/contrib/kenken2.cs b/examples/contrib/kenken2.cs index bd1bfe18d82..9420eb6772b 100644 --- a/examples/contrib/kenken2.cs +++ b/examples/contrib/kenken2.cs @@ -48,7 +48,7 @@ public static void calc(Solver solver, int[] cc, IntVar[,] x, int res) else { // For length > 2 then res is either the sum - // the the product of the segment + // the product of the segment // sum the numbers int len = cc.Length / 2; diff --git a/examples/contrib/lectures.cs b/examples/contrib/lectures.cs index f6eef390b84..5056a89d2cc 100644 --- a/examples/contrib/lectures.cs +++ b/examples/contrib/lectures.cs @@ -27,7 +27,7 @@ public class Lectures * Biggs: Discrete Mathematics (2nd ed), page 187. * """ * Suppose we wish to schedule six one-hour lectures, v1, v2, v3, v4, v5, v6. - * Among the the potential audience there are people who wish to hear both + * Among the potential audience there are people who wish to hear both * * - v1 and v2 * - v1 and v4 diff --git a/examples/contrib/lectures.py b/examples/contrib/lectures.py index ee53ffb80bf..cfa75d7bcea 100644 --- a/examples/contrib/lectures.py +++ b/examples/contrib/lectures.py @@ -18,7 +18,7 @@ Biggs: Discrete Mathematics (2nd ed), page 187. ''' Suppose we wish to schedule six one-hour lectures, v1, v2, v3, v4, v5, v6. - Among the the potential audience there are people who wish to hear both + Among the potential audience there are people who wish to hear both - v1 and v2 - v1 and v4 diff --git a/examples/contrib/pandigital_numbers.py b/examples/contrib/pandigital_numbers.py index 5fb81f1a53a..d180cf3d019 100644 --- a/examples/contrib/pandigital_numbers.py +++ b/examples/contrib/pandigital_numbers.py @@ -47,7 +47,7 @@ * Wikipedia http://en.wikipedia.org/wiki/Pandigital_number - Compare with the the following models: + Compare with the following models: * MiniZinc: http://www.hakank.org/minizinc/pandigital_numbers.mzn * Comet : http://www.hakank.org/comet/pandigital_numbers.co * ECLiPSe : http://www.hakank.org/eclipse/pandigital_numbers.ecl diff --git a/examples/contrib/set_covering_deployment.py b/examples/contrib/set_covering_deployment.py index 5ee5796dc71..92440fc4bdc 100644 --- a/examples/contrib/set_covering_deployment.py +++ b/examples/contrib/set_covering_deployment.py @@ -26,7 +26,7 @@ army placements to secure the Roman Empire. ''' - Compare with the the following models: + Compare with the following models: * MiniZinc: http://www.hakank.org/minizinc/set_covering_deployment.mzn * Comet : http://www.hakank.org/comet/set_covering_deployment.co * Gecode : http://www.hakank.org/gecode/set_covering_deployment.cpp diff --git a/examples/cpp/BUILD.bazel b/examples/cpp/BUILD.bazel index ff2e0d1da6f..6cc14900f22 100644 --- a/examples/cpp/BUILD.bazel +++ b/examples/cpp/BUILD.bazel @@ -611,79 +611,6 @@ cc_binary( ], ) -cc_library( - name = "cvrptw_lib", - hdrs = ["cvrptw_lib.h"], - deps = [ - "//ortools/base", - "//ortools/constraint_solver:routing", - "//ortools/util:random_engine", - ], -) - -cc_binary( - name = "cvrptw", - srcs = ["cvrptw.cc"], - deps = [ - ":cvrptw_lib", - "//ortools/base", - "//ortools/constraint_solver:routing", - ], -) - -cc_binary( - name = "cvrp_disjoint_tw", - srcs = ["cvrp_disjoint_tw.cc"], - deps = [ - ":cvrptw_lib", - "//ortools/base", - "//ortools/constraint_solver:routing", - ], -) - -cc_binary( - name = "cvrptw_with_breaks", - srcs = ["cvrptw_with_breaks.cc"], - deps = [ - ":cvrptw_lib", - "//ortools/base", - "//ortools/constraint_solver:routing", - "//ortools/constraint_solver:routing_enums_cc_proto", - "@com_google_absl//absl/strings", - ], -) - -cc_binary( - name = "cvrptw_with_resources", - srcs = ["cvrptw_with_resources.cc"], - deps = [ - ":cvrptw_lib", - "//ortools/base", - "//ortools/constraint_solver:routing", - ], -) - -cc_binary( - name = "cvrptw_with_stop_times_and_resources", - srcs = ["cvrptw_with_stop_times_and_resources.cc"], - deps = [ - ":cvrptw_lib", - "//ortools/base", - "//ortools/constraint_solver:routing", - "@com_google_absl//absl/strings", - ], -) - -cc_binary( - name = "cvrptw_with_refueling", - srcs = ["cvrptw_with_refueling.cc"], - deps = [ - ":cvrptw_lib", - "//ortools/base", - "//ortools/constraint_solver:routing", - ], -) - cc_binary( name = "pdptw", srcs = ["pdptw.cc"], @@ -692,6 +619,7 @@ cc_binary( "//ortools/base:file", "//ortools/base:mathutil", "//ortools/constraint_solver:routing", + "//ortools/routing/parsers:lilim_parser", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", @@ -753,6 +681,7 @@ cc_binary( "//ortools/base", "//ortools/linear_solver", "//ortools/linear_solver:linear_solver_cc_proto", + "//ortools/linear_solver:solve_mp_model", ], ) @@ -1075,12 +1004,19 @@ cc_binary( deps = [ "//ortools/base", "//ortools/linear_solver:linear_solver_cc_proto", + "//ortools/pdlp:iteration_stats", "//ortools/pdlp:primal_dual_hybrid_gradient", + "//ortools/pdlp:quadratic_program", "//ortools/pdlp:quadratic_program_io", "//ortools/pdlp:solve_log_cc_proto", "//ortools/pdlp:solvers_cc_proto", "//ortools/port:proto_utils", + "//ortools/util:file_util", + "//ortools/util:fp_roundtrip_conv", "//ortools/util:sigint", - "@com_google_absl//absl/time", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:flags", + "@com_google_absl//absl/strings", ], ) diff --git a/examples/cpp/CMakeLists.txt b/examples/cpp/CMakeLists.txt index 08320df97ce..7fe5b30fcda 100644 --- a/examples/cpp/CMakeLists.txt +++ b/examples/cpp/CMakeLists.txt @@ -42,8 +42,6 @@ file(GLOB CXX_SRCS "*.cc") list(FILTER CXX_SRCS EXCLUDE REGEX ".*/binpacking_2d_sat.cc") list(FILTER CXX_SRCS EXCLUDE REGEX ".*/course_scheduling_run.cc") # missing proto list(FILTER CXX_SRCS EXCLUDE REGEX ".*/course_scheduling.cc") # missing proto -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/cvrptw_with_breaks.cc") # too long -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/cvrptw_with_refueling.cc") # too long list(FILTER CXX_SRCS EXCLUDE REGEX ".*/dimacs_assignment.cc") # crash list(FILTER CXX_SRCS EXCLUDE REGEX ".*/dobble_ls.cc") # Too long list(FILTER CXX_SRCS EXCLUDE REGEX ".*/frequency_assignment_problem.cc") # crash diff --git a/examples/cpp/binpacking_2d_sat.cc b/examples/cpp/binpacking_2d_sat.cc index 8a197d22914..7e96706e8d5 100644 --- a/examples/cpp/binpacking_2d_sat.cc +++ b/examples/cpp/binpacking_2d_sat.cc @@ -18,14 +18,18 @@ #include #include +#include #include #include +#include #include +#include "absl/container/btree_map.h" #include "absl/container/btree_set.h" #include "absl/flags/flag.h" #include "absl/log/check.h" #include "absl/strings/str_cat.h" +#include "absl/types/span.h" #include "google/protobuf/text_format.h" #include "ortools/base/init_google.h" #include "ortools/base/logging.h" @@ -44,7 +48,7 @@ ABSL_FLAG(std::string, params, "", "Sat parameters in text proto format."); ABSL_FLAG(int, max_bins, 0, "Maximum number of bins. The 0 default value implies the code will " "use some heuristics to compute this number."); -ABSL_FLAG(bool, symmetry_breaking, true, "Use symmetry breaking constraints"); +ABSL_FLAG(int, symmetry_breaking_level, 2, "Use symmetry breaking constraints"); ABSL_FLAG(bool, use_global_cumulative, true, "Use a global cumulative relaxation"); @@ -171,7 +175,7 @@ absl::btree_set FindFixedItems( } // Solves a subset sum problem to find the maximum reachable max size. -int64_t MaxSubsetSumSize(const std::vector& sizes, int64_t max_size) { +int64_t MaxSubsetSumSize(absl::Span sizes, int64_t max_size) { CpModelBuilder builder; LinearExpr weighed_sum; for (const int size : sizes) { @@ -280,7 +284,7 @@ void LoadAndSolve(const std::string& file_name, int instance) { const absl::btree_set fixed_items = FindFixedItems(problem); // Fix the fixed_items to the first fixed_items.size() bins. - CHECK_LT(fixed_items.size(), max_bins) + CHECK_LE(fixed_items.size(), max_bins) << "Infeasible problem, increase max_bins"; int count = 0; for (const int item : fixed_items) { @@ -392,6 +396,36 @@ void LoadAndSolve(const std::string& file_name, int instance) { LOG(INFO) << num_items_fixed_on_one_border << " items fixed on one border"; } + if (absl::GetFlag(FLAGS_symmetry_breaking_level) >= 2) { + // Break symmetry of a permutation of identical items + absl::btree_map, std::vector> + item_indexes_for_dimensions; + for (int item = 0; item < num_items; ++item) { + item_indexes_for_dimensions[{problem.items(item).shapes(0).dimensions(0), + problem.items(item).shapes(0).dimensions(1)}] + .push_back(item); + } + int num_identical_items = 0; + for (const auto& [dim, item_indexes] : item_indexes_for_dimensions) { + if (item_indexes.size() == 1) { + continue; + } + ++num_identical_items; + for (int i = 1; i < item_indexes.size(); ++i) { + const IntVar prev_start_x = starts_by_dimension[item_indexes[i - 1]][0]; + const IntVar curr_start_x = starts_by_dimension[item_indexes[i]][0]; + + const IntVar prev_start_y = starts_by_dimension[item_indexes[i - 1]][1]; + const IntVar curr_start_y = starts_by_dimension[item_indexes[i]][1]; + cp_model.AddLessOrEqual(prev_start_x * bin_sizes[1] + prev_start_y, + curr_start_x * bin_sizes[1] + curr_start_y); + } + } + if (num_identical_items > 0) { + LOG(INFO) << num_identical_items << " identical items"; + } + } + // Add non overlapping constraint. for (int b = 0; b < max_bins; ++b) { NoOverlap2DConstraint no_overlap_2d = cp_model.AddNoOverlap2D(); @@ -437,12 +471,13 @@ void LoadAndSolve(const std::string& file_name, int instance) { // Objective definition. cp_model.Minimize(obj); - for (int b = trivial_lb; b + 1 < max_bins; ++b) { + CHECK_GT(trivial_lb, 0); + for (int b = trivial_lb; b < max_bins; ++b) { cp_model.AddGreaterOrEqual(obj, b + 1).OnlyEnforceIf(bin_is_used[b]); - cp_model.AddImplication(bin_is_used[b + 1], bin_is_used[b]); + cp_model.AddImplication(bin_is_used[b], bin_is_used[b - 1]); } - if (absl::GetFlag(FLAGS_symmetry_breaking)) { + if (absl::GetFlag(FLAGS_symmetry_breaking_level) >= 1) { // First sort the items not yet fixed by area. std::vector not_placed_items; for (int item = 0; item < num_items; ++item) { diff --git a/examples/cpp/constraint_programming_cp.cc b/examples/cpp/constraint_programming_cp.cc index c11cd27b8a3..af3dc8c05bb 100644 --- a/examples/cpp/constraint_programming_cp.cc +++ b/examples/cpp/constraint_programming_cp.cc @@ -43,8 +43,7 @@ void RunConstraintProgrammingExample() { solver.NewSearch(db); while (solver.NextSolution()) { - LOG(INFO) << "Solution" - << ": x = " << x->Value() << "; y = " << y->Value() + LOG(INFO) << "Solution" << ": x = " << x->Value() << "; y = " << y->Value() << "; z = " << z->Value(); } solver.EndSearch(); diff --git a/examples/cpp/costas_array_sat.cc b/examples/cpp/costas_array_sat.cc index b16f715fe49..fc7a4eb558e 100644 --- a/examples/cpp/costas_array_sat.cc +++ b/examples/cpp/costas_array_sat.cc @@ -81,7 +81,7 @@ void CheckConstraintViolators(absl::Span vars, } // Check that all pairwise differences are unique -bool CheckCostas(const std::vector& vars) { +bool CheckCostas(absl::Span vars) { std::vector violators; CheckConstraintViolators(vars, &violators); diff --git a/examples/cpp/cvrp_disjoint_tw.cc b/examples/cpp/cvrp_disjoint_tw.cc deleted file mode 100644 index 1499d48389b..00000000000 --- a/examples/cpp/cvrp_disjoint_tw.cc +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// -// Capacitated Vehicle Routing Problem with Disjoint Time Windows (and optional -// orders). -// A description of the problem can be found here: -// http://en.wikipedia.org/wiki/Vehicle_routing_problem. -// The variant which is tackled by this model includes a capacity dimension, -// disjoint time windows and optional orders, with a penalty cost if orders are -// not performed. For the sake of simplicity, orders are randomly located and -// distances are computed using the Manhattan distance. Distances are assumed -// to be in meters and times in seconds. - -#include -#include - -#include "absl/random/random.h" -#include "examples/cpp/cvrptw_lib.h" -#include "google/protobuf/text_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/init_google.h" -#include "ortools/base/types.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/constraint_solver/routing_index_manager.h" -#include "ortools/constraint_solver/routing_parameters.h" -#include "ortools/constraint_solver/routing_parameters.pb.h" - -using operations_research::Assignment; -using operations_research::DefaultRoutingSearchParameters; -using operations_research::GetSeed; -using operations_research::LocationContainer; -using operations_research::RandomDemand; -using operations_research::RoutingDimension; -using operations_research::RoutingIndexManager; -using operations_research::RoutingModel; -using operations_research::RoutingNodeIndex; -using operations_research::RoutingSearchParameters; -using operations_research::ServiceTimePlusTransition; -using operations_research::Solver; - -ABSL_FLAG(int, vrp_orders, 100, "Number of nodes in the problem."); -ABSL_FLAG(int, vrp_vehicles, 20, "Number of vehicles in the problem."); -ABSL_FLAG(int, vrp_windows, 5, "Number of disjoint windows per node."); -ABSL_FLAG(bool, vrp_use_deterministic_random_seed, false, - "Use deterministic random seeds."); -ABSL_FLAG(bool, vrp_use_same_vehicle_costs, false, - "Use same vehicle costs in the routing model"); -ABSL_FLAG(std::string, routing_search_parameters, "", - "Text proto RoutingSearchParameters (possibly partial) that will " - "override the DefaultRoutingSearchParameters()"); - -const char* kTime = "Time"; -const char* kCapacity = "Capacity"; -const int64_t kMaxNodesPerGroup = 10; -const int64_t kSameVehicleCost = 1000; - -int main(int argc, char** argv) { - InitGoogle(argv[0], &argc, &argv, true); - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_orders)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_vehicles)) - << "Specify a non-null vehicle fleet size."; - // VRP of size absl::GetFlag(FLAGS_vrp_size). - // Nodes are indexed from 0 to absl::GetFlag(FLAGS_vrp_orders), the starts and - // ends of the routes are at node 0. - const RoutingIndexManager::NodeIndex kDepot(0); - RoutingIndexManager manager(absl::GetFlag(FLAGS_vrp_orders) + 1, - absl::GetFlag(FLAGS_vrp_vehicles), kDepot); - RoutingModel routing(manager); - - // Setting up locations. - const int64_t kXMax = 100000; - const int64_t kYMax = 100000; - const int64_t kSpeed = 10; - LocationContainer locations( - kSpeed, absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - for (int location = 0; location <= absl::GetFlag(FLAGS_vrp_orders); - ++location) { - locations.AddRandomLocation(kXMax, kYMax); - } - - // Setting the cost function. - const int vehicle_cost = routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.ManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }); - routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); - - // Adding capacity dimension constraints. - const int64_t kVehicleCapacity = 40; - const int64_t kNullCapacitySlack = 0; - RandomDemand demand(manager.num_nodes(), kDepot, - absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - demand.Initialize(); - routing.AddDimension(routing.RegisterTransitCallback( - [&demand, &manager](int64_t i, int64_t j) { - return demand.Demand(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kNullCapacitySlack, kVehicleCapacity, - /*fix_start_cumul_to_zero=*/true, kCapacity); - - // Adding time dimension constraints. - const int64_t kTimePerDemandUnit = 300; - const int64_t kHorizon = 24 * 3600; - ServiceTimePlusTransition time( - kTimePerDemandUnit, - [&demand](RoutingNodeIndex i, RoutingNodeIndex j) { - return demand.Demand(i, j); - }, - [&locations](RoutingNodeIndex i, RoutingNodeIndex j) { - return locations.ManhattanTime(i, j); - }); - routing.AddDimension( - routing.RegisterTransitCallback([&time, &manager](int64_t i, int64_t j) { - return time.Compute(manager.IndexToNode(i), manager.IndexToNode(j)); - }), - kHorizon, kHorizon, /*fix_start_cumul_to_zero=*/false, kTime); - const RoutingDimension& time_dimension = routing.GetDimensionOrDie(kTime); - - // Adding disjoint time windows. - Solver* solver = routing.solver(); - std::mt19937 randomizer( - GetSeed(absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed))); - for (int order = 1; order < manager.num_nodes(); ++order) { - std::vector forbid_points(2 * absl::GetFlag(FLAGS_vrp_windows), 0); - for (int i = 0; i < forbid_points.size(); ++i) { - forbid_points[i] = absl::Uniform(randomizer, 0, kHorizon); - } - std::sort(forbid_points.begin(), forbid_points.end()); - std::vector forbid_starts(1, 0); - std::vector forbid_ends; - for (int i = 0; i < forbid_points.size(); i += 2) { - forbid_ends.push_back(forbid_points[i]); - forbid_starts.push_back(forbid_points[i + 1]); - } - forbid_ends.push_back(kHorizon); - solver->AddConstraint(solver->MakeNotMemberCt( - time_dimension.CumulVar(order), forbid_starts, forbid_ends)); - } - - // Adding penalty costs to allow skipping orders. - const int64_t kPenalty = 10000000; - const RoutingIndexManager::NodeIndex kFirstNodeAfterDepot(1); - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < manager.num_nodes(); ++order) { - std::vector orders(1, manager.NodeToIndex(order)); - routing.AddDisjunction(orders, kPenalty); - } - - // Adding same vehicle constraint costs for consecutive nodes. - if (absl::GetFlag(FLAGS_vrp_use_same_vehicle_costs)) { - std::vector group; - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < manager.num_nodes(); ++order) { - group.push_back(manager.NodeToIndex(order)); - if (group.size() == kMaxNodesPerGroup) { - routing.AddSoftSameVehicleConstraint(group, kSameVehicleCost); - group.clear(); - } - } - if (!group.empty()) { - routing.AddSoftSameVehicleConstraint(group, kSameVehicleCost); - } - } - - // Solve, returns a solution if any (owned by RoutingModel). - RoutingSearchParameters parameters = DefaultRoutingSearchParameters(); - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_routing_search_parameters), ¶meters)); - const Assignment* solution = routing.SolveWithParameters(parameters); - if (solution != nullptr) { - DisplayPlan(manager, routing, *solution, - absl::GetFlag(FLAGS_vrp_use_same_vehicle_costs), - kMaxNodesPerGroup, kSameVehicleCost, - routing.GetDimensionOrDie(kCapacity), - routing.GetDimensionOrDie(kTime)); - } else { - LOG(INFO) << "No solution found."; - } - return EXIT_SUCCESS; -} diff --git a/examples/cpp/cvrptw.cc b/examples/cpp/cvrptw.cc deleted file mode 100644 index 28b62699ecf..00000000000 --- a/examples/cpp/cvrptw.cc +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// -// Capacitated Vehicle Routing Problem with Time Windows (and optional orders). -// A description of the problem can be found here: -// http://en.wikipedia.org/wiki/Vehicle_routing_problem. -// The variant which is tackled by this model includes a capacity dimension, -// time windows and optional orders, with a penalty cost if orders are not -// performed. For the sake of simplicity, orders are randomly located and -// distances are computed using the Manhattan distance. Distances are assumed -// to be in meters and times in seconds. - -#include -#include - -#include "absl/random/random.h" -#include "examples/cpp/cvrptw_lib.h" -#include "google/protobuf/text_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/init_google.h" -#include "ortools/base/types.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/constraint_solver/routing_index_manager.h" -#include "ortools/constraint_solver/routing_parameters.h" -#include "ortools/constraint_solver/routing_parameters.pb.h" - -using operations_research::Assignment; -using operations_research::DefaultRoutingSearchParameters; -using operations_research::GetSeed; -using operations_research::LocationContainer; -using operations_research::RandomDemand; -using operations_research::RoutingDimension; -using operations_research::RoutingIndexManager; -using operations_research::RoutingModel; -using operations_research::RoutingNodeIndex; -using operations_research::RoutingSearchParameters; -using operations_research::ServiceTimePlusTransition; - -ABSL_FLAG(int, vrp_orders, 100, "Number of nodes in the problem"); -ABSL_FLAG(int, vrp_vehicles, 20, "Number of vehicles in the problem"); -ABSL_FLAG(bool, vrp_use_deterministic_random_seed, false, - "Use deterministic random seeds"); -ABSL_FLAG(bool, vrp_use_same_vehicle_costs, false, - "Use same vehicle costs in the routing model"); -ABSL_FLAG(std::string, routing_search_parameters, "", - "Text proto RoutingSearchParameters (possibly partial) that will " - "override the DefaultRoutingSearchParameters()"); - -const char* kTime = "Time"; -const char* kCapacity = "Capacity"; -const int64_t kMaxNodesPerGroup = 10; -const int64_t kSameVehicleCost = 1000; - -int main(int argc, char** argv) { - InitGoogle(argv[0], &argc, &argv, true); - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_orders)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_vehicles)) - << "Specify a non-null vehicle fleet size."; - // VRP of size absl::GetFlag(FLAGS_vrp_size). - // Nodes are indexed from 0 to absl::GetFlag(FLAGS_vrp_orders), the starts and - // ends of the routes are at node 0. - const RoutingIndexManager::NodeIndex kDepot(0); - RoutingIndexManager manager(absl::GetFlag(FLAGS_vrp_orders) + 1, - absl::GetFlag(FLAGS_vrp_vehicles), kDepot); - RoutingModel routing(manager); - - // Setting up locations. - const int64_t kXMax = 100000; - const int64_t kYMax = 100000; - const int64_t kSpeed = 10; - LocationContainer locations( - kSpeed, absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - for (int location = 0; location <= absl::GetFlag(FLAGS_vrp_orders); - ++location) { - locations.AddRandomLocation(kXMax, kYMax); - } - - // Setting the cost function. - const int vehicle_cost = routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.ManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }); - routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); - - // Adding capacity dimension constraints. - const int64_t kVehicleCapacity = 40; - const int64_t kNullCapacitySlack = 0; - RandomDemand demand(manager.num_nodes(), kDepot, - absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - demand.Initialize(); - routing.AddDimension(routing.RegisterTransitCallback( - [&demand, &manager](int64_t i, int64_t j) { - return demand.Demand(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kNullCapacitySlack, kVehicleCapacity, - /*fix_start_cumul_to_zero=*/true, kCapacity); - - // Adding time dimension constraints. - const int64_t kTimePerDemandUnit = 300; - const int64_t kHorizon = 24 * 3600; - ServiceTimePlusTransition time( - kTimePerDemandUnit, - [&demand](RoutingNodeIndex i, RoutingNodeIndex j) { - return demand.Demand(i, j); - }, - [&locations](RoutingNodeIndex i, RoutingNodeIndex j) { - return locations.ManhattanTime(i, j); - }); - routing.AddDimension( - routing.RegisterTransitCallback([&time, &manager](int64_t i, int64_t j) { - return time.Compute(manager.IndexToNode(i), manager.IndexToNode(j)); - }), - kHorizon, kHorizon, /*fix_start_cumul_to_zero=*/true, kTime); - const RoutingDimension& time_dimension = routing.GetDimensionOrDie(kTime); - - // Adding time windows. - std::mt19937 randomizer( - GetSeed(absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed))); - const int64_t kTWDuration = 5 * 3600; - for (int order = 1; order < manager.num_nodes(); ++order) { - const int64_t start = - absl::Uniform(randomizer, 0, kHorizon - kTWDuration); - time_dimension.CumulVar(order)->SetRange(start, start + kTWDuration); - } - - // Adding penalty costs to allow skipping orders. - const int64_t kPenalty = 10000000; - const RoutingIndexManager::NodeIndex kFirstNodeAfterDepot(1); - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < manager.num_nodes(); ++order) { - std::vector orders(1, manager.NodeToIndex(order)); - routing.AddDisjunction(orders, kPenalty); - } - - // Adding same vehicle constraint costs for consecutive nodes. - if (absl::GetFlag(FLAGS_vrp_use_same_vehicle_costs)) { - std::vector group; - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < manager.num_nodes(); ++order) { - group.push_back(manager.NodeToIndex(order)); - if (group.size() == kMaxNodesPerGroup) { - routing.AddSoftSameVehicleConstraint(group, kSameVehicleCost); - group.clear(); - } - } - if (!group.empty()) { - routing.AddSoftSameVehicleConstraint(group, kSameVehicleCost); - } - } - - // Solve, returns a solution if any (owned by RoutingModel). - RoutingSearchParameters parameters = DefaultRoutingSearchParameters(); - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_routing_search_parameters), ¶meters)); - const Assignment* solution = routing.SolveWithParameters(parameters); - if (solution != nullptr) { - DisplayPlan(manager, routing, *solution, - absl::GetFlag(FLAGS_vrp_use_same_vehicle_costs), - kMaxNodesPerGroup, kSameVehicleCost, - routing.GetDimensionOrDie(kCapacity), - routing.GetDimensionOrDie(kTime)); - } else { - LOG(INFO) << "No solution found."; - } - return EXIT_SUCCESS; -} diff --git a/examples/cpp/cvrptw_lib.h b/examples/cpp/cvrptw_lib.h deleted file mode 100644 index edb6e3a9384..00000000000 --- a/examples/cpp/cvrptw_lib.h +++ /dev/null @@ -1,348 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// This header provides functions to help creating random instaces of the -// vehicle routing problem; random capacities and random time windows. -#ifndef OR_TOOLS_EXAMPLES_CVRPTW_LIB_H_ -#define OR_TOOLS_EXAMPLES_CVRPTW_LIB_H_ - -#include -#include -#include - -#include "absl/strings/str_format.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/util/random_engine.h" - -namespace operations_research { - -typedef std::function - RoutingNodeEvaluator2; - -// Random seed generator. -int32_t GetSeed(bool deterministic); - -// Location container, contains positions of orders and can be used to obtain -// Manhattan distances/times between locations. -class LocationContainer { - public: - LocationContainer(int64_t speed, bool use_deterministic_seed); - void AddLocation(int64_t x, int64_t y) { - locations_.push_back(Location(x, y)); - } - void AddRandomLocation(int64_t x_max, int64_t y_max); - void AddRandomLocation(int64_t x_max, int64_t y_max, int duplicates); - int64_t ManhattanDistance(RoutingIndexManager::NodeIndex from, - RoutingIndexManager::NodeIndex to) const; - int64_t NegManhattanDistance(RoutingIndexManager::NodeIndex from, - RoutingIndexManager::NodeIndex to) const; - int64_t ManhattanTime(RoutingIndexManager::NodeIndex from, - RoutingIndexManager::NodeIndex to) const; - - bool SameLocation(RoutingIndexManager::NodeIndex node1, - RoutingIndexManager::NodeIndex node2) const; - int64_t SameLocationFromIndex(int64_t node1, int64_t node2) const; - - private: - class Location { - public: - Location(); - Location(int64_t x, int64_t y); - int64_t DistanceTo(const Location& location) const; - bool IsAtSameLocation(const Location& location) const; - - private: - static int64_t Abs(int64_t value); - - int64_t x_; - int64_t y_; - }; - - random_engine_t randomizer_; - const int64_t speed_; - absl::StrongVector locations_; -}; - -// Random demand. -class RandomDemand { - public: - RandomDemand(int size, RoutingIndexManager::NodeIndex depot, - bool use_deterministic_seed); - void Initialize(); - int64_t Demand(RoutingIndexManager::NodeIndex from, - RoutingIndexManager::NodeIndex to) const; - - private: - std::unique_ptr demand_; - const int size_; - const RoutingIndexManager::NodeIndex depot_; - const bool use_deterministic_seed_; -}; - -// Service time (proportional to demand) + transition time callback. -class ServiceTimePlusTransition { - public: - ServiceTimePlusTransition( - int64_t time_per_demand_unit, - operations_research::RoutingNodeEvaluator2 demand, - operations_research::RoutingNodeEvaluator2 transition_time); - int64_t Compute(RoutingIndexManager::NodeIndex from, - RoutingIndexManager::NodeIndex to) const; - - private: - const int64_t time_per_demand_unit_; - operations_research::RoutingNodeEvaluator2 demand_; - operations_research::RoutingNodeEvaluator2 transition_time_; -}; - -// Stop service time + transition time callback. -class StopServiceTimePlusTransition { - public: - StopServiceTimePlusTransition( - int64_t stop_time, const LocationContainer& location_container, - operations_research::RoutingNodeEvaluator2 transition_time); - int64_t Compute(RoutingIndexManager::NodeIndex from, - RoutingIndexManager::NodeIndex to) const; - - private: - const int64_t stop_time_; - const LocationContainer& location_container_; - operations_research::RoutingNodeEvaluator2 demand_; - operations_research::RoutingNodeEvaluator2 transition_time_; -}; - -// Route plan displayer. -// TODO(user): Move the display code to the routing library. -void DisplayPlan( - const operations_research::RoutingIndexManager& manager, - const operations_research::RoutingModel& routing, - const operations_research::Assignment& plan, bool use_same_vehicle_costs, - int64_t max_nodes_per_group, int64_t same_vehicle_cost, - const operations_research::RoutingDimension& capacity_dimension, - const operations_research::RoutingDimension& time_dimension); - -using NodeIndex = RoutingIndexManager::NodeIndex; - -int32_t GetSeed(bool deterministic) { - if (deterministic) { - return 0; - } else { - return std::random_device()(); - } -} - -LocationContainer::LocationContainer(int64_t speed, bool use_deterministic_seed) - : randomizer_(GetSeed(use_deterministic_seed)), speed_(speed) { - CHECK_LT(0, speed_); -} - -void LocationContainer::AddRandomLocation(int64_t x_max, int64_t y_max) { - AddRandomLocation(x_max, y_max, 1); -} - -void LocationContainer::AddRandomLocation(int64_t x_max, int64_t y_max, - int duplicates) { - const int64_t x = absl::Uniform(randomizer_, 0, x_max + 1); - const int64_t y = absl::Uniform(randomizer_, 0, y_max + 1); - for (int i = 0; i < duplicates; ++i) { - AddLocation(x, y); - } -} - -int64_t LocationContainer::ManhattanDistance(NodeIndex from, - NodeIndex to) const { - return locations_[from].DistanceTo(locations_[to]); -} - -int64_t LocationContainer::NegManhattanDistance(NodeIndex from, - NodeIndex to) const { - return -ManhattanDistance(from, to); -} - -int64_t LocationContainer::ManhattanTime(NodeIndex from, NodeIndex to) const { - return ManhattanDistance(from, to) / speed_; -} - -bool LocationContainer::SameLocation(NodeIndex node1, NodeIndex node2) const { - if (node1 < locations_.size() && node2 < locations_.size()) { - return locations_[node1].IsAtSameLocation(locations_[node2]); - } - return false; -} -int64_t LocationContainer::SameLocationFromIndex(int64_t node1, - int64_t node2) const { - // The direct conversion from constraint model indices to routing model - // nodes is correct because the depot is node 0. - // TODO(user): Fetch proper indices from routing model. - return SameLocation(NodeIndex(node1), NodeIndex(node2)); -} - -LocationContainer::Location::Location() : x_(0), y_(0) {} - -LocationContainer::Location::Location(int64_t x, int64_t y) : x_(x), y_(y) {} - -int64_t LocationContainer::Location::DistanceTo( - const Location& location) const { - return Abs(x_ - location.x_) + Abs(y_ - location.y_); -} - -bool LocationContainer::Location::IsAtSameLocation( - const Location& location) const { - return x_ == location.x_ && y_ == location.y_; -} - -int64_t LocationContainer::Location::Abs(int64_t value) { - return std::max(value, -value); -} - -RandomDemand::RandomDemand(int size, NodeIndex depot, - bool use_deterministic_seed) - : size_(size), - depot_(depot), - use_deterministic_seed_(use_deterministic_seed) { - CHECK_LT(0, size_); -} - -void RandomDemand::Initialize() { - const int64_t kDemandMax = 5; - const int64_t kDemandMin = 1; - demand_ = absl::make_unique(size_); - random_engine_t randomizer; - for (int order = 0; order < size_; ++order) { - if (order == depot_) { - demand_[order] = 0; - } else { - demand_[order] = kDemandMin + absl::Uniform(randomizer, 0, - kDemandMax - kDemandMin + 1); - } - } -} - -int64_t RandomDemand::Demand(NodeIndex from, NodeIndex /*to*/) const { - return demand_[from.value()]; -} - -ServiceTimePlusTransition::ServiceTimePlusTransition( - int64_t time_per_demand_unit, RoutingNodeEvaluator2 demand, - RoutingNodeEvaluator2 transition_time) - : time_per_demand_unit_(time_per_demand_unit), - demand_(std::move(demand)), - transition_time_(std::move(transition_time)) {} - -int64_t ServiceTimePlusTransition::Compute(NodeIndex from, NodeIndex to) const { - return time_per_demand_unit_ * demand_(from, to) + transition_time_(from, to); -} - -StopServiceTimePlusTransition::StopServiceTimePlusTransition( - int64_t stop_time, const LocationContainer& location_container, - RoutingNodeEvaluator2 transition_time) - : stop_time_(stop_time), - location_container_(location_container), - transition_time_(std::move(transition_time)) {} - -int64_t StopServiceTimePlusTransition::Compute(NodeIndex from, - NodeIndex to) const { - return location_container_.SameLocation(from, to) - ? 0 - : stop_time_ + transition_time_(from, to); -} - -void DisplayPlan( - const RoutingIndexManager& manager, const RoutingModel& routing, - const operations_research::Assignment& plan, bool use_same_vehicle_costs, - int64_t max_nodes_per_group, int64_t same_vehicle_cost, - const operations_research::RoutingDimension& capacity_dimension, - const operations_research::RoutingDimension& time_dimension) { - // Display plan cost. - std::string plan_output = absl::StrFormat("Cost %d\n", plan.ObjectiveValue()); - - // Display dropped orders. - std::string dropped; - for (int64_t order = 0; order < routing.Size(); ++order) { - if (routing.IsStart(order) || routing.IsEnd(order)) continue; - if (plan.Value(routing.NextVar(order)) == order) { - if (dropped.empty()) { - absl::StrAppendFormat(&dropped, " %d", - manager.IndexToNode(order).value()); - } else { - absl::StrAppendFormat(&dropped, ", %d", - manager.IndexToNode(order).value()); - } - } - } - if (!dropped.empty()) { - plan_output += "Dropped orders:" + dropped + "\n"; - } - - if (use_same_vehicle_costs) { - int group_size = 0; - int64_t group_same_vehicle_cost = 0; - std::set visited; - for (int64_t order = 0; order < routing.Size(); ++order) { - if (routing.IsStart(order) || routing.IsEnd(order)) continue; - ++group_size; - visited.insert(plan.Value(routing.VehicleVar(order))); - if (group_size == max_nodes_per_group) { - if (visited.size() > 1) { - group_same_vehicle_cost += (visited.size() - 1) * same_vehicle_cost; - } - group_size = 0; - visited.clear(); - } - } - if (visited.size() > 1) { - group_same_vehicle_cost += (visited.size() - 1) * same_vehicle_cost; - } - LOG(INFO) << "Same vehicle costs: " << group_same_vehicle_cost; - } - - // Display actual output for each vehicle. - for (int route_number = 0; route_number < routing.vehicles(); - ++route_number) { - int64_t order = routing.Start(route_number); - absl::StrAppendFormat(&plan_output, "Route %d: ", route_number); - if (routing.IsEnd(plan.Value(routing.NextVar(order)))) { - plan_output += "Empty\n"; - } else { - while (true) { - operations_research::IntVar* const load_var = - capacity_dimension.CumulVar(order); - operations_research::IntVar* const time_var = - time_dimension.CumulVar(order); - operations_research::IntVar* const slack_var = - routing.IsEnd(order) ? nullptr : time_dimension.SlackVar(order); - if (slack_var != nullptr && plan.Contains(slack_var)) { - absl::StrAppendFormat( - &plan_output, "%d Load(%d) Time(%d, %d) Slack(%d, %d)", - manager.IndexToNode(order).value(), plan.Value(load_var), - plan.Min(time_var), plan.Max(time_var), plan.Min(slack_var), - plan.Max(slack_var)); - } else { - absl::StrAppendFormat(&plan_output, "%d Load(%d) Time(%d, %d)", - manager.IndexToNode(order).value(), - plan.Value(load_var), plan.Min(time_var), - plan.Max(time_var)); - } - if (routing.IsEnd(order)) break; - plan_output += " -> "; - order = plan.Value(routing.NextVar(order)); - } - plan_output += "\n"; - } - } - LOG(INFO) << plan_output; -} -} // namespace operations_research - -#endif // OR_TOOLS_EXAMPLES_CVRPTW_LIB_H_ diff --git a/examples/cpp/cvrptw_with_breaks.cc b/examples/cpp/cvrptw_with_breaks.cc deleted file mode 100644 index 25d790fa019..00000000000 --- a/examples/cpp/cvrptw_with_breaks.cc +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// -// Capacitated Vehicle Routing Problem with Time Windows and Breaks. -// A description of the Capacitated Vehicle Routing Problem with Time Windows -// can be found here: -// http://en.wikipedia.org/wiki/Vehicle_routing_problem. -// The variant which is tackled by this model includes a capacity dimension, -// time windows and optional orders, with a penalty cost if orders are not -// performed. For the sake of simplicty, orders are randomly located and -// distances are computed using the Manhattan distance. Distances are assumed -// to be in meters and times in seconds. -// This variant also includes vehicle breaks which must happen during the day -// with two alternate breaks schemes: either a long break in the middle of the -// day or two smaller ones which can be taken during a longer period of the day. - -#include -#include - -#include "absl/random/random.h" -#include "absl/strings/str_cat.h" -#include "examples/cpp/cvrptw_lib.h" -#include "google/protobuf/text_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/init_google.h" -#include "ortools/base/types.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/constraint_solver/routing_enums.pb.h" -#include "ortools/constraint_solver/routing_index_manager.h" -#include "ortools/constraint_solver/routing_parameters.h" -#include "ortools/constraint_solver/routing_parameters.pb.h" - -using operations_research::Assignment; -using operations_research::DefaultRoutingSearchParameters; -using operations_research::FirstSolutionStrategy; -using operations_research::GetSeed; -using operations_research::IntervalVar; -using operations_research::LocationContainer; -using operations_research::RandomDemand; -using operations_research::RoutingDimension; -using operations_research::RoutingIndexManager; -using operations_research::RoutingModel; -using operations_research::RoutingNodeIndex; -using operations_research::RoutingSearchParameters; -using operations_research::ServiceTimePlusTransition; -using operations_research::Solver; - -ABSL_FLAG(int, vrp_orders, 100, "Nodes in the problem."); -ABSL_FLAG(int, vrp_vehicles, 20, - "Size of Traveling Salesman Problem instance."); -ABSL_FLAG(bool, vrp_use_deterministic_random_seed, false, - "Use deterministic random seeds."); -ABSL_FLAG(std::string, routing_search_parameters, "", - "Text proto RoutingSearchParameters (possibly partial) that will " - "override the DefaultRoutingSearchParameters()"); - -const char* kTime = "Time"; -const char* kCapacity = "Capacity"; - -int main(int argc, char** argv) { - InitGoogle(argv[0], &argc, &argv, true); - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_orders)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_vehicles)) - << "Specify a non-null vehicle fleet size."; - // VRP of size absl::GetFlag(FLAGS_vrp_size). - // Nodes are indexed from 0 to absl::GetFlag(FLAGS_vrp_orders), the starts and - // ends of the routes are at node 0. - const RoutingIndexManager::NodeIndex kDepot(0); - RoutingIndexManager manager(absl::GetFlag(FLAGS_vrp_orders) + 1, - absl::GetFlag(FLAGS_vrp_vehicles), kDepot); - RoutingModel routing(manager); - RoutingSearchParameters parameters = DefaultRoutingSearchParameters(); - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_routing_search_parameters), ¶meters)); - parameters.set_first_solution_strategy( - FirstSolutionStrategy::PARALLEL_CHEAPEST_INSERTION); - - // Setting up locations. - const int64_t kXMax = 100000; - const int64_t kYMax = 100000; - const int64_t kSpeed = 10; - LocationContainer locations( - kSpeed, absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - for (int location = 0; location <= absl::GetFlag(FLAGS_vrp_orders); - ++location) { - locations.AddRandomLocation(kXMax, kYMax); - } - - // Setting the cost function. - const int vehicle_cost = routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.ManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }); - routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); - - // Adding capacity dimension constraints. - const int64_t kVehicleCapacity = 40; - const int64_t kNullCapacitySlack = 0; - RandomDemand demand(manager.num_nodes(), kDepot, - absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - demand.Initialize(); - routing.AddDimension(routing.RegisterTransitCallback( - [&demand, &manager](int64_t i, int64_t j) { - return demand.Demand(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kNullCapacitySlack, kVehicleCapacity, - /*fix_start_cumul_to_zero=*/true, kCapacity); - - // Adding time dimension constraints. - const int64_t kTimePerDemandUnit = 300; - const int64_t kHorizon = 24 * 3600; - ServiceTimePlusTransition time( - kTimePerDemandUnit, - [&demand](RoutingNodeIndex i, RoutingNodeIndex j) { - return demand.Demand(i, j); - }, - [&locations](RoutingNodeIndex i, RoutingNodeIndex j) { - return locations.ManhattanTime(i, j); - }); - routing.AddDimension( - routing.RegisterTransitCallback([&time, &manager](int64_t i, int64_t j) { - return time.Compute(manager.IndexToNode(i), manager.IndexToNode(j)); - }), - kHorizon, kHorizon, /*fix_start_cumul_to_zero=*/false, kTime); - RoutingDimension* const time_dimension = routing.GetMutableDimension(kTime); - - // Adding time windows. - std::mt19937 randomizer( - GetSeed(absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed))); - const int64_t kTWDuration = 5 * 3600; - for (int order = 1; order < manager.num_nodes(); ++order) { - const int64_t start = - absl::Uniform(randomizer, 0, kHorizon - kTWDuration); - time_dimension->CumulVar(order)->SetRange(start, start + kTWDuration); - routing.AddToAssignment(time_dimension->SlackVar(order)); - } - - // Minimize time variables. - for (int i = 0; i < routing.Size(); ++i) { - routing.AddVariableMinimizedByFinalizer(time_dimension->CumulVar(i)); - } - for (int j = 0; j < absl::GetFlag(FLAGS_vrp_vehicles); ++j) { - routing.AddVariableMinimizedByFinalizer( - time_dimension->CumulVar(routing.Start(j))); - routing.AddVariableMinimizedByFinalizer( - time_dimension->CumulVar(routing.End(j))); - } - - // Adding vehicle breaks: - // - 40min breaks between 11:00am and 1:00pm - // or - // - 2 x 30min breaks between 10:00am and 3:00pm, at least 1h apart - // First, fill service time vector. - std::vector service_times(routing.Size()); - for (int node = 0; node < routing.Size(); node++) { - if (node >= routing.nodes()) { - service_times[node] = 0; - } else { - const RoutingIndexManager::NodeIndex index(node); - service_times[node] = kTimePerDemandUnit * demand.Demand(index, index); - } - } - const std::vector> break_data = { - {/*start_min*/ 11, /*start_max*/ 13, /*duration*/ 2400}, - {/*start_min*/ 10, /*start_max*/ 15, /*duration*/ 1800}, - {/*start_min*/ 10, /*start_max*/ 15, /*duration*/ 1800}}; - Solver* const solver = routing.solver(); - for (int vehicle = 0; vehicle < absl::GetFlag(FLAGS_vrp_vehicles); - ++vehicle) { - std::vector breaks; - for (int i = 0; i < break_data.size(); ++i) { - IntervalVar* const break_interval = solver->MakeFixedDurationIntervalVar( - break_data[i][0] * 3600, break_data[i][1] * 3600, break_data[i][2], - true, absl::StrCat("Break ", i, " on vehicle ", vehicle)); - breaks.push_back(break_interval); - } - // break1 performed iff break2 performed - solver->AddConstraint(solver->MakeEquality(breaks[1]->PerformedExpr(), - breaks[2]->PerformedExpr())); - // break2 start 1h after break1. - solver->AddConstraint(solver->MakeIntervalVarRelationWithDelay( - breaks[2], Solver::STARTS_AFTER_END, breaks[1], 3600)); - // break0 performed iff break2 unperformed - solver->AddConstraint(solver->MakeNonEquality(breaks[0]->PerformedExpr(), - breaks[2]->PerformedExpr())); - - time_dimension->SetBreakIntervalsOfVehicle(std::move(breaks), vehicle, - service_times); - } - - // Adding penalty costs to allow skipping orders. - const int64_t kPenalty = 10000000; - const RoutingIndexManager::NodeIndex kFirstNodeAfterDepot(1); - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < routing.nodes(); ++order) { - std::vector orders(1, manager.NodeToIndex(order)); - routing.AddDisjunction(orders, kPenalty); - } - - // Solve, returns a solution if any (owned by RoutingModel). - const Assignment* solution = routing.SolveWithParameters(parameters); - if (solution != nullptr) { - LOG(INFO) << "Breaks: "; - for (const auto& break_interval : - solution->IntervalVarContainer().elements()) { - if (break_interval.PerformedValue() == 1) { - LOG(INFO) << break_interval.Var()->name() << " " - << break_interval.DebugString(); - } else { - LOG(INFO) << break_interval.Var()->name() << " unperformed"; - } - } - DisplayPlan(manager, routing, *solution, false, 0, 0, - routing.GetDimensionOrDie(kCapacity), - routing.GetDimensionOrDie(kTime)); - } else { - LOG(INFO) << "No solution found."; - } - return EXIT_SUCCESS; -} diff --git a/examples/cpp/cvrptw_with_refueling.cc b/examples/cpp/cvrptw_with_refueling.cc deleted file mode 100644 index 5464467771b..00000000000 --- a/examples/cpp/cvrptw_with_refueling.cc +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Capacitated Vehicle Routing Problem with Time Windows and refueling -// constraints. -// This is an extension to the model in cvrptw.cc so refer to that file for -// more information on the common part of the model. The model implemented here -// takes into account refueling constraints using a specific dimension: vehicles -// must visit certain nodes (refueling nodes) before the quantity of fuel -// reaches zero. Fuel consumption is proportional to the distance traveled. - -#include -#include - -#include "absl/random/random.h" -#include "examples/cpp/cvrptw_lib.h" -#include "google/protobuf/text_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/init_google.h" -#include "ortools/base/types.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/constraint_solver/routing_index_manager.h" -#include "ortools/constraint_solver/routing_parameters.h" -#include "ortools/constraint_solver/routing_parameters.pb.h" - -using operations_research::Assignment; -using operations_research::DefaultRoutingSearchParameters; -using operations_research::GetSeed; -using operations_research::LocationContainer; -using operations_research::RandomDemand; -using operations_research::RoutingDimension; -using operations_research::RoutingIndexManager; -using operations_research::RoutingModel; -using operations_research::RoutingNodeIndex; -using operations_research::RoutingSearchParameters; -using operations_research::ServiceTimePlusTransition; - -ABSL_FLAG(int, vrp_orders, 100, "Nodes in the problem."); -ABSL_FLAG(int, vrp_vehicles, 20, - "Size of Traveling Salesman Problem instance."); -ABSL_FLAG(bool, vrp_use_deterministic_random_seed, false, - "Use deterministic random seeds."); -ABSL_FLAG(std::string, routing_search_parameters, "", - "Text proto RoutingSearchParameters (possibly partial) that will " - "override the DefaultRoutingSearchParameters()"); - -const char* kTime = "Time"; -const char* kCapacity = "Capacity"; -const char* kFuel = "Fuel"; - -// Returns true if node is a refueling node (based on node / refuel node ratio). -bool IsRefuelNode(int64_t node) { - const int64_t kRefuelNodeRatio = 10; - return (node % kRefuelNodeRatio == 0); -} - -int main(int argc, char** argv) { - InitGoogle(argv[0], &argc, &argv, true); - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_orders)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_vehicles)) - << "Specify a non-null vehicle fleet size."; - // VRP of size absl::GetFlag(FLAGS_vrp_size). - // Nodes are indexed from 0 to absl::GetFlag(FLAGS_vrp_orders), the starts and - // ends of the routes are at node 0. - const RoutingIndexManager::NodeIndex kDepot(0); - RoutingIndexManager manager(absl::GetFlag(FLAGS_vrp_orders) + 1, - absl::GetFlag(FLAGS_vrp_vehicles), kDepot); - RoutingModel routing(manager); - - // Setting up locations. - const int64_t kXMax = 100000; - const int64_t kYMax = 100000; - const int64_t kSpeed = 10; - LocationContainer locations( - kSpeed, absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - for (int location = 0; location <= absl::GetFlag(FLAGS_vrp_orders); - ++location) { - locations.AddRandomLocation(kXMax, kYMax); - } - - // Setting the cost function. - const int vehicle_cost = routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.ManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }); - routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); - - // Adding capacity dimension constraints. - const int64_t kVehicleCapacity = 40; - const int64_t kNullCapacitySlack = 0; - RandomDemand demand(manager.num_nodes(), kDepot, - absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - demand.Initialize(); - routing.AddDimension(routing.RegisterTransitCallback( - [&demand, &manager](int64_t i, int64_t j) { - return demand.Demand(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kNullCapacitySlack, kVehicleCapacity, - /*fix_start_cumul_to_zero=*/true, kCapacity); - - // Adding time dimension constraints. - const int64_t kTimePerDemandUnit = 300; - const int64_t kHorizon = 24 * 3600; - ServiceTimePlusTransition time( - kTimePerDemandUnit, - [&demand](RoutingNodeIndex i, RoutingNodeIndex j) { - return demand.Demand(i, j); - }, - [&locations](RoutingNodeIndex i, RoutingNodeIndex j) { - return locations.ManhattanTime(i, j); - }); - routing.AddDimension( - routing.RegisterTransitCallback([&time, &manager](int64_t i, int64_t j) { - return time.Compute(manager.IndexToNode(i), manager.IndexToNode(j)); - }), - kHorizon, kHorizon, /*fix_start_cumul_to_zero=*/true, kTime); - const RoutingDimension& time_dimension = routing.GetDimensionOrDie(kTime); - // Adding time windows. - // NOTE(user): This randomized test case is quite sensible to the seed: - // the generated model can be much easier or harder to solve, depending on - // the seed. It turns out that most seeds yield pretty slow/bad solver - // performance: I got good performance for about 10% of the seeds. - std::mt19937 randomizer( - 144 + GetSeed(absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed))); - const int64_t kTWDuration = 5 * 3600; - for (int order = 1; order < manager.num_nodes(); ++order) { - if (!IsRefuelNode(order)) { - const int64_t start = - absl::Uniform(randomizer, 0, kHorizon - kTWDuration); - time_dimension.CumulVar(order)->SetRange(start, start + kTWDuration); - } - } - - // Adding fuel dimension. This dimension consumes a quantity equal to the - // distance traveled. Only refuel nodes can make the quantity of dimension - // increase by letting slack variable replenish the fuel. - const int64_t kFuelCapacity = kXMax + kYMax; - routing.AddDimension( - routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.NegManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kFuelCapacity, kFuelCapacity, /*fix_start_cumul_to_zero=*/false, kFuel); - const RoutingDimension& fuel_dimension = routing.GetDimensionOrDie(kFuel); - for (int order = 0; order < routing.Size(); ++order) { - // Only let slack free for refueling nodes. - if (!IsRefuelNode(order) || routing.IsStart(order)) { - fuel_dimension.SlackVar(order)->SetValue(0); - } - // Needed to instantiate fuel quantity at each node. - routing.AddVariableMinimizedByFinalizer(fuel_dimension.CumulVar(order)); - } - - // Adding penalty costs to allow skipping orders. - const int64_t kPenalty = 100000; - const RoutingIndexManager::NodeIndex kFirstNodeAfterDepot(1); - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < routing.nodes(); ++order) { - std::vector orders(1, manager.NodeToIndex(order)); - routing.AddDisjunction(orders, kPenalty); - } - - // Solve, returns a solution if any (owned by RoutingModel). - RoutingSearchParameters parameters = DefaultRoutingSearchParameters(); - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_routing_search_parameters), ¶meters)); - const Assignment* solution = routing.SolveWithParameters(parameters); - if (solution != nullptr) { - DisplayPlan(manager, routing, *solution, /*use_same_vehicle_costs=*/false, - /*max_nodes_per_group=*/0, /*same_vehicle_cost=*/0, - routing.GetDimensionOrDie(kCapacity), - routing.GetDimensionOrDie(kTime)); - } else { - LOG(INFO) << "No solution found."; - } - return EXIT_SUCCESS; -} diff --git a/examples/cpp/cvrptw_with_resources.cc b/examples/cpp/cvrptw_with_resources.cc deleted file mode 100644 index 5171c781a19..00000000000 --- a/examples/cpp/cvrptw_with_resources.cc +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Capacitated Vehicle Routing Problem with Time Windows and capacitated -// resources. -// This is an extension to the model in cvrptw.cc so refer to that file for -// more information on the common part of the model. The model implemented here -// limits the number of vehicles which can simultaneously leave or enter the -// depot due to limited resources (or capacity) available. -// TODO(user): The current model consumes resources even for vehicles with -// empty routes; fix this when we have an API on the cumulative constraints -// with variable demands. - -#include -#include - -#include "absl/random/random.h" -#include "examples/cpp/cvrptw_lib.h" -#include "google/protobuf/text_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/init_google.h" -#include "ortools/base/types.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/constraint_solver/routing_index_manager.h" -#include "ortools/constraint_solver/routing_parameters.h" -#include "ortools/constraint_solver/routing_parameters.pb.h" - -using operations_research::Assignment; -using operations_research::DefaultRoutingSearchParameters; -using operations_research::GetSeed; -using operations_research::IntervalVar; -using operations_research::IntVar; -using operations_research::LocationContainer; -using operations_research::RandomDemand; -using operations_research::RoutingDimension; -using operations_research::RoutingIndexManager; -using operations_research::RoutingModel; -using operations_research::RoutingNodeIndex; -using operations_research::RoutingSearchParameters; -using operations_research::ServiceTimePlusTransition; -using operations_research::Solver; - -ABSL_FLAG(int, vrp_orders, 100, "Nodes in the problem."); -ABSL_FLAG(int, vrp_vehicles, 20, - "Size of Traveling Salesman Problem instance."); -ABSL_FLAG(bool, vrp_use_deterministic_random_seed, false, - "Use deterministic random seeds."); -ABSL_FLAG(std::string, routing_search_parameters, "", - "Text proto RoutingSearchParameters (possibly partial) that will " - "override the DefaultRoutingSearchParameters()"); - -const char* kTime = "Time"; -const char* kCapacity = "Capacity"; - -int main(int argc, char** argv) { - InitGoogle(argv[0], &argc, &argv, true); - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_orders)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_vehicles)) - << "Specify a non-null vehicle fleet size."; - // VRP of size absl::GetFlag(FLAGS_vrp_size). - // Nodes are indexed from 0 to absl::GetFlag(FLAGS_vrp_orders), the starts and - // ends of the routes are at node 0. - const RoutingIndexManager::NodeIndex kDepot(0); - RoutingIndexManager manager(absl::GetFlag(FLAGS_vrp_orders) + 1, - absl::GetFlag(FLAGS_vrp_vehicles), kDepot); - RoutingModel routing(manager); - - // Setting up locations. - const int64_t kXMax = 100000; - const int64_t kYMax = 100000; - const int64_t kSpeed = 10; - LocationContainer locations( - kSpeed, absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - for (int location = 0; location <= absl::GetFlag(FLAGS_vrp_orders); - ++location) { - locations.AddRandomLocation(kXMax, kYMax); - } - - // Setting the cost function. - const int vehicle_cost = routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.ManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }); - routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); - - // Adding capacity dimension constraints. - const int64_t kVehicleCapacity = 40; - const int64_t kNullCapacitySlack = 0; - RandomDemand demand(manager.num_nodes(), kDepot, - absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - demand.Initialize(); - routing.AddDimension(routing.RegisterTransitCallback( - [&demand, &manager](int64_t i, int64_t j) { - return demand.Demand(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kNullCapacitySlack, kVehicleCapacity, - /*fix_start_cumul_to_zero=*/true, kCapacity); - - // Adding time dimension constraints. - const int64_t kTimePerDemandUnit = 300; - const int64_t kHorizon = 24 * 3600; - ServiceTimePlusTransition time( - kTimePerDemandUnit, - [&demand](RoutingNodeIndex i, RoutingNodeIndex j) { - return demand.Demand(i, j); - }, - [&locations](RoutingNodeIndex i, RoutingNodeIndex j) { - return locations.ManhattanTime(i, j); - }); - routing.AddDimension( - routing.RegisterTransitCallback([&time, &manager](int64_t i, int64_t j) { - return time.Compute(manager.IndexToNode(i), manager.IndexToNode(j)); - }), - kHorizon, kHorizon, /*fix_start_cumul_to_zero=*/false, kTime); - const RoutingDimension& time_dimension = routing.GetDimensionOrDie(kTime); - - // Adding time windows. - std::mt19937 randomizer( - GetSeed(absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed))); - const int64_t kTWDuration = 5 * 3600; - for (int order = 1; order < manager.num_nodes(); ++order) { - const int64_t start = - absl::Uniform(randomizer, 0, kHorizon - kTWDuration); - time_dimension.CumulVar(order)->SetRange(start, start + kTWDuration); - } - - // Adding resource constraints at the depot (start and end location of - // routes). - std::vector start_end_times; - for (int i = 0; i < absl::GetFlag(FLAGS_vrp_vehicles); ++i) { - start_end_times.push_back(time_dimension.CumulVar(routing.End(i))); - start_end_times.push_back(time_dimension.CumulVar(routing.Start(i))); - } - // Build corresponding time intervals. - const int64_t kVehicleSetup = 180; - Solver* const solver = routing.solver(); - std::vector intervals; - solver->MakeFixedDurationIntervalVarArray(start_end_times, kVehicleSetup, - "depot_interval", &intervals); - // Constrain the number of maximum simultaneous intervals at depot. - const int64_t kDepotCapacity = 5; - std::vector depot_usage(start_end_times.size(), 1); - solver->AddConstraint( - solver->MakeCumulative(intervals, depot_usage, kDepotCapacity, "depot")); - // Instantiate route start and end times to produce feasible times. - for (int i = 0; i < start_end_times.size(); ++i) { - routing.AddVariableMinimizedByFinalizer(start_end_times[i]); - } - - // Adding penalty costs to allow skipping orders. - const int64_t kPenalty = 100000; - const RoutingIndexManager::NodeIndex kFirstNodeAfterDepot(1); - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < manager.num_nodes(); ++order) { - std::vector orders(1, manager.NodeToIndex(order)); - routing.AddDisjunction(orders, kPenalty); - } - - // Solve, returns a solution if any (owned by RoutingModel). - RoutingSearchParameters parameters = DefaultRoutingSearchParameters(); - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_routing_search_parameters), ¶meters)); - const Assignment* solution = routing.SolveWithParameters(parameters); - if (solution != nullptr) { - DisplayPlan(manager, routing, *solution, /*use_same_vehicle_costs=*/false, - /*max_nodes_per_group=*/0, /*same_vehicle_cost=*/0, - routing.GetDimensionOrDie(kCapacity), - routing.GetDimensionOrDie(kTime)); - } else { - LOG(INFO) << "No solution found."; - } - return EXIT_SUCCESS; -} diff --git a/examples/cpp/cvrptw_with_stop_times_and_resources.cc b/examples/cpp/cvrptw_with_stop_times_and_resources.cc deleted file mode 100644 index d1c494638b0..00000000000 --- a/examples/cpp/cvrptw_with_stop_times_and_resources.cc +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Capacitated Vehicle Routing Problem with Time Windows, fixed stop times and -// capacitated resources. A stop is defined as consecutive nodes at the same -// location. -// This is an extension to the model in cvrptw.cc so refer to that file for -// more information on the common part of the model. The model implemented here -// limits the number of vehicles which can simultaneously leave or enter a node -// to one. - -#include -#include - -#include "absl/random/random.h" -#include "absl/strings/str_cat.h" -#include "examples/cpp/cvrptw_lib.h" -#include "google/protobuf/text_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/init_google.h" -#include "ortools/base/types.h" -#include "ortools/base/logging.h" -#include "ortools/constraint_solver/routing.h" -#include "ortools/constraint_solver/routing_index_manager.h" -#include "ortools/constraint_solver/routing_parameters.h" -#include "ortools/constraint_solver/routing_parameters.pb.h" - -using operations_research::Assignment; -using operations_research::DefaultRoutingSearchParameters; -using operations_research::GetSeed; -using operations_research::IntervalVar; -using operations_research::IntVar; -using operations_research::LocationContainer; -using operations_research::RandomDemand; -using operations_research::RoutingDimension; -using operations_research::RoutingIndexManager; -using operations_research::RoutingModel; -using operations_research::RoutingNodeIndex; -using operations_research::RoutingSearchParameters; -using operations_research::Solver; -using operations_research::StopServiceTimePlusTransition; - -ABSL_FLAG(int, vrp_stops, 25, "Stop locations in the problem."); -ABSL_FLAG(int, vrp_orders_per_stop, 5, "Nodes for each stop."); -ABSL_FLAG(int, vrp_vehicles, 20, - "Size of Traveling Salesman Problem instance."); -ABSL_FLAG(bool, vrp_use_deterministic_random_seed, false, - "Use deterministic random seeds."); -ABSL_FLAG(std::string, routing_search_parameters, "", - "Text proto RoutingSearchParameters (possibly partial) that will " - "override the DefaultRoutingSearchParameters()"); - -const char* kTime = "Time"; -const char* kCapacity = "Capacity"; - -int main(int argc, char** argv) { - InitGoogle(argv[0], &argc, &argv, true); - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_stops)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_orders_per_stop)) - << "Specify an instance size greater than 0."; - CHECK_LT(0, absl::GetFlag(FLAGS_vrp_vehicles)) - << "Specify a non-null vehicle fleet size."; - const int vrp_orders = - absl::GetFlag(FLAGS_vrp_stops) * absl::GetFlag(FLAGS_vrp_orders_per_stop); - // Nodes are indexed from 0 to vrp_orders, the starts and ends of the routes - // are at node 0. - const RoutingIndexManager::NodeIndex kDepot(0); - RoutingIndexManager manager(vrp_orders + 1, absl::GetFlag(FLAGS_vrp_vehicles), - kDepot); - RoutingModel routing(manager); - - // Setting up locations. - const int64_t kXMax = 100000; - const int64_t kYMax = 100000; - const int64_t kSpeed = 10; - LocationContainer locations( - kSpeed, absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - for (int stop = 0; stop <= absl::GetFlag(FLAGS_vrp_stops); ++stop) { - const int num_orders = - stop == 0 ? 1 : absl::GetFlag(FLAGS_vrp_orders_per_stop); - locations.AddRandomLocation(kXMax, kYMax, num_orders); - } - - // Setting the cost function. - const int vehicle_cost = routing.RegisterTransitCallback( - [&locations, &manager](int64_t i, int64_t j) { - return locations.ManhattanDistance(manager.IndexToNode(i), - manager.IndexToNode(j)); - }); - routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); - - // Adding capacity dimension constraints. - const int64_t kVehicleCapacity = 40; - const int64_t kNullCapacitySlack = 0; - RandomDemand demand(manager.num_nodes(), kDepot, - absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed)); - demand.Initialize(); - routing.AddDimension(routing.RegisterTransitCallback( - [&demand, &manager](int64_t i, int64_t j) { - return demand.Demand(manager.IndexToNode(i), - manager.IndexToNode(j)); - }), - kNullCapacitySlack, kVehicleCapacity, - /*fix_start_cumul_to_zero=*/true, kCapacity); - - // Adding time dimension constraints. - const int64_t kStopTime = 300; - const int64_t kHorizon = 24 * 3600; - StopServiceTimePlusTransition time( - kStopTime, locations, - [&locations](RoutingNodeIndex i, RoutingNodeIndex j) { - return locations.ManhattanTime(i, j); - }); - routing.AddDimension( - routing.RegisterTransitCallback([&time, &manager](int64_t i, int64_t j) { - return time.Compute(manager.IndexToNode(i), manager.IndexToNode(j)); - }), - kHorizon, kHorizon, /*fix_start_cumul_to_zero=*/false, kTime); - const RoutingDimension& time_dimension = routing.GetDimensionOrDie(kTime); - - // Adding time windows, for the sake of simplicty same for each stop. - std::mt19937 randomizer( - GetSeed(absl::GetFlag(FLAGS_vrp_use_deterministic_random_seed))); - const int64_t kTWDuration = 5 * 3600; - for (int stop = 0; stop < absl::GetFlag(FLAGS_vrp_stops); ++stop) { - const int64_t start = - absl::Uniform(randomizer, 0, kHorizon - kTWDuration); - for (int stop_order = 0; - stop_order < absl::GetFlag(FLAGS_vrp_orders_per_stop); ++stop_order) { - const int order = - stop * absl::GetFlag(FLAGS_vrp_orders_per_stop) + stop_order + 1; - time_dimension.CumulVar(order)->SetRange(start, start + kTWDuration); - } - } - - // Adding resource constraints at order locations. - Solver* const solver = routing.solver(); - std::vector intervals; - for (int stop = 0; stop < absl::GetFlag(FLAGS_vrp_stops); ++stop) { - std::vector stop_intervals; - for (int stop_order = 0; - stop_order < absl::GetFlag(FLAGS_vrp_orders_per_stop); ++stop_order) { - const int order = - stop * absl::GetFlag(FLAGS_vrp_orders_per_stop) + stop_order + 1; - IntervalVar* const interval = solver->MakeFixedDurationIntervalVar( - 0, kHorizon, kStopTime, true, absl::StrCat("Order", order)); - intervals.push_back(interval); - stop_intervals.push_back(interval); - // Link order and interval. - IntVar* const order_start = time_dimension.CumulVar(order); - solver->AddConstraint( - solver->MakeIsEqualCt(interval->SafeStartExpr(0), order_start, - interval->PerformedExpr()->Var())); - // Make interval performed iff corresponding order has service time. - // An order has no service time iff it is at the same location as the - // next order on the route. - IntVar* const is_null_duration = - solver - ->MakeElement( - [&locations, order](int64_t index) { - return locations.SameLocationFromIndex(order, index); - }, - routing.NextVar(order)) - ->Var(); - solver->AddConstraint( - solver->MakeNonEquality(interval->PerformedExpr(), is_null_duration)); - routing.AddIntervalToAssignment(interval); - // We are minimizing route durations by minimizing route ends; so we can - // maximize order starts to pack them together. - routing.AddVariableMaximizedByFinalizer(order_start); - } - // Only one order can happen at the same time at a given location. - std::vector location_usage(stop_intervals.size(), 1); - solver->AddConstraint(solver->MakeCumulative( - stop_intervals, location_usage, 1, absl::StrCat("Client", stop))); - } - // Minimizing route duration. - for (int vehicle = 0; vehicle < manager.num_vehicles(); ++vehicle) { - routing.AddVariableMinimizedByFinalizer( - time_dimension.CumulVar(routing.End(vehicle))); - } - - // Adding penalty costs to allow skipping orders. - const int64_t kPenalty = 100000; - const RoutingIndexManager::NodeIndex kFirstNodeAfterDepot(1); - for (RoutingIndexManager::NodeIndex order = kFirstNodeAfterDepot; - order < routing.nodes(); ++order) { - std::vector orders(1, manager.NodeToIndex(order)); - routing.AddDisjunction(orders, kPenalty); - } - - // Solve, returns a solution if any (owned by RoutingModel). - RoutingSearchParameters parameters = DefaultRoutingSearchParameters(); - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_routing_search_parameters), ¶meters)); - const Assignment* solution = routing.SolveWithParameters(parameters); - if (solution != nullptr) { - DisplayPlan(manager, routing, *solution, /*use_same_vehicle_costs=*/false, - /*max_nodes_per_group=*/0, /*same_vehicle_cost=*/0, - routing.GetDimensionOrDie(kCapacity), - routing.GetDimensionOrDie(kTime)); - LOG(INFO) << "Stop intervals:"; - for (IntervalVar* const interval : intervals) { - if (solution->PerformedValue(interval)) { - LOG(INFO) << interval->name() << ": " << solution->StartValue(interval); - } - } - } else { - LOG(INFO) << "No solution found."; - } - return EXIT_SUCCESS; -} diff --git a/examples/cpp/frequency_assignment_problem.cc b/examples/cpp/frequency_assignment_problem.cc index 72914889ee7..626e5aec5ee 100644 --- a/examples/cpp/frequency_assignment_problem.cc +++ b/examples/cpp/frequency_assignment_problem.cc @@ -54,6 +54,7 @@ #include "absl/container/btree_map.h" #include "absl/strings/string_view.h" +#include "absl/types/span.h" #include "examples/cpp/fap_model_printer.h" #include "examples/cpp/fap_parser.h" #include "examples/cpp/fap_utilities.h" @@ -100,6 +101,10 @@ class OrderingDecision : public Decision { variable2_(variable2), value_(value), operator_(std::move(operation)) {} + + // This type is neither copyable nor movable. + OrderingDecision(const OrderingDecision&) = delete; + OrderingDecision& operator=(const OrderingDecision&) = delete; ~OrderingDecision() override = default; // Apply will be called first when the decision is executed. @@ -131,8 +136,6 @@ class OrderingDecision : public Decision { IntVar* const variable2_; const int value_; const std::string operator_; - - DISALLOW_COPY_AND_ASSIGN(OrderingDecision); }; // Decision on whether a soft constraint will be added to a model @@ -142,6 +145,10 @@ class ConstraintDecision : public Decision { explicit ConstraintDecision(IntVar* const constraint_violation) : constraint_violation_(constraint_violation) {} + // This type is neither copyable nor movable. + ConstraintDecision(const ConstraintDecision&) = delete; + ConstraintDecision& operator=(const ConstraintDecision&) = delete; + ~ConstraintDecision() override = default; // Apply will be called first when the decision is executed. @@ -158,8 +165,6 @@ class ConstraintDecision : public Decision { private: IntVar* const constraint_violation_; - - DISALLOW_COPY_AND_ASSIGN(ConstraintDecision); }; // The ordering builder resolves the relative order of the two variables @@ -192,6 +197,10 @@ class OrderingBuilder : public DecisionBuilder { CHECK_EQ(variable_state_.size(), variables_.size()); } + // This type is neither copyable nor movable. + OrderingBuilder(const OrderingBuilder&) = delete; + OrderingBuilder& operator=(const OrderingBuilder&) = delete; + ~OrderingBuilder() override = default; Decision* Next(Solver* const s) override { @@ -320,8 +329,6 @@ class OrderingBuilder : public DecisionBuilder { // Used by Hint() for indicating the most probable ordering. std::vector variable_state_; std::vector minimum_value_available_; - - DISALLOW_COPY_AND_ASSIGN(OrderingBuilder); }; // A comparator for sorting the constraints depending on their impact. @@ -373,7 +380,7 @@ int64_t ValueEvaluator( // The variables which participate in more constraints and have the // smaller domain should be in higher priority for assignment. int64_t VariableEvaluator( - const std::vector& key_from_index, + absl::Span key_from_index, const absl::btree_map& data_variables, int64_t variable_index) { FapVariable variable = @@ -414,7 +421,7 @@ void CreateModelVariables( } // Creates the constraints of the instance from the parsed data. -void CreateModelConstraints(const std::vector& data_constraints, +void CreateModelConstraints(absl::Span data_constraints, const std::vector& variables, const absl::btree_map& index_from_key, Solver* solver) { @@ -649,7 +656,7 @@ void SplitVariablesHardSoft( } // Splits constraints of the instance to hard and soft. -void SplitConstraintHardSoft(const std::vector& data_constraints, +void SplitConstraintHardSoft(absl::Span data_constraints, std::vector* hard_constraints, std::vector* soft_constraints) { for (const FapConstraint& ct : data_constraints) { @@ -683,8 +690,8 @@ void PenalizeVariablesViolation( // Penalize the violation of soft constraints of the instance. void PenalizeConstraintsViolation( - const std::vector& constraints, - const std::vector& soft_constraints, + absl::Span constraints, + absl::Span soft_constraints, const absl::btree_map& index_from_key, const std::vector& variables, std::vector* cost, std::vector* violated_constraints, Solver* solver) { @@ -733,7 +740,7 @@ void PenalizeConstraintsViolation( int SoftFapSolver(const absl::btree_map& data_variables, const std::vector& data_constraints, absl::string_view /*data_objective*/, - const std::vector& /*values*/) { + absl::Span /*values*/) { Solver solver("SoftFapSolver"); std::vector monitors; diff --git a/examples/cpp/jobshop_sat.cc b/examples/cpp/jobshop_sat.cc index 27b76dc2899..11c64480f6c 100644 --- a/examples/cpp/jobshop_sat.cc +++ b/examples/cpp/jobshop_sat.cc @@ -322,7 +322,7 @@ std::vector> GetDataPerMachine( void CreateMachines( const JsspInputProblem& problem, - const std::vector>>& + absl::Span>> job_task_to_alternatives, IntervalVar makespan_interval, CpModelBuilder& cp_model) { const int num_jobs = problem.jobs_size(); @@ -733,12 +733,6 @@ void Solve(const JsspInputProblem& problem) { // Setup parameters. SatParameters parameters; parameters.set_log_search_progress(true); - // Parse the --params flag. - if (!absl::GetFlag(FLAGS_params).empty()) { - CHECK(google::protobuf::TextFormat::MergeFromString( - absl::GetFlag(FLAGS_params), ¶meters)) - << absl::GetFlag(FLAGS_params); - } // Prefer objective_shaving_search over objective_lb_search. if (parameters.num_workers() >= 16 && parameters.num_workers() < 24) { @@ -751,6 +745,13 @@ void Solve(const JsspInputProblem& problem) { parameters.set_push_all_tasks_toward_start(true); parameters.set_use_dynamic_precedence_in_disjunctive(true); + // Parse the --params flag. + if (!absl::GetFlag(FLAGS_params).empty()) { + CHECK(google::protobuf::TextFormat::MergeFromString( + absl::GetFlag(FLAGS_params), ¶meters)) + << absl::GetFlag(FLAGS_params); + } + const CpSolverResponse response = SolveWithParameters(cp_model.Build(), parameters); diff --git a/examples/cpp/max_flow.cc b/examples/cpp/max_flow.cc index a7d15298497..a99e9b49833 100644 --- a/examples/cpp/max_flow.cc +++ b/examples/cpp/max_flow.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/flags/flag.h" #include "ortools/base/init_google.h" #include "ortools/base/logging.h" diff --git a/examples/cpp/min_cost_flow.cc b/examples/cpp/min_cost_flow.cc index 8119bbbf68c..7c4dbef571d 100644 --- a/examples/cpp/min_cost_flow.cc +++ b/examples/cpp/min_cost_flow.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/flags/flag.h" #include "ortools/base/init_google.h" #include "ortools/base/logging.h" diff --git a/examples/cpp/pdlp_solve.cc b/examples/cpp/pdlp_solve.cc index 9fbd11dba18..1a86d85eb9a 100644 --- a/examples/cpp/pdlp_solve.cc +++ b/examples/cpp/pdlp_solve.cc @@ -16,30 +16,39 @@ // the input problem. #include +#include +#include +#include #include -#include #include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "absl/flags/usage.h" +#include "absl/log/check.h" +#include "absl/log/flags.h" #include "absl/strings/match.h" -#include "absl/strings/str_format.h" -#include "ortools/base/commandlineflags.h" -#include "ortools/base/file.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "ortools/base/helpers.h" #include "ortools/base/init_google.h" #include "ortools/base/logging.h" +#include "ortools/base/options.h" #include "ortools/pdlp/iteration_stats.h" #include "ortools/pdlp/primal_dual_hybrid_gradient.h" +#include "ortools/pdlp/quadratic_program.h" #include "ortools/pdlp/quadratic_program_io.h" #include "ortools/pdlp/solve_log.pb.h" #include "ortools/pdlp/solvers.pb.h" #include "ortools/port/proto_utils.h" #include "ortools/util/file_util.h" +#include "ortools/util/fp_roundtrip_conv.h" #include "ortools/util/sigint.h" // TODO: .mps.gz files aren't working. As a workaround, use .mps. -ABSL_FLAG(std::string, input, "", "REQUIRED: Input file name."); +ABSL_FLAG( + std::string, input, "", + "REQUIRED: Input file name. The following formats are supported: \n" + " - a .mps, .mps.bz2 file,\n" + " - an MPModelProto [.pb (binary), .textproto (text), *.json, *.json.gz]"); ABSL_FLAG(std::string, params, "", "PrimalDualHybridGradientParams in text format"); ABSL_FLAG(std::string, solve_log_file, "", @@ -49,11 +58,6 @@ ABSL_FLAG( std::string, sol_file, "", "If non-empty, output the final primal solution in Miplib .sol format."); -static const char kUsageStr[] = - "Run PDLP on the given input file. The following formats are supported: \n" - " - a .mps, .mps.gz, .mps.bz2 file,\n" - " - an MPModelProto [.pb (binary), .textproto (text), *.json, *.json.gz]"; - namespace operations_research::pdlp { void WriteSolveLog(const std::string& solve_log_file, const SolveLog& log) { @@ -68,11 +72,12 @@ void WriteSolveLog(const std::string& solve_log_file, const SolveLog& log) { LOG(FATAL) << "Unrecognized file extension for --solve_log_file: " << solve_log_file << ". Expected .textproto, .pb, or .json"; } - QCHECK(WriteProtoToFile(solve_log_file, log, write_format, /*gzipped=*/false, - /*append_extension_to_file_name=*/false).ok()); + QCHECK_OK(WriteProtoToFile(solve_log_file, log, write_format, + /*gzipped=*/false, + /*append_extension_to_file_name=*/false)); } -void Solve(const std::string& input, const std::string& params_str, +void Solve(const std::string& input, absl::string_view params_str, const std::string& solve_log_file, const std::string& sol_file) { QCHECK(!input.empty()) << "--input is required"; PrimalDualHybridGradientParams params; @@ -104,8 +109,9 @@ void Solve(const std::string& input, const std::string& params_str, // TODO: In what format should we write the dual solution? if (!sol_file.empty() && convergence_information.has_value()) { std::string sol_string; - absl::StrAppend(&sol_string, - "=obj= ", convergence_information->primal_objective(), + absl::StrAppend( + &sol_string, "=obj= ", + RoundTripDoubleFormat(convergence_information->primal_objective()), "\n"); for (int64_t i = 0; i < result.primal_solution.size(); ++i) { std::string name; @@ -114,7 +120,8 @@ void Solve(const std::string& input, const std::string& params_str, } else { name = absl::StrCat("var", i); } - absl::StrAppend(&sol_string, name, " ", result.primal_solution(i), "\n"); + absl::StrAppend(&sol_string, name, " ", + RoundTripDoubleFormat(result.primal_solution(i)), "\n"); } LOG(INFO) << "Writing .sol solution to '" << sol_file << "'.\n"; CHECK_OK(file::SetContents(sol_file, sol_string, file::Defaults())); @@ -125,8 +132,7 @@ void Solve(const std::string& input, const std::string& params_str, int main(int argc, char** argv) { absl::SetFlag(&FLAGS_stderrthreshold, 0); - google::InitGoogleLogging(kUsageStr); - absl::ParseCommandLine(argc, argv); + InitGoogle(argv[0], &argc, &argv, /*remove_flags=*/true); operations_research::pdlp::Solve( absl::GetFlag(FLAGS_input), absl::GetFlag(FLAGS_params), diff --git a/examples/cpp/slitherlink_sat.cc b/examples/cpp/slitherlink_sat.cc index c9dba90dc40..e9fba86af60 100644 --- a/examples/cpp/slitherlink_sat.cc +++ b/examples/cpp/slitherlink_sat.cc @@ -66,7 +66,7 @@ void PrintSolution(absl::Span> data, std::cout << last_line << std::endl; } -void SlitherLink(const std::vector>& data) { +void SlitherLink(absl::Span> data) { const int num_rows = data.size(); const int num_columns = data[0].size(); diff --git a/examples/cpp/uncapacitated_facility_location.cc b/examples/cpp/uncapacitated_facility_location.cc index e738ce24171..e91707e0c45 100644 --- a/examples/cpp/uncapacitated_facility_location.cc +++ b/examples/cpp/uncapacitated_facility_location.cc @@ -25,6 +25,7 @@ #include #include +#include "absl/flags/flag.h" #include "absl/flags/parse.h" #include "absl/flags/usage.h" #include "absl/log/initialize.h" diff --git a/examples/cpp/weighted_tardiness_sat.cc b/examples/cpp/weighted_tardiness_sat.cc index 1690e7f3241..0462d6e3257 100644 --- a/examples/cpp/weighted_tardiness_sat.cc +++ b/examples/cpp/weighted_tardiness_sat.cc @@ -18,14 +18,20 @@ #include #include "absl/flags/flag.h" +#include "absl/log/check.h" #include "absl/strings/numbers.h" +#include "absl/strings/str_cat.h" #include "absl/strings/str_split.h" #include "absl/types/span.h" #include "ortools/base/init_google.h" #include "ortools/base/logging.h" #include "ortools/sat/cp_model.h" +#include "ortools/sat/cp_model.pb.h" +#include "ortools/sat/cp_model_solver.h" #include "ortools/sat/model.h" +#include "ortools/sat/sat_parameters.pb.h" #include "ortools/util/filelineiter.h" +#include "ortools/util/sorted_interval_list.h" ABSL_FLAG(std::string, input, "examples/cpp/wt40.txt", "wt data file name."); ABSL_FLAG(int, size, 40, "Size of the problem in the wt file."); diff --git a/examples/cpp/xpress_use.cc b/examples/cpp/xpress_use.cc index 600918e7620..aac79913b38 100644 --- a/examples/cpp/xpress_use.cc +++ b/examples/cpp/xpress_use.cc @@ -91,7 +91,6 @@ void useXpressSolver(bool solveAsMip, bool useFactory) { #define ABSL_MIN_LOG_LEVEL INFO; int main(int argc, char** argv) { absl::SetFlag(&FLAGS_stderrthreshold, 0); - absl::SetFlag(&FLAGS_logtostderr, true); InitGoogle(argv[0], &argc, &argv, true); std::cout << "start\n"; LOG(WARNING) << "start"; diff --git a/examples/flatzinc/README b/examples/flatzinc/README.md similarity index 100% rename from examples/flatzinc/README rename to examples/flatzinc/README.md diff --git a/examples/notebook/algorithms/knapsack.ipynb b/examples/notebook/algorithms/knapsack.ipynb index d0909076d19..9d430ded3b7 100644 --- a/examples/notebook/algorithms/knapsack.ipynb +++ b/examples/notebook/algorithms/knapsack.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/algorithms/simple_knapsack_program.ipynb b/examples/notebook/algorithms/simple_knapsack_program.ipynb index eefae5c088c..2db82375a3b 100644 --- a/examples/notebook/algorithms/simple_knapsack_program.ipynb +++ b/examples/notebook/algorithms/simple_knapsack_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/cp_is_fun_cp.ipynb b/examples/notebook/constraint_solver/cp_is_fun_cp.ipynb index eccca4c8303..bbddc10de84 100644 --- a/examples/notebook/constraint_solver/cp_is_fun_cp.ipynb +++ b/examples/notebook/constraint_solver/cp_is_fun_cp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/cvrp_reload.ipynb b/examples/notebook/constraint_solver/cvrp_reload.ipynb index 5684baaea9e..f90b37dbd94 100644 --- a/examples/notebook/constraint_solver/cvrp_reload.ipynb +++ b/examples/notebook/constraint_solver/cvrp_reload.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -380,7 +380,7 @@ " time_var = time_dimension.CumulVar(index)\n", " plan_output += (\n", " f' {manager.IndexToNode(index)} '\n", - " f'Load({assignment.Value(load_var)}) '\n", + " f'Load({assignment.Min(load_var)}) '\n", " f'Time({assignment.Min(time_var)},{assignment.Max(time_var)}) ->'\n", " )\n", " previous_index = index\n", @@ -391,15 +391,15 @@ " time_var = time_dimension.CumulVar(index)\n", " plan_output += (\n", " f' {manager.IndexToNode(index)} '\n", - " f'Load({assignment.Value(load_var)}) '\n", + " f'Load({assignment.Min(load_var)}) '\n", " f'Time({assignment.Min(time_var)},{assignment.Max(time_var)})\\n')\n", " plan_output += f'Distance of the route: {distance}m\\n'\n", - " plan_output += f'Load of the route: {assignment.Value(load_var)}\\n'\n", - " plan_output += f'Time of the route: {assignment.Value(time_var)}min\\n'\n", + " plan_output += f'Load of the route: {assignment.Min(load_var)}\\n'\n", + " plan_output += f'Time of the route: {assignment.Min(time_var)}min\\n'\n", " print(plan_output)\n", " total_distance += distance\n", - " total_load += assignment.Value(load_var)\n", - " total_time += assignment.Value(time_var)\n", + " total_load += assignment.Min(load_var)\n", + " total_time += assignment.Min(time_var)\n", " print(f'Total Distance of all routes: {total_distance}m')\n", " print(f'Total Load of all routes: {total_load}')\n", " print(f'Total Time of all routes: {total_time}min')\n", diff --git a/examples/notebook/constraint_solver/cvrptw_break.ipynb b/examples/notebook/constraint_solver/cvrptw_break.ipynb index c037cccd8c6..f4b6ecef83b 100644 --- a/examples/notebook/constraint_solver/cvrptw_break.ipynb +++ b/examples/notebook/constraint_solver/cvrptw_break.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/nqueens_cp.ipynb b/examples/notebook/constraint_solver/nqueens_cp.ipynb index 49b80d9f05b..52e67137ae6 100644 --- a/examples/notebook/constraint_solver/nqueens_cp.ipynb +++ b/examples/notebook/constraint_solver/nqueens_cp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/simple_cp_program.ipynb b/examples/notebook/constraint_solver/simple_cp_program.ipynb index 93ab4f5c145..e57f62343fa 100644 --- a/examples/notebook/constraint_solver/simple_cp_program.ipynb +++ b/examples/notebook/constraint_solver/simple_cp_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/simple_routing_program.ipynb b/examples/notebook/constraint_solver/simple_routing_program.ipynb index 3364e5d7643..57ebd7a1e81 100644 --- a/examples/notebook/constraint_solver/simple_routing_program.ipynb +++ b/examples/notebook/constraint_solver/simple_routing_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/tsp.ipynb b/examples/notebook/constraint_solver/tsp.ipynb index 53687bdf719..326c9f83a94 100644 --- a/examples/notebook/constraint_solver/tsp.ipynb +++ b/examples/notebook/constraint_solver/tsp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/tsp_circuit_board.ipynb b/examples/notebook/constraint_solver/tsp_circuit_board.ipynb index 0f70f7e4cca..76e86f1d79b 100644 --- a/examples/notebook/constraint_solver/tsp_circuit_board.ipynb +++ b/examples/notebook/constraint_solver/tsp_circuit_board.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/tsp_cities.ipynb b/examples/notebook/constraint_solver/tsp_cities.ipynb index 8aee9a21ea2..c759d138b81 100644 --- a/examples/notebook/constraint_solver/tsp_cities.ipynb +++ b/examples/notebook/constraint_solver/tsp_cities.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/tsp_distance_matrix.ipynb b/examples/notebook/constraint_solver/tsp_distance_matrix.ipynb index 1dc0c99fbe5..0a32bfc5f8c 100644 --- a/examples/notebook/constraint_solver/tsp_distance_matrix.ipynb +++ b/examples/notebook/constraint_solver/tsp_distance_matrix.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp.ipynb b/examples/notebook/constraint_solver/vrp.ipynb index 602ee59995f..6512432a1d7 100644 --- a/examples/notebook/constraint_solver/vrp.ipynb +++ b/examples/notebook/constraint_solver/vrp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_breaks.ipynb b/examples/notebook/constraint_solver/vrp_breaks.ipynb index 82f30a29b7a..ae15252c87a 100644 --- a/examples/notebook/constraint_solver/vrp_breaks.ipynb +++ b/examples/notebook/constraint_solver/vrp_breaks.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_breaks_from_start.ipynb b/examples/notebook/constraint_solver/vrp_breaks_from_start.ipynb index 82119980d14..855235963a1 100644 --- a/examples/notebook/constraint_solver/vrp_breaks_from_start.ipynb +++ b/examples/notebook/constraint_solver/vrp_breaks_from_start.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_capacity.ipynb b/examples/notebook/constraint_solver/vrp_capacity.ipynb index e0b7e7d827f..2e4465fa3d6 100644 --- a/examples/notebook/constraint_solver/vrp_capacity.ipynb +++ b/examples/notebook/constraint_solver/vrp_capacity.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_drop_nodes.ipynb b/examples/notebook/constraint_solver/vrp_drop_nodes.ipynb index 73e72be25b6..459e2fed81b 100644 --- a/examples/notebook/constraint_solver/vrp_drop_nodes.ipynb +++ b/examples/notebook/constraint_solver/vrp_drop_nodes.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_global_span.ipynb b/examples/notebook/constraint_solver/vrp_global_span.ipynb index a62cfe957ec..b9278678ecd 100644 --- a/examples/notebook/constraint_solver/vrp_global_span.ipynb +++ b/examples/notebook/constraint_solver/vrp_global_span.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_initial_routes.ipynb b/examples/notebook/constraint_solver/vrp_initial_routes.ipynb index 5051d3f9327..bfe860c563e 100644 --- a/examples/notebook/constraint_solver/vrp_initial_routes.ipynb +++ b/examples/notebook/constraint_solver/vrp_initial_routes.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_items_to_deliver.ipynb b/examples/notebook/constraint_solver/vrp_items_to_deliver.ipynb index a7a0ca637f2..e8602365758 100644 --- a/examples/notebook/constraint_solver/vrp_items_to_deliver.ipynb +++ b/examples/notebook/constraint_solver/vrp_items_to_deliver.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_node_max.ipynb b/examples/notebook/constraint_solver/vrp_node_max.ipynb index aff7fe46dff..bcd9e8c958a 100644 --- a/examples/notebook/constraint_solver/vrp_node_max.ipynb +++ b/examples/notebook/constraint_solver/vrp_node_max.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_nodes_indices.ipynb b/examples/notebook/constraint_solver/vrp_nodes_indices.ipynb index fd58b697c6a..9e93d6ea8cd 100644 --- a/examples/notebook/constraint_solver/vrp_nodes_indices.ipynb +++ b/examples/notebook/constraint_solver/vrp_nodes_indices.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_pickup_delivery.ipynb b/examples/notebook/constraint_solver/vrp_pickup_delivery.ipynb index c0a36bb5b7e..1f6aa3be6cd 100644 --- a/examples/notebook/constraint_solver/vrp_pickup_delivery.ipynb +++ b/examples/notebook/constraint_solver/vrp_pickup_delivery.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_pickup_delivery_fifo.ipynb b/examples/notebook/constraint_solver/vrp_pickup_delivery_fifo.ipynb index 624902ef795..6a4154e1135 100644 --- a/examples/notebook/constraint_solver/vrp_pickup_delivery_fifo.ipynb +++ b/examples/notebook/constraint_solver/vrp_pickup_delivery_fifo.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_pickup_delivery_lifo.ipynb b/examples/notebook/constraint_solver/vrp_pickup_delivery_lifo.ipynb index ab1ceadbd0a..369b896a126 100644 --- a/examples/notebook/constraint_solver/vrp_pickup_delivery_lifo.ipynb +++ b/examples/notebook/constraint_solver/vrp_pickup_delivery_lifo.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_resources.ipynb b/examples/notebook/constraint_solver/vrp_resources.ipynb index 527bd11da9f..026cc370da4 100644 --- a/examples/notebook/constraint_solver/vrp_resources.ipynb +++ b/examples/notebook/constraint_solver/vrp_resources.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_solution_callback.ipynb b/examples/notebook/constraint_solver/vrp_solution_callback.ipynb index 2d2ac653bd3..571575c57fd 100644 --- a/examples/notebook/constraint_solver/vrp_solution_callback.ipynb +++ b/examples/notebook/constraint_solver/vrp_solution_callback.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -91,6 +91,8 @@ "metadata": {}, "outputs": [], "source": [ + "import weakref\n", + "\n", "from ortools.constraint_solver import routing_enums_pb2\n", "from ortools.constraint_solver import pywrapcp\n", "\n", @@ -159,20 +161,21 @@ " model: pywrapcp.RoutingModel,\n", " limit: int,\n", " ):\n", - " self._routing_manager = manager\n", - " self._routing_model = model\n", + " # We need a weak ref on the routing model to avoid a cycle.\n", + " self._routing_manager_ref = weakref.ref(manager)\n", + " self._routing_model_ref = weakref.ref(model)\n", " self._counter = 0\n", " self._counter_limit = limit\n", " self.objectives = []\n", "\n", " def __call__(self):\n", - " objective = int(self._routing_model.CostVar().Value())\n", + " objective = int(self._routing_model_ref().CostVar().Value())\n", " if not self.objectives or objective < self.objectives[-1]:\n", " self.objectives.append(objective)\n", - " print_solution(self._routing_manager, self._routing_model)\n", + " print_solution(self._routing_manager_ref(), self._routing_model_ref())\n", " self._counter += 1\n", " if self._counter > self._counter_limit:\n", - " self._routing_model.solver().FinishCurrentSearch()\n", + " self._routing_model_ref().solver().FinishCurrentSearch()\n", "\n", "\n", "\n", diff --git a/examples/notebook/constraint_solver/vrp_starts_ends.ipynb b/examples/notebook/constraint_solver/vrp_starts_ends.ipynb index 950cde4d9fc..de86e7addd4 100644 --- a/examples/notebook/constraint_solver/vrp_starts_ends.ipynb +++ b/examples/notebook/constraint_solver/vrp_starts_ends.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_time_windows.ipynb b/examples/notebook/constraint_solver/vrp_time_windows.ipynb index d23624b6133..1796e779b8e 100644 --- a/examples/notebook/constraint_solver/vrp_time_windows.ipynb +++ b/examples/notebook/constraint_solver/vrp_time_windows.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_time_windows_per_vehicles.ipynb b/examples/notebook/constraint_solver/vrp_time_windows_per_vehicles.ipynb index 7d54cffabe1..f0326c598b6 100644 --- a/examples/notebook/constraint_solver/vrp_time_windows_per_vehicles.ipynb +++ b/examples/notebook/constraint_solver/vrp_time_windows_per_vehicles.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_tokens.ipynb b/examples/notebook/constraint_solver/vrp_tokens.ipynb index 1f15db2f8c4..773461b26ea 100644 --- a/examples/notebook/constraint_solver/vrp_tokens.ipynb +++ b/examples/notebook/constraint_solver/vrp_tokens.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrp_with_time_limit.ipynb b/examples/notebook/constraint_solver/vrp_with_time_limit.ipynb index 94a86a864fc..870e8efc7a2 100644 --- a/examples/notebook/constraint_solver/vrp_with_time_limit.ipynb +++ b/examples/notebook/constraint_solver/vrp_with_time_limit.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/constraint_solver/vrptw_store_solution_data.ipynb b/examples/notebook/constraint_solver/vrptw_store_solution_data.ipynb index aa88a632028..41e60a55a1b 100644 --- a/examples/notebook/constraint_solver/vrptw_store_solution_data.ipynb +++ b/examples/notebook/constraint_solver/vrptw_store_solution_data.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/3_jugs_mip.ipynb b/examples/notebook/contrib/3_jugs_mip.ipynb index ee8a8251b88..bc0c4ce2436 100644 --- a/examples/notebook/contrib/3_jugs_mip.ipynb +++ b/examples/notebook/contrib/3_jugs_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/3_jugs_regular.ipynb b/examples/notebook/contrib/3_jugs_regular.ipynb index d09dc39bfcd..4b5cdeeba01 100644 --- a/examples/notebook/contrib/3_jugs_regular.ipynb +++ b/examples/notebook/contrib/3_jugs_regular.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/a_round_of_golf.ipynb b/examples/notebook/contrib/a_round_of_golf.ipynb index f661c46724a..53e4401a64f 100644 --- a/examples/notebook/contrib/a_round_of_golf.ipynb +++ b/examples/notebook/contrib/a_round_of_golf.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/all_interval.ipynb b/examples/notebook/contrib/all_interval.ipynb index 6238de5ca93..54070f36622 100644 --- a/examples/notebook/contrib/all_interval.ipynb +++ b/examples/notebook/contrib/all_interval.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/alldifferent_except_0.ipynb b/examples/notebook/contrib/alldifferent_except_0.ipynb index 287ca64cd1a..a4b7f0ef4f8 100644 --- a/examples/notebook/contrib/alldifferent_except_0.ipynb +++ b/examples/notebook/contrib/alldifferent_except_0.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/alphametic.ipynb b/examples/notebook/contrib/alphametic.ipynb index d6184c59a8d..da69bc36e72 100644 --- a/examples/notebook/contrib/alphametic.ipynb +++ b/examples/notebook/contrib/alphametic.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/assignment.ipynb b/examples/notebook/contrib/assignment.ipynb index 8bc379abec7..5c6c316bf74 100644 --- a/examples/notebook/contrib/assignment.ipynb +++ b/examples/notebook/contrib/assignment.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/assignment6_mip.ipynb b/examples/notebook/contrib/assignment6_mip.ipynb index 1fbb2d5243a..81b7a831cff 100644 --- a/examples/notebook/contrib/assignment6_mip.ipynb +++ b/examples/notebook/contrib/assignment6_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/bacp.ipynb b/examples/notebook/contrib/bacp.ipynb index 82ff8d3542f..f7e01f2a3d1 100644 --- a/examples/notebook/contrib/bacp.ipynb +++ b/examples/notebook/contrib/bacp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/blending.ipynb b/examples/notebook/contrib/blending.ipynb index 4be44edc8b9..8f999b1e98a 100644 --- a/examples/notebook/contrib/blending.ipynb +++ b/examples/notebook/contrib/blending.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/broken_weights.ipynb b/examples/notebook/contrib/broken_weights.ipynb index 8c3e1f44600..641f2dbbace 100644 --- a/examples/notebook/contrib/broken_weights.ipynb +++ b/examples/notebook/contrib/broken_weights.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/bus_schedule.ipynb b/examples/notebook/contrib/bus_schedule.ipynb index eb98ba762a2..d10280c6075 100644 --- a/examples/notebook/contrib/bus_schedule.ipynb +++ b/examples/notebook/contrib/bus_schedule.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/car.ipynb b/examples/notebook/contrib/car.ipynb index 7933e3f9dec..f2582c5aafd 100644 --- a/examples/notebook/contrib/car.ipynb +++ b/examples/notebook/contrib/car.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/check_dependencies.ipynb b/examples/notebook/contrib/check_dependencies.ipynb index e5c203bb299..e7460d693fa 100644 --- a/examples/notebook/contrib/check_dependencies.ipynb +++ b/examples/notebook/contrib/check_dependencies.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/circuit.ipynb b/examples/notebook/contrib/circuit.ipynb index 4dc7eec26a7..5c0d3b996de 100644 --- a/examples/notebook/contrib/circuit.ipynb +++ b/examples/notebook/contrib/circuit.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/coins3.ipynb b/examples/notebook/contrib/coins3.ipynb index f22afa819a2..2acc61bf25f 100644 --- a/examples/notebook/contrib/coins3.ipynb +++ b/examples/notebook/contrib/coins3.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/coins_grid.ipynb b/examples/notebook/contrib/coins_grid.ipynb index 9d5286172fc..25ac1f2ef82 100644 --- a/examples/notebook/contrib/coins_grid.ipynb +++ b/examples/notebook/contrib/coins_grid.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/coins_grid_mip.ipynb b/examples/notebook/contrib/coins_grid_mip.ipynb index f0a146e73d7..89be73fe5fa 100644 --- a/examples/notebook/contrib/coins_grid_mip.ipynb +++ b/examples/notebook/contrib/coins_grid_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/coloring_ip.ipynb b/examples/notebook/contrib/coloring_ip.ipynb index bebf939dca9..0c4733271b8 100644 --- a/examples/notebook/contrib/coloring_ip.ipynb +++ b/examples/notebook/contrib/coloring_ip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/combinatorial_auction2.ipynb b/examples/notebook/contrib/combinatorial_auction2.ipynb index 3513d029b59..fde253c5005 100644 --- a/examples/notebook/contrib/combinatorial_auction2.ipynb +++ b/examples/notebook/contrib/combinatorial_auction2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/contiguity_regular.ipynb b/examples/notebook/contrib/contiguity_regular.ipynb index 0ba85444d27..33011747c64 100644 --- a/examples/notebook/contrib/contiguity_regular.ipynb +++ b/examples/notebook/contrib/contiguity_regular.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/costas_array.ipynb b/examples/notebook/contrib/costas_array.ipynb index 6d0551af587..a14993b9c9a 100644 --- a/examples/notebook/contrib/costas_array.ipynb +++ b/examples/notebook/contrib/costas_array.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -160,7 +160,7 @@ "\n", " # Fix the values in the lower triangle in the\n", " # difference matrix to -n+1. This removes variants\n", - " # of the difference matrix for the the same Costas array.\n", + " # of the difference matrix for the same Costas array.\n", " for i in range(n):\n", " for j in range(i + 1):\n", " solver.Add(differences[i, j] == -n + 1)\n", diff --git a/examples/notebook/contrib/covering_opl.ipynb b/examples/notebook/contrib/covering_opl.ipynb index 26dd09df982..45024be9d34 100644 --- a/examples/notebook/contrib/covering_opl.ipynb +++ b/examples/notebook/contrib/covering_opl.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/crew.ipynb b/examples/notebook/contrib/crew.ipynb index a9434f07e43..cff12bc40e3 100644 --- a/examples/notebook/contrib/crew.ipynb +++ b/examples/notebook/contrib/crew.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/crossword2.ipynb b/examples/notebook/contrib/crossword2.ipynb index 9b1d5bec3d8..3bced776ea9 100644 --- a/examples/notebook/contrib/crossword2.ipynb +++ b/examples/notebook/contrib/crossword2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/crypta.ipynb b/examples/notebook/contrib/crypta.ipynb index ab01f2af06f..7c659f09bd6 100644 --- a/examples/notebook/contrib/crypta.ipynb +++ b/examples/notebook/contrib/crypta.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/crypto.ipynb b/examples/notebook/contrib/crypto.ipynb index 1fb774b8a71..8a4c505b752 100644 --- a/examples/notebook/contrib/crypto.ipynb +++ b/examples/notebook/contrib/crypto.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/curious_set_of_integers.ipynb b/examples/notebook/contrib/curious_set_of_integers.ipynb index 9e3549413e8..882151fc2e7 100644 --- a/examples/notebook/contrib/curious_set_of_integers.ipynb +++ b/examples/notebook/contrib/curious_set_of_integers.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/debruijn_binary.ipynb b/examples/notebook/contrib/debruijn_binary.ipynb index 00f1120fb8f..820c777dba7 100644 --- a/examples/notebook/contrib/debruijn_binary.ipynb +++ b/examples/notebook/contrib/debruijn_binary.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -82,7 +82,7 @@ " base**n.\n", "\n", "\n", - " Compare with the the web based programs:\n", + " Compare with the web based programs:\n", " http://www.hakank.org/comb/debruijn.cgi\n", " http://www.hakank.org/comb/debruijn_arb.cgi\n", "\n", diff --git a/examples/notebook/contrib/diet1.ipynb b/examples/notebook/contrib/diet1.ipynb index 583e6008954..948da29917d 100644 --- a/examples/notebook/contrib/diet1.ipynb +++ b/examples/notebook/contrib/diet1.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/diet1_b.ipynb b/examples/notebook/contrib/diet1_b.ipynb index 398efbbbd16..b0c692160b6 100644 --- a/examples/notebook/contrib/diet1_b.ipynb +++ b/examples/notebook/contrib/diet1_b.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/diet1_mip.ipynb b/examples/notebook/contrib/diet1_mip.ipynb index a272b6741e8..ba426559fd1 100644 --- a/examples/notebook/contrib/diet1_mip.ipynb +++ b/examples/notebook/contrib/diet1_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/discrete_tomography.ipynb b/examples/notebook/contrib/discrete_tomography.ipynb index 702065109ba..551757a2606 100644 --- a/examples/notebook/contrib/discrete_tomography.ipynb +++ b/examples/notebook/contrib/discrete_tomography.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/divisible_by_9_through_1.ipynb b/examples/notebook/contrib/divisible_by_9_through_1.ipynb index 5c3fa505a4c..e4df8c6810e 100644 --- a/examples/notebook/contrib/divisible_by_9_through_1.ipynb +++ b/examples/notebook/contrib/divisible_by_9_through_1.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/dudeney.ipynb b/examples/notebook/contrib/dudeney.ipynb index a06fa46c92e..841542197d2 100644 --- a/examples/notebook/contrib/dudeney.ipynb +++ b/examples/notebook/contrib/dudeney.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/einav_puzzle.ipynb b/examples/notebook/contrib/einav_puzzle.ipynb index 1e561ba719d..fb10e72c6f7 100644 --- a/examples/notebook/contrib/einav_puzzle.ipynb +++ b/examples/notebook/contrib/einav_puzzle.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/einav_puzzle2.ipynb b/examples/notebook/contrib/einav_puzzle2.ipynb index cf73b64ec74..1c14453eb24 100644 --- a/examples/notebook/contrib/einav_puzzle2.ipynb +++ b/examples/notebook/contrib/einav_puzzle2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/eq10.ipynb b/examples/notebook/contrib/eq10.ipynb index 8d73138164a..59bab8f0572 100644 --- a/examples/notebook/contrib/eq10.ipynb +++ b/examples/notebook/contrib/eq10.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/eq20.ipynb b/examples/notebook/contrib/eq20.ipynb index 14d43152475..c9fc2dabd63 100644 --- a/examples/notebook/contrib/eq20.ipynb +++ b/examples/notebook/contrib/eq20.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/fill_a_pix.ipynb b/examples/notebook/contrib/fill_a_pix.ipynb index 91a8780ac60..88f31c747b0 100644 --- a/examples/notebook/contrib/fill_a_pix.ipynb +++ b/examples/notebook/contrib/fill_a_pix.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/furniture_moving.ipynb b/examples/notebook/contrib/furniture_moving.ipynb index 71ff8a961ac..eb3b35036ed 100644 --- a/examples/notebook/contrib/furniture_moving.ipynb +++ b/examples/notebook/contrib/furniture_moving.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/futoshiki.ipynb b/examples/notebook/contrib/futoshiki.ipynb index 00719e963bf..9789b29beec 100644 --- a/examples/notebook/contrib/futoshiki.ipynb +++ b/examples/notebook/contrib/futoshiki.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/game_theory_taha.ipynb b/examples/notebook/contrib/game_theory_taha.ipynb index d24c0a2f3aa..3de60774aa1 100644 --- a/examples/notebook/contrib/game_theory_taha.ipynb +++ b/examples/notebook/contrib/game_theory_taha.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/grocery.ipynb b/examples/notebook/contrib/grocery.ipynb index 7c342809f09..2fdfbb61d13 100644 --- a/examples/notebook/contrib/grocery.ipynb +++ b/examples/notebook/contrib/grocery.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/hidato.ipynb b/examples/notebook/contrib/hidato.ipynb index 7443716a855..942aa784280 100644 --- a/examples/notebook/contrib/hidato.ipynb +++ b/examples/notebook/contrib/hidato.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/just_forgotten.ipynb b/examples/notebook/contrib/just_forgotten.ipynb index d21153566d7..888f3163f26 100644 --- a/examples/notebook/contrib/just_forgotten.ipynb +++ b/examples/notebook/contrib/just_forgotten.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/kakuro.ipynb b/examples/notebook/contrib/kakuro.ipynb index 7d2ee5b163e..fa524843b38 100644 --- a/examples/notebook/contrib/kakuro.ipynb +++ b/examples/notebook/contrib/kakuro.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/kenken2.ipynb b/examples/notebook/contrib/kenken2.ipynb index 156c4628d0a..eace922e2ef 100644 --- a/examples/notebook/contrib/kenken2.ipynb +++ b/examples/notebook/contrib/kenken2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/killer_sudoku.ipynb b/examples/notebook/contrib/killer_sudoku.ipynb index d4b450e1003..7b8e84e06df 100644 --- a/examples/notebook/contrib/killer_sudoku.ipynb +++ b/examples/notebook/contrib/killer_sudoku.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/knapsack_cp.ipynb b/examples/notebook/contrib/knapsack_cp.ipynb index 79c306fce8c..f4c10df6c23 100644 --- a/examples/notebook/contrib/knapsack_cp.ipynb +++ b/examples/notebook/contrib/knapsack_cp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/knapsack_mip.ipynb b/examples/notebook/contrib/knapsack_mip.ipynb index ee9d0c12efe..7e3531fb978 100644 --- a/examples/notebook/contrib/knapsack_mip.ipynb +++ b/examples/notebook/contrib/knapsack_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/labeled_dice.ipynb b/examples/notebook/contrib/labeled_dice.ipynb index e7db02b0a7a..b1c3345a498 100644 --- a/examples/notebook/contrib/labeled_dice.ipynb +++ b/examples/notebook/contrib/labeled_dice.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/langford.ipynb b/examples/notebook/contrib/langford.ipynb index 6ac7a6083ae..1ce419ecd29 100644 --- a/examples/notebook/contrib/langford.ipynb +++ b/examples/notebook/contrib/langford.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/least_diff.ipynb b/examples/notebook/contrib/least_diff.ipynb index e41126ba4ed..34ac0ff0168 100644 --- a/examples/notebook/contrib/least_diff.ipynb +++ b/examples/notebook/contrib/least_diff.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/least_square.ipynb b/examples/notebook/contrib/least_square.ipynb index e9d11d9d010..469d07772d4 100644 --- a/examples/notebook/contrib/least_square.ipynb +++ b/examples/notebook/contrib/least_square.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/lectures.ipynb b/examples/notebook/contrib/lectures.ipynb index 0635b926f06..dc7c4c43549 100644 --- a/examples/notebook/contrib/lectures.ipynb +++ b/examples/notebook/contrib/lectures.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -79,7 +79,7 @@ " Biggs: Discrete Mathematics (2nd ed), page 187.\n", " '''\n", " Suppose we wish to schedule six one-hour lectures, v1, v2, v3, v4, v5, v6.\n", - " Among the the potential audience there are people who wish to hear both\n", + " Among the potential audience there are people who wish to hear both\n", "\n", " - v1 and v2\n", " - v1 and v4\n", diff --git a/examples/notebook/contrib/magic_sequence_sat.ipynb b/examples/notebook/contrib/magic_sequence_sat.ipynb index d082364a40f..1093abee03a 100644 --- a/examples/notebook/contrib/magic_sequence_sat.ipynb +++ b/examples/notebook/contrib/magic_sequence_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/magic_square.ipynb b/examples/notebook/contrib/magic_square.ipynb index 8d4cf7bb095..e33cf541211 100644 --- a/examples/notebook/contrib/magic_square.ipynb +++ b/examples/notebook/contrib/magic_square.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/magic_square_and_cards.ipynb b/examples/notebook/contrib/magic_square_and_cards.ipynb index 6bc65edccc8..feb9bee0df6 100644 --- a/examples/notebook/contrib/magic_square_and_cards.ipynb +++ b/examples/notebook/contrib/magic_square_and_cards.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/magic_square_mip.ipynb b/examples/notebook/contrib/magic_square_mip.ipynb index 3b4ada67791..7dd7edd998d 100644 --- a/examples/notebook/contrib/magic_square_mip.ipynb +++ b/examples/notebook/contrib/magic_square_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/map.ipynb b/examples/notebook/contrib/map.ipynb index df84f637ef5..6f383791cfd 100644 --- a/examples/notebook/contrib/map.ipynb +++ b/examples/notebook/contrib/map.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/marathon2.ipynb b/examples/notebook/contrib/marathon2.ipynb index 6d351846520..d3581cd2f91 100644 --- a/examples/notebook/contrib/marathon2.ipynb +++ b/examples/notebook/contrib/marathon2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/max_flow_taha.ipynb b/examples/notebook/contrib/max_flow_taha.ipynb index d52cf2acecc..79139584908 100644 --- a/examples/notebook/contrib/max_flow_taha.ipynb +++ b/examples/notebook/contrib/max_flow_taha.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/max_flow_winston1.ipynb b/examples/notebook/contrib/max_flow_winston1.ipynb index 4eb9f3dea2f..462d0999f3e 100644 --- a/examples/notebook/contrib/max_flow_winston1.ipynb +++ b/examples/notebook/contrib/max_flow_winston1.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/minesweeper.ipynb b/examples/notebook/contrib/minesweeper.ipynb index 0e008b1c070..236b8cbc8cf 100644 --- a/examples/notebook/contrib/minesweeper.ipynb +++ b/examples/notebook/contrib/minesweeper.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/mr_smith.ipynb b/examples/notebook/contrib/mr_smith.ipynb index 76f57a97cb1..0767bedb005 100644 --- a/examples/notebook/contrib/mr_smith.ipynb +++ b/examples/notebook/contrib/mr_smith.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nonogram_default_search.ipynb b/examples/notebook/contrib/nonogram_default_search.ipynb index c9a0abf4c87..17665e777ac 100644 --- a/examples/notebook/contrib/nonogram_default_search.ipynb +++ b/examples/notebook/contrib/nonogram_default_search.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nonogram_regular.ipynb b/examples/notebook/contrib/nonogram_regular.ipynb index 12b3bc9ece2..01edff0a1e3 100644 --- a/examples/notebook/contrib/nonogram_regular.ipynb +++ b/examples/notebook/contrib/nonogram_regular.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nonogram_table.ipynb b/examples/notebook/contrib/nonogram_table.ipynb index aba767b93ec..0de937699cf 100644 --- a/examples/notebook/contrib/nonogram_table.ipynb +++ b/examples/notebook/contrib/nonogram_table.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nonogram_table2.ipynb b/examples/notebook/contrib/nonogram_table2.ipynb index ad25175beb0..d7dfa8fb54a 100644 --- a/examples/notebook/contrib/nonogram_table2.ipynb +++ b/examples/notebook/contrib/nonogram_table2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nontransitive_dice.ipynb b/examples/notebook/contrib/nontransitive_dice.ipynb index d5fa172d434..da3cd48537b 100644 --- a/examples/notebook/contrib/nontransitive_dice.ipynb +++ b/examples/notebook/contrib/nontransitive_dice.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nqueens.ipynb b/examples/notebook/contrib/nqueens.ipynb index b5ca63c2cf0..437a3de4079 100644 --- a/examples/notebook/contrib/nqueens.ipynb +++ b/examples/notebook/contrib/nqueens.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nqueens2.ipynb b/examples/notebook/contrib/nqueens2.ipynb index 43ebd964346..492c898c8c8 100644 --- a/examples/notebook/contrib/nqueens2.ipynb +++ b/examples/notebook/contrib/nqueens2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nqueens3.ipynb b/examples/notebook/contrib/nqueens3.ipynb index 373dbc126e8..d5b310fc082 100644 --- a/examples/notebook/contrib/nqueens3.ipynb +++ b/examples/notebook/contrib/nqueens3.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nurse_rostering.ipynb b/examples/notebook/contrib/nurse_rostering.ipynb index 3a0490f896c..ca1f438affa 100644 --- a/examples/notebook/contrib/nurse_rostering.ipynb +++ b/examples/notebook/contrib/nurse_rostering.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/nurses_cp.ipynb b/examples/notebook/contrib/nurses_cp.ipynb index 036eb4d3ab1..4247c3b9b39 100644 --- a/examples/notebook/contrib/nurses_cp.ipynb +++ b/examples/notebook/contrib/nurses_cp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/olympic.ipynb b/examples/notebook/contrib/olympic.ipynb index fc438a146df..4c7675c3f1a 100644 --- a/examples/notebook/contrib/olympic.ipynb +++ b/examples/notebook/contrib/olympic.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/organize_day.ipynb b/examples/notebook/contrib/organize_day.ipynb index 1cfd0e775da..78884447516 100644 --- a/examples/notebook/contrib/organize_day.ipynb +++ b/examples/notebook/contrib/organize_day.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/p_median.ipynb b/examples/notebook/contrib/p_median.ipynb index 14a71c3b9fc..937e78c03da 100644 --- a/examples/notebook/contrib/p_median.ipynb +++ b/examples/notebook/contrib/p_median.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/pandigital_numbers.ipynb b/examples/notebook/contrib/pandigital_numbers.ipynb index 5252767074d..bf4746b3e56 100644 --- a/examples/notebook/contrib/pandigital_numbers.ipynb +++ b/examples/notebook/contrib/pandigital_numbers.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -108,7 +108,7 @@ " * Wikipedia http://en.wikipedia.org/wiki/Pandigital_number\n", "\n", "\n", - " Compare with the the following models:\n", + " Compare with the following models:\n", " * MiniZinc: http://www.hakank.org/minizinc/pandigital_numbers.mzn\n", " * Comet : http://www.hakank.org/comet/pandigital_numbers.co\n", " * ECLiPSe : http://www.hakank.org/eclipse/pandigital_numbers.ecl\n", diff --git a/examples/notebook/contrib/photo_problem.ipynb b/examples/notebook/contrib/photo_problem.ipynb index 3c926c0d9e2..9a75a3413f4 100644 --- a/examples/notebook/contrib/photo_problem.ipynb +++ b/examples/notebook/contrib/photo_problem.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/place_number_puzzle.ipynb b/examples/notebook/contrib/place_number_puzzle.ipynb index f0e9472e4a9..8fc2cd1ba81 100644 --- a/examples/notebook/contrib/place_number_puzzle.ipynb +++ b/examples/notebook/contrib/place_number_puzzle.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/post_office_problem2.ipynb b/examples/notebook/contrib/post_office_problem2.ipynb index 7e4b3f90dd7..7271628b0ab 100644 --- a/examples/notebook/contrib/post_office_problem2.ipynb +++ b/examples/notebook/contrib/post_office_problem2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/production.ipynb b/examples/notebook/contrib/production.ipynb index 90a4ce14097..f450e585750 100644 --- a/examples/notebook/contrib/production.ipynb +++ b/examples/notebook/contrib/production.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/project_scheduling_sat.ipynb b/examples/notebook/contrib/project_scheduling_sat.ipynb index 1705b4dc4f5..328bbdc1349 100644 --- a/examples/notebook/contrib/project_scheduling_sat.ipynb +++ b/examples/notebook/contrib/project_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/pyls_api.ipynb b/examples/notebook/contrib/pyls_api.ipynb index f58f9c01dd7..f0d5503213b 100644 --- a/examples/notebook/contrib/pyls_api.ipynb +++ b/examples/notebook/contrib/pyls_api.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/quasigroup_completion.ipynb b/examples/notebook/contrib/quasigroup_completion.ipynb index c7e5a1c2e32..3a897295996 100644 --- a/examples/notebook/contrib/quasigroup_completion.ipynb +++ b/examples/notebook/contrib/quasigroup_completion.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/regular.ipynb b/examples/notebook/contrib/regular.ipynb index db879beecc4..1f53a18eb5a 100644 --- a/examples/notebook/contrib/regular.ipynb +++ b/examples/notebook/contrib/regular.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/regular_table.ipynb b/examples/notebook/contrib/regular_table.ipynb index 9f671a39450..bad77bc5fa8 100644 --- a/examples/notebook/contrib/regular_table.ipynb +++ b/examples/notebook/contrib/regular_table.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/regular_table2.ipynb b/examples/notebook/contrib/regular_table2.ipynb index 5df0798cadd..73b7fe76eeb 100644 --- a/examples/notebook/contrib/regular_table2.ipynb +++ b/examples/notebook/contrib/regular_table2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/rogo2.ipynb b/examples/notebook/contrib/rogo2.ipynb index 2acf87de4c9..9bae765dc53 100644 --- a/examples/notebook/contrib/rogo2.ipynb +++ b/examples/notebook/contrib/rogo2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/rostering_with_travel.ipynb b/examples/notebook/contrib/rostering_with_travel.ipynb index 794bc3e8a30..a5545bbf536 100644 --- a/examples/notebook/contrib/rostering_with_travel.ipynb +++ b/examples/notebook/contrib/rostering_with_travel.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/safe_cracking.ipynb b/examples/notebook/contrib/safe_cracking.ipynb index 8c2184de87e..4baaa5c1254 100644 --- a/examples/notebook/contrib/safe_cracking.ipynb +++ b/examples/notebook/contrib/safe_cracking.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/scheduling_speakers.ipynb b/examples/notebook/contrib/scheduling_speakers.ipynb index 29ec70d428f..8f3de342672 100644 --- a/examples/notebook/contrib/scheduling_speakers.ipynb +++ b/examples/notebook/contrib/scheduling_speakers.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb b/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb index 16275fec822..b67470ee1d7 100644 --- a/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb +++ b/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/school_scheduling_sat.ipynb b/examples/notebook/contrib/school_scheduling_sat.ipynb index bf5507e2c20..423ff950f07 100644 --- a/examples/notebook/contrib/school_scheduling_sat.ipynb +++ b/examples/notebook/contrib/school_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/secret_santa.ipynb b/examples/notebook/contrib/secret_santa.ipynb index 789a3884125..1997541e8ad 100644 --- a/examples/notebook/contrib/secret_santa.ipynb +++ b/examples/notebook/contrib/secret_santa.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/secret_santa2.ipynb b/examples/notebook/contrib/secret_santa2.ipynb index 63b73e38273..24314c8a4de 100644 --- a/examples/notebook/contrib/secret_santa2.ipynb +++ b/examples/notebook/contrib/secret_santa2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/send_more_money_any_base.ipynb b/examples/notebook/contrib/send_more_money_any_base.ipynb index 18ad26402d2..a3039842df8 100644 --- a/examples/notebook/contrib/send_more_money_any_base.ipynb +++ b/examples/notebook/contrib/send_more_money_any_base.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/send_most_money.ipynb b/examples/notebook/contrib/send_most_money.ipynb index 3cfc024843e..418e7dd237c 100644 --- a/examples/notebook/contrib/send_most_money.ipynb +++ b/examples/notebook/contrib/send_most_money.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/seseman.ipynb b/examples/notebook/contrib/seseman.ipynb index e4ba7742e7e..4b6d7a91518 100644 --- a/examples/notebook/contrib/seseman.ipynb +++ b/examples/notebook/contrib/seseman.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/seseman_b.ipynb b/examples/notebook/contrib/seseman_b.ipynb index 35bddea7f32..5d91bb53f81 100644 --- a/examples/notebook/contrib/seseman_b.ipynb +++ b/examples/notebook/contrib/seseman_b.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/set_covering.ipynb b/examples/notebook/contrib/set_covering.ipynb index 31123d37e75..88816144192 100644 --- a/examples/notebook/contrib/set_covering.ipynb +++ b/examples/notebook/contrib/set_covering.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/set_covering2.ipynb b/examples/notebook/contrib/set_covering2.ipynb index ee0af37f978..f68a1563794 100644 --- a/examples/notebook/contrib/set_covering2.ipynb +++ b/examples/notebook/contrib/set_covering2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/set_covering3.ipynb b/examples/notebook/contrib/set_covering3.ipynb index 99d5ce74812..2464fc62f5a 100644 --- a/examples/notebook/contrib/set_covering3.ipynb +++ b/examples/notebook/contrib/set_covering3.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/set_covering4.ipynb b/examples/notebook/contrib/set_covering4.ipynb index aedf1099b5f..30632d5cced 100644 --- a/examples/notebook/contrib/set_covering4.ipynb +++ b/examples/notebook/contrib/set_covering4.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/set_covering_deployment.ipynb b/examples/notebook/contrib/set_covering_deployment.ipynb index 89a308d8e0f..bd6e6fed72b 100644 --- a/examples/notebook/contrib/set_covering_deployment.ipynb +++ b/examples/notebook/contrib/set_covering_deployment.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,7 +87,7 @@ " army placements to secure the Roman Empire.\n", " '''\n", "\n", - " Compare with the the following models:\n", + " Compare with the following models:\n", " * MiniZinc: http://www.hakank.org/minizinc/set_covering_deployment.mzn\n", " * Comet : http://www.hakank.org/comet/set_covering_deployment.co\n", " * Gecode : http://www.hakank.org/gecode/set_covering_deployment.cpp\n", diff --git a/examples/notebook/contrib/set_covering_skiena.ipynb b/examples/notebook/contrib/set_covering_skiena.ipynb index 2e9c36e3fb4..01057394493 100644 --- a/examples/notebook/contrib/set_covering_skiena.ipynb +++ b/examples/notebook/contrib/set_covering_skiena.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/set_partition.ipynb b/examples/notebook/contrib/set_partition.ipynb index 41e415e1ed5..d151380bc14 100644 --- a/examples/notebook/contrib/set_partition.ipynb +++ b/examples/notebook/contrib/set_partition.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/sicherman_dice.ipynb b/examples/notebook/contrib/sicherman_dice.ipynb index 11f1f6c91bd..41531e4954e 100644 --- a/examples/notebook/contrib/sicherman_dice.ipynb +++ b/examples/notebook/contrib/sicherman_dice.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/ski_assignment.ipynb b/examples/notebook/contrib/ski_assignment.ipynb index 0fd19263f66..cfa236c1914 100644 --- a/examples/notebook/contrib/ski_assignment.ipynb +++ b/examples/notebook/contrib/ski_assignment.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/slitherlink.ipynb b/examples/notebook/contrib/slitherlink.ipynb index afae049f1f8..4ce404c3910 100644 --- a/examples/notebook/contrib/slitherlink.ipynb +++ b/examples/notebook/contrib/slitherlink.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/sports_schedule_sat.ipynb b/examples/notebook/contrib/sports_schedule_sat.ipynb index a8d95bfe384..1425f40d414 100644 --- a/examples/notebook/contrib/sports_schedule_sat.ipynb +++ b/examples/notebook/contrib/sports_schedule_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/stable_marriage.ipynb b/examples/notebook/contrib/stable_marriage.ipynb index 1209a47734e..7d9c87ddb12 100644 --- a/examples/notebook/contrib/stable_marriage.ipynb +++ b/examples/notebook/contrib/stable_marriage.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/stable_marriage_sat.ipynb b/examples/notebook/contrib/stable_marriage_sat.ipynb index 6db62119405..5cd6df58561 100644 --- a/examples/notebook/contrib/stable_marriage_sat.ipynb +++ b/examples/notebook/contrib/stable_marriage_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/steel.ipynb b/examples/notebook/contrib/steel.ipynb index 895afb030fb..73ebdf511a9 100644 --- a/examples/notebook/contrib/steel.ipynb +++ b/examples/notebook/contrib/steel.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/steel_lns.ipynb b/examples/notebook/contrib/steel_lns.ipynb index c8f1937bbe5..df75ee18dfb 100644 --- a/examples/notebook/contrib/steel_lns.ipynb +++ b/examples/notebook/contrib/steel_lns.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/stigler_contrib.ipynb b/examples/notebook/contrib/stigler_contrib.ipynb index 2e8e24bb6a9..a9f73e4444c 100644 --- a/examples/notebook/contrib/stigler_contrib.ipynb +++ b/examples/notebook/contrib/stigler_contrib.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/strimko2.ipynb b/examples/notebook/contrib/strimko2.ipynb index 0255610cc0a..49411e6dc25 100644 --- a/examples/notebook/contrib/strimko2.ipynb +++ b/examples/notebook/contrib/strimko2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/subset_sum.ipynb b/examples/notebook/contrib/subset_sum.ipynb index 8825f23e148..17f716fc716 100644 --- a/examples/notebook/contrib/subset_sum.ipynb +++ b/examples/notebook/contrib/subset_sum.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/survo_puzzle.ipynb b/examples/notebook/contrib/survo_puzzle.ipynb index 76a96d52fbb..217186c4e6c 100644 --- a/examples/notebook/contrib/survo_puzzle.ipynb +++ b/examples/notebook/contrib/survo_puzzle.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/toNum.ipynb b/examples/notebook/contrib/toNum.ipynb index cf9efddccbd..86392c650bd 100644 --- a/examples/notebook/contrib/toNum.ipynb +++ b/examples/notebook/contrib/toNum.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/traffic_lights.ipynb b/examples/notebook/contrib/traffic_lights.ipynb index 2f1a2b379c8..9e695c55cc7 100644 --- a/examples/notebook/contrib/traffic_lights.ipynb +++ b/examples/notebook/contrib/traffic_lights.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/vendor_scheduling.ipynb b/examples/notebook/contrib/vendor_scheduling.ipynb index 15808dcbd19..e819b7a59b6 100644 --- a/examples/notebook/contrib/vendor_scheduling.ipynb +++ b/examples/notebook/contrib/vendor_scheduling.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/volsay.ipynb b/examples/notebook/contrib/volsay.ipynb index 54d49053529..d344e95c0fd 100644 --- a/examples/notebook/contrib/volsay.ipynb +++ b/examples/notebook/contrib/volsay.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/volsay2.ipynb b/examples/notebook/contrib/volsay2.ipynb index baadaa11b7d..c8d6e891299 100644 --- a/examples/notebook/contrib/volsay2.ipynb +++ b/examples/notebook/contrib/volsay2.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/volsay3.ipynb b/examples/notebook/contrib/volsay3.ipynb index b15b62ff299..9b64f26d0cb 100644 --- a/examples/notebook/contrib/volsay3.ipynb +++ b/examples/notebook/contrib/volsay3.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/wedding_optimal_chart.ipynb b/examples/notebook/contrib/wedding_optimal_chart.ipynb index 49f26a266bc..2d25a22f9e5 100644 --- a/examples/notebook/contrib/wedding_optimal_chart.ipynb +++ b/examples/notebook/contrib/wedding_optimal_chart.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/who_killed_agatha.ipynb b/examples/notebook/contrib/who_killed_agatha.ipynb index da0b53b75c1..b35d1a6313e 100644 --- a/examples/notebook/contrib/who_killed_agatha.ipynb +++ b/examples/notebook/contrib/who_killed_agatha.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/xkcd.ipynb b/examples/notebook/contrib/xkcd.ipynb index 363385a51d1..87e1a351f99 100644 --- a/examples/notebook/contrib/xkcd.ipynb +++ b/examples/notebook/contrib/xkcd.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/contrib/young_tableaux.ipynb b/examples/notebook/contrib/young_tableaux.ipynb index 86ca0ee19e0..a5aa3cbdc34 100644 --- a/examples/notebook/contrib/young_tableaux.ipynb +++ b/examples/notebook/contrib/young_tableaux.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/appointments.ipynb b/examples/notebook/examples/appointments.ipynb index db4774c348d..e9473305c38 100644 --- a/examples/notebook/examples/appointments.ipynb +++ b/examples/notebook/examples/appointments.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -108,17 +108,19 @@ " self.__variables = variables\n", " self.__collect = []\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " \"\"\"Collect a new combination.\"\"\"\n", - " combination = [self.Value(v) for v in self.__variables]\n", + " combination = [self.value(v) for v in self.__variables]\n", " self.__collect.append(combination)\n", "\n", - " def combinations(self):\n", + " def combinations(self) -> list[list[int]]:\n", " \"\"\"Returns all collected combinations.\"\"\"\n", " return self.__collect\n", "\n", "\n", - "def EnumerateAllKnapsacksWithRepetition(item_sizes, total_size_min, total_size_max):\n", + "def enumerate_all_knapsacks_with_repetition(\n", + " item_sizes: list[int], total_size_min: int, total_size_max: int\n", + ") -> list[list[int]]:\n", " \"\"\"Enumerate all possible knapsacks with total size in the given range.\n", "\n", " Args:\n", @@ -132,22 +134,26 @@ " nonnegative integer: the number of times we put item #K in the knapsack.\n", " \"\"\"\n", " model = cp_model.CpModel()\n", - " variables = [model.NewIntVar(0, total_size_max // size, \"\") for size in item_sizes]\n", + " variables = [\n", + " model.new_int_var(0, total_size_max // size, \"\") for size in item_sizes\n", + " ]\n", " load = sum(variables[i] * size for i, size in enumerate(item_sizes))\n", - " model.AddLinearConstraint(load, total_size_min, total_size_max)\n", + " model.add_linear_constraint(load, total_size_min, total_size_max)\n", "\n", " solver = cp_model.CpSolver()\n", " solution_collector = AllSolutionCollector(variables)\n", " # Enumerate all solutions.\n", " solver.parameters.enumerate_all_solutions = True\n", - " # Solve\n", - " solver.Solve(model, solution_collector)\n", + " # solve\n", + " solver.solve(model, solution_collector)\n", " return solution_collector.combinations()\n", "\n", "\n", - "def AggregateItemCollectionsOptimally(\n", - " item_collections, max_num_collections, ideal_item_ratios\n", - "):\n", + "def aggregate_item_collections_optimally(\n", + " item_collections: list[list[int]],\n", + " max_num_collections: int,\n", + " ideal_item_ratios: list[float],\n", + ") -> list[int]:\n", " \"\"\"Selects a set (with repetition) of combination of items optimally.\n", "\n", " Given a set of collections of N possible items (in each collection, an item\n", @@ -237,7 +243,9 @@ " return []\n", "\n", "\n", - "def GetOptimalSchedule(demand):\n", + "def get_optimal_schedule(\n", + " demand: list[tuple[float, str, int]]\n", + ") -> list[tuple[int, list[tuple[int, str]]]]:\n", " \"\"\"Computes the optimal schedule for the installation input.\n", "\n", " Args:\n", @@ -249,8 +257,10 @@ " Returns:\n", " The same output type as EnumerateAllKnapsacksWithRepetition.\n", " \"\"\"\n", - " combinations = EnumerateAllKnapsacksWithRepetition(\n", - " [a[2] + _COMMUTE_TIME.value for a in demand], _LOAD_MIN.value, _LOAD_MAX.value\n", + " combinations = enumerate_all_knapsacks_with_repetition(\n", + " [a[2] + _COMMUTE_TIME.value for a in demand],\n", + " _LOAD_MIN.value,\n", + " _LOAD_MAX.value,\n", " )\n", " print(\n", " (\n", @@ -259,18 +269,18 @@ " )\n", " )\n", "\n", - " selection = AggregateItemCollectionsOptimally(\n", + " selection = aggregate_item_collections_optimally(\n", " combinations, _NUM_WORKERS.value, [a[0] / 100.0 for a in demand]\n", " )\n", " output = []\n", - " for i in range(len(selection)):\n", - " if selection[i] != 0:\n", + " for i, s in enumerate(selection):\n", + " if s != 0:\n", " output.append(\n", " (\n", - " selection[i],\n", + " s,\n", " [\n", - " (combinations[i][t], demand[t][1])\n", - " for t in range(len(demand))\n", + " (combinations[i][t], d[1])\n", + " for t, d in enumerate(demand)\n", " if combinations[i][t] != 0\n", " ],\n", " )\n", @@ -291,7 +301,7 @@ " % (_LOAD_MIN.value, _LOAD_MAX.value)\n", " )\n", " print(\"%d workers\" % _NUM_WORKERS.value)\n", - " selection = GetOptimalSchedule(demand)\n", + " selection = get_optimal_schedule(demand)\n", " print()\n", " installed = 0\n", " installed_per_type = {}\n", @@ -315,7 +325,8 @@ " per_type = installed_per_type[name]\n", " if installed != 0:\n", " print(\n", - " f\" {per_type} ({per_type * 100.0 / installed}%) installations of type {name} planned\"\n", + " f\" {per_type} ({per_type * 100.0 / installed}%) installations of\"\n", + " f\" type {name} planned\"\n", " )\n", " else:\n", " print(f\" {per_type} installations of type {name} planned\")\n", diff --git a/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb b/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb index e2e0442636e..59374a19589 100644 --- a/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb +++ b/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -72,8 +72,8 @@ "id": "description", "metadata": {}, "source": [ - "Cutting stock problem with the objective to minimize wasted space.\n", - "\n" + "\n", + "Cutting stock problem with the objective to minimize wasted space.\n" ] }, { @@ -85,37 +85,164 @@ "source": [ "import collections\n", "import time\n", - "import numpy as np\n", "\n", "from ortools.sat.colab import flags\n", + "import numpy as np\n", + "\n", "from google.protobuf import text_format\n", "from ortools.linear_solver.python import model_builder as mb\n", "from ortools.sat.python import cp_model\n", "\n", - "FLAGS = flags.FLAGS\n", "\n", "_OUTPUT_PROTO = flags.define_string(\n", - " 'output_proto', '', 'Output file to write the cp_model proto to.')\n", + " \"output_proto\", \"\", \"Output file to write the cp_model proto to.\"\n", + ")\n", "_PARAMS = flags.define_string(\n", - " 'params',\n", - " 'num_search_workers:8,log_search_progress:true,max_time_in_seconds:10',\n", - " 'Sat solver parameters.')\n", - "_SOLVER = flags.define_string(\n", - " 'solver', 'sat', 'Method used to solve: sat, mip.')\n", + " \"params\",\n", + " \"num_search_workers:8,log_search_progress:true,max_time_in_seconds:10\",\n", + " \"Sat solver parameters.\",\n", + ")\n", + "_SOLVER = flags.define_string(\"solver\", \"sat\", \"Method used to solve: sat, mip.\")\n", "\n", "\n", "DESIRED_LENGTHS = [\n", - " 2490, 3980, 2490, 3980, 2391, 2391, 2391, 596, 596, 596, 2456, 2456, 3018,\n", - " 938, 3018, 938, 943, 3018, 943, 3018, 2490, 3980, 2490, 3980, 2391, 2391,\n", - " 2391, 596, 596, 596, 2456, 2456, 3018, 938, 3018, 938, 943, 3018, 943,\n", - " 3018, 2890, 3980, 2890, 3980, 2391, 2391, 2391, 596, 596, 596, 2856, 2856,\n", - " 3018, 938, 3018, 938, 943, 3018, 943, 3018, 3290, 3980, 3290, 3980, 2391,\n", - " 2391, 2391, 596, 596, 596, 3256, 3256, 3018, 938, 3018, 938, 943, 3018,\n", - " 943, 3018, 3690, 3980, 3690, 3980, 2391, 2391, 2391, 596, 596, 596, 3656,\n", - " 3656, 3018, 938, 3018, 938, 943, 3018, 943, 3018, 2790, 3980, 2790, 3980,\n", - " 2391, 2391, 2391, 596, 596, 596, 2756, 2756, 3018, 938, 3018, 938, 943,\n", - " 3018, 943, 3018, 2790, 3980, 2790, 3980, 2391, 2391, 2391, 596, 596, 596,\n", - " 2756, 2756, 3018, 938, 3018, 938, 943\n", + " 2490,\n", + " 3980,\n", + " 2490,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 2456,\n", + " 2456,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", + " 3018,\n", + " 943,\n", + " 3018,\n", + " 2490,\n", + " 3980,\n", + " 2490,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 2456,\n", + " 2456,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", + " 3018,\n", + " 943,\n", + " 3018,\n", + " 2890,\n", + " 3980,\n", + " 2890,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 2856,\n", + " 2856,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", + " 3018,\n", + " 943,\n", + " 3018,\n", + " 3290,\n", + " 3980,\n", + " 3290,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 3256,\n", + " 3256,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", + " 3018,\n", + " 943,\n", + " 3018,\n", + " 3690,\n", + " 3980,\n", + " 3690,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 3656,\n", + " 3656,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", + " 3018,\n", + " 943,\n", + " 3018,\n", + " 2790,\n", + " 3980,\n", + " 2790,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 2756,\n", + " 2756,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", + " 3018,\n", + " 943,\n", + " 3018,\n", + " 2790,\n", + " 3980,\n", + " 2790,\n", + " 3980,\n", + " 2391,\n", + " 2391,\n", + " 2391,\n", + " 596,\n", + " 596,\n", + " 596,\n", + " 2756,\n", + " 2756,\n", + " 3018,\n", + " 938,\n", + " 3018,\n", + " 938,\n", + " 943,\n", "]\n", "POSSIBLE_CAPACITIES = [4000, 5000, 6000, 7000, 8000]\n", "\n", @@ -165,7 +292,6 @@ " new_state = current_state + size * (card + 1)\n", " if new_state > max_capacity:\n", " break\n", - " new_state_index = -1\n", " if new_state in state_to_index:\n", " new_state_index = state_to_index[new_state]\n", " else:\n", @@ -173,9 +299,9 @@ " states.append(new_state)\n", " state_to_index[new_state] = new_state_index\n", " # Add the transition\n", - " transitions.append([\n", - " current_state_index, new_state_index, item_index, card + 1\n", - " ])\n", + " transitions.append(\n", + " [current_state_index, new_state_index, item_index, card + 1]\n", + " )\n", "\n", " return states, transitions\n", "\n", @@ -183,14 +309,19 @@ "def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: str):\n", " \"\"\"Solve the cutting stock with arc-flow and the CP-SAT solver.\"\"\"\n", " items = regroup_and_count(DESIRED_LENGTHS)\n", - " print('Items:', items)\n", + " print(\"Items:\", items)\n", " num_items = len(DESIRED_LENGTHS)\n", "\n", " max_capacity = max(POSSIBLE_CAPACITIES)\n", " states, transitions = create_state_graph(items, max_capacity)\n", "\n", - " print('Dynamic programming has generated', len(states), 'states and',\n", - " len(transitions), 'transitions')\n", + " print(\n", + " \"Dynamic programming has generated\",\n", + " len(states),\n", + " \"states and\",\n", + " len(transitions),\n", + " \"transitions\",\n", + " )\n", "\n", " incoming_vars = collections.defaultdict(list)\n", " outgoing_vars = collections.defaultdict(list)\n", @@ -208,8 +339,8 @@ " count = items[item_index][1]\n", " max_count = count // card\n", " count_var = model.NewIntVar(\n", - " 0, max_count,\n", - " 'i%i_f%i_t%i_C%s' % (item_index, incoming, outgoing, card))\n", + " 0, max_count, \"i%i_f%i_t%i_C%s\" % (item_index, incoming, outgoing, card)\n", + " )\n", " incoming_vars[incoming].append(count_var)\n", " outgoing_vars[outgoing].append(count_var)\n", " item_vars[item_index].append(count_var)\n", @@ -219,7 +350,7 @@ " for state_index, state in enumerate(states):\n", " if state_index == 0:\n", " continue\n", - " exit_var = model.NewIntVar(0, num_items, 'e%i' % state_index)\n", + " exit_var = model.NewIntVar(0, num_items, \"e%i\" % state_index)\n", " outgoing_vars[state_index].append(exit_var)\n", " incoming_sink_vars.append(exit_var)\n", " price = price_usage(state, POSSIBLE_CAPACITIES)\n", @@ -228,8 +359,7 @@ "\n", " # Flow conservation\n", " for state_index in range(1, len(states)):\n", - " model.Add(\n", - " sum(incoming_vars[state_index]) == sum(outgoing_vars[state_index]))\n", + " model.Add(sum(incoming_vars[state_index]) == sum(outgoing_vars[state_index]))\n", "\n", " # Flow going out of the source must go in the sink\n", " model.Add(sum(outgoing_vars[0]) == sum(incoming_sink_vars))\n", @@ -238,13 +368,17 @@ " for item_index, size_and_count in enumerate(items):\n", " num_arcs = len(item_vars[item_index])\n", " model.Add(\n", - " sum(item_vars[item_index][i] * item_coeffs[item_index][i]\n", - " for i in range(num_arcs)) == size_and_count[1])\n", + " sum(\n", + " item_vars[item_index][i] * item_coeffs[item_index][i]\n", + " for i in range(num_arcs)\n", + " )\n", + " == size_and_count[1]\n", + " )\n", "\n", " # Objective is the sum of waste\n", " model.Minimize(\n", - " sum(objective_vars[i] * objective_coeffs[i]\n", - " for i in range(len(objective_vars))))\n", + " sum(objective_vars[i] * objective_coeffs[i] for i in range(len(objective_vars)))\n", + " )\n", "\n", " # Output model proto to file.\n", " if output_proto_file:\n", @@ -261,13 +395,18 @@ "def solve_cutting_stock_with_arc_flow_and_mip():\n", " \"\"\"Solve the cutting stock with arc-flow and a MIP solver.\"\"\"\n", " items = regroup_and_count(DESIRED_LENGTHS)\n", - " print('Items:', items)\n", + " print(\"Items:\", items)\n", " num_items = len(DESIRED_LENGTHS)\n", " max_capacity = max(POSSIBLE_CAPACITIES)\n", " states, transitions = create_state_graph(items, max_capacity)\n", "\n", - " print('Dynamic programming has generated', len(states), 'states and',\n", - " len(transitions), 'transitions')\n", + " print(\n", + " \"Dynamic programming has generated\",\n", + " len(states),\n", + " \"states and\",\n", + " len(transitions),\n", + " \"transitions\",\n", + " )\n", "\n", " incoming_vars = collections.defaultdict(list)\n", " outgoing_vars = collections.defaultdict(list)\n", @@ -285,8 +424,10 @@ " for outgoing, incoming, item_index, card in transitions:\n", " count = items[item_index][1]\n", " count_var = model.new_int_var(\n", - " 0, count, 'a%i_i%i_f%i_t%i_c%i' % (var_index, item_index, incoming,\n", - " outgoing, card))\n", + " 0,\n", + " count,\n", + " \"a%i_i%i_f%i_t%i_c%i\" % (var_index, item_index, incoming, outgoing, card),\n", + " )\n", " var_index += 1\n", " incoming_vars[incoming].append(count_var)\n", " outgoing_vars[outgoing].append(count_var)\n", @@ -296,7 +437,7 @@ " for state_index, state in enumerate(states):\n", " if state_index == 0:\n", " continue\n", - " exit_var = model.new_int_var(0, num_items, 'e%i' % state_index)\n", + " exit_var = model.new_int_var(0, num_items, \"e%i\" % state_index)\n", " outgoing_vars[state_index].append(exit_var)\n", " incoming_sink_vars.append(exit_var)\n", " price = price_usage(state, POSSIBLE_CAPACITIES)\n", @@ -306,41 +447,49 @@ " # Flow conservation\n", " for state_index in range(1, len(states)):\n", " model.add(\n", - " mb.LinearExpr.sum(incoming_vars[state_index]) == mb.LinearExpr.sum(\n", - " outgoing_vars[state_index]))\n", + " mb.LinearExpr.sum(incoming_vars[state_index])\n", + " == mb.LinearExpr.sum(outgoing_vars[state_index])\n", + " )\n", "\n", " # Flow going out of the source must go in the sink\n", " model.add(\n", - " mb.LinearExpr.sum(outgoing_vars[0]) == mb.LinearExpr.sum(\n", - " incoming_sink_vars))\n", + " mb.LinearExpr.sum(outgoing_vars[0]) == mb.LinearExpr.sum(incoming_sink_vars)\n", + " )\n", "\n", " # Items must be placed\n", " for item_index, size_and_count in enumerate(items):\n", " num_arcs = len(item_vars[item_index])\n", " model.add(\n", - " mb.LinearExpr.sum([item_vars[item_index][i] * item_coeffs[item_index][i]\n", - " for i in range(num_arcs)]) == size_and_count[1])\n", + " mb.LinearExpr.sum(\n", + " [\n", + " item_vars[item_index][i] * item_coeffs[item_index][i]\n", + " for i in range(num_arcs)\n", + " ]\n", + " )\n", + " == size_and_count[1]\n", + " )\n", "\n", " # Objective is the sum of waste\n", " model.minimize(np.dot(objective_vars, objective_coeffs))\n", "\n", - " solver = mb.ModelSolver('scip')\n", + " solver = mb.ModelSolver(\"scip\")\n", " solver.enable_output(True)\n", " status = solver.solve(model)\n", "\n", " ### Output the solution.\n", " if status == mb.SolveStatus.OPTIMAL or status == mb.SolveStatus.FEASIBLE:\n", - " print('Objective value = %f found in %.2f s' %\n", - " (solver.objective_value, time.time() - start_time))\n", + " print(\n", + " \"Objective value = %f found in %.2f s\"\n", + " % (solver.objective_value, time.time() - start_time)\n", + " )\n", " else:\n", - " print('No solution')\n", + " print(\"No solution\")\n", "\n", "\n", "def main(_):\n", - " \"\"\"Main function\"\"\"\n", - " if _SOLVER.value == 'sat':\n", - " solve_cutting_stock_with_arc_flow_and_sat(_OUTPUT_PROTO.value,\n", - " _PARAMS.value)\n", + " \"\"\"Main function.\"\"\"\n", + " if _SOLVER.value == \"sat\":\n", + " solve_cutting_stock_with_arc_flow_and_sat(_OUTPUT_PROTO.value, _PARAMS.value)\n", " else: # 'mip'\n", " solve_cutting_stock_with_arc_flow_and_mip()\n", "\n", diff --git a/examples/notebook/examples/assignment_with_constraints_sat.ipynb b/examples/notebook/examples/assignment_with_constraints_sat.ipynb index e25ca989cd6..4faf8406e83 100644 --- a/examples/notebook/examples/assignment_with_constraints_sat.ipynb +++ b/examples/notebook/examples/assignment_with_constraints_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve an assignment problem with combination constraints on workers.\n" + "solve an assignment problem with combination constraints on workers.\n" ] }, { @@ -88,7 +88,7 @@ "\n", "\n", "def solve_assignment():\n", - " \"\"\"Solve the assignment problem.\"\"\"\n", + " \"\"\"solve the assignment problem.\"\"\"\n", " # Data.\n", " cost = [\n", " [90, 76, 75, 70, 50, 74],\n", @@ -110,24 +110,24 @@ " [0, 1, 0, 1], # Workers 1, 3\n", " [0, 1, 1, 0], # Workers 1, 2\n", " [1, 1, 0, 0], # Workers 0, 1\n", - " [1, 0, 1, 0],\n", - " ] # Workers 0, 2\n", + " [1, 0, 1, 0], # Workers 0, 2\n", + " ]\n", "\n", " group2 = [\n", " [0, 0, 1, 1], # Workers 6, 7\n", " [0, 1, 0, 1], # Workers 5, 7\n", " [0, 1, 1, 0], # Workers 5, 6\n", " [1, 1, 0, 0], # Workers 4, 5\n", - " [1, 0, 0, 1],\n", - " ] # Workers 4, 7\n", + " [1, 0, 0, 1], # Workers 4, 7\n", + " ]\n", "\n", " group3 = [\n", " [0, 0, 1, 1], # Workers 10, 11\n", " [0, 1, 0, 1], # Workers 9, 11\n", " [0, 1, 1, 0], # Workers 9, 10\n", " [1, 0, 1, 0], # Workers 8, 10\n", - " [1, 0, 0, 1],\n", - " ] # Workers 8, 11\n", + " [1, 0, 0, 1], # Workers 8, 11\n", + " ]\n", "\n", " sizes = [10, 7, 3, 12, 15, 4, 11, 5]\n", " total_size_max = 15\n", @@ -141,54 +141,49 @@ " model = cp_model.CpModel()\n", " # Variables\n", " selected = [\n", - " [model.NewBoolVar(\"x[%i,%i]\" % (i, j)) for j in all_tasks] for i in all_workers\n", + " [model.new_bool_var(f\"x[{i},{j}]\") for j in all_tasks] for i in all_workers\n", " ]\n", - " works = [model.NewBoolVar(\"works[%i]\" % i) for i in all_workers]\n", + " works = [model.new_bool_var(f\"works[{i}]\") for i in all_workers]\n", "\n", " # Constraints\n", "\n", " # Link selected and workers.\n", " for i in range(num_workers):\n", - " model.AddMaxEquality(works[i], selected[i])\n", + " model.add_max_equality(works[i], selected[i])\n", "\n", " # Each task is assigned to at least one worker.\n", " for j in all_tasks:\n", - " model.Add(sum(selected[i][j] for i in all_workers) >= 1)\n", + " model.add(sum(selected[i][j] for i in all_workers) >= 1)\n", "\n", " # Total task size for each worker is at most total_size_max\n", " for i in all_workers:\n", - " model.Add(sum(sizes[j] * selected[i][j] for j in all_tasks) <= total_size_max)\n", + " model.add(sum(sizes[j] * selected[i][j] for j in all_tasks) <= total_size_max)\n", "\n", " # Group constraints.\n", - " model.AddAllowedAssignments([works[0], works[1], works[2], works[3]], group1)\n", - " model.AddAllowedAssignments([works[4], works[5], works[6], works[7]], group2)\n", - " model.AddAllowedAssignments([works[8], works[9], works[10], works[11]], group3)\n", + " model.add_allowed_assignments([works[0], works[1], works[2], works[3]], group1)\n", + " model.add_allowed_assignments([works[4], works[5], works[6], works[7]], group2)\n", + " model.add_allowed_assignments([works[8], works[9], works[10], works[11]], group3)\n", "\n", " # Objective\n", - " model.Minimize(\n", + " model.minimize(\n", " sum(selected[i][j] * cost[i][j] for j in all_tasks for i in all_workers)\n", " )\n", "\n", " # Solve and output solution.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(\"Total cost = %i\" % solver.ObjectiveValue())\n", + " print(f\"Total cost = {solver.objective_value}\")\n", " print()\n", " for i in all_workers:\n", " for j in all_tasks:\n", - " if solver.BooleanValue(selected[i][j]):\n", - " print(\n", - " \"Worker \", i, \" assigned to task \", j, \" Cost = \", cost[i][j]\n", - " )\n", + " if solver.boolean_value(selected[i][j]):\n", + " print(f\"Worker {i} assigned to task {j} with Cost = {cost[i][j]}\")\n", "\n", " print()\n", "\n", - " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(solver.response_stats())\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/balance_group_sat.ipynb b/examples/notebook/examples/balance_group_sat.ipynb index 97b60aa89ec..2e966e0e289 100644 --- a/examples/notebook/examples/balance_group_sat.ipynb +++ b/examples/notebook/examples/balance_group_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -88,7 +88,8 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Sequence\n", + "from typing import Dict, Sequence\n", + "\n", "from ortools.sat.python import cp_model\n", "\n", "\n", @@ -106,27 +107,27 @@ " self.__item_in_group = item_in_group\n", "\n", " def on_solution_callback(self):\n", - " print(\"Solution %i\" % self.__solution_count)\n", + " print(f\"Solution {self.__solution_count}\")\n", " self.__solution_count += 1\n", "\n", - " print(\" objective value = %i\" % self.ObjectiveValue())\n", + " print(f\" objective value = {self.objective_value}\")\n", " groups = {}\n", " sums = {}\n", " for g in self.__all_groups:\n", " groups[g] = []\n", " sums[g] = 0\n", " for item in self.__all_items:\n", - " if self.BooleanValue(self.__item_in_group[(item, g)]):\n", + " if self.boolean_value(self.__item_in_group[(item, g)]):\n", " groups[g].append(item)\n", " sums[g] += self.__values[item]\n", "\n", " for g in self.__all_groups:\n", " group = groups[g]\n", - " print(\"group %i: sum = %0.2f [\" % (g, sums[g]), end=\"\")\n", + " print(f\"group {g}: sum = {sums[g]:0.2f} [\", end=\"\")\n", " for item in group:\n", " value = self.__values[item]\n", " color = self.__colors[item]\n", - " print(\" (%i, %i, %i)\" % (item, value, color), end=\"\")\n", + " print(f\" ({item}, {value}, {color})\", end=\"\")\n", " print(\"]\")\n", "\n", "\n", @@ -145,7 +146,7 @@ " all_items = range(num_items)\n", " all_colors = range(num_colors)\n", "\n", - " # Values for each items.\n", + " # values for each items.\n", " values = [1 + i + (i * i // 200) for i in all_items]\n", " # Color for each item (simple modulo).\n", " colors = [i % num_colors for i in all_items]\n", @@ -156,18 +157,17 @@ " num_items_per_group = num_items // num_groups\n", "\n", " # Collect all items in a given color.\n", - " items_per_color = {}\n", - " for c in all_colors:\n", - " items_per_color[c] = []\n", + " items_per_color: Dict[int, list[int]] = {}\n", + " for color in all_colors:\n", + " items_per_color[color] = []\n", " for i in all_items:\n", - " if colors[i] == c:\n", - " items_per_color[c].append(i)\n", + " if colors[i] == color:\n", + " items_per_color[color].append(i)\n", "\n", " print(\n", - " \"Model has %i items, %i groups, and %i colors\"\n", - " % (num_items, num_groups, num_colors)\n", + " f\"Model has {num_items} items, {num_groups} groups, and\" f\" {num_colors} colors\"\n", " )\n", - " print(\" average sum per group = %i\" % average_sum_per_group)\n", + " print(f\" average sum per group = {average_sum_per_group}\")\n", "\n", " # Model.\n", "\n", @@ -176,26 +176,26 @@ " item_in_group = {}\n", " for i in all_items:\n", " for g in all_groups:\n", - " item_in_group[(i, g)] = model.NewBoolVar(\"item %d in group %d\" % (i, g))\n", + " item_in_group[(i, g)] = model.new_bool_var(f\"item {i} in group {g}\")\n", "\n", " # Each group must have the same size.\n", " for g in all_groups:\n", - " model.Add(sum(item_in_group[(i, g)] for i in all_items) == num_items_per_group)\n", + " model.add(sum(item_in_group[(i, g)] for i in all_items) == num_items_per_group)\n", "\n", " # One item must belong to exactly one group.\n", " for i in all_items:\n", - " model.Add(sum(item_in_group[(i, g)] for g in all_groups) == 1)\n", + " model.add(sum(item_in_group[(i, g)] for g in all_groups) == 1)\n", "\n", " # The deviation of the sum of each items in a group against the average.\n", - " e = model.NewIntVar(0, 550, \"epsilon\")\n", + " e = model.new_int_var(0, 550, \"epsilon\")\n", "\n", " # Constrain the sum of values in one group around the average sum per group.\n", " for g in all_groups:\n", - " model.Add(\n", + " model.add(\n", " sum(item_in_group[(i, g)] * values[i] for i in all_items)\n", " <= average_sum_per_group + e\n", " )\n", - " model.Add(\n", + " model.add(\n", " sum(item_in_group[(i, g)] * values[i] for i in all_items)\n", " >= average_sum_per_group - e\n", " )\n", @@ -204,24 +204,22 @@ " color_in_group = {}\n", " for g in all_groups:\n", " for c in all_colors:\n", - " color_in_group[(c, g)] = model.NewBoolVar(\n", - " \"color %d is in group %d\" % (c, g)\n", - " )\n", + " color_in_group[(c, g)] = model.new_bool_var(f\"color {c} is in group {g}\")\n", "\n", " # Item is in a group implies its color is in that group.\n", " for i in all_items:\n", " for g in all_groups:\n", - " model.AddImplication(item_in_group[(i, g)], color_in_group[(colors[i], g)])\n", + " model.add_implication(item_in_group[(i, g)], color_in_group[(colors[i], g)])\n", "\n", " # If a color is in a group, it must contains at least\n", " # min_items_of_same_color_per_group items from that color.\n", " for c in all_colors:\n", " for g in all_groups:\n", " literal = color_in_group[(c, g)]\n", - " model.Add(\n", + " model.add(\n", " sum(item_in_group[(i, g)] for i in items_per_color[c])\n", " >= min_items_of_same_color_per_group\n", - " ).OnlyEnforceIf(literal)\n", + " ).only_enforce_if(literal)\n", "\n", " # Compute the maximum number of colors in a group.\n", " max_color = num_items_per_group // min_items_of_same_color_per_group\n", @@ -229,10 +227,10 @@ " # Redundant constraint, it helps with solving time.\n", " if max_color < num_colors:\n", " for g in all_groups:\n", - " model.Add(sum(color_in_group[(c, g)] for c in all_colors) <= max_color)\n", + " model.add(sum(color_in_group[(c, g)] for c in all_colors) <= max_color)\n", "\n", - " # Minimize epsilon\n", - " model.Minimize(e)\n", + " # minimize epsilon\n", + " model.minimize(e)\n", "\n", " solver = cp_model.CpSolver()\n", " # solver.parameters.log_search_progress = True\n", @@ -240,14 +238,11 @@ " solution_printer = SolutionPrinter(\n", " values, colors, all_groups, all_items, item_in_group\n", " )\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(\"Optimal epsilon: %i\" % solver.ObjectiveValue())\n", - " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(f\"Optimal epsilon: {solver.objective_value}\")\n", + " print(solver.response_stats())\n", " else:\n", " print(\"No solution found\")\n", "\n", diff --git a/examples/notebook/examples/bus_driver_scheduling_flow_sat.ipynb b/examples/notebook/examples/bus_driver_scheduling_flow_sat.ipynb index 3f4b5cd2fd1..59a31620011 100644 --- a/examples/notebook/examples/bus_driver_scheduling_flow_sat.ipynb +++ b/examples/notebook/examples/bus_driver_scheduling_flow_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -99,14 +99,13 @@ "from ortools.sat.python import cp_model\n", "\n", "PARSER = argparse.ArgumentParser()\n", + "PARSER.add_argument(\"--instance\", default=1, type=int, help=\"Instance number (1..3).\")\n", "PARSER.add_argument(\n", - " '--instance', default=1, type=int, help='Instance number (1..3).')\n", - "PARSER.add_argument(\n", - " '--output_proto_file',\n", + " \"--output_proto_file\",\n", " default=\"\",\n", - " help='Output file to write the cp_model'\n", - " 'proto to.')\n", - "PARSER.add_argument('--params', default=\"\", help='Sat solver parameters.')\n", + " help=\"Output file to write the cp_model\" \"proto to.\",\n", + ")\n", + "PARSER.add_argument(\"--params\", default=\"\", help=\"Sat solver parameters.\")\n", "\n", "SAMPLE_SHIFTS_SMALL = [\n", " #\n", @@ -118,1618 +117,1618 @@ " # - shift end minute\n", " # - shift duration in minutes\n", " #\n", - " [0, '05:18', '06:00', 318, 360, 42],\n", - " [1, '05:26', '06:08', 326, 368, 42],\n", - " [2, '05:40', '05:56', 340, 356, 16],\n", - " [3, '06:06', '06:51', 366, 411, 45],\n", - " [4, '06:40', '07:52', 400, 472, 72],\n", - " [5, '06:42', '07:13', 402, 433, 31],\n", - " [6, '06:48', '08:15', 408, 495, 87],\n", - " [7, '06:59', '08:07', 419, 487, 68],\n", - " [8, '07:20', '07:36', 440, 456, 16],\n", - " [9, '07:35', '08:22', 455, 502, 47],\n", - " [10, '07:50', '08:55', 470, 535, 65],\n", - " [11, '08:00', '09:05', 480, 545, 65],\n", - " [12, '08:00', '08:35', 480, 515, 35],\n", - " [13, '08:11', '09:41', 491, 581, 90],\n", - " [14, '08:28', '08:50', 508, 530, 22],\n", - " [15, '08:35', '08:45', 515, 525, 10],\n", - " [16, '08:40', '08:50', 520, 530, 10],\n", - " [17, '09:03', '10:28', 543, 628, 85],\n", - " [18, '09:23', '09:49', 563, 589, 26],\n", - " [19, '09:30', '09:40', 570, 580, 10],\n", - " [20, '09:57', '10:20', 597, 620, 23],\n", - " [21, '10:09', '11:03', 609, 663, 54],\n", - " [22, '10:20', '10:30', 620, 630, 10],\n", - " [23, '11:00', '11:10', 660, 670, 10],\n", - " [24, '11:45', '12:24', 705, 744, 39],\n", - " [25, '12:18', '13:00', 738, 780, 42],\n", - " [26, '13:18', '14:44', 798, 884, 86],\n", - " [27, '13:53', '14:49', 833, 889, 56],\n", - " [28, '14:03', '14:50', 843, 890, 47],\n", - " [29, '14:28', '15:15', 868, 915, 47],\n", - " [30, '14:30', '15:41', 870, 941, 71],\n", - " [31, '14:48', '15:35', 888, 935, 47],\n", - " [32, '15:03', '15:50', 903, 950, 47],\n", - " [33, '15:28', '16:54', 928, 1014, 86],\n", - " [34, '15:38', '16:25', 938, 985, 47],\n", - " [35, '15:40', '15:56', 940, 956, 16],\n", - " [36, '15:58', '16:45', 958, 1005, 47],\n", - " [37, '16:04', '17:30', 964, 1050, 86],\n", - " [38, '16:28', '17:15', 988, 1035, 47],\n", - " [39, '16:36', '17:21', 996, 1041, 45],\n", - " [40, '16:50', '17:00', 1010, 1020, 10],\n", - " [41, '16:54', '18:20', 1014, 1100, 86],\n", - " [42, '17:01', '17:13', 1021, 1033, 12],\n", - " [43, '17:19', '18:31', 1039, 1111, 72],\n", - " [44, '17:23', '18:10', 1043, 1090, 47],\n", - " [45, '17:34', '18:15', 1054, 1095, 41],\n", - " [46, '18:04', '19:29', 1084, 1169, 85],\n", - " [47, '18:34', '19:58', 1114, 1198, 84],\n", - " [48, '19:56', '20:34', 1196, 1234, 38],\n", - " [49, '20:05', '20:48', 1205, 1248, 43]\n", + " [0, \"05:18\", \"06:00\", 318, 360, 42],\n", + " [1, \"05:26\", \"06:08\", 326, 368, 42],\n", + " [2, \"05:40\", \"05:56\", 340, 356, 16],\n", + " [3, \"06:06\", \"06:51\", 366, 411, 45],\n", + " [4, \"06:40\", \"07:52\", 400, 472, 72],\n", + " [5, \"06:42\", \"07:13\", 402, 433, 31],\n", + " [6, \"06:48\", \"08:15\", 408, 495, 87],\n", + " [7, \"06:59\", \"08:07\", 419, 487, 68],\n", + " [8, \"07:20\", \"07:36\", 440, 456, 16],\n", + " [9, \"07:35\", \"08:22\", 455, 502, 47],\n", + " [10, \"07:50\", \"08:55\", 470, 535, 65],\n", + " [11, \"08:00\", \"09:05\", 480, 545, 65],\n", + " [12, \"08:00\", \"08:35\", 480, 515, 35],\n", + " [13, \"08:11\", \"09:41\", 491, 581, 90],\n", + " [14, \"08:28\", \"08:50\", 508, 530, 22],\n", + " [15, \"08:35\", \"08:45\", 515, 525, 10],\n", + " [16, \"08:40\", \"08:50\", 520, 530, 10],\n", + " [17, \"09:03\", \"10:28\", 543, 628, 85],\n", + " [18, \"09:23\", \"09:49\", 563, 589, 26],\n", + " [19, \"09:30\", \"09:40\", 570, 580, 10],\n", + " [20, \"09:57\", \"10:20\", 597, 620, 23],\n", + " [21, \"10:09\", \"11:03\", 609, 663, 54],\n", + " [22, \"10:20\", \"10:30\", 620, 630, 10],\n", + " [23, \"11:00\", \"11:10\", 660, 670, 10],\n", + " [24, \"11:45\", \"12:24\", 705, 744, 39],\n", + " [25, \"12:18\", \"13:00\", 738, 780, 42],\n", + " [26, \"13:18\", \"14:44\", 798, 884, 86],\n", + " [27, \"13:53\", \"14:49\", 833, 889, 56],\n", + " [28, \"14:03\", \"14:50\", 843, 890, 47],\n", + " [29, \"14:28\", \"15:15\", 868, 915, 47],\n", + " [30, \"14:30\", \"15:41\", 870, 941, 71],\n", + " [31, \"14:48\", \"15:35\", 888, 935, 47],\n", + " [32, \"15:03\", \"15:50\", 903, 950, 47],\n", + " [33, \"15:28\", \"16:54\", 928, 1014, 86],\n", + " [34, \"15:38\", \"16:25\", 938, 985, 47],\n", + " [35, \"15:40\", \"15:56\", 940, 956, 16],\n", + " [36, \"15:58\", \"16:45\", 958, 1005, 47],\n", + " [37, \"16:04\", \"17:30\", 964, 1050, 86],\n", + " [38, \"16:28\", \"17:15\", 988, 1035, 47],\n", + " [39, \"16:36\", \"17:21\", 996, 1041, 45],\n", + " [40, \"16:50\", \"17:00\", 1010, 1020, 10],\n", + " [41, \"16:54\", \"18:20\", 1014, 1100, 86],\n", + " [42, \"17:01\", \"17:13\", 1021, 1033, 12],\n", + " [43, \"17:19\", \"18:31\", 1039, 1111, 72],\n", + " [44, \"17:23\", \"18:10\", 1043, 1090, 47],\n", + " [45, \"17:34\", \"18:15\", 1054, 1095, 41],\n", + " [46, \"18:04\", \"19:29\", 1084, 1169, 85],\n", + " [47, \"18:34\", \"19:58\", 1114, 1198, 84],\n", + " [48, \"19:56\", \"20:34\", 1196, 1234, 38],\n", + " [49, \"20:05\", \"20:48\", 1205, 1248, 43],\n", "] # yapf:disable\n", "\n", "SAMPLE_SHIFTS_MEDIUM = [\n", - " [0, '04:30', '04:53', 270, 293, 23],\n", - " [1, '04:46', '04:56', 286, 296, 10],\n", - " [2, '04:52', '05:56', 292, 356, 64],\n", - " [3, '04:53', '05:23', 293, 323, 30],\n", - " [4, '05:07', '05:44', 307, 344, 37],\n", - " [5, '05:10', '06:06', 310, 366, 56],\n", - " [6, '05:18', '06:03', 318, 363, 45],\n", - " [7, '05:30', '05:40', 330, 340, 10],\n", - " [8, '05:30', '05:40', 330, 340, 10],\n", - " [9, '05:33', '06:15', 333, 375, 42],\n", - " [10, '05:40', '05:50', 340, 350, 10],\n", - " [11, '05:43', '06:08', 343, 368, 25],\n", - " [12, '05:54', '07:20', 354, 440, 86],\n", - " [13, '06:04', '06:37', 364, 397, 33],\n", - " [14, '06:13', '06:58', 373, 418, 45],\n", - " [15, '06:14', '07:40', 374, 460, 86],\n", - " [16, '06:15', '07:15', 375, 435, 60],\n", - " [17, '06:16', '06:26', 376, 386, 10],\n", - " [18, '06:17', '06:34', 377, 394, 17],\n", - " [19, '06:20', '06:36', 380, 396, 16],\n", - " [20, '06:22', '07:06', 382, 426, 44],\n", - " [21, '06:24', '07:50', 384, 470, 86],\n", - " [22, '06:27', '06:44', 387, 404, 17],\n", - " [23, '06:30', '06:40', 390, 400, 10],\n", - " [24, '06:31', '06:43', 391, 403, 12],\n", - " [25, '06:33', '07:53', 393, 473, 80],\n", - " [26, '06:34', '07:09', 394, 429, 35],\n", - " [27, '06:40', '06:56', 400, 416, 16],\n", - " [28, '06:44', '07:17', 404, 437, 33],\n", - " [29, '06:46', '06:58', 406, 418, 12],\n", - " [30, '06:49', '07:43', 409, 463, 54],\n", - " [31, '06:50', '07:05', 410, 425, 15],\n", - " [32, '06:52', '07:36', 412, 456, 44],\n", - " [33, '06:54', '07:27', 414, 447, 33],\n", - " [34, '06:56', '08:23', 416, 503, 87],\n", - " [35, '07:04', '07:44', 424, 464, 40],\n", - " [36, '07:11', '08:36', 431, 516, 85],\n", - " [37, '07:17', '07:35', 437, 455, 18],\n", - " [38, '07:22', '08:06', 442, 486, 44],\n", - " [39, '07:27', '08:15', 447, 495, 48],\n", - " [40, '07:35', '07:45', 455, 465, 10],\n", - " [41, '07:43', '08:08', 463, 488, 25],\n", - " [42, '07:50', '08:37', 470, 517, 47],\n", - " [43, '07:58', '08:45', 478, 525, 47],\n", - " [44, '08:00', '08:35', 480, 515, 35],\n", - " [45, '08:06', '08:51', 486, 531, 45],\n", - " [46, '08:10', '08:45', 490, 525, 35],\n", - " [47, '08:15', '08:30', 495, 510, 15],\n", - " [48, '08:16', '09:00', 496, 540, 44],\n", - " [49, '08:18', '09:16', 498, 556, 58],\n", - " [50, '08:20', '08:36', 500, 516, 16],\n", - " [51, '08:27', '09:07', 507, 547, 40],\n", - " [52, '08:30', '08:45', 510, 525, 15],\n", - " [53, '08:35', '09:15', 515, 555, 40],\n", - " [54, '08:46', '09:30', 526, 570, 44],\n", - " [55, '08:51', '09:17', 531, 557, 26],\n", - " [56, '08:55', '09:15', 535, 555, 20],\n", - " [57, '08:58', '09:38', 538, 578, 40],\n", - " [58, '09:00', '09:35', 540, 575, 35],\n", - " [59, '09:00', '09:16', 540, 556, 16],\n", - " [60, '09:20', '09:36', 560, 576, 16],\n", - " [61, '09:31', '09:43', 571, 583, 12],\n", - " [62, '09:33', '10:15', 573, 615, 42],\n", - " [63, '09:54', '10:05', 594, 605, 11],\n", - " [64, '10:11', '10:38', 611, 638, 27],\n", - " [65, '10:18', '11:00', 618, 660, 42],\n", - " [66, '10:21', '10:47', 621, 647, 26],\n", - " [67, '10:25', '11:04', 625, 664, 39],\n", - " [68, '10:26', '11:08', 626, 668, 42],\n", - " [69, '10:44', '12:11', 644, 731, 87],\n", - " [70, '11:00', '11:16', 660, 676, 16],\n", - " [71, '11:15', '11:54', 675, 714, 39],\n", - " [72, '11:16', '11:28', 676, 688, 12],\n", - " [73, '11:20', '11:30', 680, 690, 10],\n", - " [74, '11:21', '11:47', 681, 707, 26],\n", - " [75, '11:25', '12:04', 685, 724, 39],\n", - " [76, '11:34', '11:45', 694, 705, 11],\n", - " [77, '11:35', '12:14', 695, 734, 39],\n", - " [78, '11:41', '12:23', 701, 743, 42],\n", - " [79, '11:44', '12:35', 704, 755, 51],\n", - " [80, '11:46', '11:58', 706, 718, 12],\n", - " [81, '12:00', '12:10', 720, 730, 10],\n", - " [82, '12:04', '12:15', 724, 735, 11],\n", - " [83, '12:04', '13:04', 724, 784, 60],\n", - " [84, '12:11', '12:38', 731, 758, 27],\n", - " [85, '12:15', '12:54', 735, 774, 39],\n", - " [86, '12:25', '13:10', 745, 790, 45],\n", - " [87, '12:30', '12:40', 750, 760, 10],\n", - " [88, '12:34', '13:58', 754, 838, 84],\n", - " [89, '12:38', '13:25', 758, 805, 47],\n", - " [90, '12:48', '13:35', 768, 815, 47],\n", - " [91, '13:00', '13:16', 780, 796, 16],\n", - " [92, '13:05', '13:44', 785, 824, 39],\n", - " [93, '13:08', '13:55', 788, 835, 47],\n", - " [94, '13:14', '14:38', 794, 878, 84],\n", - " [95, '13:23', '13:49', 803, 829, 26],\n", - " [96, '13:25', '14:04', 805, 844, 39],\n", - " [97, '13:28', '14:54', 808, 894, 86],\n", - " [98, '13:31', '13:43', 811, 823, 12],\n", - " [99, '13:34', '14:58', 814, 898, 84],\n", - " [100, '13:38', '14:25', 818, 865, 47],\n", - " [101, '13:38', '15:04', 818, 904, 86],\n", - " [102, '13:39', '14:33', 819, 873, 54],\n", - " [103, '13:40', '13:50', 820, 830, 10],\n", - " [104, '13:43', '14:10', 823, 850, 27],\n", - " [105, '13:48', '14:35', 828, 875, 47],\n", - " [106, '13:48', '14:35', 828, 875, 47],\n", - " [107, '13:53', '14:40', 833, 880, 47],\n", - " [108, '13:58', '15:24', 838, 924, 86],\n", - " [109, '13:58', '14:25', 838, 865, 27],\n", - " [110, '14:00', '14:16', 840, 856, 16],\n", - " [111, '14:13', '15:00', 853, 900, 47],\n", - " [112, '14:20', '15:31', 860, 931, 71],\n", - " [113, '14:25', '15:02', 865, 902, 37],\n", - " [114, '14:34', '14:45', 874, 885, 11],\n", - " [115, '14:40', '15:51', 880, 951, 71],\n", - " [116, '14:40', '14:56', 880, 896, 16],\n", - " [117, '14:46', '14:58', 886, 898, 12],\n", - " [118, '14:49', '15:43', 889, 943, 54],\n", - " [119, '14:52', '15:21', 892, 921, 29],\n", - " [120, '14:58', '16:24', 898, 984, 86],\n", - " [121, '14:59', '15:53', 899, 953, 54],\n", - " [122, '15:00', '15:10', 900, 910, 10],\n", - " [123, '15:00', '15:35', 900, 935, 35],\n", - " [124, '15:08', '15:45', 908, 945, 37],\n", - " [125, '15:12', '15:36', 912, 936, 24],\n", - " [126, '15:18', '16:05', 918, 965, 47],\n", - " [127, '15:24', '16:05', 924, 965, 41],\n", - " [128, '15:31', '15:43', 931, 943, 12],\n", - " [129, '15:35', '15:54', 935, 954, 19],\n", - " [130, '15:36', '16:21', 936, 981, 45],\n", - " [131, '15:39', '16:33', 939, 993, 54],\n", - " [132, '15:48', '16:35', 948, 995, 47],\n", - " [133, '15:50', '17:01', 950, 1021, 71],\n", - " [134, '16:03', '16:50', 963, 1010, 47],\n", - " [135, '16:18', '17:44', 978, 1064, 86],\n", - " [136, '16:24', '17:05', 984, 1025, 41],\n", - " [137, '16:28', '17:15', 988, 1035, 47],\n", - " [138, '16:34', '17:15', 994, 1035, 41],\n", - " [139, '16:38', '17:25', 998, 1045, 47],\n", - " [140, '16:40', '16:56', 1000, 1016, 16],\n", - " [141, '16:45', '17:04', 1005, 1024, 19],\n", - " [142, '16:52', '17:36', 1012, 1056, 44],\n", - " [143, '16:58', '17:45', 1018, 1065, 47],\n", - " [144, '17:04', '18:30', 1024, 1110, 86],\n", - " [145, '17:04', '17:45', 1024, 1065, 41],\n", - " [146, '17:09', '18:03', 1029, 1083, 54],\n", - " [147, '17:18', '18:44', 1038, 1124, 86],\n", - " [148, '17:28', '18:15', 1048, 1095, 47],\n", - " [149, '17:29', '18:41', 1049, 1121, 72],\n", - " [150, '17:36', '18:21', 1056, 1101, 45],\n", - " [151, '17:38', '18:25', 1058, 1105, 47],\n", - " [152, '17:40', '17:56', 1060, 1076, 16],\n", - " [153, '17:45', '18:04', 1065, 1084, 19],\n", - " [154, '17:46', '17:58', 1066, 1078, 12],\n", - " [155, '17:48', '18:35', 1068, 1115, 47],\n", - " [156, '17:49', '18:43', 1069, 1123, 54],\n", - " [157, '17:55', '18:14', 1075, 1094, 19],\n", - " [158, '17:58', '18:45', 1078, 1125, 47],\n", - " [159, '18:00', '19:11', 1080, 1151, 71],\n", - " [160, '18:04', '18:45', 1084, 1125, 41],\n", - " [161, '18:09', '19:03', 1089, 1143, 54],\n", - " [162, '18:13', '19:00', 1093, 1140, 47],\n", - " [163, '18:13', '18:40', 1093, 1120, 27],\n", - " [164, '18:19', '19:13', 1099, 1153, 54],\n", - " [165, '18:28', '19:25', 1108, 1165, 57],\n", - " [166, '18:48', '19:28', 1128, 1168, 40],\n", - " [167, '19:03', '19:45', 1143, 1185, 42],\n", - " [168, '19:20', '19:36', 1160, 1176, 16],\n", - " [169, '19:21', '19:31', 1161, 1171, 10],\n", - " [170, '19:25', '20:04', 1165, 1204, 39],\n", - " [171, '19:26', '20:08', 1166, 1208, 42],\n", - " [172, '19:30', '19:40', 1170, 1180, 10],\n", - " [173, '19:44', '20:33', 1184, 1233, 49],\n", - " [174, '19:48', '21:09', 1188, 1269, 81],\n", - " [175, '19:53', '21:02', 1193, 1262, 69],\n", - " [176, '20:04', '20:29', 1204, 1229, 25],\n", - " [177, '20:17', '21:03', 1217, 1263, 46],\n", - " [178, '20:20', '20:57', 1220, 1257, 37],\n", - " [179, '20:29', '21:18', 1229, 1278, 49],\n", - " [180, '20:35', '21:54', 1235, 1314, 79],\n", - " [181, '20:40', '20:50', 1240, 1250, 10],\n", - " [182, '20:47', '21:42', 1247, 1302, 55],\n", - " [183, '21:00', '21:10', 1260, 1270, 10],\n", - " [184, '21:07', '21:44', 1267, 1304, 37],\n", - " [185, '21:14', '22:03', 1274, 1323, 49],\n", - " [186, '21:39', '21:55', 1299, 1315, 16],\n", - " [187, '21:40', '22:17', 1300, 1337, 37],\n", - " [188, '21:40', '21:50', 1300, 1310, 10],\n", - " [189, '21:48', '22:03', 1308, 1323, 15],\n", - " [190, '22:17', '23:03', 1337, 1383, 46],\n", - " [191, '22:43', '23:08', 1363, 1388, 25],\n", - " [192, '23:35', '01:05', 1415, 1505, 90],\n", - " [193, '23:46', '00:01', 1426, 1441, 15],\n", - " [194, '23:47', '00:33', 1427, 1473, 46],\n", - " [195, '23:52', '00:24', 1432, 1464, 32],\n", - " [196, '23:58', '00:38', 1438, 1478, 40],\n", - " [197, '00:02', '00:12', 1442, 1452, 10],\n", - " [198, '00:07', '00:39', 1447, 1479, 32],\n", - " [199, '00:25', '01:12', 1465, 1512, 47]\n", + " [0, \"04:30\", \"04:53\", 270, 293, 23],\n", + " [1, \"04:46\", \"04:56\", 286, 296, 10],\n", + " [2, \"04:52\", \"05:56\", 292, 356, 64],\n", + " [3, \"04:53\", \"05:23\", 293, 323, 30],\n", + " [4, \"05:07\", \"05:44\", 307, 344, 37],\n", + " [5, \"05:10\", \"06:06\", 310, 366, 56],\n", + " [6, \"05:18\", \"06:03\", 318, 363, 45],\n", + " [7, \"05:30\", \"05:40\", 330, 340, 10],\n", + " [8, \"05:30\", \"05:40\", 330, 340, 10],\n", + " [9, \"05:33\", \"06:15\", 333, 375, 42],\n", + " [10, \"05:40\", \"05:50\", 340, 350, 10],\n", + " [11, \"05:43\", \"06:08\", 343, 368, 25],\n", + " [12, \"05:54\", \"07:20\", 354, 440, 86],\n", + " [13, \"06:04\", \"06:37\", 364, 397, 33],\n", + " [14, \"06:13\", \"06:58\", 373, 418, 45],\n", + " [15, \"06:14\", \"07:40\", 374, 460, 86],\n", + " [16, \"06:15\", \"07:15\", 375, 435, 60],\n", + " [17, \"06:16\", \"06:26\", 376, 386, 10],\n", + " [18, \"06:17\", \"06:34\", 377, 394, 17],\n", + " [19, \"06:20\", \"06:36\", 380, 396, 16],\n", + " [20, \"06:22\", \"07:06\", 382, 426, 44],\n", + " [21, \"06:24\", \"07:50\", 384, 470, 86],\n", + " [22, \"06:27\", \"06:44\", 387, 404, 17],\n", + " [23, \"06:30\", \"06:40\", 390, 400, 10],\n", + " [24, \"06:31\", \"06:43\", 391, 403, 12],\n", + " [25, \"06:33\", \"07:53\", 393, 473, 80],\n", + " [26, \"06:34\", \"07:09\", 394, 429, 35],\n", + " [27, \"06:40\", \"06:56\", 400, 416, 16],\n", + " [28, \"06:44\", \"07:17\", 404, 437, 33],\n", + " [29, \"06:46\", \"06:58\", 406, 418, 12],\n", + " [30, \"06:49\", \"07:43\", 409, 463, 54],\n", + " [31, \"06:50\", \"07:05\", 410, 425, 15],\n", + " [32, \"06:52\", \"07:36\", 412, 456, 44],\n", + " [33, \"06:54\", \"07:27\", 414, 447, 33],\n", + " [34, \"06:56\", \"08:23\", 416, 503, 87],\n", + " [35, \"07:04\", \"07:44\", 424, 464, 40],\n", + " [36, \"07:11\", \"08:36\", 431, 516, 85],\n", + " [37, \"07:17\", \"07:35\", 437, 455, 18],\n", + " [38, \"07:22\", \"08:06\", 442, 486, 44],\n", + " [39, \"07:27\", \"08:15\", 447, 495, 48],\n", + " [40, \"07:35\", \"07:45\", 455, 465, 10],\n", + " [41, \"07:43\", \"08:08\", 463, 488, 25],\n", + " [42, \"07:50\", \"08:37\", 470, 517, 47],\n", + " [43, \"07:58\", \"08:45\", 478, 525, 47],\n", + " [44, \"08:00\", \"08:35\", 480, 515, 35],\n", + " [45, \"08:06\", \"08:51\", 486, 531, 45],\n", + " [46, \"08:10\", \"08:45\", 490, 525, 35],\n", + " [47, \"08:15\", \"08:30\", 495, 510, 15],\n", + " [48, \"08:16\", \"09:00\", 496, 540, 44],\n", + " [49, \"08:18\", \"09:16\", 498, 556, 58],\n", + " [50, \"08:20\", \"08:36\", 500, 516, 16],\n", + " [51, \"08:27\", \"09:07\", 507, 547, 40],\n", + " [52, \"08:30\", \"08:45\", 510, 525, 15],\n", + " [53, \"08:35\", \"09:15\", 515, 555, 40],\n", + " [54, \"08:46\", \"09:30\", 526, 570, 44],\n", + " [55, \"08:51\", \"09:17\", 531, 557, 26],\n", + " [56, \"08:55\", \"09:15\", 535, 555, 20],\n", + " [57, \"08:58\", \"09:38\", 538, 578, 40],\n", + " [58, \"09:00\", \"09:35\", 540, 575, 35],\n", + " [59, \"09:00\", \"09:16\", 540, 556, 16],\n", + " [60, \"09:20\", \"09:36\", 560, 576, 16],\n", + " [61, \"09:31\", \"09:43\", 571, 583, 12],\n", + " [62, \"09:33\", \"10:15\", 573, 615, 42],\n", + " [63, \"09:54\", \"10:05\", 594, 605, 11],\n", + " [64, \"10:11\", \"10:38\", 611, 638, 27],\n", + " [65, \"10:18\", \"11:00\", 618, 660, 42],\n", + " [66, \"10:21\", \"10:47\", 621, 647, 26],\n", + " [67, \"10:25\", \"11:04\", 625, 664, 39],\n", + " [68, \"10:26\", \"11:08\", 626, 668, 42],\n", + " [69, \"10:44\", \"12:11\", 644, 731, 87],\n", + " [70, \"11:00\", \"11:16\", 660, 676, 16],\n", + " [71, \"11:15\", \"11:54\", 675, 714, 39],\n", + " [72, \"11:16\", \"11:28\", 676, 688, 12],\n", + " [73, \"11:20\", \"11:30\", 680, 690, 10],\n", + " [74, \"11:21\", \"11:47\", 681, 707, 26],\n", + " [75, \"11:25\", \"12:04\", 685, 724, 39],\n", + " [76, \"11:34\", \"11:45\", 694, 705, 11],\n", + " [77, \"11:35\", \"12:14\", 695, 734, 39],\n", + " [78, \"11:41\", \"12:23\", 701, 743, 42],\n", + " [79, \"11:44\", \"12:35\", 704, 755, 51],\n", + " [80, \"11:46\", \"11:58\", 706, 718, 12],\n", + " [81, \"12:00\", \"12:10\", 720, 730, 10],\n", + " [82, \"12:04\", \"12:15\", 724, 735, 11],\n", + " [83, \"12:04\", \"13:04\", 724, 784, 60],\n", + " [84, \"12:11\", \"12:38\", 731, 758, 27],\n", + " [85, \"12:15\", \"12:54\", 735, 774, 39],\n", + " [86, \"12:25\", \"13:10\", 745, 790, 45],\n", + " [87, \"12:30\", \"12:40\", 750, 760, 10],\n", + " [88, \"12:34\", \"13:58\", 754, 838, 84],\n", + " [89, \"12:38\", \"13:25\", 758, 805, 47],\n", + " [90, \"12:48\", \"13:35\", 768, 815, 47],\n", + " [91, \"13:00\", \"13:16\", 780, 796, 16],\n", + " [92, \"13:05\", \"13:44\", 785, 824, 39],\n", + " [93, \"13:08\", \"13:55\", 788, 835, 47],\n", + " [94, \"13:14\", \"14:38\", 794, 878, 84],\n", + " [95, \"13:23\", \"13:49\", 803, 829, 26],\n", + " [96, \"13:25\", \"14:04\", 805, 844, 39],\n", + " [97, \"13:28\", \"14:54\", 808, 894, 86],\n", + " [98, \"13:31\", \"13:43\", 811, 823, 12],\n", + " [99, \"13:34\", \"14:58\", 814, 898, 84],\n", + " [100, \"13:38\", \"14:25\", 818, 865, 47],\n", + " [101, \"13:38\", \"15:04\", 818, 904, 86],\n", + " [102, \"13:39\", \"14:33\", 819, 873, 54],\n", + " [103, \"13:40\", \"13:50\", 820, 830, 10],\n", + " [104, \"13:43\", \"14:10\", 823, 850, 27],\n", + " [105, \"13:48\", \"14:35\", 828, 875, 47],\n", + " [106, \"13:48\", \"14:35\", 828, 875, 47],\n", + " [107, \"13:53\", \"14:40\", 833, 880, 47],\n", + " [108, \"13:58\", \"15:24\", 838, 924, 86],\n", + " [109, \"13:58\", \"14:25\", 838, 865, 27],\n", + " [110, \"14:00\", \"14:16\", 840, 856, 16],\n", + " [111, \"14:13\", \"15:00\", 853, 900, 47],\n", + " [112, \"14:20\", \"15:31\", 860, 931, 71],\n", + " [113, \"14:25\", \"15:02\", 865, 902, 37],\n", + " [114, \"14:34\", \"14:45\", 874, 885, 11],\n", + " [115, \"14:40\", \"15:51\", 880, 951, 71],\n", + " [116, \"14:40\", \"14:56\", 880, 896, 16],\n", + " [117, \"14:46\", \"14:58\", 886, 898, 12],\n", + " [118, \"14:49\", \"15:43\", 889, 943, 54],\n", + " [119, \"14:52\", \"15:21\", 892, 921, 29],\n", + " [120, \"14:58\", \"16:24\", 898, 984, 86],\n", + " [121, \"14:59\", \"15:53\", 899, 953, 54],\n", + " [122, \"15:00\", \"15:10\", 900, 910, 10],\n", + " [123, \"15:00\", \"15:35\", 900, 935, 35],\n", + " [124, \"15:08\", \"15:45\", 908, 945, 37],\n", + " [125, \"15:12\", \"15:36\", 912, 936, 24],\n", + " [126, \"15:18\", \"16:05\", 918, 965, 47],\n", + " [127, \"15:24\", \"16:05\", 924, 965, 41],\n", + " [128, \"15:31\", \"15:43\", 931, 943, 12],\n", + " [129, \"15:35\", \"15:54\", 935, 954, 19],\n", + " [130, \"15:36\", \"16:21\", 936, 981, 45],\n", + " [131, \"15:39\", \"16:33\", 939, 993, 54],\n", + " [132, \"15:48\", \"16:35\", 948, 995, 47],\n", + " [133, \"15:50\", \"17:01\", 950, 1021, 71],\n", + " [134, \"16:03\", \"16:50\", 963, 1010, 47],\n", + " [135, \"16:18\", \"17:44\", 978, 1064, 86],\n", + " [136, \"16:24\", \"17:05\", 984, 1025, 41],\n", + " [137, \"16:28\", \"17:15\", 988, 1035, 47],\n", + " [138, \"16:34\", \"17:15\", 994, 1035, 41],\n", + " [139, \"16:38\", \"17:25\", 998, 1045, 47],\n", + " [140, \"16:40\", \"16:56\", 1000, 1016, 16],\n", + " [141, \"16:45\", \"17:04\", 1005, 1024, 19],\n", + " [142, \"16:52\", \"17:36\", 1012, 1056, 44],\n", + " [143, \"16:58\", \"17:45\", 1018, 1065, 47],\n", + " [144, \"17:04\", \"18:30\", 1024, 1110, 86],\n", + " [145, \"17:04\", \"17:45\", 1024, 1065, 41],\n", + " [146, \"17:09\", \"18:03\", 1029, 1083, 54],\n", + " [147, \"17:18\", \"18:44\", 1038, 1124, 86],\n", + " [148, \"17:28\", \"18:15\", 1048, 1095, 47],\n", + " [149, \"17:29\", \"18:41\", 1049, 1121, 72],\n", + " [150, \"17:36\", \"18:21\", 1056, 1101, 45],\n", + " [151, \"17:38\", \"18:25\", 1058, 1105, 47],\n", + " [152, \"17:40\", \"17:56\", 1060, 1076, 16],\n", + " [153, \"17:45\", \"18:04\", 1065, 1084, 19],\n", + " [154, \"17:46\", \"17:58\", 1066, 1078, 12],\n", + " [155, \"17:48\", \"18:35\", 1068, 1115, 47],\n", + " [156, \"17:49\", \"18:43\", 1069, 1123, 54],\n", + " [157, \"17:55\", \"18:14\", 1075, 1094, 19],\n", + " [158, \"17:58\", \"18:45\", 1078, 1125, 47],\n", + " [159, \"18:00\", \"19:11\", 1080, 1151, 71],\n", + " [160, \"18:04\", \"18:45\", 1084, 1125, 41],\n", + " [161, \"18:09\", \"19:03\", 1089, 1143, 54],\n", + " [162, \"18:13\", \"19:00\", 1093, 1140, 47],\n", + " [163, \"18:13\", \"18:40\", 1093, 1120, 27],\n", + " [164, \"18:19\", \"19:13\", 1099, 1153, 54],\n", + " [165, \"18:28\", \"19:25\", 1108, 1165, 57],\n", + " [166, \"18:48\", \"19:28\", 1128, 1168, 40],\n", + " [167, \"19:03\", \"19:45\", 1143, 1185, 42],\n", + " [168, \"19:20\", \"19:36\", 1160, 1176, 16],\n", + " [169, \"19:21\", \"19:31\", 1161, 1171, 10],\n", + " [170, \"19:25\", \"20:04\", 1165, 1204, 39],\n", + " [171, \"19:26\", \"20:08\", 1166, 1208, 42],\n", + " [172, \"19:30\", \"19:40\", 1170, 1180, 10],\n", + " [173, \"19:44\", \"20:33\", 1184, 1233, 49],\n", + " [174, \"19:48\", \"21:09\", 1188, 1269, 81],\n", + " [175, \"19:53\", \"21:02\", 1193, 1262, 69],\n", + " [176, \"20:04\", \"20:29\", 1204, 1229, 25],\n", + " [177, \"20:17\", \"21:03\", 1217, 1263, 46],\n", + " [178, \"20:20\", \"20:57\", 1220, 1257, 37],\n", + " [179, \"20:29\", \"21:18\", 1229, 1278, 49],\n", + " [180, \"20:35\", \"21:54\", 1235, 1314, 79],\n", + " [181, \"20:40\", \"20:50\", 1240, 1250, 10],\n", + " [182, \"20:47\", \"21:42\", 1247, 1302, 55],\n", + " [183, \"21:00\", \"21:10\", 1260, 1270, 10],\n", + " [184, \"21:07\", \"21:44\", 1267, 1304, 37],\n", + " [185, \"21:14\", \"22:03\", 1274, 1323, 49],\n", + " [186, \"21:39\", \"21:55\", 1299, 1315, 16],\n", + " [187, \"21:40\", \"22:17\", 1300, 1337, 37],\n", + " [188, \"21:40\", \"21:50\", 1300, 1310, 10],\n", + " [189, \"21:48\", \"22:03\", 1308, 1323, 15],\n", + " [190, \"22:17\", \"23:03\", 1337, 1383, 46],\n", + " [191, \"22:43\", \"23:08\", 1363, 1388, 25],\n", + " [192, \"23:35\", \"01:05\", 1415, 1505, 90],\n", + " [193, \"23:46\", \"00:01\", 1426, 1441, 15],\n", + " [194, \"23:47\", \"00:33\", 1427, 1473, 46],\n", + " [195, \"23:52\", \"00:24\", 1432, 1464, 32],\n", + " [196, \"23:58\", \"00:38\", 1438, 1478, 40],\n", + " [197, \"00:02\", \"00:12\", 1442, 1452, 10],\n", + " [198, \"00:07\", \"00:39\", 1447, 1479, 32],\n", + " [199, \"00:25\", \"01:12\", 1465, 1512, 47],\n", "] # yapf:disable\n", "\n", "SAMPLE_SHIFTS_LARGE = [\n", - " [0, '04:18', '05:00', 258, 300, 42],\n", - " [1, '04:27', '05:08', 267, 308, 41],\n", - " [2, '04:29', '05:26', 269, 326, 57],\n", - " [3, '04:29', '04:55', 269, 295, 26],\n", - " [4, '04:30', '04:53', 270, 293, 23],\n", - " [5, '04:30', '04:51', 270, 291, 21],\n", - " [6, '04:31', '04:53', 271, 293, 22],\n", - " [7, '04:33', '05:15', 273, 315, 42],\n", - " [8, '04:34', '04:44', 274, 284, 10],\n", - " [9, '04:34', '05:03', 274, 303, 29],\n", - " [10, '04:35', '04:50', 275, 290, 15],\n", - " [11, '04:36', '04:46', 276, 286, 10],\n", - " [12, '04:37', '05:18', 277, 318, 41],\n", - " [13, '04:41', '05:13', 281, 313, 32],\n", - " [14, '04:42', '05:23', 282, 323, 41],\n", - " [15, '04:43', '04:53', 283, 293, 10],\n", - " [16, '04:44', '05:45', 284, 345, 61],\n", - " [17, '04:45', '05:11', 285, 311, 26],\n", - " [18, '04:46', '05:01', 286, 301, 15],\n", - " [19, '04:46', '04:56', 286, 296, 10],\n", - " [20, '04:47', '05:14', 287, 314, 27],\n", - " [21, '04:48', '05:30', 288, 330, 42],\n", - " [22, '04:49', '05:41', 289, 341, 52],\n", - " [23, '04:49', '05:18', 289, 318, 29],\n", - " [24, '04:50', '05:33', 290, 333, 43],\n", - " [25, '04:52', '05:56', 292, 356, 64],\n", - " [26, '04:52', '05:07', 292, 307, 15],\n", - " [27, '04:53', '05:19', 293, 319, 26],\n", - " [28, '04:53', '05:23', 293, 323, 30],\n", - " [29, '04:55', '05:27', 295, 327, 32],\n", - " [30, '04:57', '05:38', 297, 338, 41],\n", - " [31, '05:00', '06:00', 300, 360, 60],\n", - " [32, '05:00', '05:54', 300, 354, 54],\n", - " [33, '05:01', '05:33', 301, 333, 32],\n", - " [34, '05:01', '05:26', 301, 326, 25],\n", - " [35, '05:02', '05:29', 302, 329, 27],\n", - " [36, '05:02', '05:12', 302, 312, 10],\n", - " [37, '05:03', '05:45', 303, 345, 42],\n", - " [38, '05:03', '05:18', 303, 318, 15],\n", - " [39, '05:03', '06:28', 303, 388, 85],\n", - " [40, '05:03', '05:13', 303, 313, 10],\n", - " [41, '05:04', '06:24', 304, 384, 80],\n", - " [42, '05:07', '05:44', 307, 344, 37],\n", - " [43, '05:08', '05:48', 308, 348, 40],\n", - " [44, '05:10', '06:06', 310, 366, 56],\n", - " [45, '05:11', '05:37', 311, 337, 26],\n", - " [46, '05:11', '05:53', 311, 353, 42],\n", - " [47, '05:13', '06:15', 313, 375, 62],\n", - " [48, '05:13', '05:38', 313, 338, 25],\n", - " [49, '05:16', '05:44', 316, 344, 28],\n", - " [50, '05:17', '05:27', 317, 327, 10],\n", - " [51, '05:18', '06:40', 318, 400, 82],\n", - " [52, '05:18', '06:03', 318, 363, 45],\n", - " [53, '05:18', '06:11', 318, 371, 53],\n", - " [54, '05:18', '06:00', 318, 360, 42],\n", - " [55, '05:19', '06:34', 319, 394, 75],\n", - " [56, '05:20', '06:17', 320, 377, 57],\n", - " [57, '05:22', '05:59', 322, 359, 37],\n", - " [58, '05:24', '05:48', 324, 348, 24],\n", - " [59, '05:25', '05:40', 325, 340, 15],\n", - " [60, '05:26', '06:08', 326, 368, 42],\n", - " [61, '05:27', '06:30', 327, 390, 63],\n", - " [62, '05:27', '05:54', 327, 354, 27],\n", - " [63, '05:28', '05:53', 328, 353, 25],\n", - " [64, '05:29', '05:44', 329, 344, 15],\n", - " [65, '05:30', '05:40', 330, 340, 10],\n", - " [66, '05:30', '05:40', 330, 340, 10],\n", - " [67, '05:30', '05:40', 330, 340, 10],\n", - " [68, '05:32', '06:53', 332, 413, 81],\n", - " [69, '05:33', '07:00', 333, 420, 87],\n", - " [70, '05:33', '06:15', 333, 375, 42],\n", - " [71, '05:33', '05:47', 333, 347, 14],\n", - " [72, '05:37', '06:13', 337, 373, 36],\n", - " [73, '05:37', '06:05', 337, 365, 28],\n", - " [74, '05:38', '06:33', 338, 393, 55],\n", - " [75, '05:38', '06:04', 338, 364, 26],\n", - " [76, '05:38', '06:18', 338, 378, 40],\n", - " [77, '05:39', '05:54', 339, 354, 15],\n", - " [78, '05:40', '05:56', 340, 356, 16],\n", - " [79, '05:40', '06:41', 340, 401, 61],\n", - " [80, '05:40', '05:50', 340, 350, 10],\n", - " [81, '05:41', '06:23', 341, 383, 42],\n", - " [82, '05:41', '06:01', 341, 361, 20],\n", - " [83, '05:43', '06:08', 343, 368, 25],\n", - " [84, '05:44', '07:10', 344, 430, 86],\n", - " [85, '05:44', '05:55', 344, 355, 11],\n", - " [86, '05:45', '06:44', 345, 404, 59],\n", - " [87, '05:47', '06:17', 347, 377, 30],\n", - " [88, '05:48', '07:08', 348, 428, 80],\n", - " [89, '05:48', '06:30', 348, 390, 42],\n", - " [90, '05:50', '06:50', 350, 410, 60],\n", - " [91, '05:50', '06:00', 350, 360, 10],\n", - " [92, '05:50', '06:00', 350, 360, 10],\n", - " [93, '05:50', '06:51', 350, 411, 61],\n", - " [94, '05:52', '06:33', 352, 393, 41],\n", - " [95, '05:52', '06:36', 352, 396, 44],\n", - " [96, '05:52', '06:23', 352, 383, 31],\n", - " [97, '05:54', '06:14', 354, 374, 20],\n", - " [98, '05:54', '07:20', 354, 440, 86],\n", - " [99, '05:55', '06:40', 355, 400, 45],\n", - " [100, '05:55', '06:27', 355, 387, 32],\n", - " [101, '05:56', '06:35', 356, 395, 39],\n", - " [102, '05:56', '06:06', 356, 366, 10],\n", - " [103, '05:57', '06:21', 357, 381, 24],\n", - " [104, '05:58', '07:23', 358, 443, 85],\n", - " [105, '05:58', '06:23', 358, 383, 25],\n", - " [106, '05:58', '06:08', 358, 368, 10],\n", - " [107, '05:58', '06:43', 358, 403, 45],\n", - " [108, '06:00', '06:10', 360, 370, 10],\n", - " [109, '06:00', '06:16', 360, 376, 16],\n", - " [110, '06:00', '07:01', 360, 421, 61],\n", - " [111, '06:01', '07:00', 361, 420, 59],\n", - " [112, '06:01', '06:13', 361, 373, 12],\n", - " [113, '06:01', '06:45', 361, 405, 44],\n", - " [114, '06:03', '06:50', 363, 410, 47],\n", - " [115, '06:04', '06:37', 364, 397, 33],\n", - " [116, '06:04', '07:30', 364, 450, 86],\n", - " [117, '06:05', '06:24', 365, 384, 19],\n", - " [118, '06:06', '06:51', 366, 411, 45],\n", - " [119, '06:07', '06:43', 367, 403, 36],\n", - " [120, '06:08', '07:30', 368, 450, 82],\n", - " [121, '06:10', '06:20', 370, 380, 10],\n", - " [122, '06:10', '07:17', 370, 437, 67],\n", - " [123, '06:11', '06:54', 371, 414, 43],\n", - " [124, '06:11', '06:21', 371, 381, 10],\n", - " [125, '06:13', '06:38', 373, 398, 25],\n", - " [126, '06:13', '06:58', 373, 418, 45],\n", - " [127, '06:13', '06:53', 373, 413, 40],\n", - " [128, '06:14', '07:03', 374, 423, 49],\n", - " [129, '06:14', '06:47', 374, 407, 33],\n", - " [130, '06:14', '07:40', 374, 460, 86],\n", - " [131, '06:15', '07:15', 375, 435, 60],\n", - " [132, '06:16', '06:28', 376, 388, 12],\n", - " [133, '06:16', '06:26', 376, 386, 10],\n", - " [134, '06:17', '06:34', 377, 394, 17],\n", - " [135, '06:18', '07:06', 378, 426, 48],\n", - " [136, '06:18', '07:38', 378, 458, 80],\n", - " [137, '06:18', '07:02', 378, 422, 44],\n", - " [138, '06:19', '06:53', 379, 413, 34],\n", - " [139, '06:20', '07:25', 380, 445, 65],\n", - " [140, '06:20', '06:36', 380, 396, 16],\n", - " [141, '06:20', '06:30', 380, 390, 10],\n", - " [142, '06:20', '06:30', 380, 390, 10],\n", - " [143, '06:21', '06:49', 381, 409, 28],\n", - " [144, '06:22', '07:06', 382, 426, 44],\n", - " [145, '06:24', '07:50', 384, 470, 86],\n", - " [146, '06:24', '06:57', 384, 417, 33],\n", - " [147, '06:26', '07:45', 386, 465, 79],\n", - " [148, '06:26', '07:10', 386, 430, 44],\n", - " [149, '06:27', '06:44', 387, 404, 17],\n", - " [150, '06:28', '06:53', 388, 413, 25],\n", - " [151, '06:28', '07:14', 388, 434, 46],\n", - " [152, '06:29', '07:03', 389, 423, 34],\n", - " [153, '06:30', '06:40', 390, 400, 10],\n", - " [154, '06:30', '07:37', 390, 457, 67],\n", - " [155, '06:31', '06:43', 391, 403, 12],\n", - " [156, '06:33', '07:14', 393, 434, 41],\n", - " [157, '06:33', '07:53', 393, 473, 80],\n", - " [158, '06:34', '08:16', 394, 496, 102],\n", - " [159, '06:34', '07:09', 394, 429, 35],\n", - " [160, '06:34', '07:07', 394, 427, 33],\n", - " [161, '06:36', '07:21', 396, 441, 45],\n", - " [162, '06:37', '07:22', 397, 442, 45],\n", - " [163, '06:37', '06:54', 397, 414, 17],\n", - " [164, '06:38', '07:30', 398, 450, 52],\n", - " [165, '06:38', '07:18', 398, 438, 40],\n", - " [166, '06:39', '07:33', 399, 453, 54],\n", - " [167, '06:40', '07:52', 400, 472, 72],\n", - " [168, '06:40', '06:50', 400, 410, 10],\n", - " [169, '06:40', '07:22', 400, 442, 42],\n", - " [170, '06:40', '06:56', 400, 416, 16],\n", - " [171, '06:41', '08:00', 401, 480, 79],\n", - " [172, '06:42', '07:26', 402, 446, 44],\n", - " [173, '06:42', '07:13', 402, 433, 31],\n", - " [174, '06:43', '07:08', 403, 428, 25],\n", - " [175, '06:43', '07:30', 403, 450, 47],\n", - " [176, '06:43', '07:23', 403, 443, 40],\n", - " [177, '06:44', '07:17', 404, 437, 33],\n", - " [178, '06:44', '08:13', 404, 493, 89],\n", - " [179, '06:46', '07:01', 406, 421, 15],\n", - " [180, '06:46', '06:58', 406, 418, 12],\n", - " [181, '06:47', '07:04', 407, 424, 17],\n", - " [182, '06:48', '08:15', 408, 495, 87],\n", - " [183, '06:48', '07:34', 408, 454, 46],\n", - " [184, '06:48', '07:37', 408, 457, 49],\n", - " [185, '06:49', '07:43', 409, 463, 54],\n", - " [186, '06:50', '08:00', 410, 480, 70],\n", - " [187, '06:50', '07:00', 410, 420, 10],\n", - " [188, '06:50', '07:05', 410, 425, 15],\n", - " [189, '06:51', '07:18', 411, 438, 27],\n", - " [190, '06:52', '07:36', 412, 456, 44],\n", - " [191, '06:53', '07:37', 413, 457, 44],\n", - " [192, '06:54', '08:20', 414, 500, 86],\n", - " [193, '06:54', '07:27', 414, 447, 33],\n", - " [194, '06:54', '07:20', 414, 440, 26],\n", - " [195, '06:56', '08:23', 416, 503, 87],\n", - " [196, '06:57', '07:12', 417, 432, 15],\n", - " [197, '06:57', '07:58', 417, 478, 61],\n", - " [198, '06:57', '07:45', 417, 465, 48],\n", - " [199, '06:57', '07:40', 417, 460, 43],\n", - " [200, '06:58', '07:23', 418, 443, 25],\n", - " [201, '06:59', '07:53', 419, 473, 54],\n", - " [202, '06:59', '08:07', 419, 487, 68],\n", - " [203, '07:00', '07:10', 420, 430, 10],\n", - " [204, '07:00', '07:16', 420, 436, 16],\n", - " [205, '07:01', '08:30', 421, 510, 89],\n", - " [206, '07:01', '07:13', 421, 433, 12],\n", - " [207, '07:01', '07:43', 421, 463, 42],\n", - " [208, '07:03', '08:30', 423, 510, 87],\n", - " [209, '07:04', '07:37', 424, 457, 33],\n", - " [210, '07:04', '07:44', 424, 464, 40],\n", - " [211, '07:05', '07:52', 425, 472, 47],\n", - " [212, '07:05', '08:05', 425, 485, 60],\n", - " [213, '07:05', '07:46', 425, 466, 41],\n", - " [214, '07:06', '07:51', 426, 471, 45],\n", - " [215, '07:07', '08:08', 427, 488, 61],\n", - " [216, '07:07', '07:52', 427, 472, 45],\n", - " [217, '07:07', '08:16', 427, 496, 69],\n", - " [218, '07:07', '07:27', 427, 447, 20],\n", - " [219, '07:09', '07:50', 429, 470, 41],\n", - " [220, '07:09', '08:40', 429, 520, 91],\n", - " [221, '07:09', '08:03', 429, 483, 54],\n", - " [222, '07:10', '07:20', 430, 440, 10],\n", - " [223, '07:11', '08:36', 431, 516, 85],\n", - " [224, '07:12', '08:00', 432, 480, 48],\n", - " [225, '07:12', '07:47', 432, 467, 35],\n", - " [226, '07:13', '07:54', 433, 474, 41],\n", - " [227, '07:13', '07:38', 433, 458, 25],\n", - " [228, '07:14', '07:59', 434, 479, 45],\n", - " [229, '07:16', '08:50', 436, 530, 94],\n", - " [230, '07:16', '07:28', 436, 448, 12],\n", - " [231, '07:17', '07:35', 437, 455, 18],\n", - " [232, '07:17', '07:58', 437, 478, 41],\n", - " [233, '07:18', '08:06', 438, 486, 48],\n", - " [234, '07:18', '08:44', 438, 524, 86],\n", - " [235, '07:19', '08:13', 439, 493, 54],\n", - " [236, '07:20', '08:02', 440, 482, 42],\n", - " [237, '07:20', '08:07', 440, 487, 47],\n", - " [238, '07:20', '07:30', 440, 450, 10],\n", - " [239, '07:20', '07:57', 440, 477, 37],\n", - " [240, '07:20', '07:36', 440, 456, 16],\n", - " [241, '07:21', '07:48', 441, 468, 27],\n", - " [242, '07:22', '08:06', 442, 486, 44],\n", - " [243, '07:22', '08:25', 442, 505, 63],\n", - " [244, '07:24', '08:27', 444, 507, 63],\n", - " [245, '07:24', '08:05', 444, 485, 41],\n", - " [246, '07:26', '08:23', 446, 503, 57],\n", - " [247, '07:26', '08:52', 446, 532, 86],\n", - " [248, '07:27', '08:07', 447, 487, 40],\n", - " [249, '07:27', '07:42', 447, 462, 15],\n", - " [250, '07:27', '08:15', 447, 495, 48],\n", - " [251, '07:28', '07:53', 448, 473, 25],\n", - " [252, '07:28', '08:09', 448, 489, 41],\n", - " [253, '07:28', '07:38', 448, 458, 10],\n", - " [254, '07:30', '08:35', 450, 515, 65],\n", - " [255, '07:31', '07:43', 451, 463, 12],\n", - " [256, '07:32', '08:13', 452, 493, 41],\n", - " [257, '07:34', '09:00', 454, 540, 86],\n", - " [258, '07:34', '08:33', 454, 513, 59],\n", - " [259, '07:34', '09:04', 454, 544, 90],\n", - " [260, '07:35', '08:22', 455, 502, 47],\n", - " [261, '07:35', '07:45', 455, 465, 10],\n", - " [262, '07:35', '08:16', 455, 496, 41],\n", - " [263, '07:36', '08:17', 456, 497, 41],\n", - " [264, '07:36', '08:36', 456, 516, 60],\n", - " [265, '07:37', '07:50', 457, 470, 13],\n", - " [266, '07:40', '07:56', 460, 476, 16],\n", - " [267, '07:40', '08:20', 460, 500, 40],\n", - " [268, '07:40', '08:45', 460, 525, 65],\n", - " [269, '07:41', '08:39', 461, 519, 58],\n", - " [270, '07:41', '07:51', 461, 471, 10],\n", - " [271, '07:42', '08:30', 462, 510, 48],\n", - " [272, '07:42', '08:21', 462, 501, 39],\n", - " [273, '07:43', '08:08', 463, 488, 25],\n", - " [274, '07:43', '08:24', 463, 504, 41],\n", - " [275, '07:44', '09:10', 464, 550, 86],\n", - " [276, '07:44', '08:43', 464, 523, 59],\n", - " [277, '07:46', '08:28', 466, 508, 42],\n", - " [278, '07:46', '07:58', 466, 478, 12],\n", - " [279, '07:47', '08:00', 467, 480, 13],\n", - " [280, '07:48', '09:14', 468, 554, 86],\n", - " [281, '07:49', '08:32', 469, 512, 43],\n", - " [282, '07:50', '08:55', 470, 535, 65],\n", - " [283, '07:50', '08:00', 470, 480, 10],\n", - " [284, '07:50', '08:37', 470, 517, 47],\n", - " [285, '07:50', '08:26', 470, 506, 36],\n", - " [286, '07:51', '08:18', 471, 498, 27],\n", - " [287, '07:52', '08:21', 472, 501, 29],\n", - " [288, '07:53', '08:35', 473, 515, 42],\n", - " [289, '07:54', '09:19', 474, 559, 85],\n", - " [290, '07:55', '08:53', 475, 533, 58],\n", - " [291, '07:56', '08:54', 476, 534, 58],\n", - " [292, '07:57', '08:39', 477, 519, 42],\n", - " [293, '07:57', '08:10', 477, 490, 13],\n", - " [294, '07:58', '08:45', 478, 525, 47],\n", - " [295, '07:58', '08:23', 478, 503, 25],\n", - " [296, '08:00', '08:10', 480, 490, 10],\n", - " [297, '08:00', '09:05', 480, 545, 65],\n", - " [298, '08:00', '08:16', 480, 496, 16],\n", - " [299, '08:00', '08:35', 480, 515, 35],\n", - " [300, '08:01', '08:13', 481, 493, 12],\n", - " [301, '08:01', '08:43', 481, 523, 42],\n", - " [302, '08:03', '09:26', 483, 566, 83],\n", - " [303, '08:04', '09:29', 484, 569, 85],\n", - " [304, '08:05', '08:21', 485, 501, 16],\n", - " [305, '08:05', '08:47', 485, 527, 42],\n", - " [306, '08:06', '08:51', 486, 531, 45],\n", - " [307, '08:06', '09:03', 486, 543, 57],\n", - " [308, '08:07', '08:20', 487, 500, 13],\n", - " [309, '08:08', '08:55', 488, 535, 47],\n", - " [310, '08:08', '08:50', 488, 530, 42],\n", - " [311, '08:10', '08:45', 490, 525, 35],\n", - " [312, '08:10', '09:15', 490, 555, 65],\n", - " [313, '08:10', '08:20', 490, 500, 10],\n", - " [314, '08:11', '09:41', 491, 581, 90],\n", - " [315, '08:12', '08:55', 492, 535, 43],\n", - " [316, '08:13', '08:38', 493, 518, 25],\n", - " [317, '08:14', '09:38', 494, 578, 84],\n", - " [318, '08:15', '08:30', 495, 510, 15],\n", - " [319, '08:16', '08:30', 496, 510, 14],\n", - " [320, '08:16', '08:28', 496, 508, 12],\n", - " [321, '08:16', '09:00', 496, 540, 44],\n", - " [322, '08:17', '09:13', 497, 553, 56],\n", - " [323, '08:18', '09:16', 498, 556, 58],\n", - " [324, '08:18', '09:05', 498, 545, 47],\n", - " [325, '08:20', '08:36', 500, 516, 16],\n", - " [326, '08:20', '08:55', 500, 535, 35],\n", - " [327, '08:20', '09:05', 500, 545, 45],\n", - " [328, '08:20', '08:30', 500, 510, 10],\n", - " [329, '08:20', '09:25', 500, 565, 65],\n", - " [330, '08:21', '08:38', 501, 518, 17],\n", - " [331, '08:21', '08:47', 501, 527, 26],\n", - " [332, '08:22', '08:45', 502, 525, 23],\n", - " [333, '08:23', '09:10', 503, 550, 47],\n", - " [334, '08:24', '09:48', 504, 588, 84],\n", - " [335, '08:26', '08:46', 506, 526, 20],\n", - " [336, '08:27', '09:07', 507, 547, 40],\n", - " [337, '08:28', '08:50', 508, 530, 22],\n", - " [338, '08:28', '09:56', 508, 596, 88],\n", - " [339, '08:28', '09:23', 508, 563, 55],\n", - " [340, '08:29', '09:20', 509, 560, 51],\n", - " [341, '08:30', '09:05', 510, 545, 35],\n", - " [342, '08:30', '08:45', 510, 525, 15],\n", - " [343, '08:30', '08:40', 510, 520, 10],\n", - " [344, '08:30', '09:35', 510, 575, 65],\n", - " [345, '08:31', '08:43', 511, 523, 12],\n", - " [346, '08:31', '09:13', 511, 553, 42],\n", - " [347, '08:34', '09:58', 514, 598, 84],\n", - " [348, '08:35', '08:55', 515, 535, 20],\n", - " [349, '08:35', '09:15', 515, 555, 40],\n", - " [350, '08:35', '08:45', 515, 525, 10],\n", - " [351, '08:36', '08:46', 516, 526, 10],\n", - " [352, '08:36', '09:00', 516, 540, 24],\n", - " [353, '08:38', '09:20', 518, 560, 42],\n", - " [354, '08:38', '09:35', 518, 575, 57],\n", - " [355, '08:38', '09:14', 518, 554, 36],\n", - " [356, '08:39', '09:33', 519, 573, 54],\n", - " [357, '08:40', '09:45', 520, 585, 65],\n", - " [358, '08:40', '08:50', 520, 530, 10],\n", - " [359, '08:40', '08:56', 520, 536, 16],\n", - " [360, '08:42', '09:25', 522, 565, 43],\n", - " [361, '08:43', '09:08', 523, 548, 25],\n", - " [362, '08:44', '09:35', 524, 575, 51],\n", - " [363, '08:45', '09:00', 525, 540, 15],\n", - " [364, '08:45', '09:05', 525, 545, 20],\n", - " [365, '08:46', '09:24', 526, 564, 38],\n", - " [366, '08:46', '08:58', 526, 538, 12],\n", - " [367, '08:46', '09:30', 526, 570, 44],\n", - " [368, '08:48', '10:11', 528, 611, 83],\n", - " [369, '08:48', '10:13', 528, 613, 85],\n", - " [370, '08:49', '09:43', 529, 583, 54],\n", - " [371, '08:50', '09:30', 530, 570, 40],\n", - " [372, '08:50', '10:00', 530, 600, 70],\n", - " [373, '08:50', '09:00', 530, 540, 10],\n", - " [374, '08:51', '09:17', 531, 557, 26],\n", - " [375, '08:53', '09:20', 533, 560, 27],\n", - " [376, '08:53', '09:35', 533, 575, 42],\n", - " [377, '08:55', '09:34', 535, 574, 39],\n", - " [378, '08:55', '09:15', 535, 555, 20],\n", - " [379, '08:58', '09:38', 538, 578, 40],\n", - " [380, '08:58', '10:26', 538, 626, 88],\n", - " [381, '08:59', '09:53', 539, 593, 54],\n", - " [382, '08:59', '09:50', 539, 590, 51],\n", - " [383, '09:00', '09:35', 540, 575, 35],\n", - " [384, '09:00', '09:16', 540, 556, 16],\n", - " [385, '09:00', '09:10', 540, 550, 10],\n", - " [386, '09:00', '09:16', 540, 556, 16],\n", - " [387, '09:01', '09:13', 541, 553, 12],\n", - " [388, '09:03', '09:45', 543, 585, 42],\n", - " [389, '09:03', '10:28', 543, 628, 85],\n", - " [390, '09:05', '09:44', 545, 584, 39],\n", - " [391, '09:05', '09:25', 545, 565, 20],\n", - " [392, '09:08', '09:53', 548, 593, 45],\n", - " [393, '09:08', '10:04', 548, 604, 56],\n", - " [394, '09:09', '10:03', 549, 603, 54],\n", - " [395, '09:10', '10:15', 550, 615, 65],\n", - " [396, '09:10', '09:20', 550, 560, 10],\n", - " [397, '09:11', '09:38', 551, 578, 27],\n", - " [398, '09:13', '10:00', 553, 600, 47],\n", - " [399, '09:14', '09:39', 554, 579, 25],\n", - " [400, '09:14', '10:05', 554, 605, 51],\n", - " [401, '09:15', '09:54', 555, 594, 39],\n", - " [402, '09:16', '09:28', 556, 568, 12],\n", - " [403, '09:18', '10:43', 558, 643, 85],\n", - " [404, '09:18', '10:41', 558, 641, 83],\n", - " [405, '09:18', '09:58', 558, 598, 40],\n", - " [406, '09:19', '10:13', 559, 613, 54],\n", - " [407, '09:20', '09:30', 560, 570, 10],\n", - " [408, '09:20', '09:36', 560, 576, 16],\n", - " [409, '09:21', '09:47', 561, 587, 26],\n", - " [410, '09:23', '10:30', 563, 630, 67],\n", - " [411, '09:23', '10:05', 563, 605, 42],\n", - " [412, '09:23', '09:49', 563, 589, 26],\n", - " [413, '09:24', '09:35', 564, 575, 11],\n", - " [414, '09:25', '09:35', 565, 575, 10],\n", - " [415, '09:25', '10:04', 565, 604, 39],\n", - " [416, '09:28', '10:08', 568, 608, 40],\n", - " [417, '09:29', '09:45', 569, 585, 16],\n", - " [418, '09:29', '10:20', 569, 620, 51],\n", - " [419, '09:29', '10:56', 569, 656, 87],\n", - " [420, '09:29', '10:23', 569, 623, 54],\n", - " [421, '09:30', '09:40', 570, 580, 10],\n", - " [422, '09:31', '09:43', 571, 583, 12],\n", - " [423, '09:33', '10:58', 573, 658, 85],\n", - " [424, '09:33', '10:15', 573, 615, 42],\n", - " [425, '09:34', '09:45', 574, 585, 11],\n", - " [426, '09:35', '10:14', 575, 614, 39],\n", - " [427, '09:38', '10:45', 578, 645, 67],\n", - " [428, '09:39', '10:33', 579, 633, 54],\n", - " [429, '09:40', '09:56', 580, 596, 16],\n", - " [430, '09:40', '09:50', 580, 590, 10],\n", - " [431, '09:41', '10:08', 581, 608, 27],\n", - " [432, '09:41', '10:23', 581, 623, 42],\n", - " [433, '09:44', '10:35', 584, 635, 51],\n", - " [434, '09:44', '11:11', 584, 671, 87],\n", - " [435, '09:44', '09:55', 584, 595, 11],\n", - " [436, '09:45', '10:24', 585, 624, 39],\n", - " [437, '09:46', '09:58', 586, 598, 12],\n", - " [438, '09:48', '10:30', 588, 630, 42],\n", - " [439, '09:48', '11:13', 588, 673, 85],\n", - " [440, '09:48', '10:04', 588, 604, 16],\n", - " [441, '09:49', '10:43', 589, 643, 54],\n", - " [442, '09:50', '10:00', 590, 600, 10],\n", - " [443, '09:51', '10:17', 591, 617, 26],\n", - " [444, '09:53', '10:49', 593, 649, 56],\n", - " [445, '09:53', '11:00', 593, 660, 67],\n", - " [446, '09:54', '10:05', 594, 605, 11],\n", - " [447, '09:55', '10:34', 595, 634, 39],\n", - " [448, '09:56', '10:38', 596, 638, 42],\n", - " [449, '09:57', '10:20', 597, 620, 23],\n", - " [450, '09:59', '11:26', 599, 686, 87],\n", - " [451, '09:59', '10:50', 599, 650, 51],\n", - " [452, '09:59', '10:53', 599, 653, 54],\n", - " [453, '10:00', '10:16', 600, 616, 16],\n", - " [454, '10:00', '10:10', 600, 610, 10],\n", - " [455, '10:01', '10:13', 601, 613, 12],\n", - " [456, '10:03', '11:28', 603, 688, 85],\n", - " [457, '10:03', '10:45', 603, 645, 42],\n", - " [458, '10:04', '10:15', 604, 615, 11],\n", - " [459, '10:05', '10:44', 605, 644, 39],\n", - " [460, '10:08', '11:15', 608, 675, 67],\n", - " [461, '10:09', '11:03', 609, 663, 54],\n", - " [462, '10:10', '10:20', 610, 620, 10],\n", - " [463, '10:11', '10:38', 611, 638, 27],\n", - " [464, '10:11', '10:53', 611, 653, 42],\n", - " [465, '10:14', '11:05', 614, 665, 51],\n", - " [466, '10:14', '11:41', 614, 701, 87],\n", - " [467, '10:14', '10:25', 614, 625, 11],\n", - " [468, '10:15', '10:54', 615, 654, 39],\n", - " [469, '10:16', '10:28', 616, 628, 12],\n", - " [470, '10:18', '11:43', 618, 703, 85],\n", - " [471, '10:18', '11:00', 618, 660, 42],\n", - " [472, '10:19', '11:13', 619, 673, 54],\n", - " [473, '10:20', '10:30', 620, 630, 10],\n", - " [474, '10:20', '10:36', 620, 636, 16],\n", - " [475, '10:21', '10:47', 621, 647, 26],\n", - " [476, '10:23', '11:30', 623, 690, 67],\n", - " [477, '10:23', '10:45', 623, 645, 22],\n", - " [478, '10:24', '10:35', 624, 635, 11],\n", - " [479, '10:25', '11:04', 625, 664, 39],\n", - " [480, '10:26', '11:08', 626, 668, 42],\n", - " [481, '10:29', '11:20', 629, 680, 51],\n", - " [482, '10:29', '11:23', 629, 683, 54],\n", - " [483, '10:29', '11:56', 629, 716, 87],\n", - " [484, '10:30', '10:40', 630, 640, 10],\n", - " [485, '10:31', '10:43', 631, 643, 12],\n", - " [486, '10:33', '11:15', 633, 675, 42],\n", - " [487, '10:33', '11:58', 633, 718, 85],\n", - " [488, '10:34', '10:45', 634, 645, 11],\n", - " [489, '10:35', '11:14', 635, 674, 39],\n", - " [490, '10:38', '11:45', 638, 705, 67],\n", - " [491, '10:39', '11:33', 639, 693, 54],\n", - " [492, '10:40', '10:50', 640, 650, 10],\n", - " [493, '10:40', '10:56', 640, 656, 16],\n", - " [494, '10:41', '11:23', 641, 683, 42],\n", - " [495, '10:41', '11:08', 641, 668, 27],\n", - " [496, '10:44', '12:11', 644, 731, 87],\n", - " [497, '10:44', '11:35', 644, 695, 51],\n", - " [498, '10:44', '10:55', 644, 655, 11],\n", - " [499, '10:45', '11:24', 645, 684, 39],\n", - " [500, '10:46', '10:58', 646, 658, 12],\n", - " [501, '10:48', '12:13', 648, 733, 85],\n", - " [502, '10:48', '11:30', 648, 690, 42],\n", - " [503, '10:49', '11:43', 649, 703, 54],\n", - " [504, '10:50', '11:00', 650, 660, 10],\n", - " [505, '10:51', '11:17', 651, 677, 26],\n", - " [506, '10:53', '12:00', 653, 720, 67],\n", - " [507, '10:53', '11:20', 653, 680, 27],\n", - " [508, '10:54', '11:05', 654, 665, 11],\n", - " [509, '10:55', '11:34', 655, 694, 39],\n", - " [510, '10:56', '11:38', 656, 698, 42],\n", - " [511, '10:59', '11:14', 659, 674, 15],\n", - " [512, '10:59', '12:26', 659, 746, 87],\n", - " [513, '10:59', '11:53', 659, 713, 54],\n", - " [514, '10:59', '11:50', 659, 710, 51],\n", - " [515, '11:00', '11:16', 660, 676, 16],\n", - " [516, '11:00', '11:10', 660, 670, 10],\n", - " [517, '11:01', '11:13', 661, 673, 12],\n", - " [518, '11:03', '11:45', 663, 705, 42],\n", - " [519, '11:03', '12:28', 663, 748, 85],\n", - " [520, '11:04', '11:15', 664, 675, 11],\n", - " [521, '11:05', '11:44', 665, 704, 39],\n", - " [522, '11:08', '12:15', 668, 735, 67],\n", - " [523, '11:09', '12:03', 669, 723, 54],\n", - " [524, '11:10', '11:20', 670, 680, 10],\n", - " [525, '11:11', '11:38', 671, 698, 27],\n", - " [526, '11:11', '11:53', 671, 713, 42],\n", - " [527, '11:14', '11:25', 674, 685, 11],\n", - " [528, '11:14', '12:05', 674, 725, 51],\n", - " [529, '11:14', '12:38', 674, 758, 84],\n", - " [530, '11:14', '12:41', 674, 761, 87],\n", - " [531, '11:15', '11:54', 675, 714, 39],\n", - " [532, '11:16', '11:28', 676, 688, 12],\n", - " [533, '11:18', '12:00', 678, 720, 42],\n", - " [534, '11:19', '12:13', 679, 733, 54],\n", - " [535, '11:20', '11:30', 680, 690, 10],\n", - " [536, '11:20', '11:36', 680, 696, 16],\n", - " [537, '11:21', '11:47', 681, 707, 26],\n", - " [538, '11:23', '12:30', 683, 750, 67],\n", - " [539, '11:23', '11:49', 683, 709, 26],\n", - " [540, '11:24', '12:48', 684, 768, 84],\n", - " [541, '11:24', '11:35', 684, 695, 11],\n", - " [542, '11:25', '12:04', 685, 724, 39],\n", - " [543, '11:26', '12:08', 686, 728, 42],\n", - " [544, '11:29', '11:44', 689, 704, 15],\n", - " [545, '11:29', '12:23', 689, 743, 54],\n", - " [546, '11:29', '12:20', 689, 740, 51],\n", - " [547, '11:29', '12:54', 689, 774, 85],\n", - " [548, '11:30', '11:40', 690, 700, 10],\n", - " [549, '11:31', '11:43', 691, 703, 12],\n", - " [550, '11:33', '12:15', 693, 735, 42],\n", - " [551, '11:34', '12:58', 694, 778, 84],\n", - " [552, '11:34', '11:45', 694, 705, 11],\n", - " [553, '11:35', '12:14', 695, 734, 39],\n", - " [554, '11:38', '12:45', 698, 765, 67],\n", - " [555, '11:39', '12:33', 699, 753, 54],\n", - " [556, '11:40', '11:56', 700, 716, 16],\n", - " [557, '11:40', '11:50', 700, 710, 10],\n", - " [558, '11:41', '12:08', 701, 728, 27],\n", - " [559, '11:41', '12:23', 701, 743, 42],\n", - " [560, '11:44', '11:55', 704, 715, 11],\n", - " [561, '11:44', '13:14', 704, 794, 90],\n", - " [562, '11:44', '13:08', 704, 788, 84],\n", - " [563, '11:44', '12:35', 704, 755, 51],\n", - " [564, '11:45', '12:24', 705, 744, 39],\n", - " [565, '11:46', '11:58', 706, 718, 12],\n", - " [566, '11:48', '12:30', 708, 750, 42],\n", - " [567, '11:49', '12:43', 709, 763, 54],\n", - " [568, '11:50', '12:00', 710, 720, 10],\n", - " [569, '11:51', '12:17', 711, 737, 26],\n", - " [570, '11:53', '12:49', 713, 769, 56],\n", - " [571, '11:53', '13:00', 713, 780, 67],\n", - " [572, '11:54', '13:18', 714, 798, 84],\n", - " [573, '11:54', '12:05', 714, 725, 11],\n", - " [574, '11:55', '12:40', 715, 760, 45],\n", - " [575, '11:55', '12:34', 715, 754, 39],\n", - " [576, '11:56', '12:35', 716, 755, 39],\n", - " [577, '11:57', '12:20', 717, 740, 23],\n", - " [578, '11:58', '12:29', 718, 749, 31],\n", - " [579, '11:59', '12:50', 719, 770, 51],\n", - " [580, '11:59', '12:53', 719, 773, 54],\n", - " [581, '11:59', '13:24', 719, 804, 85],\n", - " [582, '11:59', '12:14', 719, 734, 15],\n", - " [583, '12:00', '12:16', 720, 736, 16],\n", - " [584, '12:00', '12:10', 720, 730, 10],\n", - " [585, '12:01', '12:45', 721, 765, 44],\n", - " [586, '12:01', '12:13', 721, 733, 12],\n", - " [587, '12:03', '12:50', 723, 770, 47],\n", - " [588, '12:04', '12:15', 724, 735, 11],\n", - " [589, '12:04', '13:04', 724, 784, 60],\n", - " [590, '12:04', '13:28', 724, 808, 84],\n", - " [591, '12:05', '12:44', 725, 764, 39],\n", - " [592, '12:08', '13:11', 728, 791, 63],\n", - " [593, '12:08', '12:39', 728, 759, 31],\n", - " [594, '12:09', '13:03', 729, 783, 54],\n", - " [595, '12:10', '12:20', 730, 740, 10],\n", - " [596, '12:11', '12:55', 731, 775, 44],\n", - " [597, '12:11', '12:38', 731, 758, 27],\n", - " [598, '12:14', '13:05', 734, 785, 51],\n", - " [599, '12:14', '12:25', 734, 745, 11],\n", - " [600, '12:14', '13:44', 734, 824, 90],\n", - " [601, '12:14', '13:38', 734, 818, 84],\n", - " [602, '12:15', '12:54', 735, 774, 39],\n", - " [603, '12:16', '12:28', 736, 748, 12],\n", - " [604, '12:18', '13:00', 738, 780, 42],\n", - " [605, '12:19', '13:13', 739, 793, 54],\n", - " [606, '12:20', '12:30', 740, 750, 10],\n", - " [607, '12:20', '13:31', 740, 811, 71],\n", - " [608, '12:20', '12:30', 740, 750, 10],\n", - " [609, '12:20', '12:36', 740, 756, 16],\n", - " [610, '12:21', '12:47', 741, 767, 26],\n", - " [611, '12:23', '12:45', 743, 765, 22],\n", - " [612, '12:24', '12:35', 744, 755, 11],\n", - " [613, '12:24', '13:48', 744, 828, 84],\n", - " [614, '12:25', '13:10', 745, 790, 45],\n", - " [615, '12:25', '13:04', 745, 784, 39],\n", - " [616, '12:26', '13:05', 746, 785, 39],\n", - " [617, '12:28', '13:54', 748, 834, 86],\n", - " [618, '12:28', '12:38', 748, 758, 10],\n", - " [619, '12:28', '13:15', 748, 795, 47],\n", - " [620, '12:29', '13:23', 749, 803, 54],\n", - " [621, '12:30', '13:41', 750, 821, 71],\n", - " [622, '12:30', '12:40', 750, 760, 10],\n", - " [623, '12:31', '13:15', 751, 795, 44],\n", - " [624, '12:31', '12:43', 751, 763, 12],\n", - " [625, '12:33', '12:48', 753, 768, 15],\n", - " [626, '12:33', '13:20', 753, 800, 47],\n", - " [627, '12:34', '13:58', 754, 838, 84],\n", - " [628, '12:34', '13:34', 754, 814, 60],\n", - " [629, '12:34', '12:45', 754, 765, 11],\n", - " [630, '12:35', '13:14', 755, 794, 39],\n", - " [631, '12:38', '13:25', 758, 805, 47],\n", - " [632, '12:38', '13:25', 758, 805, 47],\n", - " [633, '12:38', '14:04', 758, 844, 86],\n", - " [634, '12:39', '13:33', 759, 813, 54],\n", - " [635, '12:40', '13:51', 760, 831, 71],\n", - " [636, '12:40', '12:50', 760, 770, 10],\n", - " [637, '12:40', '12:56', 760, 776, 16],\n", - " [638, '12:41', '13:08', 761, 788, 27],\n", - " [639, '12:43', '13:30', 763, 810, 47],\n", - " [640, '12:44', '12:55', 764, 775, 11],\n", - " [641, '12:44', '14:08', 764, 848, 84],\n", - " [642, '12:45', '13:24', 765, 804, 39],\n", - " [643, '12:46', '12:58', 766, 778, 12],\n", - " [644, '12:46', '13:21', 766, 801, 35],\n", - " [645, '12:48', '14:14', 768, 854, 86],\n", - " [646, '12:48', '13:35', 768, 815, 47],\n", - " [647, '12:48', '12:58', 768, 778, 10],\n", - " [648, '12:48', '13:35', 768, 815, 47],\n", - " [649, '12:49', '13:43', 769, 823, 54],\n", - " [650, '12:50', '14:01', 770, 841, 71],\n", - " [651, '12:50', '13:00', 770, 780, 10],\n", - " [652, '12:50', '13:00', 770, 780, 10],\n", - " [653, '12:51', '13:17', 771, 797, 26],\n", - " [654, '12:53', '13:20', 773, 800, 27],\n", - " [655, '12:53', '13:24', 773, 804, 31],\n", - " [656, '12:53', '13:40', 773, 820, 47],\n", - " [657, '12:54', '14:18', 774, 858, 84],\n", - " [658, '12:54', '13:05', 774, 785, 11],\n", - " [659, '12:55', '13:34', 775, 814, 39],\n", - " [660, '12:58', '14:24', 778, 864, 86],\n", - " [661, '12:58', '13:25', 778, 805, 27],\n", - " [662, '12:58', '13:45', 778, 825, 47],\n", - " [663, '12:58', '13:45', 778, 825, 47],\n", - " [664, '12:59', '13:53', 779, 833, 54],\n", - " [665, '13:00', '13:10', 780, 790, 10],\n", - " [666, '13:00', '13:16', 780, 796, 16],\n", - " [667, '13:00', '14:11', 780, 851, 71],\n", - " [668, '13:01', '13:13', 781, 793, 12],\n", - " [669, '13:03', '13:34', 783, 814, 31],\n", - " [670, '13:03', '13:50', 783, 830, 47],\n", - " [671, '13:04', '13:15', 784, 795, 11],\n", - " [672, '13:04', '14:28', 784, 868, 84],\n", - " [673, '13:05', '13:44', 785, 824, 39],\n", - " [674, '13:08', '13:55', 788, 835, 47],\n", - " [675, '13:08', '14:34', 788, 874, 86],\n", - " [676, '13:08', '13:55', 788, 835, 47],\n", - " [677, '13:09', '14:03', 789, 843, 54],\n", - " [678, '13:10', '13:20', 790, 800, 10],\n", - " [679, '13:10', '14:21', 790, 861, 71],\n", - " [680, '13:13', '14:00', 793, 840, 47],\n", - " [681, '13:13', '13:40', 793, 820, 27],\n", - " [682, '13:14', '14:38', 794, 878, 84],\n", - " [683, '13:14', '13:25', 794, 805, 11],\n", - " [684, '13:15', '13:54', 795, 834, 39],\n", - " [685, '13:16', '13:28', 796, 808, 12],\n", - " [686, '13:18', '14:05', 798, 845, 47],\n", - " [687, '13:18', '14:44', 798, 884, 86],\n", - " [688, '13:18', '14:05', 798, 845, 47],\n", - " [689, '13:19', '14:13', 799, 853, 54],\n", - " [690, '13:20', '13:36', 800, 816, 16],\n", - " [691, '13:20', '14:31', 800, 871, 71],\n", - " [692, '13:20', '13:30', 800, 810, 10],\n", - " [693, '13:21', '13:47', 801, 827, 26],\n", - " [694, '13:23', '14:10', 803, 850, 47],\n", - " [695, '13:23', '13:49', 803, 829, 26],\n", - " [696, '13:24', '14:48', 804, 888, 84],\n", - " [697, '13:24', '13:35', 804, 815, 11],\n", - " [698, '13:25', '14:04', 805, 844, 39],\n", - " [699, '13:28', '14:15', 808, 855, 47],\n", - " [700, '13:28', '14:54', 808, 894, 86],\n", - " [701, '13:28', '13:55', 808, 835, 27],\n", - " [702, '13:28', '14:15', 808, 855, 47],\n", - " [703, '13:29', '14:23', 809, 863, 54],\n", - " [704, '13:30', '13:40', 810, 820, 10],\n", - " [705, '13:30', '14:41', 810, 881, 71],\n", - " [706, '13:31', '13:43', 811, 823, 12],\n", - " [707, '13:33', '14:20', 813, 860, 47],\n", - " [708, '13:34', '14:58', 814, 898, 84],\n", - " [709, '13:34', '13:45', 814, 825, 11],\n", - " [710, '13:35', '14:14', 815, 854, 39],\n", - " [711, '13:38', '14:25', 818, 865, 47],\n", - " [712, '13:38', '14:25', 818, 865, 47],\n", - " [713, '13:38', '15:04', 818, 904, 86],\n", - " [714, '13:39', '14:33', 819, 873, 54],\n", - " [715, '13:40', '13:50', 820, 830, 10],\n", - " [716, '13:40', '13:56', 820, 836, 16],\n", - " [717, '13:40', '14:51', 820, 891, 71],\n", - " [718, '13:43', '14:30', 823, 870, 47],\n", - " [719, '13:43', '14:10', 823, 850, 27],\n", - " [720, '13:44', '15:09', 824, 909, 85],\n", - " [721, '13:44', '13:55', 824, 835, 11],\n", - " [722, '13:45', '14:24', 825, 864, 39],\n", - " [723, '13:46', '13:58', 826, 838, 12],\n", - " [724, '13:48', '14:35', 828, 875, 47],\n", - " [725, '13:48', '15:14', 828, 914, 86],\n", - " [726, '13:48', '14:35', 828, 875, 47],\n", - " [727, '13:49', '14:43', 829, 883, 54],\n", - " [728, '13:50', '14:00', 830, 840, 10],\n", - " [729, '13:50', '15:01', 830, 901, 71],\n", - " [730, '13:51', '14:17', 831, 857, 26],\n", - " [731, '13:53', '14:40', 833, 880, 47],\n", - " [732, '13:53', '14:49', 833, 889, 56],\n", - " [733, '13:54', '14:05', 834, 845, 11],\n", - " [734, '13:54', '15:19', 834, 919, 85],\n", - " [735, '13:55', '14:34', 835, 874, 39],\n", - " [736, '13:57', '14:20', 837, 860, 23],\n", - " [737, '13:58', '15:24', 838, 924, 86],\n", - " [738, '13:58', '14:45', 838, 885, 47],\n", - " [739, '13:58', '14:45', 838, 885, 47],\n", - " [740, '13:58', '14:25', 838, 865, 27],\n", - " [741, '13:59', '14:53', 839, 893, 54],\n", - " [742, '14:00', '14:16', 840, 856, 16],\n", - " [743, '14:00', '14:10', 840, 850, 10],\n", - " [744, '14:00', '15:11', 840, 911, 71],\n", - " [745, '14:01', '14:13', 841, 853, 12],\n", - " [746, '14:03', '14:50', 843, 890, 47],\n", - " [747, '14:04', '14:15', 844, 855, 11],\n", - " [748, '14:04', '15:29', 844, 929, 85],\n", - " [749, '14:05', '14:44', 845, 884, 39],\n", - " [750, '14:08', '14:55', 848, 895, 47],\n", - " [751, '14:08', '14:55', 848, 895, 47],\n", - " [752, '14:08', '15:34', 848, 934, 86],\n", - " [753, '14:09', '15:03', 849, 903, 54],\n", - " [754, '14:10', '15:21', 850, 921, 71],\n", - " [755, '14:10', '14:20', 850, 860, 10],\n", - " [756, '14:13', '15:00', 853, 900, 47],\n", - " [757, '14:13', '14:40', 853, 880, 27],\n", - " [758, '14:14', '15:40', 854, 940, 86],\n", - " [759, '14:14', '14:25', 854, 865, 11],\n", - " [760, '14:15', '14:54', 855, 894, 39],\n", - " [761, '14:16', '14:28', 856, 868, 12],\n", - " [762, '14:18', '15:05', 858, 905, 47],\n", - " [763, '14:18', '15:44', 858, 944, 86],\n", - " [764, '14:18', '15:05', 858, 905, 47],\n", - " [765, '14:19', '15:13', 859, 913, 54],\n", - " [766, '14:20', '15:31', 860, 931, 71],\n", - " [767, '14:20', '14:30', 860, 870, 10],\n", - " [768, '14:20', '14:36', 860, 876, 16],\n", - " [769, '14:21', '14:47', 861, 887, 26],\n", - " [770, '14:23', '15:10', 863, 910, 47],\n", - " [771, '14:23', '14:45', 863, 885, 22],\n", - " [772, '14:24', '15:50', 864, 950, 86],\n", - " [773, '14:24', '14:35', 864, 875, 11],\n", - " [774, '14:25', '15:02', 865, 902, 37],\n", - " [775, '14:26', '14:52', 866, 892, 26],\n", - " [776, '14:28', '15:15', 868, 915, 47],\n", - " [777, '14:28', '14:55', 868, 895, 27],\n", - " [778, '14:28', '15:54', 868, 954, 86],\n", - " [779, '14:28', '15:15', 868, 915, 47],\n", - " [780, '14:29', '15:23', 869, 923, 54],\n", - " [781, '14:30', '15:41', 870, 941, 71],\n", - " [782, '14:30', '14:40', 870, 880, 10],\n", - " [783, '14:31', '14:43', 871, 883, 12],\n", - " [784, '14:33', '15:20', 873, 920, 47],\n", - " [785, '14:34', '16:00', 874, 960, 86],\n", - " [786, '14:34', '14:45', 874, 885, 11],\n", - " [787, '14:35', '15:11', 875, 911, 36],\n", - " [788, '14:38', '15:25', 878, 925, 47],\n", - " [789, '14:38', '15:25', 878, 925, 47],\n", - " [790, '14:38', '16:04', 878, 964, 86],\n", - " [791, '14:39', '15:33', 879, 933, 54],\n", - " [792, '14:40', '14:50', 880, 890, 10],\n", - " [793, '14:40', '15:51', 880, 951, 71],\n", - " [794, '14:40', '14:56', 880, 896, 16],\n", - " [795, '14:43', '15:30', 883, 930, 47],\n", - " [796, '14:43', '15:10', 883, 910, 27],\n", - " [797, '14:44', '15:00', 884, 900, 16],\n", - " [798, '14:44', '16:10', 884, 970, 86],\n", - " [799, '14:45', '15:19', 885, 919, 34],\n", - " [800, '14:46', '14:58', 886, 898, 12],\n", - " [801, '14:48', '15:35', 888, 935, 47],\n", - " [802, '14:48', '15:35', 888, 935, 47],\n", - " [803, '14:48', '17:04', 888, 1024, 136],\n", - " [804, '14:49', '15:43', 889, 943, 54],\n", - " [805, '14:50', '16:01', 890, 961, 71],\n", - " [806, '14:50', '15:00', 890, 900, 10],\n", - " [807, '14:51', '15:17', 891, 917, 26],\n", - " [808, '14:52', '15:27', 892, 927, 35],\n", - " [809, '14:52', '15:21', 892, 921, 29],\n", - " [810, '14:53', '15:40', 893, 940, 47],\n", - " [811, '14:54', '15:08', 894, 908, 14],\n", - " [812, '14:54', '16:20', 894, 980, 86],\n", - " [813, '14:58', '16:24', 898, 984, 86],\n", - " [814, '14:58', '15:45', 898, 945, 47],\n", - " [815, '14:58', '15:25', 898, 925, 27],\n", - " [816, '14:58', '15:45', 898, 945, 47],\n", - " [817, '14:59', '15:53', 899, 953, 54],\n", - " [818, '15:00', '15:10', 900, 910, 10],\n", - " [819, '15:00', '15:35', 900, 935, 35],\n", - " [820, '15:00', '16:11', 900, 971, 71],\n", - " [821, '15:00', '15:16', 900, 916, 16],\n", - " [822, '15:01', '15:13', 901, 913, 12],\n", - " [823, '15:02', '15:16', 902, 916, 14],\n", - " [824, '15:03', '15:50', 903, 950, 47],\n", - " [825, '15:04', '16:30', 904, 990, 86],\n", - " [826, '15:08', '16:34', 908, 994, 86],\n", - " [827, '15:08', '15:55', 908, 955, 47],\n", - " [828, '15:08', '15:55', 908, 955, 47],\n", - " [829, '15:08', '15:45', 908, 945, 37],\n", - " [830, '15:09', '16:14', 909, 974, 65],\n", - " [831, '15:09', '16:03', 909, 963, 54],\n", - " [832, '15:10', '16:21', 910, 981, 71],\n", - " [833, '15:10', '15:20', 910, 920, 10],\n", - " [834, '15:11', '15:24', 911, 924, 13],\n", - " [835, '15:12', '15:36', 912, 936, 24],\n", - " [836, '15:13', '16:00', 913, 960, 47],\n", - " [837, '15:13', '15:40', 913, 940, 27],\n", - " [838, '15:14', '16:40', 914, 1000, 86],\n", - " [839, '15:16', '15:28', 916, 928, 12],\n", - " [840, '15:16', '15:55', 916, 955, 39],\n", - " [841, '15:18', '16:05', 918, 965, 47],\n", - " [842, '15:18', '16:44', 918, 1004, 86],\n", - " [843, '15:18', '16:05', 918, 965, 47],\n", - " [844, '15:19', '16:13', 919, 973, 54],\n", - " [845, '15:19', '15:34', 919, 934, 15],\n", - " [846, '15:20', '15:30', 920, 930, 10],\n", - " [847, '15:20', '16:31', 920, 991, 71],\n", - " [848, '15:20', '15:36', 920, 936, 16],\n", - " [849, '15:21', '15:47', 921, 947, 26],\n", - " [850, '15:21', '16:06', 921, 966, 45],\n", - " [851, '15:23', '16:10', 923, 970, 47],\n", - " [852, '15:24', '16:50', 924, 1010, 86],\n", - " [853, '15:24', '16:05', 924, 965, 41],\n", - " [854, '15:27', '15:51', 927, 951, 24],\n", - " [855, '15:27', '15:44', 927, 944, 17],\n", - " [856, '15:28', '16:15', 928, 975, 47],\n", - " [857, '15:28', '16:54', 928, 1014, 86],\n", - " [858, '15:28', '16:15', 928, 975, 47],\n", - " [859, '15:28', '15:55', 928, 955, 27],\n", - " [860, '15:29', '16:23', 929, 983, 54],\n", - " [861, '15:30', '16:41', 930, 1001, 71],\n", - " [862, '15:30', '15:40', 930, 940, 10],\n", - " [863, '15:31', '15:43', 931, 943, 12],\n", - " [864, '15:33', '16:20', 933, 980, 47],\n", - " [865, '15:34', '17:00', 934, 1020, 86],\n", - " [866, '15:34', '16:15', 934, 975, 41],\n", - " [867, '15:35', '15:54', 935, 954, 19],\n", - " [868, '15:36', '16:21', 936, 981, 45],\n", - " [869, '15:38', '16:25', 938, 985, 47],\n", - " [870, '15:38', '16:25', 938, 985, 47],\n", - " [871, '15:38', '16:39', 938, 999, 61],\n", - " [872, '15:39', '16:33', 939, 993, 54],\n", - " [873, '15:40', '15:50', 940, 950, 10],\n", - " [874, '15:40', '16:51', 940, 1011, 71],\n", - " [875, '15:40', '15:56', 940, 956, 16],\n", - " [876, '15:43', '16:10', 943, 970, 27],\n", - " [877, '15:43', '16:30', 943, 990, 47],\n", - " [878, '15:44', '17:10', 944, 1030, 86],\n", - " [879, '15:44', '16:25', 944, 985, 41],\n", - " [880, '15:45', '16:04', 945, 964, 19],\n", - " [881, '15:46', '15:58', 946, 958, 12],\n", - " [882, '15:48', '16:35', 948, 995, 47],\n", - " [883, '15:48', '16:35', 948, 995, 47],\n", - " [884, '15:48', '17:14', 948, 1034, 86],\n", - " [885, '15:49', '16:43', 949, 1003, 54],\n", - " [886, '15:50', '16:00', 950, 960, 10],\n", - " [887, '15:50', '17:01', 950, 1021, 71],\n", - " [888, '15:51', '16:18', 951, 978, 27],\n", - " [889, '15:52', '16:36', 952, 996, 44],\n", - " [890, '15:53', '16:40', 953, 1000, 47],\n", - " [891, '15:54', '17:20', 954, 1040, 86],\n", - " [892, '15:54', '16:35', 954, 995, 41],\n", - " [893, '15:55', '16:14', 955, 974, 19],\n", - " [894, '15:58', '16:25', 958, 985, 27],\n", - " [895, '15:58', '16:45', 958, 1005, 47],\n", - " [896, '15:58', '16:45', 958, 1005, 47],\n", - " [897, '15:58', '17:24', 958, 1044, 86],\n", - " [898, '15:59', '17:11', 959, 1031, 72],\n", - " [899, '15:59', '16:53', 959, 1013, 54],\n", - " [900, '16:00', '16:10', 960, 970, 10],\n", - " [901, '16:00', '16:16', 960, 976, 16],\n", - " [902, '16:01', '16:13', 961, 973, 12],\n", - " [903, '16:03', '16:50', 963, 1010, 47],\n", - " [904, '16:04', '17:30', 964, 1050, 86],\n", - " [905, '16:04', '16:45', 964, 1005, 41],\n", - " [906, '16:05', '16:24', 965, 984, 19],\n", - " [907, '16:06', '16:51', 966, 1011, 45],\n", - " [908, '16:08', '16:55', 968, 1015, 47],\n", - " [909, '16:08', '17:34', 968, 1054, 86],\n", - " [910, '16:08', '16:55', 968, 1015, 47],\n", - " [911, '16:09', '17:03', 969, 1023, 54],\n", - " [912, '16:09', '17:21', 969, 1041, 72],\n", - " [913, '16:10', '16:20', 970, 980, 10],\n", - " [914, '16:13', '16:40', 973, 1000, 27],\n", - " [915, '16:13', '17:00', 973, 1020, 47],\n", - " [916, '16:14', '16:55', 974, 1015, 41],\n", - " [917, '16:14', '17:40', 974, 1060, 86],\n", - " [918, '16:15', '16:34', 975, 994, 19],\n", - " [919, '16:16', '16:28', 976, 988, 12],\n", - " [920, '16:18', '17:05', 978, 1025, 47],\n", - " [921, '16:18', '17:05', 978, 1025, 47],\n", - " [922, '16:18', '17:44', 978, 1064, 86],\n", - " [923, '16:19', '17:31', 979, 1051, 72],\n", - " [924, '16:19', '17:13', 979, 1033, 54],\n", - " [925, '16:20', '16:30', 980, 990, 10],\n", - " [926, '16:20', '16:36', 980, 996, 16],\n", - " [927, '16:21', '16:48', 981, 1008, 27],\n", - " [928, '16:22', '17:06', 982, 1026, 44],\n", - " [929, '16:23', '17:10', 983, 1030, 47],\n", - " [930, '16:24', '17:05', 984, 1025, 41],\n", - " [931, '16:24', '17:50', 984, 1070, 86],\n", - " [932, '16:25', '16:44', 985, 1004, 19],\n", - " [933, '16:28', '17:15', 988, 1035, 47],\n", - " [934, '16:28', '17:15', 988, 1035, 47],\n", - " [935, '16:28', '16:55', 988, 1015, 27],\n", - " [936, '16:28', '17:54', 988, 1074, 86],\n", - " [937, '16:29', '17:23', 989, 1043, 54],\n", - " [938, '16:29', '17:41', 989, 1061, 72],\n", - " [939, '16:30', '16:40', 990, 1000, 10],\n", - " [940, '16:31', '16:43', 991, 1003, 12],\n", - " [941, '16:33', '17:20', 993, 1040, 47],\n", - " [942, '16:34', '17:15', 994, 1035, 41],\n", - " [943, '16:34', '18:00', 994, 1080, 86],\n", - " [944, '16:35', '16:54', 995, 1014, 19],\n", - " [945, '16:36', '17:21', 996, 1041, 45],\n", - " [946, '16:38', '17:25', 998, 1045, 47],\n", - " [947, '16:38', '17:25', 998, 1045, 47],\n", - " [948, '16:38', '18:04', 998, 1084, 86],\n", - " [949, '16:39', '17:33', 999, 1053, 54],\n", - " [950, '16:39', '17:51', 999, 1071, 72],\n", - " [951, '16:40', '16:56', 1000, 1016, 16],\n", - " [952, '16:40', '16:50', 1000, 1010, 10],\n", - " [953, '16:43', '17:10', 1003, 1030, 27],\n", - " [954, '16:43', '17:30', 1003, 1050, 47],\n", - " [955, '16:44', '17:25', 1004, 1045, 41],\n", - " [956, '16:44', '18:10', 1004, 1090, 86],\n", - " [957, '16:45', '17:04', 1005, 1024, 19],\n", - " [958, '16:46', '16:58', 1006, 1018, 12],\n", - " [959, '16:48', '18:14', 1008, 1094, 86],\n", - " [960, '16:48', '17:35', 1008, 1055, 47],\n", - " [961, '16:48', '17:35', 1008, 1055, 47],\n", - " [962, '16:49', '18:01', 1009, 1081, 72],\n", - " [963, '16:49', '17:43', 1009, 1063, 54],\n", - " [964, '16:50', '17:00', 1010, 1020, 10],\n", - " [965, '16:51', '17:18', 1011, 1038, 27],\n", - " [966, '16:52', '17:36', 1012, 1056, 44],\n", - " [967, '16:53', '17:40', 1013, 1060, 47],\n", - " [968, '16:54', '18:20', 1014, 1100, 86],\n", - " [969, '16:54', '17:35', 1014, 1055, 41],\n", - " [970, '16:55', '17:14', 1015, 1034, 19],\n", - " [971, '16:58', '17:25', 1018, 1045, 27],\n", - " [972, '16:58', '17:45', 1018, 1065, 47],\n", - " [973, '16:58', '17:45', 1018, 1065, 47],\n", - " [974, '16:58', '18:24', 1018, 1104, 86],\n", - " [975, '16:59', '18:11', 1019, 1091, 72],\n", - " [976, '16:59', '17:53', 1019, 1073, 54],\n", - " [977, '17:00', '17:16', 1020, 1036, 16],\n", - " [978, '17:00', '17:10', 1020, 1030, 10],\n", - " [979, '17:01', '17:13', 1021, 1033, 12],\n", - " [980, '17:03', '17:50', 1023, 1070, 47],\n", - " [981, '17:04', '18:30', 1024, 1110, 86],\n", - " [982, '17:04', '17:45', 1024, 1065, 41],\n", - " [983, '17:05', '17:24', 1025, 1044, 19],\n", - " [984, '17:06', '17:51', 1026, 1071, 45],\n", - " [985, '17:08', '17:55', 1028, 1075, 47],\n", - " [986, '17:08', '17:55', 1028, 1075, 47],\n", - " [987, '17:08', '18:34', 1028, 1114, 86],\n", - " [988, '17:09', '18:03', 1029, 1083, 54],\n", - " [989, '17:09', '18:21', 1029, 1101, 72],\n", - " [990, '17:10', '17:20', 1030, 1040, 10],\n", - " [991, '17:13', '17:40', 1033, 1060, 27],\n", - " [992, '17:13', '18:00', 1033, 1080, 47],\n", - " [993, '17:14', '17:55', 1034, 1075, 41],\n", - " [994, '17:14', '18:40', 1034, 1120, 86],\n", - " [995, '17:15', '17:34', 1035, 1054, 19],\n", - " [996, '17:16', '17:28', 1036, 1048, 12],\n", - " [997, '17:18', '18:05', 1038, 1085, 47],\n", - " [998, '17:18', '18:05', 1038, 1085, 47],\n", - " [999, '17:18', '18:44', 1038, 1124, 86],\n", - " [1000, '17:19', '18:31', 1039, 1111, 72],\n", - " [1001, '17:19', '18:13', 1039, 1093, 54],\n", - " [1002, '17:20', '17:36', 1040, 1056, 16],\n", - " [1003, '17:20', '17:30', 1040, 1050, 10],\n", - " [1004, '17:21', '17:47', 1041, 1067, 26],\n", - " [1005, '17:22', '18:06', 1042, 1086, 44],\n", - " [1006, '17:23', '18:10', 1043, 1090, 47],\n", - " [1007, '17:24', '18:50', 1044, 1130, 86],\n", - " [1008, '17:24', '18:05', 1044, 1085, 41],\n", - " [1009, '17:25', '17:44', 1045, 1064, 19],\n", - " [1010, '17:28', '17:55', 1048, 1075, 27],\n", - " [1011, '17:28', '18:15', 1048, 1095, 47],\n", - " [1012, '17:28', '18:15', 1048, 1095, 47],\n", - " [1013, '17:28', '18:54', 1048, 1134, 86],\n", - " [1014, '17:29', '18:41', 1049, 1121, 72],\n", - " [1015, '17:29', '18:23', 1049, 1103, 54],\n", - " [1016, '17:30', '17:40', 1050, 1060, 10],\n", - " [1017, '17:31', '17:43', 1051, 1063, 12],\n", - " [1018, '17:33', '18:20', 1053, 1100, 47],\n", - " [1019, '17:34', '18:15', 1054, 1095, 41],\n", - " [1020, '17:34', '19:00', 1054, 1140, 86],\n", - " [1021, '17:35', '17:54', 1055, 1074, 19],\n", - " [1022, '17:36', '18:21', 1056, 1101, 45],\n", - " [1023, '17:38', '18:25', 1058, 1105, 47],\n", - " [1024, '17:38', '19:04', 1058, 1144, 86],\n", - " [1025, '17:38', '18:25', 1058, 1105, 47],\n", - " [1026, '17:39', '18:51', 1059, 1131, 72],\n", - " [1027, '17:39', '18:33', 1059, 1113, 54],\n", - " [1028, '17:40', '17:56', 1060, 1076, 16],\n", - " [1029, '17:40', '17:50', 1060, 1070, 10],\n", - " [1030, '17:43', '18:10', 1063, 1090, 27],\n", - " [1031, '17:43', '18:30', 1063, 1110, 47],\n", - " [1032, '17:44', '18:25', 1064, 1105, 41],\n", - " [1033, '17:44', '19:14', 1064, 1154, 90],\n", - " [1034, '17:45', '18:04', 1065, 1084, 19],\n", - " [1035, '17:46', '17:58', 1066, 1078, 12],\n", - " [1036, '17:48', '18:35', 1068, 1115, 47],\n", - " [1037, '17:48', '18:35', 1068, 1115, 47],\n", - " [1038, '17:48', '19:14', 1068, 1154, 86],\n", - " [1039, '17:49', '19:01', 1069, 1141, 72],\n", - " [1040, '17:49', '18:43', 1069, 1123, 54],\n", - " [1041, '17:50', '18:00', 1070, 1080, 10],\n", - " [1042, '17:51', '18:17', 1071, 1097, 26],\n", - " [1043, '17:52', '18:36', 1072, 1116, 44],\n", - " [1044, '17:53', '18:40', 1073, 1120, 47],\n", - " [1045, '17:54', '18:35', 1074, 1115, 41],\n", - " [1046, '17:54', '18:57', 1074, 1137, 63],\n", - " [1047, '17:55', '18:14', 1075, 1094, 19],\n", - " [1048, '17:58', '18:45', 1078, 1125, 47],\n", - " [1049, '17:58', '18:45', 1078, 1125, 47],\n", - " [1050, '17:58', '18:25', 1078, 1105, 27],\n", - " [1051, '17:58', '19:26', 1078, 1166, 88],\n", - " [1052, '17:59', '18:53', 1079, 1133, 54],\n", - " [1053, '18:00', '19:11', 1080, 1151, 71],\n", - " [1054, '18:00', '18:10', 1080, 1090, 10],\n", - " [1055, '18:00', '18:16', 1080, 1096, 16],\n", - " [1056, '18:01', '18:13', 1081, 1093, 12],\n", - " [1057, '18:03', '18:50', 1083, 1130, 47],\n", - " [1058, '18:04', '18:45', 1084, 1125, 41],\n", - " [1059, '18:04', '19:29', 1084, 1169, 85],\n", - " [1060, '18:05', '18:24', 1085, 1104, 19],\n", - " [1061, '18:06', '18:51', 1086, 1131, 45],\n", - " [1062, '18:08', '18:55', 1088, 1135, 47],\n", - " [1063, '18:08', '19:06', 1088, 1146, 58],\n", - " [1064, '18:08', '18:55', 1088, 1135, 47],\n", - " [1065, '18:09', '19:03', 1089, 1143, 54],\n", - " [1066, '18:10', '18:20', 1090, 1100, 10],\n", - " [1067, '18:10', '19:21', 1090, 1161, 71],\n", - " [1068, '18:13', '19:00', 1093, 1140, 47],\n", - " [1069, '18:13', '18:40', 1093, 1120, 27],\n", - " [1070, '18:14', '19:43', 1094, 1183, 89],\n", - " [1071, '18:14', '18:55', 1094, 1135, 41],\n", - " [1072, '18:15', '18:34', 1095, 1114, 19],\n", - " [1073, '18:16', '18:28', 1096, 1108, 12],\n", - " [1074, '18:17', '18:27', 1097, 1107, 10],\n", - " [1075, '18:18', '19:41', 1098, 1181, 83],\n", - " [1076, '18:18', '18:58', 1098, 1138, 40],\n", - " [1077, '18:18', '19:05', 1098, 1145, 47],\n", - " [1078, '18:19', '19:13', 1099, 1153, 54],\n", - " [1079, '18:20', '19:31', 1100, 1171, 71],\n", - " [1080, '18:20', '18:36', 1100, 1116, 16],\n", - " [1081, '18:20', '18:30', 1100, 1110, 10],\n", - " [1082, '18:22', '19:05', 1102, 1145, 43],\n", - " [1083, '18:23', '19:05', 1103, 1145, 42],\n", - " [1084, '18:24', '19:27', 1104, 1167, 63],\n", - " [1085, '18:24', '19:05', 1104, 1145, 41],\n", - " [1086, '18:25', '18:44', 1105, 1124, 19],\n", - " [1087, '18:28', '19:25', 1108, 1165, 57],\n", - " [1088, '18:28', '18:55', 1108, 1135, 27],\n", - " [1089, '18:28', '19:08', 1108, 1148, 40],\n", - " [1090, '18:28', '19:15', 1108, 1155, 47],\n", - " [1091, '18:29', '19:23', 1109, 1163, 54],\n", - " [1092, '18:30', '19:05', 1110, 1145, 35],\n", - " [1093, '18:30', '18:40', 1110, 1120, 10],\n", - " [1094, '18:31', '18:43', 1111, 1123, 12],\n", - " [1095, '18:33', '19:15', 1113, 1155, 42],\n", - " [1096, '18:34', '19:58', 1114, 1198, 84],\n", - " [1097, '18:34', '19:14', 1114, 1154, 40],\n", - " [1098, '18:35', '18:55', 1115, 1135, 20],\n", - " [1099, '18:36', '19:20', 1116, 1160, 44],\n", - " [1100, '18:38', '19:25', 1118, 1165, 47],\n", - " [1101, '18:38', '19:23', 1118, 1163, 45],\n", - " [1102, '18:38', '19:56', 1118, 1196, 78],\n", - " [1103, '18:39', '19:33', 1119, 1173, 54],\n", - " [1104, '18:40', '18:50', 1120, 1130, 10],\n", - " [1105, '18:40', '19:45', 1120, 1185, 65],\n", - " [1106, '18:40', '18:56', 1120, 1136, 16],\n", - " [1107, '18:43', '19:10', 1123, 1150, 27],\n", - " [1108, '18:43', '19:30', 1123, 1170, 47],\n", - " [1109, '18:44', '19:24', 1124, 1164, 40],\n", - " [1110, '18:45', '19:05', 1125, 1145, 20],\n", - " [1111, '18:46', '18:58', 1126, 1138, 12],\n", - " [1112, '18:48', '19:35', 1128, 1175, 47],\n", - " [1113, '18:48', '20:12', 1128, 1212, 84],\n", - " [1114, '18:48', '20:11', 1128, 1211, 83],\n", - " [1115, '18:48', '19:28', 1128, 1168, 40],\n", - " [1116, '18:49', '19:43', 1129, 1183, 54],\n", - " [1117, '18:50', '19:00', 1130, 1140, 10],\n", - " [1118, '18:51', '19:01', 1131, 1141, 10],\n", - " [1119, '18:53', '19:35', 1133, 1175, 42],\n", - " [1120, '18:53', '19:15', 1133, 1155, 22],\n", - " [1121, '18:53', '20:00', 1133, 1200, 67],\n", - " [1122, '18:55', '19:15', 1135, 1155, 20],\n", - " [1123, '18:55', '19:34', 1135, 1174, 39],\n", - " [1124, '18:58', '19:38', 1138, 1178, 40],\n", - " [1125, '18:59', '19:53', 1139, 1193, 54],\n", - " [1126, '18:59', '19:50', 1139, 1190, 51],\n", - " [1127, '18:59', '19:53', 1139, 1193, 54],\n", - " [1128, '19:00', '19:16', 1140, 1156, 16],\n", - " [1129, '19:00', '19:10', 1140, 1150, 10],\n", - " [1130, '19:00', '19:16', 1140, 1156, 16],\n", - " [1131, '19:01', '19:13', 1141, 1153, 12],\n", - " [1132, '19:03', '20:26', 1143, 1226, 83],\n", - " [1133, '19:03', '19:45', 1143, 1185, 42],\n", - " [1134, '19:05', '19:44', 1145, 1184, 39],\n", - " [1135, '19:05', '19:25', 1145, 1165, 20],\n", - " [1136, '19:08', '20:15', 1148, 1215, 67],\n", - " [1137, '19:08', '19:35', 1148, 1175, 27],\n", - " [1138, '19:09', '19:49', 1149, 1189, 40],\n", - " [1139, '19:09', '20:03', 1149, 1203, 54],\n", - " [1140, '19:10', '19:20', 1150, 1160, 10],\n", - " [1141, '19:10', '19:20', 1150, 1160, 10],\n", - " [1142, '19:11', '19:53', 1151, 1193, 42],\n", - " [1143, '19:14', '20:26', 1154, 1226, 72],\n", - " [1144, '19:14', '19:35', 1154, 1175, 21],\n", - " [1145, '19:14', '19:24', 1154, 1164, 10],\n", - " [1146, '19:14', '20:05', 1154, 1205, 51],\n", - " [1147, '19:15', '19:30', 1155, 1170, 15],\n", - " [1148, '19:15', '19:54', 1155, 1194, 39],\n", - " [1149, '19:18', '20:39', 1158, 1239, 81],\n", - " [1150, '19:18', '20:00', 1158, 1200, 42],\n", - " [1151, '19:19', '20:14', 1159, 1214, 55],\n", - " [1152, '19:20', '19:30', 1160, 1170, 10],\n", - " [1153, '19:20', '19:36', 1160, 1176, 16],\n", - " [1154, '19:21', '19:31', 1161, 1171, 10],\n", - " [1155, '19:23', '20:30', 1163, 1230, 67],\n", - " [1156, '19:23', '19:35', 1163, 1175, 12],\n", - " [1157, '19:24', '19:45', 1164, 1185, 21],\n", - " [1158, '19:24', '19:45', 1164, 1185, 21],\n", - " [1159, '19:25', '20:04', 1165, 1204, 39],\n", - " [1160, '19:26', '20:08', 1166, 1208, 42],\n", - " [1161, '19:29', '20:02', 1169, 1202, 33],\n", - " [1162, '19:29', '20:18', 1169, 1218, 49],\n", - " [1163, '19:29', '20:41', 1169, 1241, 72],\n", - " [1164, '19:30', '19:40', 1170, 1180, 10],\n", - " [1165, '19:33', '20:54', 1173, 1254, 81],\n", - " [1166, '19:33', '20:17', 1173, 1217, 44],\n", - " [1167, '19:34', '19:55', 1174, 1195, 21],\n", - " [1168, '19:35', '20:14', 1175, 1214, 39],\n", - " [1169, '19:38', '20:05', 1178, 1205, 27],\n", - " [1170, '19:38', '20:45', 1178, 1245, 67],\n", - " [1171, '19:39', '20:12', 1179, 1212, 33],\n", - " [1172, '19:40', '19:50', 1180, 1190, 10],\n", - " [1173, '19:40', '19:56', 1180, 1196, 16],\n", - " [1174, '19:41', '20:27', 1181, 1227, 46],\n", - " [1175, '19:43', '19:55', 1183, 1195, 12],\n", - " [1176, '19:44', '20:05', 1184, 1205, 21],\n", - " [1177, '19:44', '20:33', 1184, 1233, 49],\n", - " [1178, '19:44', '21:00', 1184, 1260, 76],\n", - " [1179, '19:45', '20:24', 1185, 1224, 39],\n", - " [1180, '19:48', '20:37', 1188, 1237, 49],\n", - " [1181, '19:48', '21:09', 1188, 1269, 81],\n", - " [1182, '19:50', '20:00', 1190, 1200, 10],\n", - " [1183, '19:52', '20:29', 1192, 1229, 37],\n", - " [1184, '19:53', '20:08', 1193, 1208, 15],\n", - " [1185, '19:53', '21:02', 1193, 1262, 69],\n", - " [1186, '19:53', '20:20', 1193, 1220, 27],\n", - " [1187, '19:54', '20:19', 1194, 1219, 25],\n", - " [1188, '19:55', '20:34', 1195, 1234, 39],\n", - " [1189, '19:56', '20:34', 1196, 1234, 38],\n", - " [1190, '19:59', '20:48', 1199, 1248, 49],\n", - " [1191, '19:59', '21:20', 1199, 1280, 81],\n", - " [1192, '20:00', '20:16', 1200, 1216, 16],\n", - " [1193, '20:00', '20:10', 1200, 1210, 10],\n", - " [1194, '20:03', '20:42', 1203, 1242, 39],\n", - " [1195, '20:03', '21:24', 1203, 1284, 81],\n", - " [1196, '20:04', '20:29', 1204, 1229, 25],\n", - " [1197, '20:05', '20:48', 1205, 1248, 43],\n", - " [1198, '20:07', '20:44', 1207, 1244, 37],\n", - " [1199, '20:08', '20:40', 1208, 1240, 32],\n", - " [1200, '20:08', '20:35', 1208, 1235, 27],\n", - " [1201, '20:10', '20:20', 1210, 1220, 10],\n", - " [1202, '20:10', '20:22', 1210, 1222, 12],\n", - " [1203, '20:11', '20:47', 1211, 1247, 36],\n", - " [1204, '20:14', '21:04', 1214, 1264, 50],\n", - " [1205, '20:14', '21:03', 1214, 1263, 49],\n", - " [1206, '20:17', '21:03', 1217, 1263, 46],\n", - " [1207, '20:18', '21:39', 1218, 1299, 81],\n", - " [1208, '20:20', '20:30', 1220, 1230, 10],\n", - " [1209, '20:20', '20:57', 1220, 1257, 37],\n", - " [1210, '20:20', '20:36', 1220, 1236, 16],\n", - " [1211, '20:22', '20:59', 1222, 1259, 37],\n", - " [1212, '20:22', '20:42', 1222, 1242, 20],\n", - " [1213, '20:24', '20:49', 1224, 1249, 25],\n", - " [1214, '20:27', '21:22', 1227, 1282, 55],\n", - " [1215, '20:29', '21:18', 1229, 1278, 49],\n", - " [1216, '20:30', '21:07', 1230, 1267, 37],\n", - " [1217, '20:30', '20:40', 1230, 1240, 10],\n", - " [1218, '20:30', '20:40', 1230, 1240, 10],\n", - " [1219, '20:30', '21:40', 1230, 1300, 70],\n", - " [1220, '20:32', '21:18', 1232, 1278, 46],\n", - " [1221, '20:35', '21:54', 1235, 1314, 79],\n", - " [1222, '20:37', '21:14', 1237, 1274, 37],\n", - " [1223, '20:38', '21:08', 1238, 1268, 30],\n", - " [1224, '20:40', '20:50', 1240, 1250, 10],\n", - " [1225, '20:40', '21:17', 1240, 1277, 37],\n", - " [1226, '20:40', '20:56', 1240, 1256, 16],\n", - " [1227, '20:44', '21:33', 1244, 1293, 49],\n", - " [1228, '20:47', '21:33', 1247, 1293, 46],\n", - " [1229, '20:47', '21:42', 1247, 1302, 55],\n", - " [1230, '20:50', '21:00', 1250, 1260, 10],\n", - " [1231, '20:50', '22:00', 1250, 1320, 70],\n", - " [1232, '20:50', '22:09', 1250, 1329, 79],\n", - " [1233, '20:50', '21:27', 1250, 1287, 37],\n", - " [1234, '20:52', '21:29', 1252, 1289, 37],\n", - " [1235, '20:53', '21:20', 1253, 1280, 27],\n", - " [1236, '20:56', '21:11', 1256, 1271, 15],\n", - " [1237, '20:59', '21:48', 1259, 1308, 49],\n", - " [1238, '21:00', '21:10', 1260, 1270, 10],\n", - " [1239, '21:00', '21:37', 1260, 1297, 37],\n", - " [1240, '21:02', '21:48', 1262, 1308, 46],\n", - " [1241, '21:05', '22:24', 1265, 1344, 79],\n", - " [1242, '21:07', '21:44', 1267, 1304, 37],\n", - " [1243, '21:07', '22:02', 1267, 1322, 55],\n", - " [1244, '21:08', '21:38', 1268, 1298, 30],\n", - " [1245, '21:10', '22:25', 1270, 1345, 75],\n", - " [1246, '21:10', '21:20', 1270, 1280, 10],\n", - " [1247, '21:10', '21:47', 1270, 1307, 37],\n", - " [1248, '21:14', '22:03', 1274, 1323, 49],\n", - " [1249, '21:17', '22:03', 1277, 1323, 46],\n", - " [1250, '21:20', '22:18', 1280, 1338, 58],\n", - " [1251, '21:20', '21:57', 1280, 1317, 37],\n", - " [1252, '21:20', '21:30', 1280, 1290, 10],\n", - " [1253, '21:22', '21:59', 1282, 1319, 37],\n", - " [1254, '21:24', '21:49', 1284, 1309, 25],\n", - " [1255, '21:27', '22:21', 1287, 1341, 54],\n", - " [1256, '21:30', '22:07', 1290, 1327, 37],\n", - " [1257, '21:30', '22:20', 1290, 1340, 50],\n", - " [1258, '21:30', '21:40', 1290, 1300, 10],\n", - " [1259, '21:32', '22:18', 1292, 1338, 46],\n", - " [1260, '21:32', '22:01', 1292, 1321, 29],\n", - " [1261, '21:35', '22:54', 1295, 1374, 79],\n", - " [1262, '21:37', '22:14', 1297, 1334, 37],\n", - " [1263, '21:39', '21:55', 1299, 1315, 16],\n", - " [1264, '21:40', '22:17', 1300, 1337, 37],\n", - " [1265, '21:40', '21:50', 1300, 1310, 10],\n", - " [1266, '21:41', '22:08', 1301, 1328, 27],\n", - " [1267, '21:47', '22:16', 1307, 1336, 29],\n", - " [1268, '21:47', '22:51', 1307, 1371, 64],\n", - " [1269, '21:47', '22:33', 1307, 1353, 46],\n", - " [1270, '21:48', '22:03', 1308, 1323, 15],\n", - " [1271, '21:50', '22:55', 1310, 1375, 65],\n", - " [1272, '21:50', '22:27', 1310, 1347, 37],\n", - " [1273, '21:50', '22:00', 1310, 1320, 10],\n", - " [1274, '21:52', '22:29', 1312, 1349, 37],\n", - " [1275, '21:53', '22:19', 1313, 1339, 26],\n", - " [1276, '22:00', '22:38', 1320, 1358, 38],\n", - " [1277, '22:00', '22:10', 1320, 1330, 10],\n", - " [1278, '22:02', '22:12', 1322, 1332, 10],\n", - " [1279, '22:02', '22:48', 1322, 1368, 46],\n", - " [1280, '22:04', '22:31', 1324, 1351, 27],\n", - " [1281, '22:05', '23:24', 1325, 1404, 79],\n", - " [1282, '22:07', '22:44', 1327, 1364, 37],\n", - " [1283, '22:07', '22:39', 1327, 1359, 32],\n", - " [1284, '22:09', '22:25', 1329, 1345, 16],\n", - " [1285, '22:10', '23:25', 1330, 1405, 75],\n", - " [1286, '22:13', '22:38', 1333, 1358, 25],\n", - " [1287, '22:13', '22:53', 1333, 1373, 40],\n", - " [1288, '22:17', '22:27', 1337, 1347, 10],\n", - " [1289, '22:17', '23:03', 1337, 1383, 46],\n", - " [1290, '22:19', '22:46', 1339, 1366, 27],\n", - " [1291, '22:22', '22:59', 1342, 1379, 37],\n", - " [1292, '22:24', '22:48', 1344, 1368, 24],\n", - " [1293, '22:27', '22:52', 1347, 1372, 25],\n", - " [1294, '22:27', '23:21', 1347, 1401, 54],\n", - " [1295, '22:28', '23:08', 1348, 1388, 40],\n", - " [1296, '22:30', '23:17', 1350, 1397, 47],\n", - " [1297, '22:32', '22:42', 1352, 1362, 10],\n", - " [1298, '22:32', '23:11', 1352, 1391, 39],\n", - " [1299, '22:34', '23:01', 1354, 1381, 27],\n", - " [1300, '22:35', '23:54', 1355, 1434, 79],\n", - " [1301, '22:37', '23:14', 1357, 1394, 37],\n", - " [1302, '22:43', '23:23', 1363, 1403, 40],\n", - " [1303, '22:43', '23:08', 1363, 1388, 25],\n", - " [1304, '22:47', '23:33', 1367, 1413, 46],\n", - " [1305, '22:47', '22:57', 1367, 1377, 10],\n", - " [1306, '22:49', '23:16', 1369, 1396, 27],\n", - " [1307, '22:52', '23:29', 1372, 1409, 37],\n", - " [1308, '22:53', '23:15', 1373, 1395, 22],\n", - " [1309, '22:55', '23:55', 1375, 1435, 60],\n", - " [1310, '22:57', '23:51', 1377, 1431, 54],\n", - " [1311, '22:58', '23:38', 1378, 1418, 40],\n", - " [1312, '23:02', '23:41', 1382, 1421, 39],\n", - " [1313, '23:02', '23:12', 1382, 1392, 10],\n", - " [1314, '23:04', '23:31', 1384, 1411, 27],\n", - " [1315, '23:05', '00:24', 1385, 1464, 79],\n", - " [1316, '23:07', '23:44', 1387, 1424, 37],\n", - " [1317, '23:13', '23:53', 1393, 1433, 40],\n", - " [1318, '23:13', '23:38', 1393, 1418, 25],\n", - " [1319, '23:17', '00:03', 1397, 1443, 46],\n", - " [1320, '23:17', '23:27', 1397, 1407, 10],\n", - " [1321, '23:19', '23:46', 1399, 1426, 27],\n", - " [1322, '23:22', '23:59', 1402, 1439, 37],\n", - " [1323, '23:25', '00:25', 1405, 1465, 60],\n", - " [1324, '23:27', '00:21', 1407, 1461, 54],\n", - " [1325, '23:28', '00:08', 1408, 1448, 40],\n", - " [1326, '23:32', '23:42', 1412, 1422, 10],\n", - " [1327, '23:34', '00:01', 1414, 1441, 27],\n", - " [1328, '23:35', '01:05', 1415, 1505, 90],\n", - " [1329, '23:37', '00:09', 1417, 1449, 32],\n", - " [1330, '23:43', '00:23', 1423, 1463, 40],\n", - " [1331, '23:43', '00:08', 1423, 1448, 25],\n", - " [1332, '23:46', '00:01', 1426, 1441, 15],\n", - " [1333, '23:47', '23:57', 1427, 1437, 10],\n", - " [1334, '23:47', '00:33', 1427, 1473, 46],\n", - " [1335, '23:52', '00:24', 1432, 1464, 32],\n", - " [1336, '23:55', '00:49', 1435, 1489, 54],\n", - " [1337, '23:57', '00:57', 1437, 1497, 60],\n", - " [1338, '23:58', '00:38', 1438, 1478, 40],\n", - " [1339, '00:02', '00:12', 1442, 1452, 10],\n", - " [1340, '00:07', '00:39', 1447, 1479, 32],\n", - " [1341, '00:13', '00:38', 1453, 1478, 25],\n", - " [1342, '00:13', '00:51', 1453, 1491, 38],\n", - " [1343, '00:15', '01:14', 1455, 1514, 59],\n", - " [1344, '00:17', '01:23', 1457, 1523, 66],\n", - " [1345, '00:23', '00:33', 1463, 1473, 10],\n", - " [1346, '00:24', '00:40', 1464, 1480, 16],\n", - " [1347, '00:25', '01:12', 1465, 1512, 47],\n", - " [1348, '00:28', '01:07', 1468, 1507, 39],\n", - " [1349, '00:33', '01:05', 1473, 1505, 32],\n", - " [1350, '00:43', '01:21', 1483, 1521, 38],\n", - " [1351, '00:44', '00:54', 1484, 1494, 10],\n", - " [1352, '00:47', '01:09', 1487, 1509, 22],\n", - " [1353, '00:47', '01:26', 1487, 1526, 39],\n", - " [1354, '00:54', '01:04', 1494, 1504, 10],\n", - " [1355, '00:57', '01:07', 1497, 1507, 10]\n", + " [0, \"04:18\", \"05:00\", 258, 300, 42],\n", + " [1, \"04:27\", \"05:08\", 267, 308, 41],\n", + " [2, \"04:29\", \"05:26\", 269, 326, 57],\n", + " [3, \"04:29\", \"04:55\", 269, 295, 26],\n", + " [4, \"04:30\", \"04:53\", 270, 293, 23],\n", + " [5, \"04:30\", \"04:51\", 270, 291, 21],\n", + " [6, \"04:31\", \"04:53\", 271, 293, 22],\n", + " [7, \"04:33\", \"05:15\", 273, 315, 42],\n", + " [8, \"04:34\", \"04:44\", 274, 284, 10],\n", + " [9, \"04:34\", \"05:03\", 274, 303, 29],\n", + " [10, \"04:35\", \"04:50\", 275, 290, 15],\n", + " [11, \"04:36\", \"04:46\", 276, 286, 10],\n", + " [12, \"04:37\", \"05:18\", 277, 318, 41],\n", + " [13, \"04:41\", \"05:13\", 281, 313, 32],\n", + " [14, \"04:42\", \"05:23\", 282, 323, 41],\n", + " [15, \"04:43\", \"04:53\", 283, 293, 10],\n", + " [16, \"04:44\", \"05:45\", 284, 345, 61],\n", + " [17, \"04:45\", \"05:11\", 285, 311, 26],\n", + " [18, \"04:46\", \"05:01\", 286, 301, 15],\n", + " [19, \"04:46\", \"04:56\", 286, 296, 10],\n", + " [20, \"04:47\", \"05:14\", 287, 314, 27],\n", + " [21, \"04:48\", \"05:30\", 288, 330, 42],\n", + " [22, \"04:49\", \"05:41\", 289, 341, 52],\n", + " [23, \"04:49\", \"05:18\", 289, 318, 29],\n", + " [24, \"04:50\", \"05:33\", 290, 333, 43],\n", + " [25, \"04:52\", \"05:56\", 292, 356, 64],\n", + " [26, \"04:52\", \"05:07\", 292, 307, 15],\n", + " [27, \"04:53\", \"05:19\", 293, 319, 26],\n", + " [28, \"04:53\", \"05:23\", 293, 323, 30],\n", + " [29, \"04:55\", \"05:27\", 295, 327, 32],\n", + " [30, \"04:57\", \"05:38\", 297, 338, 41],\n", + " [31, \"05:00\", \"06:00\", 300, 360, 60],\n", + " [32, \"05:00\", \"05:54\", 300, 354, 54],\n", + " [33, \"05:01\", \"05:33\", 301, 333, 32],\n", + " [34, \"05:01\", \"05:26\", 301, 326, 25],\n", + " [35, \"05:02\", \"05:29\", 302, 329, 27],\n", + " [36, \"05:02\", \"05:12\", 302, 312, 10],\n", + " [37, \"05:03\", \"05:45\", 303, 345, 42],\n", + " [38, \"05:03\", \"05:18\", 303, 318, 15],\n", + " [39, \"05:03\", \"06:28\", 303, 388, 85],\n", + " [40, \"05:03\", \"05:13\", 303, 313, 10],\n", + " [41, \"05:04\", \"06:24\", 304, 384, 80],\n", + " [42, \"05:07\", \"05:44\", 307, 344, 37],\n", + " [43, \"05:08\", \"05:48\", 308, 348, 40],\n", + " [44, \"05:10\", \"06:06\", 310, 366, 56],\n", + " [45, \"05:11\", \"05:37\", 311, 337, 26],\n", + " [46, \"05:11\", \"05:53\", 311, 353, 42],\n", + " [47, \"05:13\", \"06:15\", 313, 375, 62],\n", + " [48, \"05:13\", \"05:38\", 313, 338, 25],\n", + " [49, \"05:16\", \"05:44\", 316, 344, 28],\n", + " [50, \"05:17\", \"05:27\", 317, 327, 10],\n", + " [51, \"05:18\", \"06:40\", 318, 400, 82],\n", + " [52, \"05:18\", \"06:03\", 318, 363, 45],\n", + " [53, \"05:18\", \"06:11\", 318, 371, 53],\n", + " [54, \"05:18\", \"06:00\", 318, 360, 42],\n", + " [55, \"05:19\", \"06:34\", 319, 394, 75],\n", + " [56, \"05:20\", \"06:17\", 320, 377, 57],\n", + " [57, \"05:22\", \"05:59\", 322, 359, 37],\n", + " [58, \"05:24\", \"05:48\", 324, 348, 24],\n", + " [59, \"05:25\", \"05:40\", 325, 340, 15],\n", + " [60, \"05:26\", \"06:08\", 326, 368, 42],\n", + " [61, \"05:27\", \"06:30\", 327, 390, 63],\n", + " [62, \"05:27\", \"05:54\", 327, 354, 27],\n", + " [63, \"05:28\", \"05:53\", 328, 353, 25],\n", + " [64, \"05:29\", \"05:44\", 329, 344, 15],\n", + " [65, \"05:30\", \"05:40\", 330, 340, 10],\n", + " [66, \"05:30\", \"05:40\", 330, 340, 10],\n", + " [67, \"05:30\", \"05:40\", 330, 340, 10],\n", + " [68, \"05:32\", \"06:53\", 332, 413, 81],\n", + " [69, \"05:33\", \"07:00\", 333, 420, 87],\n", + " [70, \"05:33\", \"06:15\", 333, 375, 42],\n", + " [71, \"05:33\", \"05:47\", 333, 347, 14],\n", + " [72, \"05:37\", \"06:13\", 337, 373, 36],\n", + " [73, \"05:37\", \"06:05\", 337, 365, 28],\n", + " [74, \"05:38\", \"06:33\", 338, 393, 55],\n", + " [75, \"05:38\", \"06:04\", 338, 364, 26],\n", + " [76, \"05:38\", \"06:18\", 338, 378, 40],\n", + " [77, \"05:39\", \"05:54\", 339, 354, 15],\n", + " [78, \"05:40\", \"05:56\", 340, 356, 16],\n", + " [79, \"05:40\", \"06:41\", 340, 401, 61],\n", + " [80, \"05:40\", \"05:50\", 340, 350, 10],\n", + " [81, \"05:41\", \"06:23\", 341, 383, 42],\n", + " [82, \"05:41\", \"06:01\", 341, 361, 20],\n", + " [83, \"05:43\", \"06:08\", 343, 368, 25],\n", + " [84, \"05:44\", \"07:10\", 344, 430, 86],\n", + " [85, \"05:44\", \"05:55\", 344, 355, 11],\n", + " [86, \"05:45\", \"06:44\", 345, 404, 59],\n", + " [87, \"05:47\", \"06:17\", 347, 377, 30],\n", + " [88, \"05:48\", \"07:08\", 348, 428, 80],\n", + " [89, \"05:48\", \"06:30\", 348, 390, 42],\n", + " [90, \"05:50\", \"06:50\", 350, 410, 60],\n", + " [91, \"05:50\", \"06:00\", 350, 360, 10],\n", + " [92, \"05:50\", \"06:00\", 350, 360, 10],\n", + " [93, \"05:50\", \"06:51\", 350, 411, 61],\n", + " [94, \"05:52\", \"06:33\", 352, 393, 41],\n", + " [95, \"05:52\", \"06:36\", 352, 396, 44],\n", + " [96, \"05:52\", \"06:23\", 352, 383, 31],\n", + " [97, \"05:54\", \"06:14\", 354, 374, 20],\n", + " [98, \"05:54\", \"07:20\", 354, 440, 86],\n", + " [99, \"05:55\", \"06:40\", 355, 400, 45],\n", + " [100, \"05:55\", \"06:27\", 355, 387, 32],\n", + " [101, \"05:56\", \"06:35\", 356, 395, 39],\n", + " [102, \"05:56\", \"06:06\", 356, 366, 10],\n", + " [103, \"05:57\", \"06:21\", 357, 381, 24],\n", + " [104, \"05:58\", \"07:23\", 358, 443, 85],\n", + " [105, \"05:58\", \"06:23\", 358, 383, 25],\n", + " [106, \"05:58\", \"06:08\", 358, 368, 10],\n", + " [107, \"05:58\", \"06:43\", 358, 403, 45],\n", + " [108, \"06:00\", \"06:10\", 360, 370, 10],\n", + " [109, \"06:00\", \"06:16\", 360, 376, 16],\n", + " [110, \"06:00\", \"07:01\", 360, 421, 61],\n", + " [111, \"06:01\", \"07:00\", 361, 420, 59],\n", + " [112, \"06:01\", \"06:13\", 361, 373, 12],\n", + " [113, \"06:01\", \"06:45\", 361, 405, 44],\n", + " [114, \"06:03\", \"06:50\", 363, 410, 47],\n", + " [115, \"06:04\", \"06:37\", 364, 397, 33],\n", + " [116, \"06:04\", \"07:30\", 364, 450, 86],\n", + " [117, \"06:05\", \"06:24\", 365, 384, 19],\n", + " [118, \"06:06\", \"06:51\", 366, 411, 45],\n", + " [119, \"06:07\", \"06:43\", 367, 403, 36],\n", + " [120, \"06:08\", \"07:30\", 368, 450, 82],\n", + " [121, \"06:10\", \"06:20\", 370, 380, 10],\n", + " [122, \"06:10\", \"07:17\", 370, 437, 67],\n", + " [123, \"06:11\", \"06:54\", 371, 414, 43],\n", + " [124, \"06:11\", \"06:21\", 371, 381, 10],\n", + " [125, \"06:13\", \"06:38\", 373, 398, 25],\n", + " [126, \"06:13\", \"06:58\", 373, 418, 45],\n", + " [127, \"06:13\", \"06:53\", 373, 413, 40],\n", + " [128, \"06:14\", \"07:03\", 374, 423, 49],\n", + " [129, \"06:14\", \"06:47\", 374, 407, 33],\n", + " [130, \"06:14\", \"07:40\", 374, 460, 86],\n", + " [131, \"06:15\", \"07:15\", 375, 435, 60],\n", + " [132, \"06:16\", \"06:28\", 376, 388, 12],\n", + " [133, \"06:16\", \"06:26\", 376, 386, 10],\n", + " [134, \"06:17\", \"06:34\", 377, 394, 17],\n", + " [135, \"06:18\", \"07:06\", 378, 426, 48],\n", + " [136, \"06:18\", \"07:38\", 378, 458, 80],\n", + " [137, \"06:18\", \"07:02\", 378, 422, 44],\n", + " [138, \"06:19\", \"06:53\", 379, 413, 34],\n", + " [139, \"06:20\", \"07:25\", 380, 445, 65],\n", + " [140, \"06:20\", \"06:36\", 380, 396, 16],\n", + " [141, \"06:20\", \"06:30\", 380, 390, 10],\n", + " [142, \"06:20\", \"06:30\", 380, 390, 10],\n", + " [143, \"06:21\", \"06:49\", 381, 409, 28],\n", + " [144, \"06:22\", \"07:06\", 382, 426, 44],\n", + " [145, \"06:24\", \"07:50\", 384, 470, 86],\n", + " [146, \"06:24\", \"06:57\", 384, 417, 33],\n", + " [147, \"06:26\", \"07:45\", 386, 465, 79],\n", + " [148, \"06:26\", \"07:10\", 386, 430, 44],\n", + " [149, \"06:27\", \"06:44\", 387, 404, 17],\n", + " [150, \"06:28\", \"06:53\", 388, 413, 25],\n", + " [151, \"06:28\", \"07:14\", 388, 434, 46],\n", + " [152, \"06:29\", \"07:03\", 389, 423, 34],\n", + " [153, \"06:30\", \"06:40\", 390, 400, 10],\n", + " [154, \"06:30\", \"07:37\", 390, 457, 67],\n", + " [155, \"06:31\", \"06:43\", 391, 403, 12],\n", + " [156, \"06:33\", \"07:14\", 393, 434, 41],\n", + " [157, \"06:33\", \"07:53\", 393, 473, 80],\n", + " [158, \"06:34\", \"08:16\", 394, 496, 102],\n", + " [159, \"06:34\", \"07:09\", 394, 429, 35],\n", + " [160, \"06:34\", \"07:07\", 394, 427, 33],\n", + " [161, \"06:36\", \"07:21\", 396, 441, 45],\n", + " [162, \"06:37\", \"07:22\", 397, 442, 45],\n", + " [163, \"06:37\", \"06:54\", 397, 414, 17],\n", + " [164, \"06:38\", \"07:30\", 398, 450, 52],\n", + " [165, \"06:38\", \"07:18\", 398, 438, 40],\n", + " [166, \"06:39\", \"07:33\", 399, 453, 54],\n", + " [167, \"06:40\", \"07:52\", 400, 472, 72],\n", + " [168, \"06:40\", \"06:50\", 400, 410, 10],\n", + " [169, \"06:40\", \"07:22\", 400, 442, 42],\n", + " [170, \"06:40\", \"06:56\", 400, 416, 16],\n", + " [171, \"06:41\", \"08:00\", 401, 480, 79],\n", + " [172, \"06:42\", \"07:26\", 402, 446, 44],\n", + " [173, \"06:42\", \"07:13\", 402, 433, 31],\n", + " [174, \"06:43\", \"07:08\", 403, 428, 25],\n", + " [175, \"06:43\", \"07:30\", 403, 450, 47],\n", + " [176, \"06:43\", \"07:23\", 403, 443, 40],\n", + " [177, \"06:44\", \"07:17\", 404, 437, 33],\n", + " [178, \"06:44\", \"08:13\", 404, 493, 89],\n", + " [179, \"06:46\", \"07:01\", 406, 421, 15],\n", + " [180, \"06:46\", \"06:58\", 406, 418, 12],\n", + " [181, \"06:47\", \"07:04\", 407, 424, 17],\n", + " [182, \"06:48\", \"08:15\", 408, 495, 87],\n", + " [183, \"06:48\", \"07:34\", 408, 454, 46],\n", + " [184, \"06:48\", \"07:37\", 408, 457, 49],\n", + " [185, \"06:49\", \"07:43\", 409, 463, 54],\n", + " [186, \"06:50\", \"08:00\", 410, 480, 70],\n", + " [187, \"06:50\", \"07:00\", 410, 420, 10],\n", + " [188, \"06:50\", \"07:05\", 410, 425, 15],\n", + " [189, \"06:51\", \"07:18\", 411, 438, 27],\n", + " [190, \"06:52\", \"07:36\", 412, 456, 44],\n", + " [191, \"06:53\", \"07:37\", 413, 457, 44],\n", + " [192, \"06:54\", \"08:20\", 414, 500, 86],\n", + " [193, \"06:54\", \"07:27\", 414, 447, 33],\n", + " [194, \"06:54\", \"07:20\", 414, 440, 26],\n", + " [195, \"06:56\", \"08:23\", 416, 503, 87],\n", + " [196, \"06:57\", \"07:12\", 417, 432, 15],\n", + " [197, \"06:57\", \"07:58\", 417, 478, 61],\n", + " [198, \"06:57\", \"07:45\", 417, 465, 48],\n", + " [199, \"06:57\", \"07:40\", 417, 460, 43],\n", + " [200, \"06:58\", \"07:23\", 418, 443, 25],\n", + " [201, \"06:59\", \"07:53\", 419, 473, 54],\n", + " [202, \"06:59\", \"08:07\", 419, 487, 68],\n", + " [203, \"07:00\", \"07:10\", 420, 430, 10],\n", + " [204, \"07:00\", \"07:16\", 420, 436, 16],\n", + " [205, \"07:01\", \"08:30\", 421, 510, 89],\n", + " [206, \"07:01\", \"07:13\", 421, 433, 12],\n", + " [207, \"07:01\", \"07:43\", 421, 463, 42],\n", + " [208, \"07:03\", \"08:30\", 423, 510, 87],\n", + " [209, \"07:04\", \"07:37\", 424, 457, 33],\n", + " [210, \"07:04\", \"07:44\", 424, 464, 40],\n", + " [211, \"07:05\", \"07:52\", 425, 472, 47],\n", + " [212, \"07:05\", \"08:05\", 425, 485, 60],\n", + " [213, \"07:05\", \"07:46\", 425, 466, 41],\n", + " [214, \"07:06\", \"07:51\", 426, 471, 45],\n", + " [215, \"07:07\", \"08:08\", 427, 488, 61],\n", + " [216, \"07:07\", \"07:52\", 427, 472, 45],\n", + " [217, \"07:07\", \"08:16\", 427, 496, 69],\n", + " [218, \"07:07\", \"07:27\", 427, 447, 20],\n", + " [219, \"07:09\", \"07:50\", 429, 470, 41],\n", + " [220, \"07:09\", \"08:40\", 429, 520, 91],\n", + " [221, \"07:09\", \"08:03\", 429, 483, 54],\n", + " [222, \"07:10\", \"07:20\", 430, 440, 10],\n", + " [223, \"07:11\", \"08:36\", 431, 516, 85],\n", + " [224, \"07:12\", \"08:00\", 432, 480, 48],\n", + " [225, \"07:12\", \"07:47\", 432, 467, 35],\n", + " [226, \"07:13\", \"07:54\", 433, 474, 41],\n", + " [227, \"07:13\", \"07:38\", 433, 458, 25],\n", + " [228, \"07:14\", \"07:59\", 434, 479, 45],\n", + " [229, \"07:16\", \"08:50\", 436, 530, 94],\n", + " [230, \"07:16\", \"07:28\", 436, 448, 12],\n", + " [231, \"07:17\", \"07:35\", 437, 455, 18],\n", + " [232, \"07:17\", \"07:58\", 437, 478, 41],\n", + " [233, \"07:18\", \"08:06\", 438, 486, 48],\n", + " [234, \"07:18\", \"08:44\", 438, 524, 86],\n", + " [235, \"07:19\", \"08:13\", 439, 493, 54],\n", + " [236, \"07:20\", \"08:02\", 440, 482, 42],\n", + " [237, \"07:20\", \"08:07\", 440, 487, 47],\n", + " [238, \"07:20\", \"07:30\", 440, 450, 10],\n", + " [239, \"07:20\", \"07:57\", 440, 477, 37],\n", + " [240, \"07:20\", \"07:36\", 440, 456, 16],\n", + " [241, \"07:21\", \"07:48\", 441, 468, 27],\n", + " [242, \"07:22\", \"08:06\", 442, 486, 44],\n", + " [243, \"07:22\", \"08:25\", 442, 505, 63],\n", + " [244, \"07:24\", \"08:27\", 444, 507, 63],\n", + " [245, \"07:24\", \"08:05\", 444, 485, 41],\n", + " [246, \"07:26\", \"08:23\", 446, 503, 57],\n", + " [247, \"07:26\", \"08:52\", 446, 532, 86],\n", + " [248, \"07:27\", \"08:07\", 447, 487, 40],\n", + " [249, \"07:27\", \"07:42\", 447, 462, 15],\n", + " [250, \"07:27\", \"08:15\", 447, 495, 48],\n", + " [251, \"07:28\", \"07:53\", 448, 473, 25],\n", + " [252, \"07:28\", \"08:09\", 448, 489, 41],\n", + " [253, \"07:28\", \"07:38\", 448, 458, 10],\n", + " [254, \"07:30\", \"08:35\", 450, 515, 65],\n", + " [255, \"07:31\", \"07:43\", 451, 463, 12],\n", + " [256, \"07:32\", \"08:13\", 452, 493, 41],\n", + " [257, \"07:34\", \"09:00\", 454, 540, 86],\n", + " [258, \"07:34\", \"08:33\", 454, 513, 59],\n", + " [259, \"07:34\", \"09:04\", 454, 544, 90],\n", + " [260, \"07:35\", \"08:22\", 455, 502, 47],\n", + " [261, \"07:35\", \"07:45\", 455, 465, 10],\n", + " [262, \"07:35\", \"08:16\", 455, 496, 41],\n", + " [263, \"07:36\", \"08:17\", 456, 497, 41],\n", + " [264, \"07:36\", \"08:36\", 456, 516, 60],\n", + " [265, \"07:37\", \"07:50\", 457, 470, 13],\n", + " [266, \"07:40\", \"07:56\", 460, 476, 16],\n", + " [267, \"07:40\", \"08:20\", 460, 500, 40],\n", + " [268, \"07:40\", \"08:45\", 460, 525, 65],\n", + " [269, \"07:41\", \"08:39\", 461, 519, 58],\n", + " [270, \"07:41\", \"07:51\", 461, 471, 10],\n", + " [271, \"07:42\", \"08:30\", 462, 510, 48],\n", + " [272, \"07:42\", \"08:21\", 462, 501, 39],\n", + " [273, \"07:43\", \"08:08\", 463, 488, 25],\n", + " [274, \"07:43\", \"08:24\", 463, 504, 41],\n", + " [275, \"07:44\", \"09:10\", 464, 550, 86],\n", + " [276, \"07:44\", \"08:43\", 464, 523, 59],\n", + " [277, \"07:46\", \"08:28\", 466, 508, 42],\n", + " [278, \"07:46\", \"07:58\", 466, 478, 12],\n", + " [279, \"07:47\", \"08:00\", 467, 480, 13],\n", + " [280, \"07:48\", \"09:14\", 468, 554, 86],\n", + " [281, \"07:49\", \"08:32\", 469, 512, 43],\n", + " [282, \"07:50\", \"08:55\", 470, 535, 65],\n", + " [283, \"07:50\", \"08:00\", 470, 480, 10],\n", + " [284, \"07:50\", \"08:37\", 470, 517, 47],\n", + " [285, \"07:50\", \"08:26\", 470, 506, 36],\n", + " [286, \"07:51\", \"08:18\", 471, 498, 27],\n", + " [287, \"07:52\", \"08:21\", 472, 501, 29],\n", + " [288, \"07:53\", \"08:35\", 473, 515, 42],\n", + " [289, \"07:54\", \"09:19\", 474, 559, 85],\n", + " [290, \"07:55\", \"08:53\", 475, 533, 58],\n", + " [291, \"07:56\", \"08:54\", 476, 534, 58],\n", + " [292, \"07:57\", \"08:39\", 477, 519, 42],\n", + " [293, \"07:57\", \"08:10\", 477, 490, 13],\n", + " [294, \"07:58\", \"08:45\", 478, 525, 47],\n", + " [295, \"07:58\", \"08:23\", 478, 503, 25],\n", + " [296, \"08:00\", \"08:10\", 480, 490, 10],\n", + " [297, \"08:00\", \"09:05\", 480, 545, 65],\n", + " [298, \"08:00\", \"08:16\", 480, 496, 16],\n", + " [299, \"08:00\", \"08:35\", 480, 515, 35],\n", + " [300, \"08:01\", \"08:13\", 481, 493, 12],\n", + " [301, \"08:01\", \"08:43\", 481, 523, 42],\n", + " [302, \"08:03\", \"09:26\", 483, 566, 83],\n", + " [303, \"08:04\", \"09:29\", 484, 569, 85],\n", + " [304, \"08:05\", \"08:21\", 485, 501, 16],\n", + " [305, \"08:05\", \"08:47\", 485, 527, 42],\n", + " [306, \"08:06\", \"08:51\", 486, 531, 45],\n", + " [307, \"08:06\", \"09:03\", 486, 543, 57],\n", + " [308, \"08:07\", \"08:20\", 487, 500, 13],\n", + " [309, \"08:08\", \"08:55\", 488, 535, 47],\n", + " [310, \"08:08\", \"08:50\", 488, 530, 42],\n", + " [311, \"08:10\", \"08:45\", 490, 525, 35],\n", + " [312, \"08:10\", \"09:15\", 490, 555, 65],\n", + " [313, \"08:10\", \"08:20\", 490, 500, 10],\n", + " [314, \"08:11\", \"09:41\", 491, 581, 90],\n", + " [315, \"08:12\", \"08:55\", 492, 535, 43],\n", + " [316, \"08:13\", \"08:38\", 493, 518, 25],\n", + " [317, \"08:14\", \"09:38\", 494, 578, 84],\n", + " [318, \"08:15\", \"08:30\", 495, 510, 15],\n", + " [319, \"08:16\", \"08:30\", 496, 510, 14],\n", + " [320, \"08:16\", \"08:28\", 496, 508, 12],\n", + " [321, \"08:16\", \"09:00\", 496, 540, 44],\n", + " [322, \"08:17\", \"09:13\", 497, 553, 56],\n", + " [323, \"08:18\", \"09:16\", 498, 556, 58],\n", + " [324, \"08:18\", \"09:05\", 498, 545, 47],\n", + " [325, \"08:20\", \"08:36\", 500, 516, 16],\n", + " [326, \"08:20\", \"08:55\", 500, 535, 35],\n", + " [327, \"08:20\", \"09:05\", 500, 545, 45],\n", + " [328, \"08:20\", \"08:30\", 500, 510, 10],\n", + " [329, \"08:20\", \"09:25\", 500, 565, 65],\n", + " [330, \"08:21\", \"08:38\", 501, 518, 17],\n", + " [331, \"08:21\", \"08:47\", 501, 527, 26],\n", + " [332, \"08:22\", \"08:45\", 502, 525, 23],\n", + " [333, \"08:23\", \"09:10\", 503, 550, 47],\n", + " [334, \"08:24\", \"09:48\", 504, 588, 84],\n", + " [335, \"08:26\", \"08:46\", 506, 526, 20],\n", + " [336, \"08:27\", \"09:07\", 507, 547, 40],\n", + " [337, \"08:28\", \"08:50\", 508, 530, 22],\n", + " [338, \"08:28\", \"09:56\", 508, 596, 88],\n", + " [339, \"08:28\", \"09:23\", 508, 563, 55],\n", + " [340, \"08:29\", \"09:20\", 509, 560, 51],\n", + " [341, \"08:30\", \"09:05\", 510, 545, 35],\n", + " [342, \"08:30\", \"08:45\", 510, 525, 15],\n", + " [343, \"08:30\", \"08:40\", 510, 520, 10],\n", + " [344, \"08:30\", \"09:35\", 510, 575, 65],\n", + " [345, \"08:31\", \"08:43\", 511, 523, 12],\n", + " [346, \"08:31\", \"09:13\", 511, 553, 42],\n", + " [347, \"08:34\", \"09:58\", 514, 598, 84],\n", + " [348, \"08:35\", \"08:55\", 515, 535, 20],\n", + " [349, \"08:35\", \"09:15\", 515, 555, 40],\n", + " [350, \"08:35\", \"08:45\", 515, 525, 10],\n", + " [351, \"08:36\", \"08:46\", 516, 526, 10],\n", + " [352, \"08:36\", \"09:00\", 516, 540, 24],\n", + " [353, \"08:38\", \"09:20\", 518, 560, 42],\n", + " [354, \"08:38\", \"09:35\", 518, 575, 57],\n", + " [355, \"08:38\", \"09:14\", 518, 554, 36],\n", + " [356, \"08:39\", \"09:33\", 519, 573, 54],\n", + " [357, \"08:40\", \"09:45\", 520, 585, 65],\n", + " [358, \"08:40\", \"08:50\", 520, 530, 10],\n", + " [359, \"08:40\", \"08:56\", 520, 536, 16],\n", + " [360, \"08:42\", \"09:25\", 522, 565, 43],\n", + " [361, \"08:43\", \"09:08\", 523, 548, 25],\n", + " [362, \"08:44\", \"09:35\", 524, 575, 51],\n", + " [363, \"08:45\", \"09:00\", 525, 540, 15],\n", + " [364, \"08:45\", \"09:05\", 525, 545, 20],\n", + " [365, \"08:46\", \"09:24\", 526, 564, 38],\n", + " [366, \"08:46\", \"08:58\", 526, 538, 12],\n", + " [367, \"08:46\", \"09:30\", 526, 570, 44],\n", + " [368, \"08:48\", \"10:11\", 528, 611, 83],\n", + " [369, \"08:48\", \"10:13\", 528, 613, 85],\n", + " [370, \"08:49\", \"09:43\", 529, 583, 54],\n", + " [371, \"08:50\", \"09:30\", 530, 570, 40],\n", + " [372, \"08:50\", \"10:00\", 530, 600, 70],\n", + " [373, \"08:50\", \"09:00\", 530, 540, 10],\n", + " [374, \"08:51\", \"09:17\", 531, 557, 26],\n", + " [375, \"08:53\", \"09:20\", 533, 560, 27],\n", + " [376, \"08:53\", \"09:35\", 533, 575, 42],\n", + " [377, \"08:55\", \"09:34\", 535, 574, 39],\n", + " [378, \"08:55\", \"09:15\", 535, 555, 20],\n", + " [379, \"08:58\", \"09:38\", 538, 578, 40],\n", + " [380, \"08:58\", \"10:26\", 538, 626, 88],\n", + " [381, \"08:59\", \"09:53\", 539, 593, 54],\n", + " [382, \"08:59\", \"09:50\", 539, 590, 51],\n", + " [383, \"09:00\", \"09:35\", 540, 575, 35],\n", + " [384, \"09:00\", \"09:16\", 540, 556, 16],\n", + " [385, \"09:00\", \"09:10\", 540, 550, 10],\n", + " [386, \"09:00\", \"09:16\", 540, 556, 16],\n", + " [387, \"09:01\", \"09:13\", 541, 553, 12],\n", + " [388, \"09:03\", \"09:45\", 543, 585, 42],\n", + " [389, \"09:03\", \"10:28\", 543, 628, 85],\n", + " [390, \"09:05\", \"09:44\", 545, 584, 39],\n", + " [391, \"09:05\", \"09:25\", 545, 565, 20],\n", + " [392, \"09:08\", \"09:53\", 548, 593, 45],\n", + " [393, \"09:08\", \"10:04\", 548, 604, 56],\n", + " [394, \"09:09\", \"10:03\", 549, 603, 54],\n", + " [395, \"09:10\", \"10:15\", 550, 615, 65],\n", + " [396, \"09:10\", \"09:20\", 550, 560, 10],\n", + " [397, \"09:11\", \"09:38\", 551, 578, 27],\n", + " [398, \"09:13\", \"10:00\", 553, 600, 47],\n", + " [399, \"09:14\", \"09:39\", 554, 579, 25],\n", + " [400, \"09:14\", \"10:05\", 554, 605, 51],\n", + " [401, \"09:15\", \"09:54\", 555, 594, 39],\n", + " [402, \"09:16\", \"09:28\", 556, 568, 12],\n", + " [403, \"09:18\", \"10:43\", 558, 643, 85],\n", + " [404, \"09:18\", \"10:41\", 558, 641, 83],\n", + " [405, \"09:18\", \"09:58\", 558, 598, 40],\n", + " [406, \"09:19\", \"10:13\", 559, 613, 54],\n", + " [407, \"09:20\", \"09:30\", 560, 570, 10],\n", + " [408, \"09:20\", \"09:36\", 560, 576, 16],\n", + " [409, \"09:21\", \"09:47\", 561, 587, 26],\n", + " [410, \"09:23\", \"10:30\", 563, 630, 67],\n", + " [411, \"09:23\", \"10:05\", 563, 605, 42],\n", + " [412, \"09:23\", \"09:49\", 563, 589, 26],\n", + " [413, \"09:24\", \"09:35\", 564, 575, 11],\n", + " [414, \"09:25\", \"09:35\", 565, 575, 10],\n", + " [415, \"09:25\", \"10:04\", 565, 604, 39],\n", + " [416, \"09:28\", \"10:08\", 568, 608, 40],\n", + " [417, \"09:29\", \"09:45\", 569, 585, 16],\n", + " [418, \"09:29\", \"10:20\", 569, 620, 51],\n", + " [419, \"09:29\", \"10:56\", 569, 656, 87],\n", + " [420, \"09:29\", \"10:23\", 569, 623, 54],\n", + " [421, \"09:30\", \"09:40\", 570, 580, 10],\n", + " [422, \"09:31\", \"09:43\", 571, 583, 12],\n", + " [423, \"09:33\", \"10:58\", 573, 658, 85],\n", + " [424, \"09:33\", \"10:15\", 573, 615, 42],\n", + " [425, \"09:34\", \"09:45\", 574, 585, 11],\n", + " [426, \"09:35\", \"10:14\", 575, 614, 39],\n", + " [427, \"09:38\", \"10:45\", 578, 645, 67],\n", + " [428, \"09:39\", \"10:33\", 579, 633, 54],\n", + " [429, \"09:40\", \"09:56\", 580, 596, 16],\n", + " [430, \"09:40\", \"09:50\", 580, 590, 10],\n", + " [431, \"09:41\", \"10:08\", 581, 608, 27],\n", + " [432, \"09:41\", \"10:23\", 581, 623, 42],\n", + " [433, \"09:44\", \"10:35\", 584, 635, 51],\n", + " [434, \"09:44\", \"11:11\", 584, 671, 87],\n", + " [435, \"09:44\", \"09:55\", 584, 595, 11],\n", + " [436, \"09:45\", \"10:24\", 585, 624, 39],\n", + " [437, \"09:46\", \"09:58\", 586, 598, 12],\n", + " [438, \"09:48\", \"10:30\", 588, 630, 42],\n", + " [439, \"09:48\", \"11:13\", 588, 673, 85],\n", + " [440, \"09:48\", \"10:04\", 588, 604, 16],\n", + " [441, \"09:49\", \"10:43\", 589, 643, 54],\n", + " [442, \"09:50\", \"10:00\", 590, 600, 10],\n", + " [443, \"09:51\", \"10:17\", 591, 617, 26],\n", + " [444, \"09:53\", \"10:49\", 593, 649, 56],\n", + " [445, \"09:53\", \"11:00\", 593, 660, 67],\n", + " [446, \"09:54\", \"10:05\", 594, 605, 11],\n", + " [447, \"09:55\", \"10:34\", 595, 634, 39],\n", + " [448, \"09:56\", \"10:38\", 596, 638, 42],\n", + " [449, \"09:57\", \"10:20\", 597, 620, 23],\n", + " [450, \"09:59\", \"11:26\", 599, 686, 87],\n", + " [451, \"09:59\", \"10:50\", 599, 650, 51],\n", + " [452, \"09:59\", \"10:53\", 599, 653, 54],\n", + " [453, \"10:00\", \"10:16\", 600, 616, 16],\n", + " [454, \"10:00\", \"10:10\", 600, 610, 10],\n", + " [455, \"10:01\", \"10:13\", 601, 613, 12],\n", + " [456, \"10:03\", \"11:28\", 603, 688, 85],\n", + " [457, \"10:03\", \"10:45\", 603, 645, 42],\n", + " [458, \"10:04\", \"10:15\", 604, 615, 11],\n", + " [459, \"10:05\", \"10:44\", 605, 644, 39],\n", + " [460, \"10:08\", \"11:15\", 608, 675, 67],\n", + " [461, \"10:09\", \"11:03\", 609, 663, 54],\n", + " [462, \"10:10\", \"10:20\", 610, 620, 10],\n", + " [463, \"10:11\", \"10:38\", 611, 638, 27],\n", + " [464, \"10:11\", \"10:53\", 611, 653, 42],\n", + " [465, \"10:14\", \"11:05\", 614, 665, 51],\n", + " [466, \"10:14\", \"11:41\", 614, 701, 87],\n", + " [467, \"10:14\", \"10:25\", 614, 625, 11],\n", + " [468, \"10:15\", \"10:54\", 615, 654, 39],\n", + " [469, \"10:16\", \"10:28\", 616, 628, 12],\n", + " [470, \"10:18\", \"11:43\", 618, 703, 85],\n", + " [471, \"10:18\", \"11:00\", 618, 660, 42],\n", + " [472, \"10:19\", \"11:13\", 619, 673, 54],\n", + " [473, \"10:20\", \"10:30\", 620, 630, 10],\n", + " [474, \"10:20\", \"10:36\", 620, 636, 16],\n", + " [475, \"10:21\", \"10:47\", 621, 647, 26],\n", + " [476, \"10:23\", \"11:30\", 623, 690, 67],\n", + " [477, \"10:23\", \"10:45\", 623, 645, 22],\n", + " [478, \"10:24\", \"10:35\", 624, 635, 11],\n", + " [479, \"10:25\", \"11:04\", 625, 664, 39],\n", + " [480, \"10:26\", \"11:08\", 626, 668, 42],\n", + " [481, \"10:29\", \"11:20\", 629, 680, 51],\n", + " [482, \"10:29\", \"11:23\", 629, 683, 54],\n", + " [483, \"10:29\", \"11:56\", 629, 716, 87],\n", + " [484, \"10:30\", \"10:40\", 630, 640, 10],\n", + " [485, \"10:31\", \"10:43\", 631, 643, 12],\n", + " [486, \"10:33\", \"11:15\", 633, 675, 42],\n", + " [487, \"10:33\", \"11:58\", 633, 718, 85],\n", + " [488, \"10:34\", \"10:45\", 634, 645, 11],\n", + " [489, \"10:35\", \"11:14\", 635, 674, 39],\n", + " [490, \"10:38\", \"11:45\", 638, 705, 67],\n", + " [491, \"10:39\", \"11:33\", 639, 693, 54],\n", + " [492, \"10:40\", \"10:50\", 640, 650, 10],\n", + " [493, \"10:40\", \"10:56\", 640, 656, 16],\n", + " [494, \"10:41\", \"11:23\", 641, 683, 42],\n", + " [495, \"10:41\", \"11:08\", 641, 668, 27],\n", + " [496, \"10:44\", \"12:11\", 644, 731, 87],\n", + " [497, \"10:44\", \"11:35\", 644, 695, 51],\n", + " [498, \"10:44\", \"10:55\", 644, 655, 11],\n", + " [499, \"10:45\", \"11:24\", 645, 684, 39],\n", + " [500, \"10:46\", \"10:58\", 646, 658, 12],\n", + " [501, \"10:48\", \"12:13\", 648, 733, 85],\n", + " [502, \"10:48\", \"11:30\", 648, 690, 42],\n", + " [503, \"10:49\", \"11:43\", 649, 703, 54],\n", + " [504, \"10:50\", \"11:00\", 650, 660, 10],\n", + " [505, \"10:51\", \"11:17\", 651, 677, 26],\n", + " [506, \"10:53\", \"12:00\", 653, 720, 67],\n", + " [507, \"10:53\", \"11:20\", 653, 680, 27],\n", + " [508, \"10:54\", \"11:05\", 654, 665, 11],\n", + " [509, \"10:55\", \"11:34\", 655, 694, 39],\n", + " [510, \"10:56\", \"11:38\", 656, 698, 42],\n", + " [511, \"10:59\", \"11:14\", 659, 674, 15],\n", + " [512, \"10:59\", \"12:26\", 659, 746, 87],\n", + " [513, \"10:59\", \"11:53\", 659, 713, 54],\n", + " [514, \"10:59\", \"11:50\", 659, 710, 51],\n", + " [515, \"11:00\", \"11:16\", 660, 676, 16],\n", + " [516, \"11:00\", \"11:10\", 660, 670, 10],\n", + " [517, \"11:01\", \"11:13\", 661, 673, 12],\n", + " [518, \"11:03\", \"11:45\", 663, 705, 42],\n", + " [519, \"11:03\", \"12:28\", 663, 748, 85],\n", + " [520, \"11:04\", \"11:15\", 664, 675, 11],\n", + " [521, \"11:05\", \"11:44\", 665, 704, 39],\n", + " [522, \"11:08\", \"12:15\", 668, 735, 67],\n", + " [523, \"11:09\", \"12:03\", 669, 723, 54],\n", + " [524, \"11:10\", \"11:20\", 670, 680, 10],\n", + " [525, \"11:11\", \"11:38\", 671, 698, 27],\n", + " [526, \"11:11\", \"11:53\", 671, 713, 42],\n", + " [527, \"11:14\", \"11:25\", 674, 685, 11],\n", + " [528, \"11:14\", \"12:05\", 674, 725, 51],\n", + " [529, \"11:14\", \"12:38\", 674, 758, 84],\n", + " [530, \"11:14\", \"12:41\", 674, 761, 87],\n", + " [531, \"11:15\", \"11:54\", 675, 714, 39],\n", + " [532, \"11:16\", \"11:28\", 676, 688, 12],\n", + " [533, \"11:18\", \"12:00\", 678, 720, 42],\n", + " [534, \"11:19\", \"12:13\", 679, 733, 54],\n", + " [535, \"11:20\", \"11:30\", 680, 690, 10],\n", + " [536, \"11:20\", \"11:36\", 680, 696, 16],\n", + " [537, \"11:21\", \"11:47\", 681, 707, 26],\n", + " [538, \"11:23\", \"12:30\", 683, 750, 67],\n", + " [539, \"11:23\", \"11:49\", 683, 709, 26],\n", + " [540, \"11:24\", \"12:48\", 684, 768, 84],\n", + " [541, \"11:24\", \"11:35\", 684, 695, 11],\n", + " [542, \"11:25\", \"12:04\", 685, 724, 39],\n", + " [543, \"11:26\", \"12:08\", 686, 728, 42],\n", + " [544, \"11:29\", \"11:44\", 689, 704, 15],\n", + " [545, \"11:29\", \"12:23\", 689, 743, 54],\n", + " [546, \"11:29\", \"12:20\", 689, 740, 51],\n", + " [547, \"11:29\", \"12:54\", 689, 774, 85],\n", + " [548, \"11:30\", \"11:40\", 690, 700, 10],\n", + " [549, \"11:31\", \"11:43\", 691, 703, 12],\n", + " [550, \"11:33\", \"12:15\", 693, 735, 42],\n", + " [551, \"11:34\", \"12:58\", 694, 778, 84],\n", + " [552, \"11:34\", \"11:45\", 694, 705, 11],\n", + " [553, \"11:35\", \"12:14\", 695, 734, 39],\n", + " [554, \"11:38\", \"12:45\", 698, 765, 67],\n", + " [555, \"11:39\", \"12:33\", 699, 753, 54],\n", + " [556, \"11:40\", \"11:56\", 700, 716, 16],\n", + " [557, \"11:40\", \"11:50\", 700, 710, 10],\n", + " [558, \"11:41\", \"12:08\", 701, 728, 27],\n", + " [559, \"11:41\", \"12:23\", 701, 743, 42],\n", + " [560, \"11:44\", \"11:55\", 704, 715, 11],\n", + " [561, \"11:44\", \"13:14\", 704, 794, 90],\n", + " [562, \"11:44\", \"13:08\", 704, 788, 84],\n", + " [563, \"11:44\", \"12:35\", 704, 755, 51],\n", + " [564, \"11:45\", \"12:24\", 705, 744, 39],\n", + " [565, \"11:46\", \"11:58\", 706, 718, 12],\n", + " [566, \"11:48\", \"12:30\", 708, 750, 42],\n", + " [567, \"11:49\", \"12:43\", 709, 763, 54],\n", + " [568, \"11:50\", \"12:00\", 710, 720, 10],\n", + " [569, \"11:51\", \"12:17\", 711, 737, 26],\n", + " [570, \"11:53\", \"12:49\", 713, 769, 56],\n", + " [571, \"11:53\", \"13:00\", 713, 780, 67],\n", + " [572, \"11:54\", \"13:18\", 714, 798, 84],\n", + " [573, \"11:54\", \"12:05\", 714, 725, 11],\n", + " [574, \"11:55\", \"12:40\", 715, 760, 45],\n", + " [575, \"11:55\", \"12:34\", 715, 754, 39],\n", + " [576, \"11:56\", \"12:35\", 716, 755, 39],\n", + " [577, \"11:57\", \"12:20\", 717, 740, 23],\n", + " [578, \"11:58\", \"12:29\", 718, 749, 31],\n", + " [579, \"11:59\", \"12:50\", 719, 770, 51],\n", + " [580, \"11:59\", \"12:53\", 719, 773, 54],\n", + " [581, \"11:59\", \"13:24\", 719, 804, 85],\n", + " [582, \"11:59\", \"12:14\", 719, 734, 15],\n", + " [583, \"12:00\", \"12:16\", 720, 736, 16],\n", + " [584, \"12:00\", \"12:10\", 720, 730, 10],\n", + " [585, \"12:01\", \"12:45\", 721, 765, 44],\n", + " [586, \"12:01\", \"12:13\", 721, 733, 12],\n", + " [587, \"12:03\", \"12:50\", 723, 770, 47],\n", + " [588, \"12:04\", \"12:15\", 724, 735, 11],\n", + " [589, \"12:04\", \"13:04\", 724, 784, 60],\n", + " [590, \"12:04\", \"13:28\", 724, 808, 84],\n", + " [591, \"12:05\", \"12:44\", 725, 764, 39],\n", + " [592, \"12:08\", \"13:11\", 728, 791, 63],\n", + " [593, \"12:08\", \"12:39\", 728, 759, 31],\n", + " [594, \"12:09\", \"13:03\", 729, 783, 54],\n", + " [595, \"12:10\", \"12:20\", 730, 740, 10],\n", + " [596, \"12:11\", \"12:55\", 731, 775, 44],\n", + " [597, \"12:11\", \"12:38\", 731, 758, 27],\n", + " [598, \"12:14\", \"13:05\", 734, 785, 51],\n", + " [599, \"12:14\", \"12:25\", 734, 745, 11],\n", + " [600, \"12:14\", \"13:44\", 734, 824, 90],\n", + " [601, \"12:14\", \"13:38\", 734, 818, 84],\n", + " [602, \"12:15\", \"12:54\", 735, 774, 39],\n", + " [603, \"12:16\", \"12:28\", 736, 748, 12],\n", + " [604, \"12:18\", \"13:00\", 738, 780, 42],\n", + " [605, \"12:19\", \"13:13\", 739, 793, 54],\n", + " [606, \"12:20\", \"12:30\", 740, 750, 10],\n", + " [607, \"12:20\", \"13:31\", 740, 811, 71],\n", + " [608, \"12:20\", \"12:30\", 740, 750, 10],\n", + " [609, \"12:20\", \"12:36\", 740, 756, 16],\n", + " [610, \"12:21\", \"12:47\", 741, 767, 26],\n", + " [611, \"12:23\", \"12:45\", 743, 765, 22],\n", + " [612, \"12:24\", \"12:35\", 744, 755, 11],\n", + " [613, \"12:24\", \"13:48\", 744, 828, 84],\n", + " [614, \"12:25\", \"13:10\", 745, 790, 45],\n", + " [615, \"12:25\", \"13:04\", 745, 784, 39],\n", + " [616, \"12:26\", \"13:05\", 746, 785, 39],\n", + " [617, \"12:28\", \"13:54\", 748, 834, 86],\n", + " [618, \"12:28\", \"12:38\", 748, 758, 10],\n", + " [619, \"12:28\", \"13:15\", 748, 795, 47],\n", + " [620, \"12:29\", \"13:23\", 749, 803, 54],\n", + " [621, \"12:30\", \"13:41\", 750, 821, 71],\n", + " [622, \"12:30\", \"12:40\", 750, 760, 10],\n", + " [623, \"12:31\", \"13:15\", 751, 795, 44],\n", + " [624, \"12:31\", \"12:43\", 751, 763, 12],\n", + " [625, \"12:33\", \"12:48\", 753, 768, 15],\n", + " [626, \"12:33\", \"13:20\", 753, 800, 47],\n", + " [627, \"12:34\", \"13:58\", 754, 838, 84],\n", + " [628, \"12:34\", \"13:34\", 754, 814, 60],\n", + " [629, \"12:34\", \"12:45\", 754, 765, 11],\n", + " [630, \"12:35\", \"13:14\", 755, 794, 39],\n", + " [631, \"12:38\", \"13:25\", 758, 805, 47],\n", + " [632, \"12:38\", \"13:25\", 758, 805, 47],\n", + " [633, \"12:38\", \"14:04\", 758, 844, 86],\n", + " [634, \"12:39\", \"13:33\", 759, 813, 54],\n", + " [635, \"12:40\", \"13:51\", 760, 831, 71],\n", + " [636, \"12:40\", \"12:50\", 760, 770, 10],\n", + " [637, \"12:40\", \"12:56\", 760, 776, 16],\n", + " [638, \"12:41\", \"13:08\", 761, 788, 27],\n", + " [639, \"12:43\", \"13:30\", 763, 810, 47],\n", + " [640, \"12:44\", \"12:55\", 764, 775, 11],\n", + " [641, \"12:44\", \"14:08\", 764, 848, 84],\n", + " [642, \"12:45\", \"13:24\", 765, 804, 39],\n", + " [643, \"12:46\", \"12:58\", 766, 778, 12],\n", + " [644, \"12:46\", \"13:21\", 766, 801, 35],\n", + " [645, \"12:48\", \"14:14\", 768, 854, 86],\n", + " [646, \"12:48\", \"13:35\", 768, 815, 47],\n", + " [647, \"12:48\", \"12:58\", 768, 778, 10],\n", + " [648, \"12:48\", \"13:35\", 768, 815, 47],\n", + " [649, \"12:49\", \"13:43\", 769, 823, 54],\n", + " [650, \"12:50\", \"14:01\", 770, 841, 71],\n", + " [651, \"12:50\", \"13:00\", 770, 780, 10],\n", + " [652, \"12:50\", \"13:00\", 770, 780, 10],\n", + " [653, \"12:51\", \"13:17\", 771, 797, 26],\n", + " [654, \"12:53\", \"13:20\", 773, 800, 27],\n", + " [655, \"12:53\", \"13:24\", 773, 804, 31],\n", + " [656, \"12:53\", \"13:40\", 773, 820, 47],\n", + " [657, \"12:54\", \"14:18\", 774, 858, 84],\n", + " [658, \"12:54\", \"13:05\", 774, 785, 11],\n", + " [659, \"12:55\", \"13:34\", 775, 814, 39],\n", + " [660, \"12:58\", \"14:24\", 778, 864, 86],\n", + " [661, \"12:58\", \"13:25\", 778, 805, 27],\n", + " [662, \"12:58\", \"13:45\", 778, 825, 47],\n", + " [663, \"12:58\", \"13:45\", 778, 825, 47],\n", + " [664, \"12:59\", \"13:53\", 779, 833, 54],\n", + " [665, \"13:00\", \"13:10\", 780, 790, 10],\n", + " [666, \"13:00\", \"13:16\", 780, 796, 16],\n", + " [667, \"13:00\", \"14:11\", 780, 851, 71],\n", + " [668, \"13:01\", \"13:13\", 781, 793, 12],\n", + " [669, \"13:03\", \"13:34\", 783, 814, 31],\n", + " [670, \"13:03\", \"13:50\", 783, 830, 47],\n", + " [671, \"13:04\", \"13:15\", 784, 795, 11],\n", + " [672, \"13:04\", \"14:28\", 784, 868, 84],\n", + " [673, \"13:05\", \"13:44\", 785, 824, 39],\n", + " [674, \"13:08\", \"13:55\", 788, 835, 47],\n", + " [675, \"13:08\", \"14:34\", 788, 874, 86],\n", + " [676, \"13:08\", \"13:55\", 788, 835, 47],\n", + " [677, \"13:09\", \"14:03\", 789, 843, 54],\n", + " [678, \"13:10\", \"13:20\", 790, 800, 10],\n", + " [679, \"13:10\", \"14:21\", 790, 861, 71],\n", + " [680, \"13:13\", \"14:00\", 793, 840, 47],\n", + " [681, \"13:13\", \"13:40\", 793, 820, 27],\n", + " [682, \"13:14\", \"14:38\", 794, 878, 84],\n", + " [683, \"13:14\", \"13:25\", 794, 805, 11],\n", + " [684, \"13:15\", \"13:54\", 795, 834, 39],\n", + " [685, \"13:16\", \"13:28\", 796, 808, 12],\n", + " [686, \"13:18\", \"14:05\", 798, 845, 47],\n", + " [687, \"13:18\", \"14:44\", 798, 884, 86],\n", + " [688, \"13:18\", \"14:05\", 798, 845, 47],\n", + " [689, \"13:19\", \"14:13\", 799, 853, 54],\n", + " [690, \"13:20\", \"13:36\", 800, 816, 16],\n", + " [691, \"13:20\", \"14:31\", 800, 871, 71],\n", + " [692, \"13:20\", \"13:30\", 800, 810, 10],\n", + " [693, \"13:21\", \"13:47\", 801, 827, 26],\n", + " [694, \"13:23\", \"14:10\", 803, 850, 47],\n", + " [695, \"13:23\", \"13:49\", 803, 829, 26],\n", + " [696, \"13:24\", \"14:48\", 804, 888, 84],\n", + " [697, \"13:24\", \"13:35\", 804, 815, 11],\n", + " [698, \"13:25\", \"14:04\", 805, 844, 39],\n", + " [699, \"13:28\", \"14:15\", 808, 855, 47],\n", + " [700, \"13:28\", \"14:54\", 808, 894, 86],\n", + " [701, \"13:28\", \"13:55\", 808, 835, 27],\n", + " [702, \"13:28\", \"14:15\", 808, 855, 47],\n", + " [703, \"13:29\", \"14:23\", 809, 863, 54],\n", + " [704, \"13:30\", \"13:40\", 810, 820, 10],\n", + " [705, \"13:30\", \"14:41\", 810, 881, 71],\n", + " [706, \"13:31\", \"13:43\", 811, 823, 12],\n", + " [707, \"13:33\", \"14:20\", 813, 860, 47],\n", + " [708, \"13:34\", \"14:58\", 814, 898, 84],\n", + " [709, \"13:34\", \"13:45\", 814, 825, 11],\n", + " [710, \"13:35\", \"14:14\", 815, 854, 39],\n", + " [711, \"13:38\", \"14:25\", 818, 865, 47],\n", + " [712, \"13:38\", \"14:25\", 818, 865, 47],\n", + " [713, \"13:38\", \"15:04\", 818, 904, 86],\n", + " [714, \"13:39\", \"14:33\", 819, 873, 54],\n", + " [715, \"13:40\", \"13:50\", 820, 830, 10],\n", + " [716, \"13:40\", \"13:56\", 820, 836, 16],\n", + " [717, \"13:40\", \"14:51\", 820, 891, 71],\n", + " [718, \"13:43\", \"14:30\", 823, 870, 47],\n", + " [719, \"13:43\", \"14:10\", 823, 850, 27],\n", + " [720, \"13:44\", \"15:09\", 824, 909, 85],\n", + " [721, \"13:44\", \"13:55\", 824, 835, 11],\n", + " [722, \"13:45\", \"14:24\", 825, 864, 39],\n", + " [723, \"13:46\", \"13:58\", 826, 838, 12],\n", + " [724, \"13:48\", \"14:35\", 828, 875, 47],\n", + " [725, \"13:48\", \"15:14\", 828, 914, 86],\n", + " [726, \"13:48\", \"14:35\", 828, 875, 47],\n", + " [727, \"13:49\", \"14:43\", 829, 883, 54],\n", + " [728, \"13:50\", \"14:00\", 830, 840, 10],\n", + " [729, \"13:50\", \"15:01\", 830, 901, 71],\n", + " [730, \"13:51\", \"14:17\", 831, 857, 26],\n", + " [731, \"13:53\", \"14:40\", 833, 880, 47],\n", + " [732, \"13:53\", \"14:49\", 833, 889, 56],\n", + " [733, \"13:54\", \"14:05\", 834, 845, 11],\n", + " [734, \"13:54\", \"15:19\", 834, 919, 85],\n", + " [735, \"13:55\", \"14:34\", 835, 874, 39],\n", + " [736, \"13:57\", \"14:20\", 837, 860, 23],\n", + " [737, \"13:58\", \"15:24\", 838, 924, 86],\n", + " [738, \"13:58\", \"14:45\", 838, 885, 47],\n", + " [739, \"13:58\", \"14:45\", 838, 885, 47],\n", + " [740, \"13:58\", \"14:25\", 838, 865, 27],\n", + " [741, \"13:59\", \"14:53\", 839, 893, 54],\n", + " [742, \"14:00\", \"14:16\", 840, 856, 16],\n", + " [743, \"14:00\", \"14:10\", 840, 850, 10],\n", + " [744, \"14:00\", \"15:11\", 840, 911, 71],\n", + " [745, \"14:01\", \"14:13\", 841, 853, 12],\n", + " [746, \"14:03\", \"14:50\", 843, 890, 47],\n", + " [747, \"14:04\", \"14:15\", 844, 855, 11],\n", + " [748, \"14:04\", \"15:29\", 844, 929, 85],\n", + " [749, \"14:05\", \"14:44\", 845, 884, 39],\n", + " [750, \"14:08\", \"14:55\", 848, 895, 47],\n", + " [751, \"14:08\", \"14:55\", 848, 895, 47],\n", + " [752, \"14:08\", \"15:34\", 848, 934, 86],\n", + " [753, \"14:09\", \"15:03\", 849, 903, 54],\n", + " [754, \"14:10\", \"15:21\", 850, 921, 71],\n", + " [755, \"14:10\", \"14:20\", 850, 860, 10],\n", + " [756, \"14:13\", \"15:00\", 853, 900, 47],\n", + " [757, \"14:13\", \"14:40\", 853, 880, 27],\n", + " [758, \"14:14\", \"15:40\", 854, 940, 86],\n", + " [759, \"14:14\", \"14:25\", 854, 865, 11],\n", + " [760, \"14:15\", \"14:54\", 855, 894, 39],\n", + " [761, \"14:16\", \"14:28\", 856, 868, 12],\n", + " [762, \"14:18\", \"15:05\", 858, 905, 47],\n", + " [763, \"14:18\", \"15:44\", 858, 944, 86],\n", + " [764, \"14:18\", \"15:05\", 858, 905, 47],\n", + " [765, \"14:19\", \"15:13\", 859, 913, 54],\n", + " [766, \"14:20\", \"15:31\", 860, 931, 71],\n", + " [767, \"14:20\", \"14:30\", 860, 870, 10],\n", + " [768, \"14:20\", \"14:36\", 860, 876, 16],\n", + " [769, \"14:21\", \"14:47\", 861, 887, 26],\n", + " [770, \"14:23\", \"15:10\", 863, 910, 47],\n", + " [771, \"14:23\", \"14:45\", 863, 885, 22],\n", + " [772, \"14:24\", \"15:50\", 864, 950, 86],\n", + " [773, \"14:24\", \"14:35\", 864, 875, 11],\n", + " [774, \"14:25\", \"15:02\", 865, 902, 37],\n", + " [775, \"14:26\", \"14:52\", 866, 892, 26],\n", + " [776, \"14:28\", \"15:15\", 868, 915, 47],\n", + " [777, \"14:28\", \"14:55\", 868, 895, 27],\n", + " [778, \"14:28\", \"15:54\", 868, 954, 86],\n", + " [779, \"14:28\", \"15:15\", 868, 915, 47],\n", + " [780, \"14:29\", \"15:23\", 869, 923, 54],\n", + " [781, \"14:30\", \"15:41\", 870, 941, 71],\n", + " [782, \"14:30\", \"14:40\", 870, 880, 10],\n", + " [783, \"14:31\", \"14:43\", 871, 883, 12],\n", + " [784, \"14:33\", \"15:20\", 873, 920, 47],\n", + " [785, \"14:34\", \"16:00\", 874, 960, 86],\n", + " [786, \"14:34\", \"14:45\", 874, 885, 11],\n", + " [787, \"14:35\", \"15:11\", 875, 911, 36],\n", + " [788, \"14:38\", \"15:25\", 878, 925, 47],\n", + " [789, \"14:38\", \"15:25\", 878, 925, 47],\n", + " [790, \"14:38\", \"16:04\", 878, 964, 86],\n", + " [791, \"14:39\", \"15:33\", 879, 933, 54],\n", + " [792, \"14:40\", \"14:50\", 880, 890, 10],\n", + " [793, \"14:40\", \"15:51\", 880, 951, 71],\n", + " [794, \"14:40\", \"14:56\", 880, 896, 16],\n", + " [795, \"14:43\", \"15:30\", 883, 930, 47],\n", + " [796, \"14:43\", \"15:10\", 883, 910, 27],\n", + " [797, \"14:44\", \"15:00\", 884, 900, 16],\n", + " [798, \"14:44\", \"16:10\", 884, 970, 86],\n", + " [799, \"14:45\", \"15:19\", 885, 919, 34],\n", + " [800, \"14:46\", \"14:58\", 886, 898, 12],\n", + " [801, \"14:48\", \"15:35\", 888, 935, 47],\n", + " [802, \"14:48\", \"15:35\", 888, 935, 47],\n", + " [803, \"14:48\", \"17:04\", 888, 1024, 136],\n", + " [804, \"14:49\", \"15:43\", 889, 943, 54],\n", + " [805, \"14:50\", \"16:01\", 890, 961, 71],\n", + " [806, \"14:50\", \"15:00\", 890, 900, 10],\n", + " [807, \"14:51\", \"15:17\", 891, 917, 26],\n", + " [808, \"14:52\", \"15:27\", 892, 927, 35],\n", + " [809, \"14:52\", \"15:21\", 892, 921, 29],\n", + " [810, \"14:53\", \"15:40\", 893, 940, 47],\n", + " [811, \"14:54\", \"15:08\", 894, 908, 14],\n", + " [812, \"14:54\", \"16:20\", 894, 980, 86],\n", + " [813, \"14:58\", \"16:24\", 898, 984, 86],\n", + " [814, \"14:58\", \"15:45\", 898, 945, 47],\n", + " [815, \"14:58\", \"15:25\", 898, 925, 27],\n", + " [816, \"14:58\", \"15:45\", 898, 945, 47],\n", + " [817, \"14:59\", \"15:53\", 899, 953, 54],\n", + " [818, \"15:00\", \"15:10\", 900, 910, 10],\n", + " [819, \"15:00\", \"15:35\", 900, 935, 35],\n", + " [820, \"15:00\", \"16:11\", 900, 971, 71],\n", + " [821, \"15:00\", \"15:16\", 900, 916, 16],\n", + " [822, \"15:01\", \"15:13\", 901, 913, 12],\n", + " [823, \"15:02\", \"15:16\", 902, 916, 14],\n", + " [824, \"15:03\", \"15:50\", 903, 950, 47],\n", + " [825, \"15:04\", \"16:30\", 904, 990, 86],\n", + " [826, \"15:08\", \"16:34\", 908, 994, 86],\n", + " [827, \"15:08\", \"15:55\", 908, 955, 47],\n", + " [828, \"15:08\", \"15:55\", 908, 955, 47],\n", + " [829, \"15:08\", \"15:45\", 908, 945, 37],\n", + " [830, \"15:09\", \"16:14\", 909, 974, 65],\n", + " [831, \"15:09\", \"16:03\", 909, 963, 54],\n", + " [832, \"15:10\", \"16:21\", 910, 981, 71],\n", + " [833, \"15:10\", \"15:20\", 910, 920, 10],\n", + " [834, \"15:11\", \"15:24\", 911, 924, 13],\n", + " [835, \"15:12\", \"15:36\", 912, 936, 24],\n", + " [836, \"15:13\", \"16:00\", 913, 960, 47],\n", + " [837, \"15:13\", \"15:40\", 913, 940, 27],\n", + " [838, \"15:14\", \"16:40\", 914, 1000, 86],\n", + " [839, \"15:16\", \"15:28\", 916, 928, 12],\n", + " [840, \"15:16\", \"15:55\", 916, 955, 39],\n", + " [841, \"15:18\", \"16:05\", 918, 965, 47],\n", + " [842, \"15:18\", \"16:44\", 918, 1004, 86],\n", + " [843, \"15:18\", \"16:05\", 918, 965, 47],\n", + " [844, \"15:19\", \"16:13\", 919, 973, 54],\n", + " [845, \"15:19\", \"15:34\", 919, 934, 15],\n", + " [846, \"15:20\", \"15:30\", 920, 930, 10],\n", + " [847, \"15:20\", \"16:31\", 920, 991, 71],\n", + " [848, \"15:20\", \"15:36\", 920, 936, 16],\n", + " [849, \"15:21\", \"15:47\", 921, 947, 26],\n", + " [850, \"15:21\", \"16:06\", 921, 966, 45],\n", + " [851, \"15:23\", \"16:10\", 923, 970, 47],\n", + " [852, \"15:24\", \"16:50\", 924, 1010, 86],\n", + " [853, \"15:24\", \"16:05\", 924, 965, 41],\n", + " [854, \"15:27\", \"15:51\", 927, 951, 24],\n", + " [855, \"15:27\", \"15:44\", 927, 944, 17],\n", + " [856, \"15:28\", \"16:15\", 928, 975, 47],\n", + " [857, \"15:28\", \"16:54\", 928, 1014, 86],\n", + " [858, \"15:28\", \"16:15\", 928, 975, 47],\n", + " [859, \"15:28\", \"15:55\", 928, 955, 27],\n", + " [860, \"15:29\", \"16:23\", 929, 983, 54],\n", + " [861, \"15:30\", \"16:41\", 930, 1001, 71],\n", + " [862, \"15:30\", \"15:40\", 930, 940, 10],\n", + " [863, \"15:31\", \"15:43\", 931, 943, 12],\n", + " [864, \"15:33\", \"16:20\", 933, 980, 47],\n", + " [865, \"15:34\", \"17:00\", 934, 1020, 86],\n", + " [866, \"15:34\", \"16:15\", 934, 975, 41],\n", + " [867, \"15:35\", \"15:54\", 935, 954, 19],\n", + " [868, \"15:36\", \"16:21\", 936, 981, 45],\n", + " [869, \"15:38\", \"16:25\", 938, 985, 47],\n", + " [870, \"15:38\", \"16:25\", 938, 985, 47],\n", + " [871, \"15:38\", \"16:39\", 938, 999, 61],\n", + " [872, \"15:39\", \"16:33\", 939, 993, 54],\n", + " [873, \"15:40\", \"15:50\", 940, 950, 10],\n", + " [874, \"15:40\", \"16:51\", 940, 1011, 71],\n", + " [875, \"15:40\", \"15:56\", 940, 956, 16],\n", + " [876, \"15:43\", \"16:10\", 943, 970, 27],\n", + " [877, \"15:43\", \"16:30\", 943, 990, 47],\n", + " [878, \"15:44\", \"17:10\", 944, 1030, 86],\n", + " [879, \"15:44\", \"16:25\", 944, 985, 41],\n", + " [880, \"15:45\", \"16:04\", 945, 964, 19],\n", + " [881, \"15:46\", \"15:58\", 946, 958, 12],\n", + " [882, \"15:48\", \"16:35\", 948, 995, 47],\n", + " [883, \"15:48\", \"16:35\", 948, 995, 47],\n", + " [884, \"15:48\", \"17:14\", 948, 1034, 86],\n", + " [885, \"15:49\", \"16:43\", 949, 1003, 54],\n", + " [886, \"15:50\", \"16:00\", 950, 960, 10],\n", + " [887, \"15:50\", \"17:01\", 950, 1021, 71],\n", + " [888, \"15:51\", \"16:18\", 951, 978, 27],\n", + " [889, \"15:52\", \"16:36\", 952, 996, 44],\n", + " [890, \"15:53\", \"16:40\", 953, 1000, 47],\n", + " [891, \"15:54\", \"17:20\", 954, 1040, 86],\n", + " [892, \"15:54\", \"16:35\", 954, 995, 41],\n", + " [893, \"15:55\", \"16:14\", 955, 974, 19],\n", + " [894, \"15:58\", \"16:25\", 958, 985, 27],\n", + " [895, \"15:58\", \"16:45\", 958, 1005, 47],\n", + " [896, \"15:58\", \"16:45\", 958, 1005, 47],\n", + " [897, \"15:58\", \"17:24\", 958, 1044, 86],\n", + " [898, \"15:59\", \"17:11\", 959, 1031, 72],\n", + " [899, \"15:59\", \"16:53\", 959, 1013, 54],\n", + " [900, \"16:00\", \"16:10\", 960, 970, 10],\n", + " [901, \"16:00\", \"16:16\", 960, 976, 16],\n", + " [902, \"16:01\", \"16:13\", 961, 973, 12],\n", + " [903, \"16:03\", \"16:50\", 963, 1010, 47],\n", + " [904, \"16:04\", \"17:30\", 964, 1050, 86],\n", + " [905, \"16:04\", \"16:45\", 964, 1005, 41],\n", + " [906, \"16:05\", \"16:24\", 965, 984, 19],\n", + " [907, \"16:06\", \"16:51\", 966, 1011, 45],\n", + " [908, \"16:08\", \"16:55\", 968, 1015, 47],\n", + " [909, \"16:08\", \"17:34\", 968, 1054, 86],\n", + " [910, \"16:08\", \"16:55\", 968, 1015, 47],\n", + " [911, \"16:09\", \"17:03\", 969, 1023, 54],\n", + " [912, \"16:09\", \"17:21\", 969, 1041, 72],\n", + " [913, \"16:10\", \"16:20\", 970, 980, 10],\n", + " [914, \"16:13\", \"16:40\", 973, 1000, 27],\n", + " [915, \"16:13\", \"17:00\", 973, 1020, 47],\n", + " [916, \"16:14\", \"16:55\", 974, 1015, 41],\n", + " [917, \"16:14\", \"17:40\", 974, 1060, 86],\n", + " [918, \"16:15\", \"16:34\", 975, 994, 19],\n", + " [919, \"16:16\", \"16:28\", 976, 988, 12],\n", + " [920, \"16:18\", \"17:05\", 978, 1025, 47],\n", + " [921, \"16:18\", \"17:05\", 978, 1025, 47],\n", + " [922, \"16:18\", \"17:44\", 978, 1064, 86],\n", + " [923, \"16:19\", \"17:31\", 979, 1051, 72],\n", + " [924, \"16:19\", \"17:13\", 979, 1033, 54],\n", + " [925, \"16:20\", \"16:30\", 980, 990, 10],\n", + " [926, \"16:20\", \"16:36\", 980, 996, 16],\n", + " [927, \"16:21\", \"16:48\", 981, 1008, 27],\n", + " [928, \"16:22\", \"17:06\", 982, 1026, 44],\n", + " [929, \"16:23\", \"17:10\", 983, 1030, 47],\n", + " [930, \"16:24\", \"17:05\", 984, 1025, 41],\n", + " [931, \"16:24\", \"17:50\", 984, 1070, 86],\n", + " [932, \"16:25\", \"16:44\", 985, 1004, 19],\n", + " [933, \"16:28\", \"17:15\", 988, 1035, 47],\n", + " [934, \"16:28\", \"17:15\", 988, 1035, 47],\n", + " [935, \"16:28\", \"16:55\", 988, 1015, 27],\n", + " [936, \"16:28\", \"17:54\", 988, 1074, 86],\n", + " [937, \"16:29\", \"17:23\", 989, 1043, 54],\n", + " [938, \"16:29\", \"17:41\", 989, 1061, 72],\n", + " [939, \"16:30\", \"16:40\", 990, 1000, 10],\n", + " [940, \"16:31\", \"16:43\", 991, 1003, 12],\n", + " [941, \"16:33\", \"17:20\", 993, 1040, 47],\n", + " [942, \"16:34\", \"17:15\", 994, 1035, 41],\n", + " [943, \"16:34\", \"18:00\", 994, 1080, 86],\n", + " [944, \"16:35\", \"16:54\", 995, 1014, 19],\n", + " [945, \"16:36\", \"17:21\", 996, 1041, 45],\n", + " [946, \"16:38\", \"17:25\", 998, 1045, 47],\n", + " [947, \"16:38\", \"17:25\", 998, 1045, 47],\n", + " [948, \"16:38\", \"18:04\", 998, 1084, 86],\n", + " [949, \"16:39\", \"17:33\", 999, 1053, 54],\n", + " [950, \"16:39\", \"17:51\", 999, 1071, 72],\n", + " [951, \"16:40\", \"16:56\", 1000, 1016, 16],\n", + " [952, \"16:40\", \"16:50\", 1000, 1010, 10],\n", + " [953, \"16:43\", \"17:10\", 1003, 1030, 27],\n", + " [954, \"16:43\", \"17:30\", 1003, 1050, 47],\n", + " [955, \"16:44\", \"17:25\", 1004, 1045, 41],\n", + " [956, \"16:44\", \"18:10\", 1004, 1090, 86],\n", + " [957, \"16:45\", \"17:04\", 1005, 1024, 19],\n", + " [958, \"16:46\", \"16:58\", 1006, 1018, 12],\n", + " [959, \"16:48\", \"18:14\", 1008, 1094, 86],\n", + " [960, \"16:48\", \"17:35\", 1008, 1055, 47],\n", + " [961, \"16:48\", \"17:35\", 1008, 1055, 47],\n", + " [962, \"16:49\", \"18:01\", 1009, 1081, 72],\n", + " [963, \"16:49\", \"17:43\", 1009, 1063, 54],\n", + " [964, \"16:50\", \"17:00\", 1010, 1020, 10],\n", + " [965, \"16:51\", \"17:18\", 1011, 1038, 27],\n", + " [966, \"16:52\", \"17:36\", 1012, 1056, 44],\n", + " [967, \"16:53\", \"17:40\", 1013, 1060, 47],\n", + " [968, \"16:54\", \"18:20\", 1014, 1100, 86],\n", + " [969, \"16:54\", \"17:35\", 1014, 1055, 41],\n", + " [970, \"16:55\", \"17:14\", 1015, 1034, 19],\n", + " [971, \"16:58\", \"17:25\", 1018, 1045, 27],\n", + " [972, \"16:58\", \"17:45\", 1018, 1065, 47],\n", + " [973, \"16:58\", \"17:45\", 1018, 1065, 47],\n", + " [974, \"16:58\", \"18:24\", 1018, 1104, 86],\n", + " [975, \"16:59\", \"18:11\", 1019, 1091, 72],\n", + " [976, \"16:59\", \"17:53\", 1019, 1073, 54],\n", + " [977, \"17:00\", \"17:16\", 1020, 1036, 16],\n", + " [978, \"17:00\", \"17:10\", 1020, 1030, 10],\n", + " [979, \"17:01\", \"17:13\", 1021, 1033, 12],\n", + " [980, \"17:03\", \"17:50\", 1023, 1070, 47],\n", + " [981, \"17:04\", \"18:30\", 1024, 1110, 86],\n", + " [982, \"17:04\", \"17:45\", 1024, 1065, 41],\n", + " [983, \"17:05\", \"17:24\", 1025, 1044, 19],\n", + " [984, \"17:06\", \"17:51\", 1026, 1071, 45],\n", + " [985, \"17:08\", \"17:55\", 1028, 1075, 47],\n", + " [986, \"17:08\", \"17:55\", 1028, 1075, 47],\n", + " [987, \"17:08\", \"18:34\", 1028, 1114, 86],\n", + " [988, \"17:09\", \"18:03\", 1029, 1083, 54],\n", + " [989, \"17:09\", \"18:21\", 1029, 1101, 72],\n", + " [990, \"17:10\", \"17:20\", 1030, 1040, 10],\n", + " [991, \"17:13\", \"17:40\", 1033, 1060, 27],\n", + " [992, \"17:13\", \"18:00\", 1033, 1080, 47],\n", + " [993, \"17:14\", \"17:55\", 1034, 1075, 41],\n", + " [994, \"17:14\", \"18:40\", 1034, 1120, 86],\n", + " [995, \"17:15\", \"17:34\", 1035, 1054, 19],\n", + " [996, \"17:16\", \"17:28\", 1036, 1048, 12],\n", + " [997, \"17:18\", \"18:05\", 1038, 1085, 47],\n", + " [998, \"17:18\", \"18:05\", 1038, 1085, 47],\n", + " [999, \"17:18\", \"18:44\", 1038, 1124, 86],\n", + " [1000, \"17:19\", \"18:31\", 1039, 1111, 72],\n", + " [1001, \"17:19\", \"18:13\", 1039, 1093, 54],\n", + " [1002, \"17:20\", \"17:36\", 1040, 1056, 16],\n", + " [1003, \"17:20\", \"17:30\", 1040, 1050, 10],\n", + " [1004, \"17:21\", \"17:47\", 1041, 1067, 26],\n", + " [1005, \"17:22\", \"18:06\", 1042, 1086, 44],\n", + " [1006, \"17:23\", \"18:10\", 1043, 1090, 47],\n", + " [1007, \"17:24\", \"18:50\", 1044, 1130, 86],\n", + " [1008, \"17:24\", \"18:05\", 1044, 1085, 41],\n", + " [1009, \"17:25\", \"17:44\", 1045, 1064, 19],\n", + " [1010, \"17:28\", \"17:55\", 1048, 1075, 27],\n", + " [1011, \"17:28\", \"18:15\", 1048, 1095, 47],\n", + " [1012, \"17:28\", \"18:15\", 1048, 1095, 47],\n", + " [1013, \"17:28\", \"18:54\", 1048, 1134, 86],\n", + " [1014, \"17:29\", \"18:41\", 1049, 1121, 72],\n", + " [1015, \"17:29\", \"18:23\", 1049, 1103, 54],\n", + " [1016, \"17:30\", \"17:40\", 1050, 1060, 10],\n", + " [1017, \"17:31\", \"17:43\", 1051, 1063, 12],\n", + " [1018, \"17:33\", \"18:20\", 1053, 1100, 47],\n", + " [1019, \"17:34\", \"18:15\", 1054, 1095, 41],\n", + " [1020, \"17:34\", \"19:00\", 1054, 1140, 86],\n", + " [1021, \"17:35\", \"17:54\", 1055, 1074, 19],\n", + " [1022, \"17:36\", \"18:21\", 1056, 1101, 45],\n", + " [1023, \"17:38\", \"18:25\", 1058, 1105, 47],\n", + " [1024, \"17:38\", \"19:04\", 1058, 1144, 86],\n", + " [1025, \"17:38\", \"18:25\", 1058, 1105, 47],\n", + " [1026, \"17:39\", \"18:51\", 1059, 1131, 72],\n", + " [1027, \"17:39\", \"18:33\", 1059, 1113, 54],\n", + " [1028, \"17:40\", \"17:56\", 1060, 1076, 16],\n", + " [1029, \"17:40\", \"17:50\", 1060, 1070, 10],\n", + " [1030, \"17:43\", \"18:10\", 1063, 1090, 27],\n", + " [1031, \"17:43\", \"18:30\", 1063, 1110, 47],\n", + " [1032, \"17:44\", \"18:25\", 1064, 1105, 41],\n", + " [1033, \"17:44\", \"19:14\", 1064, 1154, 90],\n", + " [1034, \"17:45\", \"18:04\", 1065, 1084, 19],\n", + " [1035, \"17:46\", \"17:58\", 1066, 1078, 12],\n", + " [1036, \"17:48\", \"18:35\", 1068, 1115, 47],\n", + " [1037, \"17:48\", \"18:35\", 1068, 1115, 47],\n", + " [1038, \"17:48\", \"19:14\", 1068, 1154, 86],\n", + " [1039, \"17:49\", \"19:01\", 1069, 1141, 72],\n", + " [1040, \"17:49\", \"18:43\", 1069, 1123, 54],\n", + " [1041, \"17:50\", \"18:00\", 1070, 1080, 10],\n", + " [1042, \"17:51\", \"18:17\", 1071, 1097, 26],\n", + " [1043, \"17:52\", \"18:36\", 1072, 1116, 44],\n", + " [1044, \"17:53\", \"18:40\", 1073, 1120, 47],\n", + " [1045, \"17:54\", \"18:35\", 1074, 1115, 41],\n", + " [1046, \"17:54\", \"18:57\", 1074, 1137, 63],\n", + " [1047, \"17:55\", \"18:14\", 1075, 1094, 19],\n", + " [1048, \"17:58\", \"18:45\", 1078, 1125, 47],\n", + " [1049, \"17:58\", \"18:45\", 1078, 1125, 47],\n", + " [1050, \"17:58\", \"18:25\", 1078, 1105, 27],\n", + " [1051, \"17:58\", \"19:26\", 1078, 1166, 88],\n", + " [1052, \"17:59\", \"18:53\", 1079, 1133, 54],\n", + " [1053, \"18:00\", \"19:11\", 1080, 1151, 71],\n", + " [1054, \"18:00\", \"18:10\", 1080, 1090, 10],\n", + " [1055, \"18:00\", \"18:16\", 1080, 1096, 16],\n", + " [1056, \"18:01\", \"18:13\", 1081, 1093, 12],\n", + " [1057, \"18:03\", \"18:50\", 1083, 1130, 47],\n", + " [1058, \"18:04\", \"18:45\", 1084, 1125, 41],\n", + " [1059, \"18:04\", \"19:29\", 1084, 1169, 85],\n", + " [1060, \"18:05\", \"18:24\", 1085, 1104, 19],\n", + " [1061, \"18:06\", \"18:51\", 1086, 1131, 45],\n", + " [1062, \"18:08\", \"18:55\", 1088, 1135, 47],\n", + " [1063, \"18:08\", \"19:06\", 1088, 1146, 58],\n", + " [1064, \"18:08\", \"18:55\", 1088, 1135, 47],\n", + " [1065, \"18:09\", \"19:03\", 1089, 1143, 54],\n", + " [1066, \"18:10\", \"18:20\", 1090, 1100, 10],\n", + " [1067, \"18:10\", \"19:21\", 1090, 1161, 71],\n", + " [1068, \"18:13\", \"19:00\", 1093, 1140, 47],\n", + " [1069, \"18:13\", \"18:40\", 1093, 1120, 27],\n", + " [1070, \"18:14\", \"19:43\", 1094, 1183, 89],\n", + " [1071, \"18:14\", \"18:55\", 1094, 1135, 41],\n", + " [1072, \"18:15\", \"18:34\", 1095, 1114, 19],\n", + " [1073, \"18:16\", \"18:28\", 1096, 1108, 12],\n", + " [1074, \"18:17\", \"18:27\", 1097, 1107, 10],\n", + " [1075, \"18:18\", \"19:41\", 1098, 1181, 83],\n", + " [1076, \"18:18\", \"18:58\", 1098, 1138, 40],\n", + " [1077, \"18:18\", \"19:05\", 1098, 1145, 47],\n", + " [1078, \"18:19\", \"19:13\", 1099, 1153, 54],\n", + " [1079, \"18:20\", \"19:31\", 1100, 1171, 71],\n", + " [1080, \"18:20\", \"18:36\", 1100, 1116, 16],\n", + " [1081, \"18:20\", \"18:30\", 1100, 1110, 10],\n", + " [1082, \"18:22\", \"19:05\", 1102, 1145, 43],\n", + " [1083, \"18:23\", \"19:05\", 1103, 1145, 42],\n", + " [1084, \"18:24\", \"19:27\", 1104, 1167, 63],\n", + " [1085, \"18:24\", \"19:05\", 1104, 1145, 41],\n", + " [1086, \"18:25\", \"18:44\", 1105, 1124, 19],\n", + " [1087, \"18:28\", \"19:25\", 1108, 1165, 57],\n", + " [1088, \"18:28\", \"18:55\", 1108, 1135, 27],\n", + " [1089, \"18:28\", \"19:08\", 1108, 1148, 40],\n", + " [1090, \"18:28\", \"19:15\", 1108, 1155, 47],\n", + " [1091, \"18:29\", \"19:23\", 1109, 1163, 54],\n", + " [1092, \"18:30\", \"19:05\", 1110, 1145, 35],\n", + " [1093, \"18:30\", \"18:40\", 1110, 1120, 10],\n", + " [1094, \"18:31\", \"18:43\", 1111, 1123, 12],\n", + " [1095, \"18:33\", \"19:15\", 1113, 1155, 42],\n", + " [1096, \"18:34\", \"19:58\", 1114, 1198, 84],\n", + " [1097, \"18:34\", \"19:14\", 1114, 1154, 40],\n", + " [1098, \"18:35\", \"18:55\", 1115, 1135, 20],\n", + " [1099, \"18:36\", \"19:20\", 1116, 1160, 44],\n", + " [1100, \"18:38\", \"19:25\", 1118, 1165, 47],\n", + " [1101, \"18:38\", \"19:23\", 1118, 1163, 45],\n", + " [1102, \"18:38\", \"19:56\", 1118, 1196, 78],\n", + " [1103, \"18:39\", \"19:33\", 1119, 1173, 54],\n", + " [1104, \"18:40\", \"18:50\", 1120, 1130, 10],\n", + " [1105, \"18:40\", \"19:45\", 1120, 1185, 65],\n", + " [1106, \"18:40\", \"18:56\", 1120, 1136, 16],\n", + " [1107, \"18:43\", \"19:10\", 1123, 1150, 27],\n", + " [1108, \"18:43\", \"19:30\", 1123, 1170, 47],\n", + " [1109, \"18:44\", \"19:24\", 1124, 1164, 40],\n", + " [1110, \"18:45\", \"19:05\", 1125, 1145, 20],\n", + " [1111, \"18:46\", \"18:58\", 1126, 1138, 12],\n", + " [1112, \"18:48\", \"19:35\", 1128, 1175, 47],\n", + " [1113, \"18:48\", \"20:12\", 1128, 1212, 84],\n", + " [1114, \"18:48\", \"20:11\", 1128, 1211, 83],\n", + " [1115, \"18:48\", \"19:28\", 1128, 1168, 40],\n", + " [1116, \"18:49\", \"19:43\", 1129, 1183, 54],\n", + " [1117, \"18:50\", \"19:00\", 1130, 1140, 10],\n", + " [1118, \"18:51\", \"19:01\", 1131, 1141, 10],\n", + " [1119, \"18:53\", \"19:35\", 1133, 1175, 42],\n", + " [1120, \"18:53\", \"19:15\", 1133, 1155, 22],\n", + " [1121, \"18:53\", \"20:00\", 1133, 1200, 67],\n", + " [1122, \"18:55\", \"19:15\", 1135, 1155, 20],\n", + " [1123, \"18:55\", \"19:34\", 1135, 1174, 39],\n", + " [1124, \"18:58\", \"19:38\", 1138, 1178, 40],\n", + " [1125, \"18:59\", \"19:53\", 1139, 1193, 54],\n", + " [1126, \"18:59\", \"19:50\", 1139, 1190, 51],\n", + " [1127, \"18:59\", \"19:53\", 1139, 1193, 54],\n", + " [1128, \"19:00\", \"19:16\", 1140, 1156, 16],\n", + " [1129, \"19:00\", \"19:10\", 1140, 1150, 10],\n", + " [1130, \"19:00\", \"19:16\", 1140, 1156, 16],\n", + " [1131, \"19:01\", \"19:13\", 1141, 1153, 12],\n", + " [1132, \"19:03\", \"20:26\", 1143, 1226, 83],\n", + " [1133, \"19:03\", \"19:45\", 1143, 1185, 42],\n", + " [1134, \"19:05\", \"19:44\", 1145, 1184, 39],\n", + " [1135, \"19:05\", \"19:25\", 1145, 1165, 20],\n", + " [1136, \"19:08\", \"20:15\", 1148, 1215, 67],\n", + " [1137, \"19:08\", \"19:35\", 1148, 1175, 27],\n", + " [1138, \"19:09\", \"19:49\", 1149, 1189, 40],\n", + " [1139, \"19:09\", \"20:03\", 1149, 1203, 54],\n", + " [1140, \"19:10\", \"19:20\", 1150, 1160, 10],\n", + " [1141, \"19:10\", \"19:20\", 1150, 1160, 10],\n", + " [1142, \"19:11\", \"19:53\", 1151, 1193, 42],\n", + " [1143, \"19:14\", \"20:26\", 1154, 1226, 72],\n", + " [1144, \"19:14\", \"19:35\", 1154, 1175, 21],\n", + " [1145, \"19:14\", \"19:24\", 1154, 1164, 10],\n", + " [1146, \"19:14\", \"20:05\", 1154, 1205, 51],\n", + " [1147, \"19:15\", \"19:30\", 1155, 1170, 15],\n", + " [1148, \"19:15\", \"19:54\", 1155, 1194, 39],\n", + " [1149, \"19:18\", \"20:39\", 1158, 1239, 81],\n", + " [1150, \"19:18\", \"20:00\", 1158, 1200, 42],\n", + " [1151, \"19:19\", \"20:14\", 1159, 1214, 55],\n", + " [1152, \"19:20\", \"19:30\", 1160, 1170, 10],\n", + " [1153, \"19:20\", \"19:36\", 1160, 1176, 16],\n", + " [1154, \"19:21\", \"19:31\", 1161, 1171, 10],\n", + " [1155, \"19:23\", \"20:30\", 1163, 1230, 67],\n", + " [1156, \"19:23\", \"19:35\", 1163, 1175, 12],\n", + " [1157, \"19:24\", \"19:45\", 1164, 1185, 21],\n", + " [1158, \"19:24\", \"19:45\", 1164, 1185, 21],\n", + " [1159, \"19:25\", \"20:04\", 1165, 1204, 39],\n", + " [1160, \"19:26\", \"20:08\", 1166, 1208, 42],\n", + " [1161, \"19:29\", \"20:02\", 1169, 1202, 33],\n", + " [1162, \"19:29\", \"20:18\", 1169, 1218, 49],\n", + " [1163, \"19:29\", \"20:41\", 1169, 1241, 72],\n", + " [1164, \"19:30\", \"19:40\", 1170, 1180, 10],\n", + " [1165, \"19:33\", \"20:54\", 1173, 1254, 81],\n", + " [1166, \"19:33\", \"20:17\", 1173, 1217, 44],\n", + " [1167, \"19:34\", \"19:55\", 1174, 1195, 21],\n", + " [1168, \"19:35\", \"20:14\", 1175, 1214, 39],\n", + " [1169, \"19:38\", \"20:05\", 1178, 1205, 27],\n", + " [1170, \"19:38\", \"20:45\", 1178, 1245, 67],\n", + " [1171, \"19:39\", \"20:12\", 1179, 1212, 33],\n", + " [1172, \"19:40\", \"19:50\", 1180, 1190, 10],\n", + " [1173, \"19:40\", \"19:56\", 1180, 1196, 16],\n", + " [1174, \"19:41\", \"20:27\", 1181, 1227, 46],\n", + " [1175, \"19:43\", \"19:55\", 1183, 1195, 12],\n", + " [1176, \"19:44\", \"20:05\", 1184, 1205, 21],\n", + " [1177, \"19:44\", \"20:33\", 1184, 1233, 49],\n", + " [1178, \"19:44\", \"21:00\", 1184, 1260, 76],\n", + " [1179, \"19:45\", \"20:24\", 1185, 1224, 39],\n", + " [1180, \"19:48\", \"20:37\", 1188, 1237, 49],\n", + " [1181, \"19:48\", \"21:09\", 1188, 1269, 81],\n", + " [1182, \"19:50\", \"20:00\", 1190, 1200, 10],\n", + " [1183, \"19:52\", \"20:29\", 1192, 1229, 37],\n", + " [1184, \"19:53\", \"20:08\", 1193, 1208, 15],\n", + " [1185, \"19:53\", \"21:02\", 1193, 1262, 69],\n", + " [1186, \"19:53\", \"20:20\", 1193, 1220, 27],\n", + " [1187, \"19:54\", \"20:19\", 1194, 1219, 25],\n", + " [1188, \"19:55\", \"20:34\", 1195, 1234, 39],\n", + " [1189, \"19:56\", \"20:34\", 1196, 1234, 38],\n", + " [1190, \"19:59\", \"20:48\", 1199, 1248, 49],\n", + " [1191, \"19:59\", \"21:20\", 1199, 1280, 81],\n", + " [1192, \"20:00\", \"20:16\", 1200, 1216, 16],\n", + " [1193, \"20:00\", \"20:10\", 1200, 1210, 10],\n", + " [1194, \"20:03\", \"20:42\", 1203, 1242, 39],\n", + " [1195, \"20:03\", \"21:24\", 1203, 1284, 81],\n", + " [1196, \"20:04\", \"20:29\", 1204, 1229, 25],\n", + " [1197, \"20:05\", \"20:48\", 1205, 1248, 43],\n", + " [1198, \"20:07\", \"20:44\", 1207, 1244, 37],\n", + " [1199, \"20:08\", \"20:40\", 1208, 1240, 32],\n", + " [1200, \"20:08\", \"20:35\", 1208, 1235, 27],\n", + " [1201, \"20:10\", \"20:20\", 1210, 1220, 10],\n", + " [1202, \"20:10\", \"20:22\", 1210, 1222, 12],\n", + " [1203, \"20:11\", \"20:47\", 1211, 1247, 36],\n", + " [1204, \"20:14\", \"21:04\", 1214, 1264, 50],\n", + " [1205, \"20:14\", \"21:03\", 1214, 1263, 49],\n", + " [1206, \"20:17\", \"21:03\", 1217, 1263, 46],\n", + " [1207, \"20:18\", \"21:39\", 1218, 1299, 81],\n", + " [1208, \"20:20\", \"20:30\", 1220, 1230, 10],\n", + " [1209, \"20:20\", \"20:57\", 1220, 1257, 37],\n", + " [1210, \"20:20\", \"20:36\", 1220, 1236, 16],\n", + " [1211, \"20:22\", \"20:59\", 1222, 1259, 37],\n", + " [1212, \"20:22\", \"20:42\", 1222, 1242, 20],\n", + " [1213, \"20:24\", \"20:49\", 1224, 1249, 25],\n", + " [1214, \"20:27\", \"21:22\", 1227, 1282, 55],\n", + " [1215, \"20:29\", \"21:18\", 1229, 1278, 49],\n", + " [1216, \"20:30\", \"21:07\", 1230, 1267, 37],\n", + " [1217, \"20:30\", \"20:40\", 1230, 1240, 10],\n", + " [1218, \"20:30\", \"20:40\", 1230, 1240, 10],\n", + " [1219, \"20:30\", \"21:40\", 1230, 1300, 70],\n", + " [1220, \"20:32\", \"21:18\", 1232, 1278, 46],\n", + " [1221, \"20:35\", \"21:54\", 1235, 1314, 79],\n", + " [1222, \"20:37\", \"21:14\", 1237, 1274, 37],\n", + " [1223, \"20:38\", \"21:08\", 1238, 1268, 30],\n", + " [1224, \"20:40\", \"20:50\", 1240, 1250, 10],\n", + " [1225, \"20:40\", \"21:17\", 1240, 1277, 37],\n", + " [1226, \"20:40\", \"20:56\", 1240, 1256, 16],\n", + " [1227, \"20:44\", \"21:33\", 1244, 1293, 49],\n", + " [1228, \"20:47\", \"21:33\", 1247, 1293, 46],\n", + " [1229, \"20:47\", \"21:42\", 1247, 1302, 55],\n", + " [1230, \"20:50\", \"21:00\", 1250, 1260, 10],\n", + " [1231, \"20:50\", \"22:00\", 1250, 1320, 70],\n", + " [1232, \"20:50\", \"22:09\", 1250, 1329, 79],\n", + " [1233, \"20:50\", \"21:27\", 1250, 1287, 37],\n", + " [1234, \"20:52\", \"21:29\", 1252, 1289, 37],\n", + " [1235, \"20:53\", \"21:20\", 1253, 1280, 27],\n", + " [1236, \"20:56\", \"21:11\", 1256, 1271, 15],\n", + " [1237, \"20:59\", \"21:48\", 1259, 1308, 49],\n", + " [1238, \"21:00\", \"21:10\", 1260, 1270, 10],\n", + " [1239, \"21:00\", \"21:37\", 1260, 1297, 37],\n", + " [1240, \"21:02\", \"21:48\", 1262, 1308, 46],\n", + " [1241, \"21:05\", \"22:24\", 1265, 1344, 79],\n", + " [1242, \"21:07\", \"21:44\", 1267, 1304, 37],\n", + " [1243, \"21:07\", \"22:02\", 1267, 1322, 55],\n", + " [1244, \"21:08\", \"21:38\", 1268, 1298, 30],\n", + " [1245, \"21:10\", \"22:25\", 1270, 1345, 75],\n", + " [1246, \"21:10\", \"21:20\", 1270, 1280, 10],\n", + " [1247, \"21:10\", \"21:47\", 1270, 1307, 37],\n", + " [1248, \"21:14\", \"22:03\", 1274, 1323, 49],\n", + " [1249, \"21:17\", \"22:03\", 1277, 1323, 46],\n", + " [1250, \"21:20\", \"22:18\", 1280, 1338, 58],\n", + " [1251, \"21:20\", \"21:57\", 1280, 1317, 37],\n", + " [1252, \"21:20\", \"21:30\", 1280, 1290, 10],\n", + " [1253, \"21:22\", \"21:59\", 1282, 1319, 37],\n", + " [1254, \"21:24\", \"21:49\", 1284, 1309, 25],\n", + " [1255, \"21:27\", \"22:21\", 1287, 1341, 54],\n", + " [1256, \"21:30\", \"22:07\", 1290, 1327, 37],\n", + " [1257, \"21:30\", \"22:20\", 1290, 1340, 50],\n", + " [1258, \"21:30\", \"21:40\", 1290, 1300, 10],\n", + " [1259, \"21:32\", \"22:18\", 1292, 1338, 46],\n", + " [1260, \"21:32\", \"22:01\", 1292, 1321, 29],\n", + " [1261, \"21:35\", \"22:54\", 1295, 1374, 79],\n", + " [1262, \"21:37\", \"22:14\", 1297, 1334, 37],\n", + " [1263, \"21:39\", \"21:55\", 1299, 1315, 16],\n", + " [1264, \"21:40\", \"22:17\", 1300, 1337, 37],\n", + " [1265, \"21:40\", \"21:50\", 1300, 1310, 10],\n", + " [1266, \"21:41\", \"22:08\", 1301, 1328, 27],\n", + " [1267, \"21:47\", \"22:16\", 1307, 1336, 29],\n", + " [1268, \"21:47\", \"22:51\", 1307, 1371, 64],\n", + " [1269, \"21:47\", \"22:33\", 1307, 1353, 46],\n", + " [1270, \"21:48\", \"22:03\", 1308, 1323, 15],\n", + " [1271, \"21:50\", \"22:55\", 1310, 1375, 65],\n", + " [1272, \"21:50\", \"22:27\", 1310, 1347, 37],\n", + " [1273, \"21:50\", \"22:00\", 1310, 1320, 10],\n", + " [1274, \"21:52\", \"22:29\", 1312, 1349, 37],\n", + " [1275, \"21:53\", \"22:19\", 1313, 1339, 26],\n", + " [1276, \"22:00\", \"22:38\", 1320, 1358, 38],\n", + " [1277, \"22:00\", \"22:10\", 1320, 1330, 10],\n", + " [1278, \"22:02\", \"22:12\", 1322, 1332, 10],\n", + " [1279, \"22:02\", \"22:48\", 1322, 1368, 46],\n", + " [1280, \"22:04\", \"22:31\", 1324, 1351, 27],\n", + " [1281, \"22:05\", \"23:24\", 1325, 1404, 79],\n", + " [1282, \"22:07\", \"22:44\", 1327, 1364, 37],\n", + " [1283, \"22:07\", \"22:39\", 1327, 1359, 32],\n", + " [1284, \"22:09\", \"22:25\", 1329, 1345, 16],\n", + " [1285, \"22:10\", \"23:25\", 1330, 1405, 75],\n", + " [1286, \"22:13\", \"22:38\", 1333, 1358, 25],\n", + " [1287, \"22:13\", \"22:53\", 1333, 1373, 40],\n", + " [1288, \"22:17\", \"22:27\", 1337, 1347, 10],\n", + " [1289, \"22:17\", \"23:03\", 1337, 1383, 46],\n", + " [1290, \"22:19\", \"22:46\", 1339, 1366, 27],\n", + " [1291, \"22:22\", \"22:59\", 1342, 1379, 37],\n", + " [1292, \"22:24\", \"22:48\", 1344, 1368, 24],\n", + " [1293, \"22:27\", \"22:52\", 1347, 1372, 25],\n", + " [1294, \"22:27\", \"23:21\", 1347, 1401, 54],\n", + " [1295, \"22:28\", \"23:08\", 1348, 1388, 40],\n", + " [1296, \"22:30\", \"23:17\", 1350, 1397, 47],\n", + " [1297, \"22:32\", \"22:42\", 1352, 1362, 10],\n", + " [1298, \"22:32\", \"23:11\", 1352, 1391, 39],\n", + " [1299, \"22:34\", \"23:01\", 1354, 1381, 27],\n", + " [1300, \"22:35\", \"23:54\", 1355, 1434, 79],\n", + " [1301, \"22:37\", \"23:14\", 1357, 1394, 37],\n", + " [1302, \"22:43\", \"23:23\", 1363, 1403, 40],\n", + " [1303, \"22:43\", \"23:08\", 1363, 1388, 25],\n", + " [1304, \"22:47\", \"23:33\", 1367, 1413, 46],\n", + " [1305, \"22:47\", \"22:57\", 1367, 1377, 10],\n", + " [1306, \"22:49\", \"23:16\", 1369, 1396, 27],\n", + " [1307, \"22:52\", \"23:29\", 1372, 1409, 37],\n", + " [1308, \"22:53\", \"23:15\", 1373, 1395, 22],\n", + " [1309, \"22:55\", \"23:55\", 1375, 1435, 60],\n", + " [1310, \"22:57\", \"23:51\", 1377, 1431, 54],\n", + " [1311, \"22:58\", \"23:38\", 1378, 1418, 40],\n", + " [1312, \"23:02\", \"23:41\", 1382, 1421, 39],\n", + " [1313, \"23:02\", \"23:12\", 1382, 1392, 10],\n", + " [1314, \"23:04\", \"23:31\", 1384, 1411, 27],\n", + " [1315, \"23:05\", \"00:24\", 1385, 1464, 79],\n", + " [1316, \"23:07\", \"23:44\", 1387, 1424, 37],\n", + " [1317, \"23:13\", \"23:53\", 1393, 1433, 40],\n", + " [1318, \"23:13\", \"23:38\", 1393, 1418, 25],\n", + " [1319, \"23:17\", \"00:03\", 1397, 1443, 46],\n", + " [1320, \"23:17\", \"23:27\", 1397, 1407, 10],\n", + " [1321, \"23:19\", \"23:46\", 1399, 1426, 27],\n", + " [1322, \"23:22\", \"23:59\", 1402, 1439, 37],\n", + " [1323, \"23:25\", \"00:25\", 1405, 1465, 60],\n", + " [1324, \"23:27\", \"00:21\", 1407, 1461, 54],\n", + " [1325, \"23:28\", \"00:08\", 1408, 1448, 40],\n", + " [1326, \"23:32\", \"23:42\", 1412, 1422, 10],\n", + " [1327, \"23:34\", \"00:01\", 1414, 1441, 27],\n", + " [1328, \"23:35\", \"01:05\", 1415, 1505, 90],\n", + " [1329, \"23:37\", \"00:09\", 1417, 1449, 32],\n", + " [1330, \"23:43\", \"00:23\", 1423, 1463, 40],\n", + " [1331, \"23:43\", \"00:08\", 1423, 1448, 25],\n", + " [1332, \"23:46\", \"00:01\", 1426, 1441, 15],\n", + " [1333, \"23:47\", \"23:57\", 1427, 1437, 10],\n", + " [1334, \"23:47\", \"00:33\", 1427, 1473, 46],\n", + " [1335, \"23:52\", \"00:24\", 1432, 1464, 32],\n", + " [1336, \"23:55\", \"00:49\", 1435, 1489, 54],\n", + " [1337, \"23:57\", \"00:57\", 1437, 1497, 60],\n", + " [1338, \"23:58\", \"00:38\", 1438, 1478, 40],\n", + " [1339, \"00:02\", \"00:12\", 1442, 1452, 10],\n", + " [1340, \"00:07\", \"00:39\", 1447, 1479, 32],\n", + " [1341, \"00:13\", \"00:38\", 1453, 1478, 25],\n", + " [1342, \"00:13\", \"00:51\", 1453, 1491, 38],\n", + " [1343, \"00:15\", \"01:14\", 1455, 1514, 59],\n", + " [1344, \"00:17\", \"01:23\", 1457, 1523, 66],\n", + " [1345, \"00:23\", \"00:33\", 1463, 1473, 10],\n", + " [1346, \"00:24\", \"00:40\", 1464, 1480, 16],\n", + " [1347, \"00:25\", \"01:12\", 1465, 1512, 47],\n", + " [1348, \"00:28\", \"01:07\", 1468, 1507, 39],\n", + " [1349, \"00:33\", \"01:05\", 1473, 1505, 32],\n", + " [1350, \"00:43\", \"01:21\", 1483, 1521, 38],\n", + " [1351, \"00:44\", \"00:54\", 1484, 1494, 10],\n", + " [1352, \"00:47\", \"01:09\", 1487, 1509, 22],\n", + " [1353, \"00:47\", \"01:26\", 1487, 1526, 39],\n", + " [1354, \"00:54\", \"01:04\", 1494, 1504, 10],\n", + " [1355, \"00:57\", \"01:07\", 1497, 1507, 10],\n", "] # yapf:disable\n", "\n", "\n", @@ -1750,17 +1749,16 @@ "\n", " # Computed data.\n", " total_driving_time = sum(shift[5] for shift in shifts)\n", - " min_num_drivers = int(\n", - " math.ceil(total_driving_time * 1.0 / max_driving_time))\n", + " min_num_drivers = int(math.ceil(total_driving_time * 1.0 / max_driving_time))\n", " min_start_time = min(shift[3] for shift in shifts)\n", " max_end_time = max(shift[4] for shift in shifts)\n", "\n", - " print('Bus driver scheduling')\n", - " print(' num shifts =', num_shifts)\n", - " print(' total driving time =', total_driving_time, 'minutes')\n", - " print(' min num drivers =', min_num_drivers)\n", - " print(' min start time =', min_start_time)\n", - " print(' max end time =', max_end_time)\n", + " print(\"Bus driver scheduling\")\n", + " print(\" num shifts =\", num_shifts)\n", + " print(\" total driving time =\", total_driving_time, \"minutes\")\n", + " print(\" min num drivers =\", min_num_drivers)\n", + " print(\" min start time =\", min_start_time)\n", + " print(\" max end time =\", max_end_time)\n", "\n", " # We are going to build a flow from a the start of the day to the end\n", " # of the day.\n", @@ -1785,11 +1783,11 @@ " # Create all the shift variables before iterating on the transitions\n", " # between these shifts.\n", " for shift in range(num_shifts):\n", - " driving_time[shift] = model.NewIntVar(0, max_driving_time, 'dt_%i' % shift)\n", + " driving_time[shift] = model.NewIntVar(0, max_driving_time, \"dt_%i\" % shift)\n", " no_break_driving_time[shift] = model.NewIntVar(\n", - " 0, max_driving_time_without_pauses, 'nbdt_%i' % shift)\n", - " working_time[shift] = model.NewIntVar(\n", - " 0, max_working_time, 'wt_%i' % shift)\n", + " 0, max_driving_time_without_pauses, \"nbdt_%i\" % shift\n", + " )\n", + " working_time[shift] = model.NewIntVar(0, max_working_time, \"wt_%i\" % shift)\n", "\n", " for shift in range(num_shifts):\n", " duration = shifts[shift][5]\n", @@ -1797,19 +1795,19 @@ " # Arc from source to shift.\n", " # - set the working time of the driver\n", " # - increase driving time and driving time since the last break\n", - " source_lit = model.NewBoolVar('from source to %i' % shift)\n", + " source_lit = model.NewBoolVar(\"from source to %i\" % shift)\n", " all_literals.append(source_lit)\n", " outgoing_source_literals.append(source_lit)\n", " incoming_literals[shift].append(source_lit)\n", " model.Add(driving_time[shift] == duration).OnlyEnforceIf(source_lit)\n", - " model.Add(no_break_driving_time[shift] == duration).OnlyEnforceIf(\n", - " source_lit)\n", + " model.Add(no_break_driving_time[shift] == duration).OnlyEnforceIf(source_lit)\n", " model.Add(working_time[shift] == duration + extra_time).OnlyEnforceIf(\n", - " source_lit)\n", + " source_lit\n", + " )\n", "\n", " # Arc from shift to sink\n", " # - checks that working time is greater than min_working_time\n", - " sink_lit = model.NewBoolVar('from %i to sink' % shift)\n", + " sink_lit = model.NewBoolVar(\"from %i to sink\" % shift)\n", " all_literals.append(sink_lit)\n", " outgoing_literals[shift].append(sink_lit)\n", " incoming_sink_literals.append(sink_lit)\n", @@ -1822,25 +1820,29 @@ " if delay > max_break:\n", " break # Assumes start times are sorted.\n", " other_duration = shifts[other][5]\n", - " lit = model.NewBoolVar('from %i to %i' % (shift, other))\n", + " lit = model.NewBoolVar(\"from %i to %i\" % (shift, other))\n", " all_literals.append(lit)\n", "\n", " # Increase driving time\n", - " model.Add(driving_time[other] ==\n", - " driving_time[shift] + other_duration).OnlyEnforceIf(lit)\n", + " model.Add(\n", + " driving_time[other] == driving_time[shift] + other_duration\n", + " ).OnlyEnforceIf(lit)\n", "\n", " # Increase no_break_driving or reset it to 0 depending on the delay\n", " if delay >= min_pause_after_4h:\n", - " model.Add(no_break_driving_time[other] ==\n", - " other_duration).OnlyEnforceIf(lit)\n", + " model.Add(no_break_driving_time[other] == other_duration).OnlyEnforceIf(\n", + " lit\n", + " )\n", " else:\n", " model.Add(\n", - " no_break_driving_time[other] ==\n", - " no_break_driving_time[shift] + other_duration).OnlyEnforceIf(lit)\n", + " no_break_driving_time[other]\n", + " == no_break_driving_time[shift] + other_duration\n", + " ).OnlyEnforceIf(lit)\n", "\n", " # Increase working time\n", - " model.Add(working_time[other] == working_time[shift] + delay +\n", - " other_duration).OnlyEnforceIf(lit)\n", + " model.Add(\n", + " working_time[other] == working_time[shift] + delay + other_duration\n", + " ).OnlyEnforceIf(lit)\n", "\n", " # Add arc\n", " outgoing_literals[shift].append(lit)\n", @@ -1852,16 +1854,16 @@ " model.Add(sum(incoming_literals[shift]) == 1)\n", "\n", " # Num drivers\n", - " num_drivers = model.NewIntVar(min_num_drivers, min_num_drivers * 3, 'num_drivers')\n", + " num_drivers = model.NewIntVar(min_num_drivers, min_num_drivers * 3, \"num_drivers\")\n", " model.Add(sum(incoming_sink_literals) == num_drivers)\n", " model.Add(sum(outgoing_source_literals) == num_drivers)\n", "\n", - " model.Minimize(num_drivers) \n", + " model.Minimize(num_drivers)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.log_search_progress = True\n", - " #solver.parameters.num_search_workers = 16\n", + " # solver.parameters.num_search_workers = 16\n", " # solver.parameters.boolean_encoding_level = 0\n", " # solver.parameters.lns_focus_on_decision_variables = True\n", " status = solver.Solve(model)\n", @@ -1871,13 +1873,13 @@ "\n", " # Display solution\n", " optimal_num_drivers = int(solver.ObjectiveValue())\n", - " print('minimal number of drivers =', optimal_num_drivers)\n", + " print(\"minimal number of drivers =\", optimal_num_drivers)\n", " return optimal_num_drivers\n", "\n", "\n", "def main(args):\n", " \"\"\"Optimize the bus driver allocation in two passes.\"\"\"\n", - " print('----------- first pass: minimize the number of drivers')\n", + " print(\"----------- first pass: minimize the number of drivers\")\n", " shifts = []\n", " if args.instance == 1:\n", " shifts = SAMPLE_SHIFTS_SMALL\n", @@ -1887,8 +1889,8 @@ " shifts = SAMPLE_SHIFTS_LARGE\n", " num_drivers = find_minimum_number_of_drivers(shifts, args.params)\n", "\n", - " print('----------- second pass: minimize the sum of working times')\n", - " #bus_driver_scheduling(False, num_drivers)\n", + " print(\"----------- second pass: minimize the sum of working times\")\n", + " # bus_driver_scheduling(False, num_drivers)\n", "\n", "\n", "main(PARSER.parse_args())\n", diff --git a/examples/notebook/examples/bus_driver_scheduling_sat.ipynb b/examples/notebook/examples/bus_driver_scheduling_sat.ipynb index 1e7d4c66429..bb04ad260cb 100644 --- a/examples/notebook/examples/bus_driver_scheduling_sat.ipynb +++ b/examples/notebook/examples/bus_driver_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -1776,7 +1776,7 @@ "] # yapf:disable\n", "\n", "\n", - "def bus_driver_scheduling(minimize_drivers, max_num_drivers):\n", + "def bus_driver_scheduling(minimize_drivers: bool, max_num_drivers: int) -> int:\n", " \"\"\"Optimize the bus driver scheduling problem.\n", "\n", " This model has two modes.\n", @@ -1874,14 +1874,14 @@ "\n", " for d in range(num_drivers):\n", " start_times.append(\n", - " model.NewIntVar(min_start_time - setup_time, max_end_time, \"start_%i\" % d)\n", + " model.new_int_var(min_start_time - setup_time, max_end_time, \"start_%i\" % d)\n", " )\n", " end_times.append(\n", - " model.NewIntVar(min_start_time, max_end_time + cleanup_time, \"end_%i\" % d)\n", + " model.new_int_var(min_start_time, max_end_time + cleanup_time, \"end_%i\" % d)\n", " )\n", - " driving_times.append(model.NewIntVar(0, max_driving_time, \"driving_%i\" % d))\n", + " driving_times.append(model.new_int_var(0, max_driving_time, \"driving_%i\" % d))\n", " working_times.append(\n", - " model.NewIntVar(0, max_working_time, \"working_times_%i\" % d)\n", + " model.new_int_var(0, max_working_time, \"working_times_%i\" % d)\n", " )\n", "\n", " incoming_literals = collections.defaultdict(list)\n", @@ -1892,13 +1892,13 @@ " # Create all the shift variables before iterating on the transitions\n", " # between these shifts.\n", " for s in range(num_shifts):\n", - " total_driving[d, s] = model.NewIntVar(\n", + " total_driving[d, s] = model.new_int_var(\n", " 0, max_driving_time, \"dr_%i_%i\" % (d, s)\n", " )\n", - " no_break_driving[d, s] = model.NewIntVar(\n", + " no_break_driving[d, s] = model.new_int_var(\n", " 0, max_driving_time_without_pauses, \"mdr_%i_%i\" % (d, s)\n", " )\n", - " performed[d, s] = model.NewBoolVar(\"performed_%i_%i\" % (d, s))\n", + " performed[d, s] = model.new_bool_var(\"performed_%i_%i\" % (d, s))\n", "\n", " for s in range(num_shifts):\n", " shift = shifts[s]\n", @@ -1907,42 +1907,44 @@ " # Arc from source to shift.\n", " # - set the start time of the driver\n", " # - increase driving time and driving time since break\n", - " source_lit = model.NewBoolVar(\"%i from source to %i\" % (d, s))\n", + " source_lit = model.new_bool_var(\"%i from source to %i\" % (d, s))\n", " outgoing_source_literals.append(source_lit)\n", " incoming_literals[s].append(source_lit)\n", " shared_incoming_literals[s].append(source_lit)\n", - " model.Add(start_times[d] == shift[3] - setup_time).OnlyEnforceIf(source_lit)\n", - " model.Add(total_driving[d, s] == duration).OnlyEnforceIf(source_lit)\n", - " model.Add(no_break_driving[d, s] == duration).OnlyEnforceIf(source_lit)\n", + " model.add(start_times[d] == shift[3] - setup_time).only_enforce_if(\n", + " source_lit\n", + " )\n", + " model.add(total_driving[d, s] == duration).only_enforce_if(source_lit)\n", + " model.add(no_break_driving[d, s] == duration).only_enforce_if(source_lit)\n", " starting_shifts[d, s] = source_lit\n", "\n", " # Arc from shift to sink\n", " # - set the end time of the driver\n", " # - set the driving times of the driver\n", - " sink_lit = model.NewBoolVar(\"%i from %i to sink\" % (d, s))\n", + " sink_lit = model.new_bool_var(\"%i from %i to sink\" % (d, s))\n", " outgoing_literals[s].append(sink_lit)\n", " shared_outgoing_literals[s].append(sink_lit)\n", " incoming_sink_literals.append(sink_lit)\n", - " model.Add(end_times[d] == shift[4] + cleanup_time).OnlyEnforceIf(sink_lit)\n", - " model.Add(driving_times[d] == total_driving[d, s]).OnlyEnforceIf(sink_lit)\n", + " model.add(end_times[d] == shift[4] + cleanup_time).only_enforce_if(sink_lit)\n", + " model.add(driving_times[d] == total_driving[d, s]).only_enforce_if(sink_lit)\n", "\n", " # Node not performed\n", " # - set both driving times to 0\n", " # - add a looping arc on the node\n", - " model.Add(total_driving[d, s] == 0).OnlyEnforceIf(performed[d, s].Not())\n", - " model.Add(no_break_driving[d, s] == 0).OnlyEnforceIf(performed[d, s].Not())\n", - " incoming_literals[s].append(performed[d, s].Not())\n", - " outgoing_literals[s].append(performed[d, s].Not())\n", - " # Not adding to the shared lists, because, globally, each node will have\n", - " # one incoming literal, and one outgoing literal.\n", + " model.add(total_driving[d, s] == 0).only_enforce_if(~performed[d, s])\n", + " model.add(no_break_driving[d, s] == 0).only_enforce_if(~performed[d, s])\n", + " incoming_literals[s].append(~performed[d, s])\n", + " outgoing_literals[s].append(~performed[d, s])\n", + " # negated adding to the shared lists, because, globally, each node will\n", + " # have one incoming literal, and one outgoing literal.\n", "\n", " # Node performed:\n", " # - add upper bound on start_time\n", " # - add lower bound on end_times\n", - " model.Add(start_times[d] <= shift[3] - setup_time).OnlyEnforceIf(\n", + " model.add(start_times[d] <= shift[3] - setup_time).only_enforce_if(\n", " performed[d, s]\n", " )\n", - " model.Add(end_times[d] >= shift[4] + cleanup_time).OnlyEnforceIf(\n", + " model.add(end_times[d] >= shift[4] + cleanup_time).only_enforce_if(\n", " performed[d, s]\n", " )\n", "\n", @@ -1951,22 +1953,22 @@ " delay = other[3] - shift[4]\n", " if delay < min_delay_between_shifts:\n", " continue\n", - " lit = model.NewBoolVar(\"%i from %i to %i\" % (d, s, o))\n", + " lit = model.new_bool_var(\"%i from %i to %i\" % (d, s, o))\n", "\n", " # Increase driving time\n", - " model.Add(\n", + " model.add(\n", " total_driving[d, o] == total_driving[d, s] + other[5]\n", - " ).OnlyEnforceIf(lit)\n", + " ).only_enforce_if(lit)\n", "\n", " # Increase no_break_driving or reset it to 0 depending on the delay\n", " if delay >= min_pause_after_4h:\n", - " model.Add(no_break_driving[d, o] == other[5]).OnlyEnforceIf(lit)\n", + " model.add(no_break_driving[d, o] == other[5]).only_enforce_if(lit)\n", " else:\n", - " model.Add(\n", + " model.add(\n", " no_break_driving[d, o] == no_break_driving[d, s] + other[5]\n", - " ).OnlyEnforceIf(lit)\n", + " ).only_enforce_if(lit)\n", "\n", - " # Add arc\n", + " # add arc\n", " outgoing_literals[s].append(lit)\n", " shared_outgoing_literals[s].append(lit)\n", " incoming_literals[o].append(lit)\n", @@ -1976,68 +1978,68 @@ " delay_literals.append(lit)\n", " delay_weights.append(delay)\n", "\n", - " model.Add(working_times[d] == end_times[d] - start_times[d])\n", + " model.add(working_times[d] == end_times[d] - start_times[d])\n", "\n", " if minimize_drivers:\n", " # Driver is not working.\n", - " working = model.NewBoolVar(\"working_%i\" % d)\n", - " model.Add(start_times[d] == min_start_time).OnlyEnforceIf(working.Not())\n", - " model.Add(end_times[d] == min_start_time).OnlyEnforceIf(working.Not())\n", - " model.Add(driving_times[d] == 0).OnlyEnforceIf(working.Not())\n", + " working = model.new_bool_var(\"working_%i\" % d)\n", + " model.add(start_times[d] == min_start_time).only_enforce_if(~working)\n", + " model.add(end_times[d] == min_start_time).only_enforce_if(~working)\n", + " model.add(driving_times[d] == 0).only_enforce_if(~working)\n", " working_drivers.append(working)\n", - " outgoing_source_literals.append(working.Not())\n", - " incoming_sink_literals.append(working.Not())\n", + " outgoing_source_literals.append(~working)\n", + " incoming_sink_literals.append(~working)\n", " # Conditional working time constraints\n", - " model.Add(working_times[d] >= min_working_time).OnlyEnforceIf(working)\n", - " model.Add(working_times[d] == 0).OnlyEnforceIf(working.Not())\n", + " model.add(working_times[d] >= min_working_time).only_enforce_if(working)\n", + " model.add(working_times[d] == 0).only_enforce_if(~working)\n", " else:\n", " # Working time constraints\n", - " model.Add(working_times[d] >= min_working_time)\n", + " model.add(working_times[d] >= min_working_time)\n", "\n", " # Create circuit constraint.\n", - " model.AddExactlyOne(outgoing_source_literals)\n", + " model.add_exactly_one(outgoing_source_literals)\n", " for s in range(num_shifts):\n", - " model.AddExactlyOne(outgoing_literals[s])\n", - " model.AddExactlyOne(incoming_literals[s])\n", - " model.AddExactlyOne(incoming_sink_literals)\n", + " model.add_exactly_one(outgoing_literals[s])\n", + " model.add_exactly_one(incoming_literals[s])\n", + " model.add_exactly_one(incoming_sink_literals)\n", "\n", " # Each shift is covered.\n", " for s in range(num_shifts):\n", - " model.AddExactlyOne(performed[d, s] for d in range(num_drivers))\n", + " model.add_exactly_one(performed[d, s] for d in range(num_drivers))\n", " # Globally, each node has one incoming and one outgoing literal\n", - " model.AddExactlyOne(shared_incoming_literals[s])\n", - " model.AddExactlyOne(shared_outgoing_literals[s])\n", + " model.add_exactly_one(shared_incoming_literals[s])\n", + " model.add_exactly_one(shared_outgoing_literals[s])\n", "\n", " # Symmetry breaking\n", "\n", " # The first 3 shifts must be performed by 3 different drivers.\n", " # Let's assign them to the first 3 drivers in sequence\n", - " model.Add(starting_shifts[0, 0] == 1)\n", - " model.Add(starting_shifts[1, 1] == 1)\n", - " model.Add(starting_shifts[2, 2] == 1)\n", + " model.add(starting_shifts[0, 0] == 1)\n", + " model.add(starting_shifts[1, 1] == 1)\n", + " model.add(starting_shifts[2, 2] == 1)\n", "\n", " if minimize_drivers:\n", " # Push non working drivers to the end\n", " for d in range(num_drivers - 1):\n", - " model.AddImplication(working_drivers[d].Not(), working_drivers[d + 1].Not())\n", + " model.add_implication(~working_drivers[d], ~working_drivers[d + 1])\n", "\n", " # Redundant constraints: sum of driving times = sum of shift driving times\n", - " model.Add(cp_model.LinearExpr.Sum(driving_times) == total_driving_time)\n", + " model.add(cp_model.LinearExpr.sum(driving_times) == total_driving_time)\n", " if not minimize_drivers:\n", - " model.Add(\n", - " cp_model.LinearExpr.Sum(working_times)\n", + " model.add(\n", + " cp_model.LinearExpr.sum(working_times)\n", " == total_driving_time\n", " + num_drivers * (setup_time + cleanup_time)\n", - " + cp_model.LinearExpr.WeightedSum(delay_literals, delay_weights)\n", + " + cp_model.LinearExpr.weighted_sum(delay_literals, delay_weights)\n", " )\n", "\n", " if minimize_drivers:\n", - " # Minimize the number of working drivers\n", - " model.Minimize(cp_model.LinearExpr.Sum(working_drivers))\n", + " # minimize the number of working drivers\n", + " model.minimize(cp_model.LinearExpr.sum(working_drivers))\n", " else:\n", - " # Minimize the sum of delays between tasks, which in turns minimize the\n", + " # minimize the sum of delays between tasks, which in turns minimize the\n", " # sum of working times as the total driving time is fixed\n", - " model.Minimize(cp_model.LinearExpr.WeightedSum(delay_literals, delay_weights))\n", + " model.minimize(cp_model.LinearExpr.weighted_sum(delay_literals, delay_weights))\n", "\n", " if not minimize_drivers and _OUTPUT_PROTO.value:\n", " print(\"Writing proto to %s\" % _OUTPUT_PROTO.value)\n", @@ -2049,41 +2051,41 @@ " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status != cp_model.OPTIMAL and status != cp_model.FEASIBLE:\n", " return -1\n", "\n", " # Display solution\n", " if minimize_drivers:\n", - " max_num_drivers = int(solver.ObjectiveValue())\n", + " max_num_drivers = int(solver.objective_value)\n", " print(\"minimal number of drivers =\", max_num_drivers)\n", " return max_num_drivers\n", "\n", " for d in range(num_drivers):\n", " print(\"Driver %i: \" % (d + 1))\n", - " print(\" total driving time =\", solver.Value(driving_times[d]))\n", + " print(\" total driving time =\", solver.value(driving_times[d]))\n", " print(\n", " \" working time =\",\n", - " solver.Value(working_times[d]) + setup_time + cleanup_time,\n", + " solver.value(working_times[d]) + setup_time + cleanup_time,\n", " )\n", "\n", " first = True\n", " for s in range(num_shifts):\n", " shift = shifts[s]\n", "\n", - " if not solver.BooleanValue(performed[d, s]):\n", + " if not solver.boolean_value(performed[d, s]):\n", " continue\n", "\n", " # Hack to detect if the waiting time between the last shift and\n", " # this one exceeds 30 minutes. For this, we look at the\n", " # no_break_driving which was reinitialized in that case.\n", - " if solver.Value(no_break_driving[d, s]) == shift[5] and not first:\n", + " if solver.value(no_break_driving[d, s]) == shift[5] and not first:\n", " print(\" **break**\")\n", " print(\" shift \", shift[0], \":\", shift[1], \"-\", shift[2])\n", " first = False\n", "\n", - " return int(solver.ObjectiveValue())\n", + " return int(solver.objective_value)\n", "\n", "\n", "def main(_):\n", diff --git a/examples/notebook/examples/chemical_balance_lp.ipynb b/examples/notebook/examples/chemical_balance_lp.ipynb index 61cf55e97d6..2e0538ecbe9 100644 --- a/examples/notebook/examples/chemical_balance_lp.ipynb +++ b/examples/notebook/examples/chemical_balance_lp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/chemical_balance_sat.ipynb b/examples/notebook/examples/chemical_balance_sat.ipynb index 77703e01223..4cc19e2a37e 100644 --- a/examples/notebook/examples/chemical_balance_sat.ipynb +++ b/examples/notebook/examples/chemical_balance_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -138,40 +138,43 @@ " for s in all_sets\n", " ]\n", "\n", - " set_vars = [model.NewIntVar(0, max_set[s], f\"set_{s}\") for s in all_sets]\n", + " set_vars = [model.new_int_var(0, max_set[s], f\"set_{s}\") for s in all_sets]\n", "\n", - " epsilon = model.NewIntVar(0, 10000000, \"epsilon\")\n", + " epsilon = model.new_int_var(0, 10000000, \"epsilon\")\n", "\n", " for p in all_products:\n", - " model.Add(\n", + " model.add(\n", " sum(int(chemical_set[s][p + 1] * 10) * set_vars[s] for s in all_sets)\n", " <= int(max_quantities[p][1] * 10000)\n", " )\n", - " model.Add(\n", + " model.add(\n", " sum(int(chemical_set[s][p + 1] * 10) * set_vars[s] for s in all_sets)\n", " >= int(max_quantities[p][1] * 10000) - epsilon\n", " )\n", "\n", - " model.Minimize(epsilon)\n", + " model.minimize(epsilon)\n", "\n", " # Creates a solver and solves.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", - " print(f\"Status = {solver.StatusName(status)}\")\n", - " # The objective value of the solution.\n", - " print(f\"Optimal objective value = {solver.ObjectiveValue() / 10000.0}\")\n", - "\n", - " for s in all_sets:\n", - " print(f\" {chemical_set[s][0]} = {solver.Value(set_vars[s]) / 1000.0}\", end=\" \")\n", - " print()\n", - " for p in all_products:\n", - " name = max_quantities[p][0]\n", - " max_quantity = max_quantities[p][1]\n", - " quantity = sum(\n", - " solver.Value(set_vars[s]) / 1000.0 * chemical_set[s][p + 1]\n", - " for s in all_sets\n", - " )\n", - " print(f\"{name}: {quantity} out of {max_quantity}\")\n", + " status = solver.solve(model)\n", + " if status == cp_model.OPTIMAL:\n", + " # The objective value of the solution.\n", + " print(f\"Optimal objective value = {solver.objective_value / 10000.0}\")\n", + "\n", + " for s in all_sets:\n", + " print(\n", + " f\" {chemical_set[s][0]} = {solver.value(set_vars[s]) / 1000.0}\",\n", + " end=\" \",\n", + " )\n", + " print()\n", + " for p in all_products:\n", + " name = max_quantities[p][0]\n", + " max_quantity = max_quantities[p][1]\n", + " quantity = sum(\n", + " solver.value(set_vars[s]) / 1000.0 * chemical_set[s][p + 1]\n", + " for s in all_sets\n", + " )\n", + " print(f\"{name}: {quantity:.3f} out of {max_quantity}\")\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/clustering_sat.ipynb b/examples/notebook/examples/clustering_sat.ipynb index d29efbf5fac..18636b3f4fd 100644 --- a/examples/notebook/examples/clustering_sat.ipynb +++ b/examples/notebook/examples/clustering_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -133,7 +133,7 @@ "]\n", "\n", "\n", - "def clustering_sat():\n", + "def clustering_sat() -> None:\n", " \"\"\"Entry point of the program.\"\"\"\n", " num_nodes = len(distance_matrix)\n", " print(\"Num nodes =\", num_nodes)\n", @@ -151,14 +151,14 @@ " obj_coeffs = []\n", " for n1 in range(num_nodes - 1):\n", " for n2 in range(n1 + 1, num_nodes):\n", - " same = model.NewBoolVar(\"neighbors_%i_%i\" % (n1, n2))\n", + " same = model.new_bool_var(\"neighbors_%i_%i\" % (n1, n2))\n", " neighbors[n1, n2] = same\n", " obj_vars.append(same)\n", " obj_coeffs.append(distance_matrix[n1][n2] + distance_matrix[n2][n1])\n", "\n", " # Number of neighborss:\n", " for n in range(num_nodes):\n", - " model.Add(\n", + " model.add(\n", " sum(neighbors[m, n] for m in range(n))\n", " + sum(neighbors[n, m] for m in range(n + 1, num_nodes))\n", " == group_size - 1\n", @@ -168,23 +168,23 @@ " for n1 in range(num_nodes - 2):\n", " for n2 in range(n1 + 1, num_nodes - 1):\n", " for n3 in range(n2 + 1, num_nodes):\n", - " model.Add(\n", + " model.add(\n", " neighbors[n1, n3] + neighbors[n2, n3] + neighbors[n1, n2] != 2\n", " )\n", "\n", " # Redundant constraints on total sum of neighborss.\n", - " model.Add(sum(obj_vars) == num_groups * group_size * (group_size - 1) // 2)\n", + " model.add(sum(obj_vars) == num_groups * group_size * (group_size - 1) // 2)\n", "\n", " # Minimize weighted sum of arcs.\n", - " model.Minimize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", + " model.minimize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", "\n", " # Solve and print out the solution.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.log_search_progress = True\n", " solver.parameters.num_search_workers = 8\n", "\n", - " status = solver.Solve(model)\n", - " print(solver.ResponseStats())\n", + " status = solver.solve(model)\n", + " print(solver.response_stats())\n", "\n", " if status == cp_model.FEASIBLE or status == cp_model.OPTIMAL:\n", " visited = set()\n", @@ -194,7 +194,7 @@ " visited.add(n)\n", " output = str(n)\n", " for o in range(n + 1, num_nodes):\n", - " if solver.BooleanValue(neighbors[n, o]):\n", + " if solver.boolean_value(neighbors[n, o]):\n", " visited.add(o)\n", " output += \" \" + str(o)\n", " print(\"Group\", g, \":\", output)\n", diff --git a/examples/notebook/examples/cover_rectangle_sat.ipynb b/examples/notebook/examples/cover_rectangle_sat.ipynb index 6d6c06201fe..e98d7890b5a 100644 --- a/examples/notebook/examples/cover_rectangle_sat.ipynb +++ b/examples/notebook/examples/cover_rectangle_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,7 +87,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def cover_rectangle(num_squares):\n", + "def cover_rectangle(num_squares: int) -> bool:\n", " \"\"\"Try to fill the rectangle with a given number of squares.\"\"\"\n", " size_x = 60\n", " size_y = 50\n", @@ -103,17 +103,17 @@ "\n", " # Creates intervals for the NoOverlap2D and size variables.\n", " for i in range(num_squares):\n", - " size = model.NewIntVar(1, size_y, \"size_%i\" % i)\n", - " start_x = model.NewIntVar(0, size_x, \"sx_%i\" % i)\n", - " end_x = model.NewIntVar(0, size_x, \"ex_%i\" % i)\n", - " start_y = model.NewIntVar(0, size_y, \"sy_%i\" % i)\n", - " end_y = model.NewIntVar(0, size_y, \"ey_%i\" % i)\n", + " size = model.new_int_var(1, size_y, \"size_%i\" % i)\n", + " start_x = model.new_int_var(0, size_x, \"sx_%i\" % i)\n", + " end_x = model.new_int_var(0, size_x, \"ex_%i\" % i)\n", + " start_y = model.new_int_var(0, size_y, \"sy_%i\" % i)\n", + " end_y = model.new_int_var(0, size_y, \"ey_%i\" % i)\n", "\n", - " interval_x = model.NewIntervalVar(start_x, size, end_x, \"ix_%i\" % i)\n", - " interval_y = model.NewIntervalVar(start_y, size, end_y, \"iy_%i\" % i)\n", + " interval_x = model.new_interval_var(start_x, size, end_x, \"ix_%i\" % i)\n", + " interval_y = model.new_interval_var(start_y, size, end_y, \"iy_%i\" % i)\n", "\n", - " area = model.NewIntVar(1, size_y * size_y, \"area_%i\" % i)\n", - " model.AddMultiplicationEquality(area, [size, size])\n", + " area = model.new_int_var(1, size_y * size_y, \"area_%i\" % i)\n", + " model.add_multiplication_equality(area, [size, size])\n", "\n", " areas.append(area)\n", " x_intervals.append(interval_x)\n", @@ -123,47 +123,46 @@ " y_starts.append(start_y)\n", "\n", " # Main constraint.\n", - " model.AddNoOverlap2D(x_intervals, y_intervals)\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", "\n", " # Redundant constraints.\n", - " model.AddCumulative(x_intervals, sizes, size_y)\n", - " model.AddCumulative(y_intervals, sizes, size_x)\n", + " model.add_cumulative(x_intervals, sizes, size_y)\n", + " model.add_cumulative(y_intervals, sizes, size_x)\n", "\n", " # Forces the rectangle to be exactly covered.\n", - " model.Add(sum(areas) == size_x * size_y)\n", + " model.add(sum(areas) == size_x * size_y)\n", "\n", " # Symmetry breaking 1: sizes are ordered.\n", " for i in range(num_squares - 1):\n", - " model.Add(sizes[i] <= sizes[i + 1])\n", + " model.add(sizes[i] <= sizes[i + 1])\n", "\n", " # Define same to be true iff sizes[i] == sizes[i + 1]\n", - " same = model.NewBoolVar(\"\")\n", - " model.Add(sizes[i] == sizes[i + 1]).OnlyEnforceIf(same)\n", - " model.Add(sizes[i] < sizes[i + 1]).OnlyEnforceIf(same.Not())\n", + " same = model.new_bool_var(\"\")\n", + " model.add(sizes[i] == sizes[i + 1]).only_enforce_if(same)\n", + " model.add(sizes[i] < sizes[i + 1]).only_enforce_if(~same)\n", "\n", " # Tie break with starts.\n", - " model.Add(x_starts[i] <= x_starts[i + 1]).OnlyEnforceIf(same)\n", + " model.add(x_starts[i] <= x_starts[i + 1]).only_enforce_if(same)\n", "\n", " # Symmetry breaking 2: first square in one quadrant.\n", - " model.Add(x_starts[0] < (size_x + 1) // 2)\n", - " model.Add(y_starts[0] < (size_y + 1) // 2)\n", + " model.add(x_starts[0] < (size_x + 1) // 2)\n", + " model.add(y_starts[0] < (size_y + 1) // 2)\n", "\n", " # Creates a solver and solves.\n", " solver = cp_model.CpSolver()\n", - " solver.parameters.num_workers = 16\n", - " # solver.parameters.log_search_progress = True\n", + " solver.parameters.num_workers = 8\n", " solver.parameters.max_time_in_seconds = 10.0\n", - " status = solver.Solve(model)\n", - " print(\"%s found in %0.2fs\" % (solver.StatusName(status), solver.WallTime()))\n", + " status = solver.solve(model)\n", + " print(\"%s found in %0.2fs\" % (solver.status_name(status), solver.wall_time))\n", "\n", " # Prints solution.\n", " solution_found = status == cp_model.OPTIMAL or status == cp_model.FEASIBLE\n", " if solution_found:\n", " display = [[\" \" for _ in range(size_x)] for _ in range(size_y)]\n", " for i in range(num_squares):\n", - " sol_x = solver.Value(x_starts[i])\n", - " sol_y = solver.Value(y_starts[i])\n", - " sol_s = solver.Value(sizes[i])\n", + " sol_x = solver.value(x_starts[i])\n", + " sol_y = solver.value(y_starts[i])\n", + " sol_s = solver.value(sizes[i])\n", " char = format(i, \"01x\")\n", " for j in range(sol_s):\n", " for k in range(sol_s):\n", diff --git a/examples/notebook/examples/cryptarithm_sat.ipynb b/examples/notebook/examples/cryptarithm_sat.ipynb index 90bb5d1901f..a4488987670 100644 --- a/examples/notebook/examples/cryptarithm_sat.ipynb +++ b/examples/notebook/examples/cryptarithm_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,62 +87,62 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def send_more_money():\n", - " \"\"\"Solve the cryptarithmic puzzle SEND+MORE=MONEY.\"\"\"\n", + "def send_more_money() -> None:\n", + " \"\"\"solve the cryptarithmic puzzle SEND+MORE=MONEY.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", " # Create variables.\n", " # Since s is a leading digit, it can't be 0.\n", - " s = model.NewIntVar(1, 9, \"s\")\n", - " e = model.NewIntVar(0, 9, \"e\")\n", - " n = model.NewIntVar(0, 9, \"n\")\n", - " d = model.NewIntVar(0, 9, \"d\")\n", + " s = model.new_int_var(1, 9, \"s\")\n", + " e = model.new_int_var(0, 9, \"e\")\n", + " n = model.new_int_var(0, 9, \"n\")\n", + " d = model.new_int_var(0, 9, \"d\")\n", " # Since m is a leading digit, it can't be 0.\n", - " m = model.NewIntVar(1, 9, \"m\")\n", - " o = model.NewIntVar(0, 9, \"o\")\n", - " r = model.NewIntVar(0, 9, \"r\")\n", - " y = model.NewIntVar(0, 9, \"y\")\n", + " m = model.new_int_var(1, 9, \"m\")\n", + " o = model.new_int_var(0, 9, \"o\")\n", + " r = model.new_int_var(0, 9, \"r\")\n", + " y = model.new_int_var(0, 9, \"y\")\n", "\n", " # Create carry variables. c0 is true if the first column of addends carries\n", " # a 1, c2 is true if the second column carries a 1, and so on.\n", - " c0 = model.NewBoolVar(\"c0\")\n", - " c1 = model.NewBoolVar(\"c1\")\n", - " c2 = model.NewBoolVar(\"c2\")\n", - " c3 = model.NewBoolVar(\"c3\")\n", + " c0 = model.new_bool_var(\"c0\")\n", + " c1 = model.new_bool_var(\"c1\")\n", + " c2 = model.new_bool_var(\"c2\")\n", + " c3 = model.new_bool_var(\"c3\")\n", "\n", " # Force all letters to take on different values.\n", - " model.AddAllDifferent(s, e, n, d, m, o, r, y)\n", + " model.add_all_different(s, e, n, d, m, o, r, y)\n", "\n", " # Column 0:\n", - " model.Add(c0 == m)\n", + " model.add(c0 == m)\n", "\n", " # Column 1:\n", - " model.Add(c1 + s + m == o + 10 * c0)\n", + " model.add(c1 + s + m == o + 10 * c0)\n", "\n", " # Column 2:\n", - " model.Add(c2 + e + o == n + 10 * c1)\n", + " model.add(c2 + e + o == n + 10 * c1)\n", "\n", " # Column 3:\n", - " model.Add(c3 + n + r == e + 10 * c2)\n", + " model.add(c3 + n + r == e + 10 * c2)\n", "\n", " # Column 4:\n", - " model.Add(d + e == y + 10 * c3)\n", + " model.add(d + e == y + 10 * c3)\n", "\n", - " # Solve model.\n", + " # solve model.\n", " solver = cp_model.CpSolver()\n", - " if solver.Solve(model) == cp_model.OPTIMAL:\n", + " if solver.solve(model) == cp_model.OPTIMAL:\n", " print(\"Optimal solution found!\")\n", - " print(\"s:\", solver.Value(s))\n", - " print(\"e:\", solver.Value(e))\n", - " print(\"n:\", solver.Value(n))\n", - " print(\"d:\", solver.Value(d))\n", - " print(\"m:\", solver.Value(m))\n", - " print(\"o:\", solver.Value(o))\n", - " print(\"r:\", solver.Value(r))\n", - " print(\"y:\", solver.Value(y))\n", + " print(\"s:\", solver.value(s))\n", + " print(\"e:\", solver.value(e))\n", + " print(\"n:\", solver.value(n))\n", + " print(\"d:\", solver.value(d))\n", + " print(\"m:\", solver.value(m))\n", + " print(\"o:\", solver.value(o))\n", + " print(\"r:\", solver.value(r))\n", + " print(\"y:\", solver.value(y))\n", "\n", "\n", - "def main(_):\n", + "def main(_) -> None:\n", " send_more_money()\n", "\n", "\n", diff --git a/examples/notebook/examples/cvrptw_plot.ipynb b/examples/notebook/examples/cvrptw_plot.ipynb index 1ef4eec93b9..4c4599c99f3 100644 --- a/examples/notebook/examples/cvrptw_plot.ipynb +++ b/examples/notebook/examples/cvrptw_plot.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/flexible_job_shop_sat.ipynb b/examples/notebook/examples/flexible_job_shop_sat.ipynb index 19d5db8fbc1..d76f41b56e4 100644 --- a/examples/notebook/examples/flexible_job_shop_sat.ipynb +++ b/examples/notebook/examples/flexible_job_shop_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -101,21 +101,21 @@ "class SolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self):\n", + " def __init__(self) -> None:\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " \"\"\"Called at each new solution.\"\"\"\n", " print(\n", - " \"Solution %i, time = %f s, objective = %i\"\n", - " % (self.__solution_count, self.WallTime(), self.ObjectiveValue())\n", + " f\"Solution {self.__solution_count}, time = {self.wall_time} s,\"\n", + " f\" objective = {self.objective_value}\"\n", " )\n", " self.__solution_count += 1\n", "\n", "\n", - "def flexible_jobshop():\n", - " \"\"\"Solve a small flexible jobshop problem.\"\"\"\n", + "def flexible_jobshop() -> None:\n", + " \"\"\"solve a small flexible jobshop problem.\"\"\"\n", " # Data part.\n", " jobs = [ # task = (processing_time, machine_id)\n", " [ # Job 0\n", @@ -152,13 +152,13 @@ " max_task_duration = max(max_task_duration, alternative[0])\n", " horizon += max_task_duration\n", "\n", - " print(\"Horizon = %i\" % horizon)\n", + " print(f\"Horizon = {horizon}\")\n", "\n", " # Global storage of variables.\n", " intervals_per_resources = collections.defaultdict(list)\n", " starts = {} # indexed by (job_id, task_id).\n", " presences = {} # indexed by (job_id, task_id, alt_id).\n", - " job_ends = []\n", + " job_ends: list[cp_model.IntVar] = []\n", "\n", " # Scan the jobs and create the relevant variables and intervals.\n", " for job_id in all_jobs:\n", @@ -180,13 +180,13 @@ " max_duration = max(max_duration, alt_duration)\n", "\n", " # Create main interval for the task.\n", - " suffix_name = \"_j%i_t%i\" % (job_id, task_id)\n", - " start = model.NewIntVar(0, horizon, \"start\" + suffix_name)\n", - " duration = model.NewIntVar(\n", + " suffix_name = f\"_j{job_id}_t{task_id}\"\n", + " start = model.new_int_var(0, horizon, \"start\" + suffix_name)\n", + " duration = model.new_int_var(\n", " min_duration, max_duration, \"duration\" + suffix_name\n", " )\n", - " end = model.NewIntVar(0, horizon, \"end\" + suffix_name)\n", - " interval = model.NewIntervalVar(\n", + " end = model.new_int_var(0, horizon, \"end\" + suffix_name)\n", + " interval = model.new_interval_var(\n", " start, duration, end, \"interval\" + suffix_name\n", " )\n", "\n", @@ -195,27 +195,27 @@ "\n", " # Add precedence with previous task in the same job.\n", " if previous_end is not None:\n", - " model.Add(start >= previous_end)\n", + " model.add(start >= previous_end)\n", " previous_end = end\n", "\n", " # Create alternative intervals.\n", " if num_alternatives > 1:\n", " l_presences = []\n", " for alt_id in all_alternatives:\n", - " alt_suffix = \"_j%i_t%i_a%i\" % (job_id, task_id, alt_id)\n", - " l_presence = model.NewBoolVar(\"presence\" + alt_suffix)\n", - " l_start = model.NewIntVar(0, horizon, \"start\" + alt_suffix)\n", + " alt_suffix = f\"_j{job_id}_t{task_id}_a{alt_id}\"\n", + " l_presence = model.new_bool_var(\"presence\" + alt_suffix)\n", + " l_start = model.new_int_var(0, horizon, \"start\" + alt_suffix)\n", " l_duration = task[alt_id][0]\n", - " l_end = model.NewIntVar(0, horizon, \"end\" + alt_suffix)\n", - " l_interval = model.NewOptionalIntervalVar(\n", + " l_end = model.new_int_var(0, horizon, \"end\" + alt_suffix)\n", + " l_interval = model.new_optional_interval_var(\n", " l_start, l_duration, l_end, l_presence, \"interval\" + alt_suffix\n", " )\n", " l_presences.append(l_presence)\n", "\n", " # Link the primary/global variables with the local ones.\n", - " model.Add(start == l_start).OnlyEnforceIf(l_presence)\n", - " model.Add(duration == l_duration).OnlyEnforceIf(l_presence)\n", - " model.Add(end == l_end).OnlyEnforceIf(l_presence)\n", + " model.add(start == l_start).only_enforce_if(l_presence)\n", + " model.add(duration == l_duration).only_enforce_if(l_presence)\n", + " model.add(end == l_end).only_enforce_if(l_presence)\n", "\n", " # Add the local interval to the right machine.\n", " intervals_per_resources[task[alt_id][1]].append(l_interval)\n", @@ -224,53 +224,50 @@ " presences[(job_id, task_id, alt_id)] = l_presence\n", "\n", " # Select exactly one presence variable.\n", - " model.AddExactlyOne(l_presences)\n", + " model.add_exactly_one(l_presences)\n", " else:\n", " intervals_per_resources[task[0][1]].append(interval)\n", - " presences[(job_id, task_id, 0)] = model.NewConstant(1)\n", + " presences[(job_id, task_id, 0)] = model.new_constant(1)\n", "\n", - " job_ends.append(previous_end)\n", + " if previous_end is not None:\n", + " job_ends.append(previous_end)\n", "\n", " # Create machines constraints.\n", " for machine_id in all_machines:\n", " intervals = intervals_per_resources[machine_id]\n", " if len(intervals) > 1:\n", - " model.AddNoOverlap(intervals)\n", + " model.add_no_overlap(intervals)\n", "\n", " # Makespan objective\n", - " makespan = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(makespan, job_ends)\n", - " model.Minimize(makespan)\n", + " makespan = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(makespan, job_ends)\n", + " model.minimize(makespan)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", " solution_printer = SolutionPrinter()\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " # Print final solution.\n", - " for job_id in all_jobs:\n", - " print(\"Job %i:\" % job_id)\n", - " for task_id in range(len(jobs[job_id])):\n", - " start_value = solver.Value(starts[(job_id, task_id)])\n", - " machine = -1\n", - " duration = -1\n", - " selected = -1\n", - " for alt_id in range(len(jobs[job_id][task_id])):\n", - " if solver.Value(presences[(job_id, task_id, alt_id)]):\n", - " duration = jobs[job_id][task_id][alt_id][0]\n", - " machine = jobs[job_id][task_id][alt_id][1]\n", - " selected = alt_id\n", - " print(\n", - " \" task_%i_%i starts at %i (alt %i, machine %i, duration %i)\"\n", - " % (job_id, task_id, start_value, selected, machine, duration)\n", - " )\n", - "\n", - " print(\"Solve status: %s\" % solver.StatusName(status))\n", - " print(\"Optimal objective value: %i\" % solver.ObjectiveValue())\n", - " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", + " print(f\"Optimal objective value: {solver.objective_value}\")\n", + " for job_id in all_jobs:\n", + " print(f\"Job {job_id}\")\n", + " for task_id, task in enumerate(jobs[job_id]):\n", + " start_value = solver.value(starts[(job_id, task_id)])\n", + " machine: int = -1\n", + " task_duration: int = -1\n", + " selected: int = -1\n", + " for alt_id, alt in enumerate(task):\n", + " if solver.boolean_value(presences[(job_id, task_id, alt_id)]):\n", + " task_duration, machine = alt\n", + " selected = alt_id\n", + " print(\n", + " f\" task_{job_id}_{task_id} starts at {start_value} (alt\"\n", + " f\" {selected}, machine {machine}, duration {task_duration})\"\n", + " )\n", + "\n", + " print(solver.response_stats())\n", "\n", "\n", "flexible_jobshop()\n", diff --git a/examples/notebook/examples/gate_scheduling_sat.ipynb b/examples/notebook/examples/gate_scheduling_sat.ipynb index 5a64af6250b..985b8f9c6ba 100644 --- a/examples/notebook/examples/gate_scheduling_sat.ipynb +++ b/examples/notebook/examples/gate_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -96,7 +96,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main(_):\n", + "def main(_) -> None:\n", " \"\"\"Solves the gate scheduling problem.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", @@ -134,71 +134,81 @@ "\n", " for i in all_jobs:\n", " # Create main interval.\n", - " start = model.NewIntVar(0, horizon, \"start_%i\" % i)\n", + " start = model.new_int_var(0, horizon, f\"start_{i}\")\n", " duration = jobs[i][0]\n", - " end = model.NewIntVar(0, horizon, \"end_%i\" % i)\n", - " interval = model.NewIntervalVar(start, duration, end, \"interval_%i\" % i)\n", + " end = model.new_int_var(0, horizon, f\"end_{i}\")\n", + " interval = model.new_interval_var(start, duration, end, f\"interval_{i}\")\n", " starts.append(start)\n", " intervals.append(interval)\n", " ends.append(end)\n", " demands.append(jobs[i][1])\n", "\n", " # Create an optional copy of interval to be executed on machine 0.\n", - " performed_on_m0 = model.NewBoolVar(\"perform_%i_on_m0\" % i)\n", + " performed_on_m0 = model.new_bool_var(f\"perform_{i}_on_m0\")\n", " performed.append(performed_on_m0)\n", - " start0 = model.NewIntVar(0, horizon, \"start_%i_on_m0\" % i)\n", - " end0 = model.NewIntVar(0, horizon, \"end_%i_on_m0\" % i)\n", - " interval0 = model.NewOptionalIntervalVar(\n", - " start0, duration, end0, performed_on_m0, \"interval_%i_on_m0\" % i\n", + " start0 = model.new_int_var(0, horizon, f\"start_{i}_on_m0\")\n", + " end0 = model.new_int_var(0, horizon, f\"end_{i}_on_m0\")\n", + " interval0 = model.new_optional_interval_var(\n", + " start0, duration, end0, performed_on_m0, f\"interval_{i}_on_m0\"\n", " )\n", " intervals0.append(interval0)\n", "\n", " # Create an optional copy of interval to be executed on machine 1.\n", - " start1 = model.NewIntVar(0, horizon, \"start_%i_on_m1\" % i)\n", - " end1 = model.NewIntVar(0, horizon, \"end_%i_on_m1\" % i)\n", - " interval1 = model.NewOptionalIntervalVar(\n", - " start1, duration, end1, performed_on_m0.Not(), \"interval_%i_on_m1\" % i\n", + " start1 = model.new_int_var(0, horizon, f\"start_{i}_on_m1\")\n", + " end1 = model.new_int_var(0, horizon, f\"end_{i}_on_m1\")\n", + " interval1 = model.new_optional_interval_var(\n", + " start1,\n", + " duration,\n", + " end1,\n", + " ~performed_on_m0,\n", + " f\"interval_{i}_on_m1\",\n", " )\n", " intervals1.append(interval1)\n", "\n", " # We only propagate the constraint if the tasks is performed on the machine.\n", - " model.Add(start0 == start).OnlyEnforceIf(performed_on_m0)\n", - " model.Add(start1 == start).OnlyEnforceIf(performed_on_m0.Not())\n", + " model.add(start0 == start).only_enforce_if(performed_on_m0)\n", + " model.add(start1 == start).only_enforce_if(~performed_on_m0)\n", "\n", " # Width constraint (modeled as a cumulative)\n", - " model.AddCumulative(intervals, demands, max_width)\n", + " model.add_cumulative(intervals, demands, max_width)\n", "\n", " # Choose which machine to perform the jobs on.\n", - " model.AddNoOverlap(intervals0)\n", - " model.AddNoOverlap(intervals1)\n", + " model.add_no_overlap(intervals0)\n", + " model.add_no_overlap(intervals1)\n", "\n", " # Objective variable.\n", - " makespan = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(makespan, ends)\n", - " model.Minimize(makespan)\n", + " makespan = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(makespan, ends)\n", + " model.minimize(makespan)\n", "\n", " # Symmetry breaking.\n", - " model.Add(performed[0] == 0)\n", + " model.add(performed[0] == 0)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", - " solver.Solve(model)\n", + " solver.solve(model)\n", "\n", " # Output solution.\n", " if visualization.RunFromIPython():\n", - " output = visualization.SvgWrapper(solver.ObjectiveValue(), max_width, 40.0)\n", - " output.AddTitle(\"Makespan = %i\" % solver.ObjectiveValue())\n", + " output = visualization.SvgWrapper(solver.objective_value, max_width, 40.0)\n", + " output.AddTitle(f\"Makespan = {solver.objective_value}\")\n", " color_manager = visualization.ColorManager()\n", " color_manager.SeedRandomColor(0)\n", "\n", " for i in all_jobs:\n", - " performed_machine = 1 - solver.Value(performed[i])\n", - " start = solver.Value(starts[i])\n", + " performed_machine = 1 - solver.value(performed[i])\n", + " start_of_task = solver.value(starts[i])\n", " d_x = jobs[i][0]\n", " d_y = jobs[i][1]\n", " s_y = performed_machine * (max_width - d_y)\n", " output.AddRectangle(\n", - " start, s_y, d_x, d_y, color_manager.RandomColor(), \"black\", \"j%i\" % i\n", + " start_of_task,\n", + " s_y,\n", + " d_x,\n", + " d_y,\n", + " color_manager.RandomColor(),\n", + " \"black\",\n", + " f\"j{i}\",\n", " )\n", "\n", " output.AddXScale()\n", @@ -206,17 +216,15 @@ " output.Display()\n", " else:\n", " print(\"Solution\")\n", - " print(\" - makespan = %i\" % solver.ObjectiveValue())\n", + " print(f\" - makespan = {solver.objective_value}\")\n", " for i in all_jobs:\n", - " performed_machine = 1 - solver.Value(performed[i])\n", - " start = solver.Value(starts[i])\n", + " performed_machine = 1 - solver.value(performed[i])\n", + " start_of_task = solver.value(starts[i])\n", " print(\n", - " \" - Job %i starts at %i on machine %i\" % (i, start, performed_machine)\n", + " f\" - Job {i} starts at {start_of_task} on machine\"\n", + " f\" {performed_machine}\"\n", " )\n", - " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(solver.response_stats())\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/examples/golomb8.ipynb b/examples/notebook/examples/golomb8.ipynb index 6d05000633f..0414ee36c55 100644 --- a/examples/notebook/examples/golomb8.ipynb +++ b/examples/notebook/examples/golomb8.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -91,17 +91,14 @@ "metadata": {}, "outputs": [], "source": [ - "from ortools.sat.colab import flags\n", "from ortools.constraint_solver import pywrapcp\n", "\n", - "FLAGS = flags.FLAGS\n", - "\n", "# We disable the following warning because it is a false positive on constraints\n", "# like: solver.Add(x == 0)\n", "# pylint: disable=g-explicit-bool-comparison\n", "\n", "\n", - "def main(_):\n", + "def main(_) -> None:\n", " # Create the solver.\n", " solver = pywrapcp.Solver(\"golomb ruler\")\n", "\n", diff --git a/examples/notebook/examples/golomb_sat.ipynb b/examples/notebook/examples/golomb_sat.ipynb index 12be270dc9a..cdd120961fa 100644 --- a/examples/notebook/examples/golomb_sat.ipynb +++ b/examples/notebook/examples/golomb_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -105,7 +105,7 @@ ")\n", "\n", "\n", - "def solve_golomb_ruler(order, params):\n", + "def solve_golomb_ruler(order: int, params: str) -> None:\n", " \"\"\"Solve the Golomb ruler problem.\"\"\"\n", " # Create the model.\n", " model = cp_model.CpModel()\n", @@ -113,26 +113,26 @@ " var_max = order * order\n", " all_vars = list(range(0, order))\n", "\n", - " marks = [model.NewIntVar(0, var_max, f\"marks_{i}\") for i in all_vars]\n", + " marks = [model.new_int_var(0, var_max, f\"marks_{i}\") for i in all_vars]\n", "\n", - " model.Add(marks[0] == 0)\n", + " model.add(marks[0] == 0)\n", " for i in range(order - 2):\n", - " model.Add(marks[i + 1] > marks[i])\n", + " model.add(marks[i + 1] > marks[i])\n", "\n", " diffs = []\n", " for i in range(order - 1):\n", " for j in range(i + 1, order):\n", - " diff = model.NewIntVar(0, var_max, f\"diff [{j},{i}]\")\n", - " model.Add(diff == marks[j] - marks[i])\n", + " diff = model.new_int_var(0, var_max, f\"diff [{j},{i}]\")\n", + " model.add(diff == marks[j] - marks[i])\n", " diffs.append(diff)\n", - " model.AddAllDifferent(diffs)\n", + " model.add_all_different(diffs)\n", "\n", " # symmetry breaking\n", " if order > 2:\n", - " model.Add(marks[order - 1] - marks[order - 2] > marks[1] - marks[0])\n", + " model.add(marks[order - 1] - marks[order - 2] > marks[1] - marks[0])\n", "\n", " # Objective\n", - " model.Minimize(marks[order - 1])\n", + " model.minimize(marks[order - 1])\n", "\n", " # Solve the model.\n", " solver = cp_model.CpSolver()\n", @@ -140,21 +140,16 @@ " text_format.Parse(params, solver.parameters)\n", " solution_printer = cp_model.ObjectiveSolutionPrinter()\n", " print(f\"Golomb ruler(order={order})\")\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " # Print solution.\n", - " print(f\"status: {solver.StatusName(status)}\")\n", " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", " for idx, var in enumerate(marks):\n", - " print(f\"mark[{idx}]: {solver.Value(var)}\")\n", - " intervals = [solver.Value(diff) for diff in diffs]\n", + " print(f\"mark[{idx}]: {solver.value(var)}\")\n", + " intervals = [solver.value(diff) for diff in diffs]\n", " intervals.sort()\n", " print(f\"intervals: {intervals}\")\n", - "\n", - " print(\"Statistics:\")\n", - " print(f\"- conflicts: {solver.NumConflicts()}\")\n", - " print(f\"- branches : {solver.NumBranches()}\")\n", - " print(f\"- wall time: {solver.WallTime()}s\\n\")\n", + " print(solver.response_stats())\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/hidato_sat.ipynb b/examples/notebook/examples/hidato_sat.ipynb index c400624b5d3..49640e66c3c 100644 --- a/examples/notebook/examples/hidato_sat.ipynb +++ b/examples/notebook/examples/hidato_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -83,11 +83,12 @@ "metadata": {}, "outputs": [], "source": [ + "from typing import Union\n", "from ortools.sat.colab import visualization\n", "from ortools.sat.python import cp_model\n", "\n", "\n", - "def build_pairs(rows, cols):\n", + "def build_pairs(rows: int, cols: int) -> list[tuple[int, int]]:\n", " \"\"\"Build closeness pairs for consecutive numbers.\n", "\n", " Build set of allowed pairs such that two consecutive numbers touch\n", @@ -116,7 +117,7 @@ " return result\n", "\n", "\n", - "def print_solution(positions, rows, cols):\n", + "def print_solution(positions: list[int], rows: int, cols: int):\n", " \"\"\"Print a current solution.\"\"\"\n", " # Create empty board.\n", " board = []\n", @@ -131,7 +132,7 @@ " print_matrix(board)\n", "\n", "\n", - "def print_matrix(game):\n", + "def print_matrix(game: list[list[int]]) -> None:\n", " \"\"\"Pretty print of a matrix.\"\"\"\n", " rows = len(game)\n", " cols = len(game[0])\n", @@ -141,11 +142,11 @@ " if game[i][j] == 0:\n", " line += \" .\"\n", " else:\n", - " line += \"% 3s\" % game[i][j]\n", + " line += f\"{game[i][j]:3}\"\n", " print(line)\n", "\n", "\n", - "def build_puzzle(problem):\n", + "def build_puzzle(problem: int) -> Union[None, list[list[int]]]:\n", " \"\"\"Build the problem from its index.\"\"\"\n", " #\n", " # models, a 0 indicates an open cell which number is not yet known.\n", @@ -169,7 +170,7 @@ "\n", " elif problem == 3:\n", " # Problems from the book:\n", - " # Gyora Bededek: \"Hidato: 2000 Pure Logic Puzzles\"\n", + " # Gyora Bededek: 'Hidato: 2000 Pure Logic Puzzles'\n", " # Problem 1 (Practice)\n", " puzzle = [\n", " [0, 0, 20, 0, 0],\n", @@ -214,8 +215,8 @@ " return puzzle\n", "\n", "\n", - "def solve_hidato(puzzle, index):\n", - " \"\"\"Solve the given hidato table.\"\"\"\n", + "def solve_hidato(puzzle: list[list[int]], index: int) -> None:\n", + " \"\"\"solve the given hidato table.\"\"\"\n", " # Create the model.\n", " model = cp_model.CpModel()\n", "\n", @@ -223,64 +224,61 @@ " c = len(puzzle[0])\n", " if not visualization.RunFromIPython():\n", " print(\"\")\n", - " print(\"----- Solving problem %i -----\" % index)\n", + " print(f\"----- Solving problem {index} -----\")\n", " print(\"\")\n", - " print((\"Initial game (%i x %i)\" % (r, c)))\n", + " print(f\"Initial game ({r} x {c})\")\n", " print_matrix(puzzle)\n", "\n", " #\n", - " # declare variables\n", + " # Declare variables.\n", " #\n", - " positions = [model.NewIntVar(0, r * c - 1, \"p[%i]\" % i) for i in range(r * c)]\n", + " positions = [model.new_int_var(0, r * c - 1, f\"p[{i}]\") for i in range(r * c)]\n", "\n", " #\n", - " # constraints\n", + " # Constraints.\n", " #\n", - " model.AddAllDifferent(positions)\n", + " model.add_all_different(positions)\n", "\n", " #\n", - " # Fill in the clues\n", + " # Fill in the clues.\n", " #\n", " for i in range(r):\n", " for j in range(c):\n", " if puzzle[i][j] > 0:\n", - " model.Add(positions[puzzle[i][j] - 1] == i * c + j)\n", + " model.add(positions[puzzle[i][j] - 1] == i * c + j)\n", "\n", " # Consecutive numbers much touch each other in the grid.\n", " # We use an allowed assignment constraint to model it.\n", " close_tuples = build_pairs(r, c)\n", " for k in range(0, r * c - 1):\n", - " model.AddAllowedAssignments([positions[k], positions[k + 1]], close_tuples)\n", + " model.add_allowed_assignments([positions[k], positions[k + 1]], close_tuples)\n", "\n", " #\n", - " # solution and search\n", + " # Solution and search.\n", " #\n", "\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", " if visualization.RunFromIPython():\n", " output = visualization.SvgWrapper(10, r, 40.0)\n", " for i, var in enumerate(positions):\n", - " val = solver.Value(var)\n", + " val = solver.value(var)\n", " x = val % c\n", " y = val // c\n", " color = \"white\" if puzzle[y][x] == 0 else \"lightgreen\"\n", " output.AddRectangle(x, r - y - 1, 1, 1, color, \"black\", str(i + 1))\n", "\n", - " output.AddTitle(\"Puzzle %i solved in %f s\" % (index, solver.WallTime()))\n", + " output.AddTitle(f\"Puzzle {index} solved in {solver.wall_time:.2f} s\")\n", " output.Display()\n", " else:\n", " print_solution(\n", - " [solver.Value(x) for x in positions],\n", + " [solver.value(x) for x in positions],\n", " r,\n", " c,\n", " )\n", - " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(solver.response_stats())\n", "\n", "\n", "def main(_):\n", diff --git a/examples/notebook/examples/integer_programming.ipynb b/examples/notebook/examples/integer_programming.ipynb index 70931b481ae..3dd97a4aae9 100644 --- a/examples/notebook/examples/integer_programming.ipynb +++ b/examples/notebook/examples/integer_programming.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -171,6 +171,7 @@ " # RunIntegerExampleNaturalLanguageAPI('CBC')\n", " RunIntegerExampleNaturalLanguageAPI(\"SCIP\")\n", " RunIntegerExampleNaturalLanguageAPI(\"SAT\")\n", + " RunIntegerExampleNaturalLanguageAPI(\"XPRESS\")\n", "\n", "\n", "def RunAllIntegerExampleCppStyleAPI():\n", @@ -179,6 +180,7 @@ " # RunIntegerExampleCppStyleAPI('CBC')\n", " RunIntegerExampleCppStyleAPI(\"SCIP\")\n", " RunIntegerExampleCppStyleAPI(\"SAT\")\n", + " RunIntegerExampleCppStyleAPI(\"XPRESS\")\n", "\n", "\n", "def main():\n", diff --git a/examples/notebook/examples/jobshop_ft06_distance_sat.ipynb b/examples/notebook/examples/jobshop_ft06_distance_sat.ipynb index 8c5bd6d9926..dc5ee45b5b8 100644 --- a/examples/notebook/examples/jobshop_ft06_distance_sat.ipynb +++ b/examples/notebook/examples/jobshop_ft06_distance_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -100,12 +100,12 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def distance_between_jobs(x, y):\n", + "def distance_between_jobs(x: int, y: int) -> int:\n", " \"\"\"Returns the distance between tasks of job x and tasks of job y.\"\"\"\n", " return abs(x - y)\n", "\n", "\n", - "def jobshop_ft06_distance():\n", + "def jobshop_ft06_distance() -> None:\n", " \"\"\"Solves the ft06 jobshop with distances between tasks.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", @@ -142,11 +142,11 @@ " all_tasks = {}\n", " for i in all_jobs:\n", " for j in all_machines:\n", - " start_var = model.NewIntVar(0, horizon, \"start_%i_%i\" % (i, j))\n", + " start_var = model.new_int_var(0, horizon, f\"start_{i}_{j}\")\n", " duration = durations[i][j]\n", - " end_var = model.NewIntVar(0, horizon, \"end_%i_%i\" % (i, j))\n", - " interval_var = model.NewIntervalVar(\n", - " start_var, duration, end_var, \"interval_%i_%i\" % (i, j)\n", + " end_var = model.new_int_var(0, horizon, f\"end_{i}_{j}\")\n", + " interval_var = model.new_interval_var(\n", + " start_var, duration, end_var, f\"interval_{i}_{j}\"\n", " )\n", " all_tasks[(i, j)] = task_type(\n", " start=start_var, end=end_var, interval=interval_var\n", @@ -165,51 +165,52 @@ " job_indices.append(j)\n", " job_starts.append(all_tasks[(j, k)].start)\n", " job_ends.append(all_tasks[(j, k)].end)\n", - " model.AddNoOverlap(job_intervals)\n", + " model.add_no_overlap(job_intervals)\n", "\n", " arcs = []\n", " for j1 in range(len(job_intervals)):\n", " # Initial arc from the dummy node (0) to a task.\n", - " start_lit = model.NewBoolVar(\"%i is first job\" % j1)\n", + " start_lit = model.new_bool_var(f\"{j1} is first job\")\n", " arcs.append((0, j1 + 1, start_lit))\n", " # Final arc from an arc to the dummy node.\n", - " arcs.append((j1 + 1, 0, model.NewBoolVar(\"%i is last job\" % j1)))\n", + " arcs.append((j1 + 1, 0, model.new_bool_var(f\"{j1} is last job\")))\n", "\n", " for j2 in range(len(job_intervals)):\n", " if j1 == j2:\n", " continue\n", "\n", - " lit = model.NewBoolVar(\"%i follows %i\" % (j2, j1))\n", + " lit = model.new_bool_var(f\"{j2} follows {j1}\")\n", " arcs.append((j1 + 1, j2 + 1, lit))\n", "\n", " # We add the reified precedence to link the literal with the\n", " # times of the two tasks.\n", " min_distance = distance_between_jobs(j1, j2)\n", - " model.Add(job_starts[j2] >= job_ends[j1] + min_distance).OnlyEnforceIf(\n", - " lit\n", - " )\n", + " model.add(\n", + " job_starts[j2] >= job_ends[j1] + min_distance\n", + " ).only_enforce_if(lit)\n", "\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", " # Precedences inside a job.\n", " for i in all_jobs:\n", " for j in range(0, machines_count - 1):\n", - " model.Add(all_tasks[(i, j + 1)].start >= all_tasks[(i, j)].end)\n", + " model.add(all_tasks[(i, j + 1)].start >= all_tasks[(i, j)].end)\n", "\n", " # Makespan objective.\n", - " obj_var = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(\n", + " obj_var = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(\n", " obj_var, [all_tasks[(i, machines_count - 1)].end for i in all_jobs]\n", " )\n", - " model.Minimize(obj_var)\n", + " model.minimize(obj_var)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Output solution.\n", " if status == cp_model.OPTIMAL:\n", - " print(\"Optimal makespan: %i\" % solver.ObjectiveValue())\n", + " print(f\"Optimal makespan: {solver.objective_value}\")\n", + " print(solver.response_stats())\n", "\n", "\n", "jobshop_ft06_distance()\n", diff --git a/examples/notebook/examples/jobshop_ft06_sat.ipynb b/examples/notebook/examples/jobshop_ft06_sat.ipynb index 2ba0409dc53..792fd66ff5f 100644 --- a/examples/notebook/examples/jobshop_ft06_sat.ipynb +++ b/examples/notebook/examples/jobshop_ft06_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -98,7 +98,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def jobshop_ft06():\n", + "def jobshop_ft06() -> None:\n", " \"\"\"Solves the ft06 jobshop.\"\"\"\n", " # Creates the solver.\n", " model = cp_model.CpModel()\n", @@ -135,11 +135,11 @@ " all_tasks = {}\n", " for i in all_jobs:\n", " for j in all_machines:\n", - " start_var = model.NewIntVar(0, horizon, \"start_%i_%i\" % (i, j))\n", + " start_var = model.new_int_var(0, horizon, f\"start_{i}_{j}\")\n", " duration = durations[i][j]\n", - " end_var = model.NewIntVar(0, horizon, \"end_%i_%i\" % (i, j))\n", - " interval_var = model.NewIntervalVar(\n", - " start_var, duration, end_var, \"interval_%i_%i\" % (i, j)\n", + " end_var = model.new_int_var(0, horizon, f\"end_{i}_{j}\")\n", + " interval_var = model.new_interval_var(\n", + " start_var, duration, end_var, f\"interval_{i}_{j}\"\n", " )\n", " all_tasks[(i, j)] = task_type(\n", " start=start_var, end=end_var, interval=interval_var\n", @@ -154,35 +154,35 @@ " if machines[j][k] == i:\n", " machines_jobs.append(all_tasks[(j, k)].interval)\n", " machine_to_jobs[i] = machines_jobs\n", - " model.AddNoOverlap(machines_jobs)\n", + " model.add_no_overlap(machines_jobs)\n", "\n", " # Precedences inside a job.\n", " for i in all_jobs:\n", " for j in range(0, machines_count - 1):\n", - " model.Add(all_tasks[(i, j + 1)].start >= all_tasks[(i, j)].end)\n", + " model.add(all_tasks[(i, j + 1)].start >= all_tasks[(i, j)].end)\n", "\n", " # Makespan objective.\n", - " obj_var = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(\n", + " obj_var = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(\n", " obj_var, [all_tasks[(i, machines_count - 1)].end for i in all_jobs]\n", " )\n", - " model.Minimize(obj_var)\n", + " model.minimize(obj_var)\n", "\n", - " # Solve model.\n", + " # Solve the model.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.log_search_progress = True\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", - " # Output solution.\n", + " # Output the solution.\n", " if status == cp_model.OPTIMAL:\n", " if visualization.RunFromIPython():\n", " starts = [\n", - " [solver.Value(all_tasks[(i, j)][0]) for j in all_machines]\n", + " [solver.value(all_tasks[(i, j)][0]) for j in all_machines]\n", " for i in all_jobs\n", " ]\n", " visualization.DisplayJobshop(starts, durations, machines, \"FT06\")\n", " else:\n", - " print(\"Optimal makespan: %i\" % solver.ObjectiveValue())\n", + " print(f\"Optimal makespan: {solver.objective_value}\")\n", "\n", "\n", "jobshop_ft06()\n", diff --git a/examples/notebook/examples/jobshop_with_maintenance_sat.ipynb b/examples/notebook/examples/jobshop_with_maintenance_sat.ipynb index 2382b0fe857..a40167ca522 100644 --- a/examples/notebook/examples/jobshop_with_maintenance_sat.ipynb +++ b/examples/notebook/examples/jobshop_with_maintenance_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -91,20 +91,20 @@ "class SolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self):\n", + " def __init__(self) -> None:\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " \"\"\"Called at each new solution.\"\"\"\n", " print(\n", - " \"Solution %i, time = %f s, objective = %i\"\n", - " % (self.__solution_count, self.WallTime(), self.ObjectiveValue())\n", + " f\"Solution {self.__solution_count}, time = {self.wall_time} s,\"\n", + " f\" objective = {self.objective_value}\"\n", " )\n", " self.__solution_count += 1\n", "\n", "\n", - "def jobshop_with_maintenance():\n", + "def jobshop_with_maintenance() -> None:\n", " \"\"\"Solves a jobshop with maintenance on one machine.\"\"\"\n", " # Create the model.\n", " model = cp_model.CpModel()\n", @@ -122,7 +122,7 @@ " horizon = sum(task[1] for job in jobs_data for task in job)\n", "\n", " # Named tuple to store information about created variables.\n", - " task_type = collections.namedtuple(\"Task\", \"start end interval\")\n", + " task_type = collections.namedtuple(\"task_type\", \"start end interval\")\n", " # Named tuple to manipulate solution information.\n", " assigned_task_type = collections.namedtuple(\n", " \"assigned_task_type\", \"start job index duration\"\n", @@ -133,13 +133,13 @@ " machine_to_intervals = collections.defaultdict(list)\n", "\n", " for job_id, job in enumerate(jobs_data):\n", - " for task_id, task in enumerate(job):\n", - " machine = task[0]\n", - " duration = task[1]\n", - " suffix = \"_%i_%i\" % (job_id, task_id)\n", - " start_var = model.NewIntVar(0, horizon, \"start\" + suffix)\n", - " end_var = model.NewIntVar(0, horizon, \"end\" + suffix)\n", - " interval_var = model.NewIntervalVar(\n", + " for entry in enumerate(job):\n", + " task_id, task = entry\n", + " machine, duration = task\n", + " suffix = f\"_{job_id}_{task_id}\"\n", + " start_var = model.new_int_var(0, horizon, \"start\" + suffix)\n", + " end_var = model.new_int_var(0, horizon, \"end\" + suffix)\n", + " interval_var = model.new_interval_var(\n", " start_var, duration, end_var, \"interval\" + suffix\n", " )\n", " all_tasks[job_id, task_id] = task_type(\n", @@ -148,31 +148,31 @@ " machine_to_intervals[machine].append(interval_var)\n", "\n", " # Add maintenance interval (machine 0 is not available on time {4, 5, 6, 7}).\n", - " machine_to_intervals[0].append(model.NewIntervalVar(4, 4, 8, \"weekend_0\"))\n", + " machine_to_intervals[0].append(model.new_interval_var(4, 4, 8, \"weekend_0\"))\n", "\n", " # Create and add disjunctive constraints.\n", " for machine in all_machines:\n", - " model.AddNoOverlap(machine_to_intervals[machine])\n", + " model.add_no_overlap(machine_to_intervals[machine])\n", "\n", " # Precedences inside a job.\n", " for job_id, job in enumerate(jobs_data):\n", " for task_id in range(len(job) - 1):\n", - " model.Add(\n", + " model.add(\n", " all_tasks[job_id, task_id + 1].start >= all_tasks[job_id, task_id].end\n", " )\n", "\n", " # Makespan objective.\n", - " obj_var = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(\n", + " obj_var = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(\n", " obj_var,\n", " [all_tasks[job_id, len(job) - 1].end for job_id, job in enumerate(jobs_data)],\n", " )\n", - " model.Minimize(obj_var)\n", + " model.minimize(obj_var)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", " solution_printer = SolutionPrinter()\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " # Output solution.\n", " if status == cp_model.OPTIMAL:\n", @@ -183,7 +183,7 @@ " machine = task[0]\n", " assigned_jobs[machine].append(\n", " assigned_task_type(\n", - " start=solver.Value(all_tasks[job_id, task_id].start),\n", + " start=solver.value(all_tasks[job_id, task_id].start),\n", " job=job_id,\n", " index=task_id,\n", " duration=task[1],\n", @@ -199,15 +199,15 @@ " sol_line = \" \"\n", "\n", " for assigned_task in assigned_jobs[machine]:\n", - " name = \"job_%i_%i\" % (assigned_task.job, assigned_task.index)\n", - " # Add spaces to output to align columns.\n", - " sol_line_tasks += \"%-10s\" % name\n", + " name = f\"job_{assigned_task.job}_{assigned_task.index}\"\n", + " # add spaces to output to align columns.\n", + " sol_line_tasks += f\"{name:>10}\"\n", " start = assigned_task.start\n", " duration = assigned_task.duration\n", "\n", - " sol_tmp = \"[%i,%i]\" % (start, start + duration)\n", - " # Add spaces to output to align columns.\n", - " sol_line += \"%-10s\" % sol_tmp\n", + " sol_tmp = f\"[{start}, {start + duration}]\"\n", + " # add spaces to output to align columns.\n", + " sol_line += f\"{sol_tmp:>10}\"\n", "\n", " sol_line += \"\\n\"\n", " sol_line_tasks += \"\\n\"\n", @@ -215,12 +215,9 @@ " output += sol_line\n", "\n", " # Finally print the solution found.\n", - " print(\"Optimal Schedule Length: %i\" % solver.ObjectiveValue())\n", + " print(f\"Optimal Schedule Length: {solver.objective_value}\")\n", " print(output)\n", - " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(solver.response_stats())\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/knapsack_2d_sat.ipynb b/examples/notebook/examples/knapsack_2d_sat.ipynb index 0695e7d5e86..09efe314988 100644 --- a/examples/notebook/examples/knapsack_2d_sat.ipynb +++ b/examples/notebook/examples/knapsack_2d_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -97,6 +97,7 @@ "\n", "from ortools.sat.python import cp_model\n", "\n", + "\n", "_OUTPUT_PROTO = flags.define_string(\n", " \"output_proto\", \"\", \"Output file to write the cp_model proto to.\"\n", ")\n", @@ -110,7 +111,7 @@ ")\n", "\n", "\n", - "def build_data():\n", + "def build_data() -> tuple[pd.Series, int, int]:\n", " \"\"\"Build the data frame.\"\"\"\n", " data = \"\"\"\n", " item width height available value color\n", @@ -138,8 +139,10 @@ " return (data, max_height, max_width)\n", "\n", "\n", - "def solve_with_duplicate_items(data, max_height, max_width):\n", - " \"\"\"Solve the problem by building 2 items (rotated or not) for each item.\"\"\"\n", + "def solve_with_duplicate_items(\n", + " data: pd.Series, max_height: int, max_width: int\n", + ") -> None:\n", + " \"\"\"solve the problem by building 2 items (rotated or not) for each item.\"\"\"\n", " # Derived data (expanded to individual items).\n", " data_widths = data[\"width\"].to_numpy()\n", " data_heights = data[\"height\"].to_numpy()\n", @@ -173,41 +176,47 @@ "\n", " for i in range(num_items):\n", " ## Is the item used?\n", - " is_used.append(model.NewBoolVar(f\"is_used{i}\"))\n", + " is_used.append(model.new_bool_var(f\"is_used{i}\"))\n", "\n", " ## Item coordinates.\n", - " x_starts.append(model.NewIntVar(0, max_width, f\"x_start{i}\"))\n", - " x_ends.append(model.NewIntVar(0, max_width, f\"x_end{i}\"))\n", - " y_starts.append(model.NewIntVar(0, max_height, f\"y_start{i}\"))\n", - " y_ends.append(model.NewIntVar(0, max_height, f\"y_end{i}\"))\n", + " x_starts.append(model.new_int_var(0, max_width, f\"x_start{i}\"))\n", + " x_ends.append(model.new_int_var(0, max_width, f\"x_end{i}\"))\n", + " y_starts.append(model.new_int_var(0, max_height, f\"y_start{i}\"))\n", + " y_ends.append(model.new_int_var(0, max_height, f\"y_end{i}\"))\n", "\n", " ## Interval variables.\n", " x_intervals.append(\n", - " model.NewIntervalVar(\n", - " x_starts[i], item_widths[i] * is_used[i], x_ends[i], f\"x_interval{i}\"\n", + " model.new_interval_var(\n", + " x_starts[i],\n", + " item_widths[i] * is_used[i],\n", + " x_ends[i],\n", + " f\"x_interval{i}\",\n", " )\n", " )\n", " y_intervals.append(\n", - " model.NewIntervalVar(\n", - " y_starts[i], item_heights[i] * is_used[i], y_ends[i], f\"y_interval{i}\"\n", + " model.new_interval_var(\n", + " y_starts[i],\n", + " item_heights[i] * is_used[i],\n", + " y_ends[i],\n", + " f\"y_interval{i}\",\n", " )\n", " )\n", "\n", " # Unused boxes are fixed at (0.0).\n", - " model.Add(x_starts[i] == 0).OnlyEnforceIf(is_used[i].Not())\n", - " model.Add(y_starts[i] == 0).OnlyEnforceIf(is_used[i].Not())\n", + " model.add(x_starts[i] == 0).only_enforce_if(~is_used[i])\n", + " model.add(y_starts[i] == 0).only_enforce_if(~is_used[i])\n", "\n", " # Constraints.\n", "\n", " ## Only one of non-rotated/rotated pair can be used.\n", " for i in range(num_data_items):\n", - " model.Add(is_used[i] + is_used[i + num_data_items] <= 1)\n", + " model.add(is_used[i] + is_used[i + num_data_items] <= 1)\n", "\n", " ## 2D no overlap.\n", - " model.AddNoOverlap2D(x_intervals, y_intervals)\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", "\n", " ## Objective.\n", - " model.Maximize(cp_model.LinearExpr.WeightedSum(is_used, item_values))\n", + " model.maximize(cp_model.LinearExpr.weighted_sum(is_used, item_values))\n", "\n", " # Output proto to file.\n", " if _OUTPUT_PROTO.value:\n", @@ -220,27 +229,29 @@ " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Report solution.\n", - " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " used = {i for i in range(num_items) if solver.BooleanValue(is_used[i])}\n", + " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", + " used = {i for i in range(num_items) if solver.boolean_value(is_used[i])}\n", " data = pd.DataFrame(\n", " {\n", - " \"x_start\": [solver.Value(x_starts[i]) for i in used],\n", - " \"y_start\": [solver.Value(y_starts[i]) for i in used],\n", + " \"x_start\": [solver.value(x_starts[i]) for i in used],\n", + " \"y_start\": [solver.value(y_starts[i]) for i in used],\n", " \"item_width\": [item_widths[i] for i in used],\n", " \"item_height\": [item_heights[i] for i in used],\n", - " \"x_end\": [solver.Value(x_ends[i]) for i in used],\n", - " \"y_end\": [solver.Value(y_ends[i]) for i in used],\n", + " \"x_end\": [solver.value(x_ends[i]) for i in used],\n", + " \"y_end\": [solver.value(y_ends[i]) for i in used],\n", " \"item_value\": [item_values[i] for i in used],\n", " }\n", " )\n", " print(data)\n", "\n", "\n", - "def solve_with_duplicate_optional_items(data, max_height, max_width):\n", - " \"\"\"Solve the problem by building 2 optional items (rotated or not) for each item.\"\"\"\n", + "def solve_with_duplicate_optional_items(\n", + " data: pd.Series, max_height: int, max_width: int\n", + "):\n", + " \"\"\"solve the problem by building 2 optional items (rotated or not) for each item.\"\"\"\n", " # Derived data (expanded to individual items).\n", " data_widths = data[\"width\"].to_numpy()\n", " data_heights = data[\"height\"].to_numpy()\n", @@ -272,42 +283,42 @@ "\n", " for i in range(num_items):\n", " ## Is the item used?\n", - " is_used.append(model.NewBoolVar(f\"is_used{i}\"))\n", + " is_used.append(model.new_bool_var(f\"is_used{i}\"))\n", "\n", " ## Item coordinates.\n", " x_starts.append(\n", - " model.NewIntVar(0, max_width - int(item_widths[i]), f\"x_start{i}\")\n", + " model.new_int_var(0, max_width - int(item_widths[i]), f\"x_start{i}\")\n", " )\n", " y_starts.append(\n", - " model.NewIntVar(0, max_height - int(item_heights[i]), f\"y_start{i}\")\n", + " model.new_int_var(0, max_height - int(item_heights[i]), f\"y_start{i}\")\n", " )\n", "\n", " ## Interval variables.\n", " x_intervals.append(\n", - " model.NewOptionalFixedSizeIntervalVar(\n", + " model.new_optional_fixed_size_interval_var(\n", " x_starts[i], item_widths[i], is_used[i], f\"x_interval{i}\"\n", " )\n", " )\n", " y_intervals.append(\n", - " model.NewOptionalFixedSizeIntervalVar(\n", + " model.new_optional_fixed_size_interval_var(\n", " y_starts[i], item_heights[i], is_used[i], f\"y_interval{i}\"\n", " )\n", " )\n", " # Unused boxes are fixed at (0.0).\n", - " model.Add(x_starts[i] == 0).OnlyEnforceIf(is_used[i].Not())\n", - " model.Add(y_starts[i] == 0).OnlyEnforceIf(is_used[i].Not())\n", + " model.add(x_starts[i] == 0).only_enforce_if(~is_used[i])\n", + " model.add(y_starts[i] == 0).only_enforce_if(~is_used[i])\n", "\n", " # Constraints.\n", "\n", " ## Only one of non-rotated/rotated pair can be used.\n", " for i in range(num_data_items):\n", - " model.Add(is_used[i] + is_used[i + num_data_items] <= 1)\n", + " model.add(is_used[i] + is_used[i + num_data_items] <= 1)\n", "\n", " ## 2D no overlap.\n", - " model.AddNoOverlap2D(x_intervals, y_intervals)\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", "\n", " ## Objective.\n", - " model.Maximize(cp_model.LinearExpr.WeightedSum(is_used, item_values))\n", + " model.maximize(cp_model.LinearExpr.weighted_sum(is_used, item_values))\n", "\n", " # Output proto to file.\n", " if _OUTPUT_PROTO.value:\n", @@ -315,32 +326,32 @@ " with open(_OUTPUT_PROTO.value, \"w\") as text_file:\n", " text_file.write(str(model))\n", "\n", - " # Solve model.\n", + " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Report solution.\n", - " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " used = {i for i in range(num_items) if solver.BooleanValue(is_used[i])}\n", + " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", + " used = {i for i in range(num_items) if solver.boolean_value(is_used[i])}\n", " data = pd.DataFrame(\n", " {\n", - " \"x_start\": [solver.Value(x_starts[i]) for i in used],\n", - " \"y_start\": [solver.Value(y_starts[i]) for i in used],\n", + " \"x_start\": [solver.value(x_starts[i]) for i in used],\n", + " \"y_start\": [solver.value(y_starts[i]) for i in used],\n", " \"item_width\": [item_widths[i] for i in used],\n", " \"item_height\": [item_heights[i] for i in used],\n", - " \"x_end\": [solver.Value(x_starts[i]) + item_widths[i] for i in used],\n", - " \"y_end\": [solver.Value(y_starts[i]) + item_heights[i] for i in used],\n", + " \"x_end\": [solver.value(x_starts[i]) + item_widths[i] for i in used],\n", + " \"y_end\": [solver.value(y_starts[i]) + item_heights[i] for i in used],\n", " \"item_value\": [item_values[i] for i in used],\n", " }\n", " )\n", " print(data)\n", "\n", "\n", - "def solve_with_rotations(data, max_height, max_width):\n", - " \"\"\"Solve the problem by rotating items.\"\"\"\n", + "def solve_with_rotations(data: pd.Series, max_height: int, max_width: int):\n", + " \"\"\"solve the problem by rotating items.\"\"\"\n", " # Derived data (expanded to individual items).\n", " data_widths = data[\"width\"].to_numpy()\n", " data_heights = data[\"height\"].to_numpy()\n", @@ -369,25 +380,29 @@ " for i in range(num_items):\n", " sizes = [0, int(item_widths[i]), int(item_heights[i])]\n", " # X coordinates.\n", - " x_starts.append(model.NewIntVar(0, max_width, f\"x_start{i}\"))\n", + " x_starts.append(model.new_int_var(0, max_width, f\"x_start{i}\"))\n", " x_sizes.append(\n", - " model.NewIntVarFromDomain(cp_model.Domain.FromValues(sizes), f\"x_size{i}\")\n", + " model.new_int_var_from_domain(\n", + " cp_model.Domain.FromValues(sizes), f\"x_size{i}\"\n", + " )\n", " )\n", - " x_ends.append(model.NewIntVar(0, max_width, f\"x_end{i}\"))\n", + " x_ends.append(model.new_int_var(0, max_width, f\"x_end{i}\"))\n", "\n", " # Y coordinates.\n", - " y_starts.append(model.NewIntVar(0, max_height, f\"y_start{i}\"))\n", + " y_starts.append(model.new_int_var(0, max_height, f\"y_start{i}\"))\n", " y_sizes.append(\n", - " model.NewIntVarFromDomain(cp_model.Domain.FromValues(sizes), f\"y_size{i}\")\n", + " model.new_int_var_from_domain(\n", + " cp_model.Domain.FromValues(sizes), f\"y_size{i}\"\n", + " )\n", " )\n", - " y_ends.append(model.NewIntVar(0, max_height, f\"y_end{i}\"))\n", + " y_ends.append(model.new_int_var(0, max_height, f\"y_end{i}\"))\n", "\n", " ## Interval variables\n", " x_intervals.append(\n", - " model.NewIntervalVar(x_starts[i], x_sizes[i], x_ends[i], f\"x_interval{i}\")\n", + " model.new_interval_var(x_starts[i], x_sizes[i], x_ends[i], f\"x_interval{i}\")\n", " )\n", " y_intervals.append(\n", - " model.NewIntervalVar(y_starts[i], y_sizes[i], y_ends[i], f\"y_interval{i}\")\n", + " model.new_interval_var(y_starts[i], y_sizes[i], y_ends[i], f\"y_interval{i}\")\n", " )\n", "\n", " # is_used[i] == True if and only if item i is selected.\n", @@ -397,34 +412,34 @@ "\n", " ## for each item, decide is unselected, no_rotation, rotated.\n", " for i in range(num_items):\n", - " not_selected = model.NewBoolVar(f\"not_selected_{i}\")\n", - " no_rotation = model.NewBoolVar(f\"no_rotation_{i}\")\n", - " rotated = model.NewBoolVar(f\"rotated_{i}\")\n", + " not_selected = model.new_bool_var(f\"not_selected_{i}\")\n", + " no_rotation = model.new_bool_var(f\"no_rotation_{i}\")\n", + " rotated = model.new_bool_var(f\"rotated_{i}\")\n", "\n", " ### Exactly one state must be chosen.\n", - " model.AddExactlyOne(not_selected, no_rotation, rotated)\n", + " model.add_exactly_one(not_selected, no_rotation, rotated)\n", "\n", " ### Define height and width according to the state.\n", " dim1 = item_widths[i]\n", " dim2 = item_heights[i]\n", " # Unused boxes are fixed at (0.0).\n", - " model.Add(x_sizes[i] == 0).OnlyEnforceIf(not_selected)\n", - " model.Add(y_sizes[i] == 0).OnlyEnforceIf(not_selected)\n", - " model.Add(x_starts[i] == 0).OnlyEnforceIf(not_selected)\n", - " model.Add(y_starts[i] == 0).OnlyEnforceIf(not_selected)\n", + " model.add(x_sizes[i] == 0).only_enforce_if(not_selected)\n", + " model.add(y_sizes[i] == 0).only_enforce_if(not_selected)\n", + " model.add(x_starts[i] == 0).only_enforce_if(not_selected)\n", + " model.add(y_starts[i] == 0).only_enforce_if(not_selected)\n", " # Sizes are fixed by the rotation.\n", - " model.Add(x_sizes[i] == dim1).OnlyEnforceIf(no_rotation)\n", - " model.Add(y_sizes[i] == dim2).OnlyEnforceIf(no_rotation)\n", - " model.Add(x_sizes[i] == dim2).OnlyEnforceIf(rotated)\n", - " model.Add(y_sizes[i] == dim1).OnlyEnforceIf(rotated)\n", + " model.add(x_sizes[i] == dim1).only_enforce_if(no_rotation)\n", + " model.add(y_sizes[i] == dim2).only_enforce_if(no_rotation)\n", + " model.add(x_sizes[i] == dim2).only_enforce_if(rotated)\n", + " model.add(y_sizes[i] == dim1).only_enforce_if(rotated)\n", "\n", - " is_used.append(not_selected.Not())\n", + " is_used.append(~not_selected)\n", "\n", " ## 2D no overlap.\n", - " model.AddNoOverlap2D(x_intervals, y_intervals)\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", "\n", " # Objective.\n", - " model.Maximize(cp_model.LinearExpr.WeightedSum(is_used, item_values))\n", + " model.maximize(cp_model.LinearExpr.weighted_sum(is_used, item_values))\n", "\n", " # Output proto to file.\n", " if _OUTPUT_PROTO.value:\n", @@ -432,24 +447,24 @@ " with open(_OUTPUT_PROTO.value, \"w\") as text_file:\n", " text_file.write(str(model))\n", "\n", - " # Solve model.\n", + " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Report solution.\n", - " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " used = {i for i in range(num_items) if solver.BooleanValue(is_used[i])}\n", + " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", + " used = {i for i in range(num_items) if solver.boolean_value(is_used[i])}\n", " data = pd.DataFrame(\n", " {\n", - " \"x_start\": [solver.Value(x_starts[i]) for i in used],\n", - " \"y_start\": [solver.Value(y_starts[i]) for i in used],\n", - " \"item_width\": [solver.Value(x_sizes[i]) for i in used],\n", - " \"item_height\": [solver.Value(y_sizes[i]) for i in used],\n", - " \"x_end\": [solver.Value(x_ends[i]) for i in used],\n", - " \"y_end\": [solver.Value(y_ends[i]) for i in used],\n", + " \"x_start\": [solver.value(x_starts[i]) for i in used],\n", + " \"y_start\": [solver.value(y_starts[i]) for i in used],\n", + " \"item_width\": [solver.value(x_sizes[i]) for i in used],\n", + " \"item_height\": [solver.value(y_sizes[i]) for i in used],\n", + " \"x_end\": [solver.value(x_ends[i]) for i in used],\n", + " \"y_end\": [solver.value(y_ends[i]) for i in used],\n", " \"item_value\": [item_values[i] for i in used],\n", " }\n", " )\n", @@ -457,7 +472,7 @@ "\n", "\n", "def main(_):\n", - " \"\"\"Solve the problem with all models.\"\"\"\n", + " \"\"\"solve the problem with all models.\"\"\"\n", " data, max_height, max_width = build_data()\n", " if _MODEL.value == \"duplicate\":\n", " solve_with_duplicate_items(data, max_height, max_width)\n", diff --git a/examples/notebook/examples/line_balancing_sat.ipynb b/examples/notebook/examples/line_balancing_sat.ipynb index 3535f21cc00..07c2c82ab93 100644 --- a/examples/notebook/examples/line_balancing_sat.ipynb +++ b/examples/notebook/examples/line_balancing_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -98,7 +98,7 @@ "source": [ "import collections\n", "import re\n", - "from typing import Sequence\n", + "from typing import Dict, Sequence\n", "\n", "from ortools.sat.colab import flags\n", "from google.protobuf import text_format\n", @@ -115,8 +115,8 @@ ")\n", "\n", "\n", - "class SectionInfo(object):\n", - " \"\"\"Store model information for each section of the input file.\"\"\"\n", + "class SectionInfo:\n", + " \"\"\"Store problem information for each section of the input file.\"\"\"\n", "\n", " def __init__(self):\n", " self.value = None\n", @@ -134,44 +134,43 @@ " return \"SectionInfo()\"\n", "\n", "\n", - "def read_model(filename):\n", - " \"\"\"Reads a .alb file and returns the model.\"\"\"\n", + "def read_problem(filename: str) -> Dict[str, SectionInfo]:\n", + " \"\"\"Reads a .alb file and returns the problem.\"\"\"\n", "\n", " current_info = SectionInfo()\n", "\n", - " model = {}\n", + " problem: Dict[str, SectionInfo] = {}\n", " with open(filename, \"r\") as input_file:\n", - " print(f\"Reading model from '{filename}'\")\n", - " section_name = \"\"\n", + " print(f\"Reading problem from '{filename}'\")\n", "\n", " for line in input_file:\n", " stripped_line = line.strip()\n", " if not stripped_line:\n", " continue\n", "\n", - " match_section_def = re.match(r\"<([\\w\\s]+)>\", stripped_line)\n", + " match_section_def = re.fullmatch(r\"<([\\w\\s]+)>\", stripped_line)\n", " if match_section_def:\n", " section_name = match_section_def.group(1)\n", " if section_name == \"end\":\n", " continue\n", "\n", " current_info = SectionInfo()\n", - " model[section_name] = current_info\n", + " problem[section_name] = current_info\n", " continue\n", "\n", - " match_single_number = re.match(r\"^([0-9]+)$\", stripped_line)\n", + " match_single_number = re.fullmatch(r\"^([0-9]+)$\", stripped_line)\n", " if match_single_number:\n", " current_info.value = int(match_single_number.group(1))\n", " continue\n", "\n", - " match_key_value = re.match(r\"^([0-9]+)\\s+([0-9]+)$\", stripped_line)\n", + " match_key_value = re.fullmatch(r\"^([0-9]+)\\s+([0-9]+)$\", stripped_line)\n", " if match_key_value:\n", " key = int(match_key_value.group(1))\n", " value = int(match_key_value.group(2))\n", " current_info.index_map[key] = value\n", " continue\n", "\n", - " match_pair = re.match(r\"^([0-9]+),([0-9]+)$\", stripped_line)\n", + " match_pair = re.fullmatch(r\"^([0-9]+),([0-9]+)$\", stripped_line)\n", " if match_pair:\n", " left = int(match_pair.group(1))\n", " right = int(match_pair.group(2))\n", @@ -180,24 +179,26 @@ "\n", " print(f\"Unrecognized line '{stripped_line}'\")\n", "\n", - " return model\n", + " return problem\n", "\n", "\n", - "def print_stats(model):\n", - " print(\"Model Statistics\")\n", - " for key, value in model.items():\n", + "def print_stats(problem: Dict[str, SectionInfo]) -> None:\n", + " print(\"Problem Statistics\")\n", + " for key, value in problem.items():\n", " print(f\" - {key}: {value}\")\n", "\n", "\n", - "def solve_model_greedily(model):\n", + "def solve_problem_greedily(problem: Dict[str, SectionInfo]) -> Dict[int, int]:\n", " \"\"\"Compute a greedy solution.\"\"\"\n", " print(\"Solving using a Greedy heuristics\")\n", "\n", - " num_tasks = model[\"number of tasks\"].value\n", + " num_tasks = problem[\"number of tasks\"].value\n", + " if num_tasks is None:\n", + " return {}\n", " all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the data.\n", - " precedences = model[\"precedence relations\"].set_of_pairs\n", - " durations = model[\"task times\"].index_map\n", - " cycle_time = model[\"cycle time\"].value\n", + " precedences = problem[\"precedence relations\"].set_of_pairs\n", + " durations = problem[\"task times\"].index_map\n", + " cycle_time = problem[\"cycle time\"].value\n", "\n", " weights = collections.defaultdict(int)\n", " successors = collections.defaultdict(list)\n", @@ -210,7 +211,7 @@ " if after in candidates:\n", " candidates.remove(after)\n", "\n", - " assignment = {}\n", + " assignment: Dict[int, int] = {}\n", " current_pod = 0\n", " residual_capacity = cycle_time\n", "\n", @@ -251,16 +252,20 @@ " return assignment\n", "\n", "\n", - "def solve_boolean_model(model, hint):\n", - " \"\"\"Solve the given model.\"\"\"\n", + "def solve_problem_with_boolean_model(\n", + " problem: Dict[str, SectionInfo], hint: Dict[int, int]\n", + ") -> None:\n", + " \"\"\"solve the given problem.\"\"\"\n", "\n", " print(\"Solving using the Boolean model\")\n", - " # Model data\n", - " num_tasks = model[\"number of tasks\"].value\n", - " all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the model.\n", - " durations = model[\"task times\"].index_map\n", - " precedences = model[\"precedence relations\"].set_of_pairs\n", - " cycle_time = model[\"cycle time\"].value\n", + " # problem data\n", + " num_tasks = problem[\"number of tasks\"].value\n", + " if num_tasks is None:\n", + " return\n", + " all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the problem.\n", + " durations = problem[\"task times\"].index_map\n", + " precedences = problem[\"precedence relations\"].set_of_pairs\n", + " cycle_time = problem[\"cycle time\"].value\n", "\n", " num_pods = max(p for _, p in hint.items()) + 1 if hint else num_tasks - 1\n", " all_pods = range(num_pods)\n", @@ -275,81 +280,85 @@ " # Create the variables\n", " for t in all_tasks:\n", " for p in all_pods:\n", - " assign[t, p] = model.NewBoolVar(f\"assign_{t}_{p}\")\n", - " possible[t, p] = model.NewBoolVar(f\"possible_{t}_{p}\")\n", + " assign[t, p] = model.new_bool_var(f\"assign_{t}_{p}\")\n", + " possible[t, p] = model.new_bool_var(f\"possible_{t}_{p}\")\n", "\n", " # active[p] indicates if pod p is active.\n", - " active = [model.NewBoolVar(f\"active_{p}\") for p in all_pods]\n", + " active = [model.new_bool_var(f\"active_{p}\") for p in all_pods]\n", "\n", " # Each task is done on exactly one pod.\n", " for t in all_tasks:\n", - " model.AddExactlyOne([assign[t, p] for p in all_pods])\n", + " model.add_exactly_one([assign[t, p] for p in all_pods])\n", "\n", " # Total tasks assigned to one pod cannot exceed cycle time.\n", " for p in all_pods:\n", - " model.Add(sum(assign[t, p] * durations[t] for t in all_tasks) <= cycle_time)\n", + " model.add(sum(assign[t, p] * durations[t] for t in all_tasks) <= cycle_time)\n", "\n", " # Maintain the possible variables:\n", " # possible at pod p -> possible at any pod after p\n", " for t in all_tasks:\n", " for p in range(num_pods - 1):\n", - " model.AddImplication(possible[t, p], possible[t, p + 1])\n", + " model.add_implication(possible[t, p], possible[t, p + 1])\n", "\n", " # Link possible and active variables.\n", " for t in all_tasks:\n", " for p in all_pods:\n", - " model.AddImplication(assign[t, p], possible[t, p])\n", + " model.add_implication(assign[t, p], possible[t, p])\n", " if p > 1:\n", - " model.AddImplication(assign[t, p], possible[t, p - 1].Not())\n", + " model.add_implication(assign[t, p], ~possible[t, p - 1])\n", "\n", " # Precedences.\n", " for before, after in precedences:\n", " for p in range(1, num_pods):\n", - " model.AddImplication(assign[before, p], possible[after, p - 1].Not())\n", + " model.add_implication(assign[before, p], ~possible[after, p - 1])\n", "\n", " # Link active variables with the assign one.\n", " for p in all_pods:\n", " all_assign_vars = [assign[t, p] for t in all_tasks]\n", " for a in all_assign_vars:\n", - " model.AddImplication(a, active[p])\n", - " model.AddBoolOr(all_assign_vars + [active[p].Not()])\n", + " model.add_implication(a, active[p])\n", + " model.add_bool_or(all_assign_vars + [~active[p]])\n", "\n", " # Force pods to be contiguous. This is critical to get good lower bounds\n", " # on the objective, even if it makes feasibility harder.\n", " for p in range(1, num_pods):\n", - " model.AddImplication(active[p - 1].Not(), active[p].Not())\n", + " model.add_implication(~active[p - 1], ~active[p])\n", " for t in all_tasks:\n", - " model.AddImplication(active[p].Not(), possible[t, p - 1])\n", + " model.add_implication(~active[p], possible[t, p - 1])\n", "\n", " # Objective.\n", - " model.Minimize(sum(active))\n", + " model.minimize(sum(active))\n", "\n", - " # Add search hinting from the greedy solution.\n", + " # add search hinting from the greedy solution.\n", " for t in all_tasks:\n", - " model.AddHint(assign[t, hint[t]], 1)\n", + " model.add_hint(assign[t, hint[t]], 1)\n", "\n", " if _OUTPUT_PROTO.value:\n", " print(f\"Writing proto to {_OUTPUT_PROTO.value}\")\n", - " model.ExportToFile(_OUTPUT_PROTO.value)\n", + " model.export_to_file(_OUTPUT_PROTO.value)\n", "\n", - " # Solve model.\n", + " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", " solver.parameters.log_search_progress = True\n", - " solver.Solve(model)\n", + " solver.solve(model)\n", "\n", "\n", - "def solve_scheduling_model(model, hint):\n", - " \"\"\"Solve the given model using a cumutive model.\"\"\"\n", + "def solve_problem_with_scheduling_model(\n", + " problem: Dict[str, SectionInfo], hint: Dict[int, int]\n", + ") -> None:\n", + " \"\"\"solve the given problem using a cumulative model.\"\"\"\n", "\n", " print(\"Solving using the scheduling model\")\n", - " # Model data\n", - " num_tasks = model[\"number of tasks\"].value\n", + " # Problem data\n", + " num_tasks = problem[\"number of tasks\"].value\n", + " if num_tasks is None:\n", + " return\n", " all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the data.\n", - " durations = model[\"task times\"].index_map\n", - " precedences = model[\"precedence relations\"].set_of_pairs\n", - " cycle_time = model[\"cycle time\"].value\n", + " durations = problem[\"task times\"].index_map\n", + " precedences = problem[\"precedence relations\"].set_of_pairs\n", + " cycle_time = problem[\"cycle time\"].value\n", "\n", " num_pods = max(p for _, p in hint.items()) + 1 if hint else num_tasks\n", "\n", @@ -358,61 +367,63 @@ " # pod[t] indicates on which pod the task is performed.\n", " pods = {}\n", " for t in all_tasks:\n", - " pods[t] = model.NewIntVar(0, num_pods - 1, f\"pod_{t}\")\n", + " pods[t] = model.new_int_var(0, num_pods - 1, f\"pod_{t}\")\n", "\n", " # Create the variables\n", " intervals = []\n", " demands = []\n", " for t in all_tasks:\n", - " interval = model.NewFixedSizeIntervalVar(pods[t], 1, \"\")\n", + " interval = model.new_fixed_size_interval_var(pods[t], 1, \"\")\n", " intervals.append(interval)\n", " demands.append(durations[t])\n", "\n", - " # Add terminating interval as the objective.\n", - " obj_var = model.NewIntVar(1, num_pods, \"obj_var\")\n", - " obj_size = model.NewIntVar(1, num_pods, \"obj_duration\")\n", - " obj_interval = model.NewIntervalVar(obj_var, obj_size, num_pods + 1, \"obj_interval\")\n", + " # add terminating interval as the objective.\n", + " obj_var = model.new_int_var(1, num_pods, \"obj_var\")\n", + " obj_size = model.new_int_var(1, num_pods, \"obj_duration\")\n", + " obj_interval = model.new_interval_var(\n", + " obj_var, obj_size, num_pods + 1, \"obj_interval\"\n", + " )\n", " intervals.append(obj_interval)\n", " demands.append(cycle_time)\n", "\n", " # Cumulative constraint.\n", - " model.AddCumulative(intervals, demands, cycle_time)\n", + " model.add_cumulative(intervals, demands, cycle_time)\n", "\n", " # Precedences.\n", " for before, after in precedences:\n", - " model.Add(pods[after] >= pods[before])\n", + " model.add(pods[after] >= pods[before])\n", "\n", " # Objective.\n", - " model.Minimize(obj_var)\n", + " model.minimize(obj_var)\n", "\n", - " # Add search hinting from the greedy solution.\n", + " # add search hinting from the greedy solution.\n", " for t in all_tasks:\n", - " model.AddHint(pods[t], hint[t])\n", + " model.add_hint(pods[t], hint[t])\n", "\n", " if _OUTPUT_PROTO.value:\n", " print(f\"Writing proto to{_OUTPUT_PROTO.value}\")\n", - " model.ExportToFile(_OUTPUT_PROTO.value)\n", + " model.export_to_file(_OUTPUT_PROTO.value)\n", "\n", - " # Solve model.\n", + " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", " solver.parameters.log_search_progress = True\n", - " solver.Solve(model)\n", + " solver.solve(model)\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", " if len(argv) > 1:\n", " raise app.UsageError(\"Too many command-line arguments.\")\n", "\n", - " model = read_model(_INPUT.value)\n", - " print_stats(model)\n", - " greedy_solution = solve_model_greedily(model)\n", + " problem = read_problem(_INPUT.value)\n", + " print_stats(problem)\n", + " greedy_solution = solve_problem_greedily(problem)\n", "\n", " if _MODEL.value == \"boolean\":\n", - " solve_boolean_model(model, greedy_solution)\n", + " solve_problem_with_boolean_model(problem, greedy_solution)\n", " elif _MODEL.value == \"scheduling\":\n", - " solve_scheduling_model(model, greedy_solution)\n", + " solve_problem_with_scheduling_model(problem, greedy_solution)\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/examples/linear_assignment_api.ipynb b/examples/notebook/examples/linear_assignment_api.ipynb index 55c1d39baa1..093c87cf6fc 100644 --- a/examples/notebook/examples/linear_assignment_api.ipynb +++ b/examples/notebook/examples/linear_assignment_api.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/linear_programming.ipynb b/examples/notebook/examples/linear_programming.ipynb index 474d8cc683a..ea83ff3c899 100644 --- a/examples/notebook/examples/linear_programming.ipynb +++ b/examples/notebook/examples/linear_programming.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -207,11 +207,13 @@ " RunLinearExampleNaturalLanguageAPI(\"GLPK_LP\")\n", " RunLinearExampleNaturalLanguageAPI(\"CLP\")\n", " RunLinearExampleNaturalLanguageAPI(\"PDLP\")\n", + " RunLinearExampleNaturalLanguageAPI(\"XPRESS_LP\")\n", "\n", " RunLinearExampleCppStyleAPI(\"GLOP\")\n", " RunLinearExampleCppStyleAPI(\"GLPK_LP\")\n", " RunLinearExampleCppStyleAPI(\"CLP\")\n", " RunLinearExampleCppStyleAPI(\"PDLP\")\n", + " RunLinearExampleCppStyleAPI(\"XPRESS_LP\")\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/examples/magic_sequence_distribute.ipynb b/examples/notebook/examples/magic_sequence_distribute.ipynb index ed8cd1d9098..d3ec65460f9 100644 --- a/examples/notebook/examples/magic_sequence_distribute.ipynb +++ b/examples/notebook/examples/magic_sequence_distribute.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/maximize_combinations_sat.ipynb b/examples/notebook/examples/maximize_combinations_sat.ipynb new file mode 100644 index 00000000000..a30e3d7166f --- /dev/null +++ b/examples/notebook/examples/maximize_combinations_sat.ipynb @@ -0,0 +1,149 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# maximize_combinations_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Maximize the number of valid combinations of Boolean variables.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Sequence\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "def maximize_combinations_sat() -> None:\n", + " \"\"\"Maximize the number of valid combinations of Boolean variables.\"\"\"\n", + " model = cp_model.CpModel()\n", + " cards: list[cp_model.IntVar] = [\n", + " model.new_bool_var(\"card1\"),\n", + " model.new_bool_var(\"card2\"),\n", + " model.new_bool_var(\"card3\"),\n", + " model.new_bool_var(\"card4\"),\n", + " ]\n", + "\n", + " combos: list[list[cp_model.IntVar]] = [\n", + " [cards[0], cards[1]],\n", + " [cards[0], cards[2]],\n", + " [cards[1], cards[3]],\n", + " [cards[0], cards[2], cards[3]],\n", + " ]\n", + "\n", + " deck_size: int = 3\n", + " model.add(sum(cards) == deck_size)\n", + "\n", + " valid_combos: list[cp_model.IntVar] = []\n", + " for combination in combos:\n", + " is_valid = model.new_bool_var(\"\")\n", + "\n", + " # All true implies is_valid.\n", + " model.add_bool_and(is_valid).only_enforce_if(combination)\n", + "\n", + " # is_valid implies all true.\n", + " for literal in combination:\n", + " model.add_implication(is_valid, literal)\n", + " valid_combos.append(is_valid)\n", + "\n", + " model.maximize(sum(valid_combos))\n", + "\n", + " solver = cp_model.CpSolver()\n", + " solver.parameters.log_search_progress = True\n", + " status = solver.solve(model)\n", + "\n", + " if status == cp_model.OPTIMAL:\n", + " print(\n", + " \"chosen cards:\",\n", + " [card.name for card in cards if solver.boolean_value(card)],\n", + " )\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + " maximize_combinations_sat()\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/maze_escape_sat.ipynb b/examples/notebook/examples/maze_escape_sat.ipynb index 6d79c9613b9..2ff03b9f05b 100644 --- a/examples/notebook/examples/maze_escape_sat.ipynb +++ b/examples/notebook/examples/maze_escape_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,7 +89,7 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Sequence\n", + "from typing import Dict, Sequence, Tuple\n", "\n", "from ortools.sat.colab import flags\n", "from google.protobuf import text_format\n", @@ -99,11 +99,25 @@ " \"output_proto\", \"\", \"Output file to write the cp_model proto to.\"\n", ")\n", "_PARAMS = flags.define_string(\n", - " \"params\", \"num_search_workers:8,log_search_progress:true\", \"Sat solver parameters.\"\n", + " \"params\",\n", + " \"num_search_workers:8,log_search_progress:true\",\n", + " \"Sat solver parameters.\",\n", ")\n", "\n", "\n", - "def add_neighbor(size, x, y, z, dx, dy, dz, model, index_map, position_to_rank, arcs):\n", + "def add_neighbor(\n", + " size: int,\n", + " x: int,\n", + " y: int,\n", + " z: int,\n", + " dx: int,\n", + " dy: int,\n", + " dz: int,\n", + " model: cp_model.CpModel,\n", + " index_map: Dict[Tuple[int, int, int], int],\n", + " position_to_rank: Dict[Tuple[int, int, int], cp_model.IntVar],\n", + " arcs: list[Tuple[int, int, cp_model.LiteralT]],\n", + ") -> None:\n", " \"\"\"Checks if the neighbor is valid, and adds it to the model.\"\"\"\n", " if (\n", " x + dx < 0\n", @@ -118,12 +132,12 @@ " before_rank = position_to_rank[(x, y, z)]\n", " after_index = index_map[(x + dx, y + dy, z + dz)]\n", " after_rank = position_to_rank[(x + dx, y + dy, z + dz)]\n", - " move_literal = model.NewBoolVar(\"\")\n", - " model.Add(after_rank == before_rank + 1).OnlyEnforceIf(move_literal)\n", + " move_literal = model.new_bool_var(\"\")\n", + " model.add(after_rank == before_rank + 1).only_enforce_if(move_literal)\n", " arcs.append((before_index, after_index, move_literal))\n", "\n", "\n", - "def escape_the_maze(params, output_proto):\n", + "def escape_the_maze(params: str, output_proto: str) -> None:\n", " \"\"\"Escapes the maze.\"\"\"\n", " size = 4\n", " boxes = [(0, 1, 0), (2, 0, 1), (1, 3, 1), (3, 1, 3)]\n", @@ -147,17 +161,17 @@ " position_to_rank = {}\n", "\n", " for coord in reverse_map:\n", - " position_to_rank[coord] = model.NewIntVar(0, counter - 1, f\"rank_{coord}\")\n", + " position_to_rank[coord] = model.new_int_var(0, counter - 1, f\"rank_{coord}\")\n", "\n", " # Path constraints.\n", - " model.Add(position_to_rank[start] == 0)\n", - " model.Add(position_to_rank[end] == counter - 1)\n", + " model.add(position_to_rank[start] == 0)\n", + " model.add(position_to_rank[end] == counter - 1)\n", " for i in range(len(boxes) - 1):\n", - " model.Add(position_to_rank[boxes[i]] < position_to_rank[boxes[i + 1]])\n", + " model.add(position_to_rank[boxes[i]] < position_to_rank[boxes[i + 1]])\n", "\n", " # Circuit constraint: visit all blocks exactly once, and maintains the rank\n", " # of each block.\n", - " arcs = []\n", + " arcs: list[Tuple[int, int, cp_model.LiteralT]] = []\n", " for x in range(size):\n", " for y in range(size):\n", " for z in range(size):\n", @@ -184,18 +198,18 @@ " arcs.append((index_map[end], index_map[start], True))\n", "\n", " # Adds the circuit (hamiltonian path) constraint.\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", " # Exports the model if required.\n", " if output_proto:\n", - " model.ExportToFile(output_proto)\n", + " model.export_to_file(output_proto)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", " text_format.Parse(params, solver.parameters)\n", " solver.parameters.log_search_progress = True\n", - " result = solver.Solve(model)\n", + " result = solver.solve(model)\n", "\n", " # Prints solution.\n", " if result == cp_model.OPTIMAL:\n", @@ -204,15 +218,15 @@ " for y in range(size):\n", " for z in range(size):\n", " position = (x, y, z)\n", - " rank = solver.Value(position_to_rank[position])\n", + " rank = solver.value(position_to_rank[position])\n", " msg = f\"({x}, {y}, {z})\"\n", " if position == start:\n", " msg += \" [start]\"\n", " elif position == end:\n", " msg += \" [end]\"\n", " else:\n", - " for b in range(len(boxes)):\n", - " if position == boxes[b]:\n", + " for b, box in enumerate(boxes):\n", + " if position == box:\n", " msg += f\" [boxes {b}]\"\n", " path[rank] = msg\n", " print(path)\n", diff --git a/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb b/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb new file mode 100644 index 00000000000..5512d91f34e --- /dev/null +++ b/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb @@ -0,0 +1,255 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# memory_layout_and_infeasibility_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Solves the memory allocation problem, and returns a minimal set of demands to explain infeasibility.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "from typing import List\n", + "\n", + "from ortools.sat.colab import flags\n", + "from google.protobuf import text_format\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "_OUTPUT_PROTO = flags.define_string(\n", + " \"output_proto\", \"\", \"Output file to write the cp_model proto to.\"\n", + ")\n", + "_PARAMS = flags.define_string(\n", + " \"params\", \"num_workers:1,linearization_level:2\", \"Sat solver parameters.\"\n", + ")\n", + "\n", + "\n", + "# Input of the problem.\n", + "DEMANDS = [\n", + " [1578, 1583, 43008, 1],\n", + " [1588, 1589, 11264, 1],\n", + " [1590, 1595, 43008, 1],\n", + " [1583, 1588, 47872, 1],\n", + " [1589, 1590, 22848, 1],\n", + " [1586, 1590, 22848, 1],\n", + " [1591, 1594, 43008, 1],\n", + "]\n", + "CAPACITY = 98304\n", + "\n", + "\n", + "def solve_hard_model(output_proto: str, params: str) -> bool:\n", + " \"\"\"Solves the hard assignment model.\"\"\"\n", + " print(\"Solving the hard assignment model\")\n", + " model = cp_model.CpModel()\n", + "\n", + " x_intervals: List[cp_model.IntervalVar] = []\n", + " y_starts: List[cp_model.IntVar] = []\n", + " y_intervals: List[cp_model.IntervalVar] = []\n", + "\n", + " for start_time, end_time, demand, _ in DEMANDS:\n", + " x_interval = model.new_fixed_size_interval_var(\n", + " start_time, end_time - start_time + 1, \"\"\n", + " )\n", + " y_start = model.new_int_var(0, CAPACITY - demand, \"\")\n", + " y_interval = model.new_fixed_size_interval_var(y_start, demand, \"\")\n", + "\n", + " x_intervals.append(x_interval)\n", + " y_starts.append(y_start)\n", + " y_intervals.append(y_interval)\n", + "\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", + "\n", + " if output_proto:\n", + " model.export_to_file(output_proto)\n", + "\n", + " solver = cp_model.CpSolver()\n", + " if params:\n", + " text_format.Parse(params, solver.parameters)\n", + " status = solver.solve(model)\n", + " print(solver.response_stats())\n", + "\n", + " if status in (cp_model.FEASIBLE, cp_model.OPTIMAL):\n", + " for index, start_var in enumerate(y_starts):\n", + " print(f\"task {index} buffer starts at {solver.value(start_var)}\")\n", + "\n", + " return status != cp_model.INFEASIBLE\n", + "\n", + "\n", + "def solve_soft_model_with_assumptions() -> None:\n", + " \"\"\"Solves the soft model using assumptions.\"\"\"\n", + " print(\"Solving the soft model using assumptions\")\n", + "\n", + " model = cp_model.CpModel()\n", + "\n", + " presences: List[cp_model.IntVar] = []\n", + " x_intervals: List[cp_model.IntervalVar] = []\n", + " y_starts: List[cp_model.IntVar] = []\n", + " y_intervals: List[cp_model.IntervalVar] = []\n", + "\n", + " for start, end, demand, unused_alignment in DEMANDS:\n", + " presence = model.new_bool_var(\"\")\n", + " x_interval = model.new_optional_fixed_size_interval_var(\n", + " start, end - start + 1, presence, \"\"\n", + " )\n", + " y_start = model.new_int_var(0, CAPACITY - demand, \"\")\n", + " y_interval = model.new_optional_fixed_size_interval_var(\n", + " y_start, demand, presence, \"\"\n", + " )\n", + "\n", + " presences.append(presence)\n", + " x_intervals.append(x_interval)\n", + " y_starts.append(y_start)\n", + " y_intervals.append(y_interval)\n", + "\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", + " model.add_assumptions(presences)\n", + "\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + " print(solver.response_stats())\n", + " if status == cp_model.INFEASIBLE:\n", + " # The list actually contains the indices of the variables sufficient to\n", + " # explain infeasibility.\n", + " infeasible_variable_indices = solver.sufficient_assumptions_for_infeasibility()\n", + " infeasible_variable_indices_set = set(infeasible_variable_indices)\n", + "\n", + " for index, presence in enumerate(presences):\n", + " if presence.index in infeasible_variable_indices_set:\n", + " print(f\"using task {index} is sufficient to explain infeasibility\")\n", + "\n", + "\n", + "def solve_soft_model_with_maximization(params: str) -> None:\n", + " \"\"\"Solves the soft model using maximization.\"\"\"\n", + " print(\"Solving the soft model using minimization\")\n", + "\n", + " model = cp_model.CpModel()\n", + "\n", + " presences: List[cp_model.IntVar] = []\n", + " x_intervals: List[cp_model.IntervalVar] = []\n", + " y_starts: List[cp_model.IntVar] = []\n", + " y_intervals: List[cp_model.IntervalVar] = []\n", + "\n", + " for start, end, demand, unused_alignment in DEMANDS:\n", + " presence = model.new_bool_var(\"\")\n", + " x_interval = model.new_optional_fixed_size_interval_var(\n", + " start, end - start + 1, presence, \"\"\n", + " )\n", + " y_start = model.new_int_var(0, CAPACITY - demand, \"\")\n", + " y_interval = model.new_optional_fixed_size_interval_var(\n", + " y_start, demand, presence, \"\"\n", + " )\n", + "\n", + " presences.append(presence)\n", + " x_intervals.append(x_interval)\n", + " y_starts.append(y_start)\n", + " y_intervals.append(y_interval)\n", + "\n", + " model.add_no_overlap_2d(x_intervals, y_intervals)\n", + "\n", + " model.maximize(sum(presences))\n", + "\n", + " solver = cp_model.CpSolver()\n", + " if params:\n", + " text_format.Parse(params, solver.parameters)\n", + " status = solver.solve(model)\n", + " print(solver.response_stats())\n", + " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", + " for index, presence in enumerate(presences):\n", + " if not solver.boolean_value(presence):\n", + " print(f\"task {index} does not fit\")\n", + " else:\n", + " print(f\"task {index} buffer starts at {solver.value(y_starts[index])}\")\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + " if not solve_hard_model(_OUTPUT_PROTO.value, _PARAMS.value):\n", + " solve_soft_model_with_assumptions()\n", + " solve_soft_model_with_maximization(_PARAMS.value)\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb b/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb index 66996192946..e82657ba4a1 100644 --- a/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb +++ b/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -90,16 +90,16 @@ "outputs": [], "source": [ "import collections\n", - "from typing import Sequence\n", + "from typing import List, Sequence, Tuple\n", + "\n", "from ortools.sat.colab import flags\n", "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_PARAMS = flags.define_string(\n", - " \"params\", \"num_search_workers:16, max_time_in_seconds:30\", \"Sat solver parameters.\"\n", - ")\n", - "_PROTO_FILE = flags.define_string(\n", - " \"proto_file\", \"\", \"If not empty, output the proto to this file.\"\n", + " \"params\",\n", + " \"num_search_workers:16, max_time_in_seconds:30\",\n", + " \"Sat solver parameters.\",\n", ")\n", "\n", "# Recipes\n", @@ -117,7 +117,7 @@ "DISPLAY = \"display\"\n", "\n", "\n", - "class Task(object):\n", + "class Task:\n", " \"\"\"A unit baking task.\n", "\n", " - Simple baking tasks have a fixed duration. They are performed by workers.\n", @@ -131,7 +131,7 @@ " self.max_duration = max_duration\n", "\n", "\n", - "class Skill(object):\n", + "class Skill:\n", " \"\"\"The skill of a worker or the capability of a machine.\"\"\"\n", "\n", " def __init__(self, name, efficiency):\n", @@ -140,19 +140,21 @@ " self.efficiency = efficiency\n", "\n", "\n", - "class Recipe(object):\n", + "class Recipe:\n", " \"\"\"A recipe is a sequence of cooking tasks.\"\"\"\n", "\n", " def __init__(self, name):\n", " self.name = name\n", " self.tasks = []\n", "\n", - " def add_task(self, resource_name, min_duration, max_duration):\n", + " def add_task(\n", + " self, resource_name: str, min_duration: int, max_duration: int\n", + " ) -> \"Recipe\":\n", " self.tasks.append(Task(resource_name, min_duration, max_duration))\n", " return self\n", "\n", "\n", - "class Resource(object):\n", + "class Resource:\n", " \"\"\"A resource is a worker, a machine, or just some space for cakes to rest.\n", "\n", " - Workers have a capacity of 1 and can have variable efficiency.\n", @@ -168,12 +170,12 @@ " self.capacity = capacity\n", " self.skills = []\n", "\n", - " def add_skill(self, skill_name, efficiency):\n", + " def add_skill(self, skill_name: str, efficiency: float) -> \"Resource\":\n", " self.skills.append(Skill(skill_name, efficiency))\n", " return self\n", "\n", "\n", - "class Order(object):\n", + "class Order:\n", " \"\"\"An order is a recipe that should be delivered at a given due date.\"\"\"\n", "\n", " def __init__(self, unique_id, recipe_name, due_date, quantity):\n", @@ -191,7 +193,7 @@ " self.quantity = quantity\n", "\n", "\n", - "def set_up_data():\n", + "def set_up_data() -> Tuple[List[Recipe], List[Resource], List[Order]]:\n", " \"\"\"Set up the bakery problem data.\"\"\"\n", "\n", " # Recipes.\n", @@ -260,7 +262,9 @@ " return recipes, resources, orders\n", "\n", "\n", - "def solve_with_cp_sat(recipes, resources, orders):\n", + "def solve_with_cp_sat(\n", + " recipes: List[Recipe], resources: List[Resource], orders: List[Order]\n", + ") -> None:\n", " \"\"\"Build the optimization model, and solve the problem.\"\"\"\n", "\n", " model = cp_model.CpModel()\n", @@ -297,70 +301,68 @@ " skill_name = task.name\n", " suffix = f\"_{order.unique_id}_batch{batch}_{skill_name}\"\n", "\n", - " start = None\n", " if previous_end is None:\n", - " start = model.NewIntVar(start_work, horizon, f\"start{suffix}\")\n", + " start = model.new_int_var(start_work, horizon, f\"start{suffix}\")\n", " orders_sequence_of_events[order_id].append(\n", " (start, f\"start{suffix}\")\n", " )\n", " else:\n", " start = previous_end\n", "\n", - " size = model.NewIntVar(\n", + " size = model.new_int_var(\n", " task.min_duration, task.max_duration, f\"size{suffix}\"\n", " )\n", - " end = None\n", " if task == recipe.tasks[-1]:\n", " # The order must end after the due_date. Ideally, exactly at the\n", " # due_date.\n", - " tardiness = model.NewIntVar(0, horizon - due_date, f\"end{suffix}\")\n", + " tardiness = model.new_int_var(0, horizon - due_date, f\"end{suffix}\")\n", " end = tardiness + due_date\n", "\n", " # Store the end_var for the objective.\n", " tardiness_vars.append(tardiness)\n", " else:\n", - " end = model.NewIntVar(start_work, horizon, f\"end{suffix}\")\n", + " end = model.new_int_var(start_work, horizon, f\"end{suffix}\")\n", " orders_sequence_of_events[order_id].append((end, f\"end{suffix}\"))\n", " previous_end = end\n", "\n", " # Per resource copy.\n", " presence_literals = []\n", " for resource in resource_list_by_skill_name[skill_name]:\n", - " presence = model.NewBoolVar(f\"presence{suffix}_{resource.name}\")\n", - " copy = model.NewOptionalIntervalVar(\n", + " presence = model.new_bool_var(f\"presence{suffix}_{resource.name}\")\n", + " copy = model.new_optional_interval_var(\n", " start, size, end, presence, f\"interval{suffix}_{resource.name}\"\n", " )\n", " interval_list_by_resource_name[resource.name].append(copy)\n", " presence_literals.append(presence)\n", "\n", " # Only one copy will be performed.\n", - " model.AddExactlyOne(presence_literals)\n", + " model.add_exactly_one(presence_literals)\n", "\n", " # Create resource constraints.\n", " for resource in resources:\n", " intervals = interval_list_by_resource_name[resource.name]\n", " if resource.capacity == 1:\n", - " model.AddNoOverlap(intervals)\n", + " model.add_no_overlap(intervals)\n", " else:\n", - " model.AddCumulative(intervals, [1] * len(intervals), resource.capacity)\n", + " model.add_cumulative(intervals, [1] * len(intervals), resource.capacity)\n", "\n", " # The objective is to minimize the sum of the tardiness values of each jobs.\n", " # The tardiness is difference between the end time of an order and its\n", " # due date.\n", - " model.Minimize(sum(tardiness_vars))\n", + " model.minimize(sum(tardiness_vars))\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", " solver.parameters.log_search_progress = True\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", " for order_id in sorted_orders:\n", " print(f\"{order_id}:\")\n", " for time_expr, event_id in orders_sequence_of_events[order_id]:\n", - " time = solver.Value(time_expr)\n", + " time = solver.value(time_expr)\n", " print(f\" {event_id} at {time // 60}:{time % 60:02}\")\n", "\n", "\n", diff --git a/examples/notebook/examples/nqueens_sat.ipynb b/examples/notebook/examples/nqueens_sat.ipynb index ae32b07a04e..391be2abc69 100644 --- a/examples/notebook/examples/nqueens_sat.ipynb +++ b/examples/notebook/examples/nqueens_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -94,27 +94,28 @@ "class NQueenSolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, queens):\n", + " def __init__(self, queens: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", - " self.__queens = queens\n", - " self.__solution_count = 0\n", - " self.__start_time = time.time()\n", + " self._queens = queens\n", + " self._solution_count = 0\n", + " self._start_time = time.time()\n", "\n", - " def SolutionCount(self):\n", - " return self.__solution_count\n", + " @property\n", + " def solution_count(self) -> int:\n", + " return self._solution_count\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " current_time = time.time()\n", " print(\n", - " \"Solution %i, time = %f s\"\n", - " % (self.__solution_count, current_time - self.__start_time)\n", + " f\"Solution{self._solution_count}, time =\"\n", + " f\" {current_time - self._start_time} s\"\n", " )\n", - " self.__solution_count += 1\n", + " self._solution_count += 1\n", "\n", - " all_queens = range(len(self.__queens))\n", + " all_queens = range(len(self._queens))\n", " for i in all_queens:\n", " for j in all_queens:\n", - " if self.Value(self.__queens[j]) == i:\n", + " if self.value(self._queens[j]) == i:\n", " # There is a queen in column j, row i.\n", " print(\"Q\", end=\" \")\n", " else:\n", @@ -131,41 +132,43 @@ "\n", " ### Creates the variables.\n", " # The array index is the column, and the value is the row.\n", - " queens = [model.NewIntVar(0, board_size - 1, \"x%i\" % i) for i in range(board_size)]\n", + " queens = [\n", + " model.new_int_var(0, board_size - 1, \"x%i\" % i) for i in range(board_size)\n", + " ]\n", "\n", " ### Creates the constraints.\n", "\n", " # All columns must be different because the indices of queens are all\n", " # different, so we just add the all different constraint on the rows.\n", - " model.AddAllDifferent(queens)\n", + " model.add_all_different(queens)\n", "\n", " # No two queens can be on the same diagonal.\n", " diag1 = []\n", " diag2 = []\n", " for i in range(board_size):\n", - " q1 = model.NewIntVar(0, 2 * board_size, \"diag1_%i\" % i)\n", - " q2 = model.NewIntVar(-board_size, board_size, \"diag2_%i\" % i)\n", + " q1 = model.new_int_var(0, 2 * board_size, \"diag1_%i\" % i)\n", + " q2 = model.new_int_var(-board_size, board_size, \"diag2_%i\" % i)\n", " diag1.append(q1)\n", " diag2.append(q2)\n", - " model.Add(q1 == queens[i] + i)\n", - " model.Add(q2 == queens[i] - i)\n", - " model.AddAllDifferent(diag1)\n", - " model.AddAllDifferent(diag2)\n", + " model.add(q1 == queens[i] + i)\n", + " model.add(q2 == queens[i] - i)\n", + " model.add_all_different(diag1)\n", + " model.add_all_different(diag2)\n", "\n", " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " solution_printer = NQueenSolutionPrinter(queens)\n", " # Enumerate all solutions.\n", " solver.parameters.enumerate_all_solutions = True\n", - " # Solve.\n", - " solver.Solve(model, solution_printer)\n", + " # solve.\n", + " solver.solve(model, solution_printer)\n", "\n", " print()\n", " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", - " print(\" - solutions found : %i\" % solution_printer.SolutionCount())\n", + " print(\" - conflicts : %i\" % solver.num_conflicts)\n", + " print(\" - branches : %i\" % solver.num_branches)\n", + " print(\" - wall time : %f s\" % solver.wall_time)\n", + " print(\" - solutions found : %i\" % solution_printer.solution_count)\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/examples/pell_equation_sat.ipynb b/examples/notebook/examples/pell_equation_sat.ipynb new file mode 100644 index 00000000000..d136340121a --- /dev/null +++ b/examples/notebook/examples/pell_equation_sat.ipynb @@ -0,0 +1,141 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# pell_equation_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Solves Pell's equation x^2 - coeff * y^2 = 1.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "\n", + "from ortools.sat.colab import flags\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "_COEFF = flags.define_integer(\"coeff\", 1, \"The Pell equation coefficient.\")\n", + "_MAX_VALUE = flags.define_integer(\"max_value\", 5000_000, \"The maximum value.\")\n", + "\n", + "\n", + "def solve_pell(coeff: int, max_value: int) -> None:\n", + " \"\"\"Solves Pell's equation x^2 - coeff * y^2 = 1.\"\"\"\n", + " model = cp_model.CpModel()\n", + "\n", + " x = model.new_int_var(1, max_value, \"x\")\n", + " y = model.new_int_var(1, max_value, \"y\")\n", + "\n", + " # Pell's equation:\n", + " x_square = model.new_int_var(1, max_value * max_value, \"x_square\")\n", + " y_square = model.new_int_var(1, max_value * max_value, \"y_square\")\n", + " model.add_multiplication_equality(x_square, x, x)\n", + " model.add_multiplication_equality(y_square, y, y)\n", + " model.add(x_square - coeff * y_square == 1)\n", + "\n", + " model.add_decision_strategy(\n", + " [x, y], cp_model.CHOOSE_MIN_DOMAIN_SIZE, cp_model.SELECT_MIN_VALUE\n", + " )\n", + "\n", + " solver = cp_model.CpSolver()\n", + " solver.parameters.num_workers = 12\n", + " solver.parameters.log_search_progress = True\n", + " solver.parameters.cp_model_presolve = True\n", + " solver.parameters.cp_model_probing_level = 0\n", + "\n", + " result = solver.solve(model)\n", + " if result == cp_model.OPTIMAL:\n", + " print(f\"x={solver.value(x)} y={solver.value(y)} coeff={coeff}\")\n", + " if solver.value(x) ** 2 - coeff * (solver.value(y) ** 2) != 1:\n", + " raise ValueError(\"Pell equation not satisfied.\")\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + " solve_pell(_COEFF.value, _MAX_VALUE.value)\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/pentominoes_sat.ipynb b/examples/notebook/examples/pentominoes_sat.ipynb new file mode 100644 index 00000000000..8f47a9623c7 --- /dev/null +++ b/examples/notebook/examples/pentominoes_sat.ipynb @@ -0,0 +1,280 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# pentominoes_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Example to solves a pentomino paving problem.\n", + "\n", + "Given a subset of n different pentomino, the problem is to pave a square of\n", + "size 5 x n. The problem is reduced to an exact set cover problem and encoded\n", + "as a linear boolean problem.\n", + "\n", + "This problem comes from the game Katamino:\n", + "http://boardgamegeek.com/boardgame/6931/katamino\n", + "\n", + "This example also includes suggestions from\n", + "https://web.ma.utexas.edu/users/smmg/archive/1997/radin.html\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "from typing import Dict, List\n", + "\n", + "from ortools.sat.colab import flags\n", + "from google.protobuf import text_format\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "_PARAMS = flags.define_string(\n", + " \"params\",\n", + " \"num_search_workers:16,log_search_progress:false,max_time_in_seconds:45\",\n", + " \"Sat solver parameters.\",\n", + ")\n", + "\n", + "_PIECES = flags.define_string(\n", + " \"pieces\", \"FILNPTUVWXYZ\", \"The subset of pieces to consider.\"\n", + ")\n", + "\n", + "_HEIGHT = flags.define_integer(\"height\", 5, \"The height of the box.\")\n", + "\n", + "\n", + "def is_one(mask: List[List[int]], x: int, y: int, orientation: int) -> bool:\n", + " \"\"\"Returns true if the oriented piece is 1 at position [i][j].\n", + "\n", + " The 3 bits in orientation respectively mean: transposition, symmetry by\n", + " x axis, symmetry by y axis.\n", + "\n", + " Args:\n", + " mask: The shape of the piece.\n", + " x: position.\n", + " y: position.\n", + " orientation: between 0 and 7.\n", + " \"\"\"\n", + " if orientation & 1:\n", + " tmp: int = x\n", + " x = y\n", + " y = tmp\n", + " if orientation & 2:\n", + " x = len(mask[0]) - 1 - x\n", + " if orientation & 4:\n", + " y = len(mask) - 1 - y\n", + " return mask[y][x] == 1\n", + "\n", + "\n", + "def get_height(mask: List[List[int]], orientation: int) -> int:\n", + " if orientation & 1:\n", + " return len(mask[0])\n", + " return len(mask)\n", + "\n", + "\n", + "def get_width(mask: List[List[int]], orientation: int) -> int:\n", + " if orientation & 1:\n", + " return len(mask)\n", + " return len(mask[0])\n", + "\n", + "\n", + "def orientation_is_redundant(mask: List[List[int]], orientation: int) -> bool:\n", + " \"\"\"Checks if the current rotated figure is the same as a previous rotation.\"\"\"\n", + " size_i: int = get_width(mask, orientation)\n", + " size_j: int = get_height(mask, orientation)\n", + " for o in range(orientation):\n", + " if size_i != get_width(mask, o):\n", + " continue\n", + " if size_j != get_height(mask, o):\n", + " continue\n", + "\n", + " is_the_same: bool = True\n", + " for k in range(size_i):\n", + " if not is_the_same:\n", + " break\n", + " for l in range(size_j):\n", + " if not is_the_same:\n", + " break\n", + " if is_one(mask, k, l, orientation) != is_one(mask, k, l, o):\n", + " is_the_same = False\n", + " if is_the_same:\n", + " return True\n", + " return False\n", + "\n", + "\n", + "def generate_and_solve_problem(pieces: Dict[str, List[List[int]]]) -> None:\n", + " \"\"\"Solves the pentominoes problem.\"\"\"\n", + " box_height = _HEIGHT.value\n", + " box_width = 5 * len(pieces) // box_height\n", + " print(f\"Box has dimension {box_height} * {box_width}\")\n", + "\n", + " model = cp_model.CpModel()\n", + " position_to_variables: List[List[List[cp_model.IntVar]]] = [\n", + " [[] for _ in range(box_width)] for _ in range(box_height)\n", + " ]\n", + "\n", + " for name, mask in pieces.items():\n", + " all_position_variables = []\n", + " for orientation in range(8):\n", + " if orientation_is_redundant(mask, orientation):\n", + " continue\n", + " piece_width = get_width(mask, orientation)\n", + " piece_height = get_height(mask, orientation)\n", + " for i in range(box_width - piece_width + 1):\n", + " for j in range(box_height - piece_height + 1):\n", + " v = model.new_bool_var(name)\n", + " all_position_variables.append(v)\n", + " for k in range(piece_width):\n", + " for l in range(piece_height):\n", + " if is_one(mask, k, l, orientation):\n", + " position_to_variables[j + l][i + k].append(v)\n", + "\n", + " # Only one combination is selected.\n", + " model.add_exactly_one(all_position_variables)\n", + "\n", + " for one_column in position_to_variables:\n", + " for all_pieces_in_one_position in one_column:\n", + " model.add_exactly_one(all_pieces_in_one_position)\n", + "\n", + " # Solve the model.\n", + " solver = cp_model.CpSolver()\n", + " if _PARAMS.value:\n", + " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " status = solver.solve(model)\n", + "\n", + " print(\n", + " f\"Problem {_PIECES.value} box {box_height}*{box_width} solved in\"\n", + " f\" {solver.wall_time}s with status {solver.status_name(status)}\"\n", + " )\n", + "\n", + " # Print the solution.\n", + " if status == cp_model.OPTIMAL:\n", + " for y in range(box_height):\n", + " line = \"\"\n", + " for x in range(box_width):\n", + " for v in position_to_variables[y][x]:\n", + " if solver.BooleanValue(v):\n", + " line += v.name\n", + " break\n", + " print(line)\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + "\n", + " # Pieces are stored in a matrix. mask[height][width]\n", + " pieces: Dict[str, List[List[int]]] = {\n", + " \"F\": [[0, 1, 1], [1, 1, 0], [0, 1, 0]],\n", + " \"I\": [[1, 1, 1, 1, 1]],\n", + " \"L\": [[1, 1, 1, 1], [1, 0, 0, 0]],\n", + " \"N\": [[1, 1, 1, 0], [0, 0, 1, 1]],\n", + " \"P\": [[1, 1, 1], [1, 1, 0]],\n", + " \"T\": [[1, 1, 1], [0, 1, 0], [0, 1, 0]],\n", + " \"U\": [[1, 0, 1], [1, 1, 1]],\n", + " \"V\": [[1, 0, 0], [1, 0, 0], [1, 1, 1]],\n", + " \"W\": [[1, 0, 0], [1, 1, 0], [0, 1, 1]],\n", + " \"X\": [[0, 1, 0], [1, 1, 1], [0, 1, 0]],\n", + " \"Y\": [[1, 1, 1, 1], [0, 1, 0, 0]],\n", + " \"Z\": [[1, 1, 0], [0, 1, 0], [0, 1, 1]],\n", + " }\n", + " selected_pieces: Dict[str, List[List[int]]] = {}\n", + " for p in _PIECES.value:\n", + " if p not in pieces:\n", + " print(f\"Piece {p} not found in the list of pieces\")\n", + " return\n", + " selected_pieces[p] = pieces[p]\n", + " if (len(selected_pieces) * 5) % _HEIGHT.value != 0:\n", + " print(\n", + " f\"The height {_HEIGHT.value} does not divide the total area\"\n", + " f\" {5 * len(selected_pieces)}\"\n", + " )\n", + " return\n", + " if _HEIGHT.value < 3 or 5 * len(selected_pieces) // _HEIGHT.value < 3:\n", + " print(f\"The height {_HEIGHT.value} is not compatible with the pieces.\")\n", + " return\n", + "\n", + " generate_and_solve_problem(selected_pieces)\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/prize_collecting_tsp.ipynb b/examples/notebook/examples/prize_collecting_tsp.ipynb index 0a22c80eef5..c18f94325eb 100644 --- a/examples/notebook/examples/prize_collecting_tsp.ipynb +++ b/examples/notebook/examples/prize_collecting_tsp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/prize_collecting_tsp_sat.ipynb b/examples/notebook/examples/prize_collecting_tsp_sat.ipynb index b963f36a002..df2d25e7c15 100644 --- a/examples/notebook/examples/prize_collecting_tsp_sat.ipynb +++ b/examples/notebook/examples/prize_collecting_tsp_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -139,14 +139,19 @@ "\n", "\n", "# Create a console solution printer.\n", - "def print_solution(solver, visited_nodes, used_arcs, num_nodes):\n", + "def print_solution(\n", + " solver: cp_model.CpSolver,\n", + " visited_nodes: list[cp_model.IntVar],\n", + " used_arcs: dict[tuple[int, int], cp_model.IntVar],\n", + " num_nodes: int,\n", + ") -> None:\n", " \"\"\"Prints solution on console.\"\"\"\n", " # Display dropped nodes.\n", " dropped_nodes = \"Dropped nodes:\"\n", " for i in range(num_nodes):\n", " if i == 0:\n", " continue\n", - " if not solver.BooleanValue(visited_nodes[i]):\n", + " if not solver.boolean_value(visited_nodes[i]):\n", " dropped_nodes += f\" {i}({VISIT_VALUES[i]})\"\n", " print(dropped_nodes)\n", " # Display routes\n", @@ -162,7 +167,7 @@ " for node in range(num_nodes):\n", " if node == current_node:\n", " continue\n", - " if solver.BooleanValue(used_arcs[current_node, node]):\n", + " if solver.boolean_value(used_arcs[current_node, node]):\n", " route_distance += DISTANCE_MATRIX[current_node][node]\n", " current_node = node\n", " if current_node == 0:\n", @@ -170,7 +175,7 @@ " break\n", " plan_output += f\" {current_node}\\n\"\n", " plan_output += f\"Distance of the route: {route_distance}m\\n\"\n", - " plan_output += f\"Value collected: {value_collected}/{sum(VISIT_VALUES)}\\n\"\n", + " plan_output += f\"value collected: {value_collected}/{sum(VISIT_VALUES)}\\n\"\n", " print(plan_output)\n", "\n", "\n", @@ -191,8 +196,8 @@ " # Create the circuit constraint.\n", " arcs = []\n", " for i in all_nodes:\n", - " is_visited = model.NewBoolVar(f\"{i} is visited\")\n", - " arcs.append((i, i, is_visited.Not()))\n", + " is_visited = model.new_bool_var(f\"{i} is visited\")\n", + " arcs.append((i, i, ~is_visited))\n", "\n", " obj_vars.append(is_visited)\n", " obj_coeffs.append(VISIT_VALUES[i])\n", @@ -200,22 +205,22 @@ "\n", " for j in all_nodes:\n", " if i == j:\n", - " used_arcs[i, j] = is_visited.Not()\n", + " used_arcs[i, j] = ~is_visited\n", " continue\n", - " arc_is_used = model.NewBoolVar(f\"{j} follows {i}\")\n", + " arc_is_used = model.new_bool_var(f\"{j} follows {i}\")\n", " arcs.append((i, j, arc_is_used))\n", "\n", " obj_vars.append(arc_is_used)\n", " obj_coeffs.append(-DISTANCE_MATRIX[i][j])\n", " used_arcs[i, j] = arc_is_used\n", "\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", " # Node 0 must be visited.\n", - " model.Add(visited_nodes[0] == 1)\n", + " model.add(visited_nodes[0] == 1)\n", "\n", " # limit the route distance\n", - " model.Add(\n", + " model.add(\n", " sum(\n", " used_arcs[i, j] * DISTANCE_MATRIX[i][j]\n", " for i in all_nodes\n", @@ -225,7 +230,7 @@ " )\n", "\n", " # Maximize visited node values minus the travelled distance.\n", - " model.Maximize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", + " model.maximize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", "\n", " # Solve and print out the solution.\n", " solver = cp_model.CpSolver()\n", @@ -234,7 +239,7 @@ " solver.parameters.num_search_workers = 8\n", " solver.parameters.log_search_progress = True\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", " if status == cp_model.FEASIBLE or status == cp_model.OPTIMAL:\n", " print_solution(solver, visited_nodes, used_arcs, num_nodes)\n", "\n", diff --git a/examples/notebook/examples/prize_collecting_vrp.ipynb b/examples/notebook/examples/prize_collecting_vrp.ipynb index 9d2199ef035..59f667c0731 100644 --- a/examples/notebook/examples/prize_collecting_vrp.ipynb +++ b/examples/notebook/examples/prize_collecting_vrp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/prize_collecting_vrp_sat.ipynb b/examples/notebook/examples/prize_collecting_vrp_sat.ipynb index d5ff50aa6d1..e0565da8394 100644 --- a/examples/notebook/examples/prize_collecting_vrp_sat.ipynb +++ b/examples/notebook/examples/prize_collecting_vrp_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -139,7 +139,13 @@ "\n", "\n", "# Create a console solution printer.\n", - "def print_solution(solver, visited_nodes, used_arcs, num_nodes, num_vehicles):\n", + "def print_solution(\n", + " solver: cp_model.CpSolver,\n", + " visited_nodes: dict[int, list[cp_model.IntVar]],\n", + " used_arcs: dict[int, dict[tuple[int, int], cp_model.IntVar]],\n", + " num_nodes: int,\n", + " num_vehicles: int,\n", + ") -> None:\n", " \"\"\"Prints solution on console.\"\"\"\n", " # Display dropped nodes.\n", " dropped_nodes = \"Dropped nodes:\"\n", @@ -147,7 +153,7 @@ " if node == 0:\n", " continue\n", " is_visited = sum(\n", - " [solver.BooleanValue(visited_nodes[v][node]) for v in range(num_vehicles)]\n", + " [solver.boolean_value(visited_nodes[v][node]) for v in range(num_vehicles)]\n", " )\n", " if not is_visited:\n", " dropped_nodes += f\" {node}({VISIT_VALUES[node]})\"\n", @@ -168,7 +174,7 @@ " for node in range(num_nodes):\n", " if node == current_node:\n", " continue\n", - " if solver.BooleanValue(used_arcs[v][current_node, node]):\n", + " if solver.boolean_value(used_arcs[v][current_node, node]):\n", " route_distance += DISTANCE_MATRIX[current_node][node]\n", " current_node = node\n", " if current_node == 0:\n", @@ -176,12 +182,12 @@ " break\n", " plan_output += f\" {current_node}\\n\"\n", " plan_output += f\"Distance of the route: {route_distance}m\\n\"\n", - " plan_output += f\"Value collected: {value_collected}\\n\"\n", + " plan_output += f\"value collected: {value_collected}\\n\"\n", " print(plan_output)\n", " total_distance += route_distance\n", " total_value_collected += value_collected\n", " print(f\"Total Distance: {total_distance}m\")\n", - " print(f\"Total Value collected: {total_value_collected}/{sum(VISIT_VALUES)}\")\n", + " print(f\"Total value collected: {total_value_collected}/{sum(VISIT_VALUES)}\")\n", "\n", "\n", "def prize_collecting_vrp():\n", @@ -205,8 +211,8 @@ " used_arcs[v] = {}\n", " arcs = []\n", " for i in all_nodes:\n", - " is_visited = model.NewBoolVar(f\"{i} is visited\")\n", - " arcs.append((i, i, is_visited.Not()))\n", + " is_visited = model.new_bool_var(f\"{i} is visited\")\n", + " arcs.append((i, i, ~is_visited))\n", "\n", " obj_vars.append(is_visited)\n", " obj_coeffs.append(VISIT_VALUES[i])\n", @@ -214,22 +220,22 @@ "\n", " for j in all_nodes:\n", " if i == j:\n", - " used_arcs[v][i, j] = is_visited.Not()\n", + " used_arcs[v][i, j] = ~is_visited\n", " continue\n", - " arc_is_used = model.NewBoolVar(f\"{j} follows {i}\")\n", + " arc_is_used = model.new_bool_var(f\"{j} follows {i}\")\n", " arcs.append((i, j, arc_is_used))\n", "\n", " obj_vars.append(arc_is_used)\n", " obj_coeffs.append(-DISTANCE_MATRIX[i][j])\n", " used_arcs[v][i, j] = arc_is_used\n", "\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", " # Node 0 must be visited.\n", - " model.Add(visited_nodes[v][0] == 1)\n", + " model.add(visited_nodes[v][0] == 1)\n", "\n", " # limit the route distance\n", - " model.Add(\n", + " model.add(\n", " sum(\n", " used_arcs[v][i, j] * DISTANCE_MATRIX[i][j]\n", " for i in all_nodes\n", @@ -240,10 +246,10 @@ "\n", " # Each node is visited at most once\n", " for node in range(1, num_nodes):\n", - " model.AddAtMostOne([visited_nodes[v][node] for v in range(num_vehicles)])\n", + " model.add_at_most_one([visited_nodes[v][node] for v in range(num_vehicles)])\n", "\n", " # Maximize visited node values minus the travelled distance.\n", - " model.Maximize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", + " model.maximize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", "\n", " # Solve and print out the solution.\n", " solver = cp_model.CpSolver()\n", @@ -251,7 +257,7 @@ " solver.parameters.max_time_in_seconds = 15.0\n", " solver.parameters.log_search_progress = True\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", " if status == cp_model.FEASIBLE or status == cp_model.OPTIMAL:\n", " print_solution(solver, visited_nodes, used_arcs, num_nodes, num_vehicles)\n", "\n", diff --git a/examples/notebook/examples/proto_solve.ipynb b/examples/notebook/examples/proto_solve.ipynb index fa1efb7b530..8ef8b466e28 100644 --- a/examples/notebook/examples/proto_solve.ipynb +++ b/examples/notebook/examples/proto_solve.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -94,7 +94,7 @@ "\n", " # Create solver.\n", " solver = model_builder.ModelSolver(_SOLVER.value)\n", - " if not solver:\n", + " if not solver.solver_is_supported():\n", " print(f'Cannot create solver with name \\'{_SOLVER.value}\\'')\n", " return\n", "\n", @@ -109,7 +109,8 @@ " solver.solve(model)\n", "\n", "\n", - "main()\n" + "main()\n", + "\n" ] } ], diff --git a/examples/notebook/examples/pyflow_example.ipynb b/examples/notebook/examples/pyflow_example.ipynb index 920b6a6e6e7..d90c8fc9926 100644 --- a/examples/notebook/examples/pyflow_example.ipynb +++ b/examples/notebook/examples/pyflow_example.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/qubo_sat.ipynb b/examples/notebook/examples/qubo_sat.ipynb index 9b160b3a57f..23720414661 100644 --- a/examples/notebook/examples/qubo_sat.ipynb +++ b/examples/notebook/examples/qubo_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -83,10 +83,10 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Sequence\n", + "from typing import List, Sequence\n", "from ortools.sat.python import cp_model\n", "\n", - "RAW_DATA = [\n", + "RAW_DATA: List[List[float]] = [\n", " # fmt:off\n", " [\n", " 0, 0, 49.774821, -59.5968886, -46.0773896, 0, -65.166109, 0, 0, 0, 0, 0,\n", @@ -720,15 +720,15 @@ "]\n", "\n", "\n", - "def solve_qubo():\n", - " \"\"\"Solve the Qubo problem.\"\"\"\n", + "def solve_qubo() -> None:\n", + " \"\"\"solve the Qubo problem.\"\"\"\n", "\n", - " # Constraint programming engine\n", + " # Build the model.\n", " model = cp_model.CpModel()\n", "\n", " num_vars = len(RAW_DATA)\n", " all_vars = range(num_vars)\n", - " variables = [model.NewBoolVar(\"x_%i\" % i) for i in all_vars]\n", + " variables = [model.new_bool_var(\"x_%i\" % i) for i in all_vars]\n", "\n", " obj_vars = []\n", " obj_coeffs = []\n", @@ -740,10 +740,10 @@ " if coeff == 0.0:\n", " continue\n", " x_j = variables[j]\n", - " var = model.NewBoolVar(\"\")\n", - " model.AddBoolOr([x_i.Not(), x_j.Not(), var])\n", - " model.AddImplication(var, x_i)\n", - " model.AddImplication(var, x_j)\n", + " var = model.new_bool_var(\"\")\n", + " model.add_bool_or([~x_i, ~x_j, var])\n", + " model.add_implication(var, x_i)\n", + " model.add_implication(var, x_j)\n", " obj_vars.append(var)\n", " obj_coeffs.append(coeff)\n", "\n", @@ -753,14 +753,14 @@ " obj_vars.append(variables[i])\n", " obj_coeffs.append(self_coeff)\n", "\n", - " model.Minimize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", + " model.minimize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", "\n", " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.num_search_workers = 16\n", " solver.parameters.log_search_progress = True\n", " solver.parameters.max_time_in_seconds = 30\n", - " solver.Solve(model)\n", + " solver.solve(model)\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/random_tsp.ipynb b/examples/notebook/examples/random_tsp.ipynb index 42c6be37769..179aad74771 100644 --- a/examples/notebook/examples/random_tsp.ipynb +++ b/examples/notebook/examples/random_tsp.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/rcpsp_sat.ipynb b/examples/notebook/examples/rcpsp_sat.ipynb index f2efe0e1ee8..ee96bdd88b3 100644 --- a/examples/notebook/examples/rcpsp_sat.ipynb +++ b/examples/notebook/examples/rcpsp_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -80,7 +80,6 @@ "\n", "Data use in flags:\n", " http://www.om-db.wi.tum.de/psplib/data.html\n", - "\n", "\n" ] }, @@ -92,6 +91,8 @@ "outputs": [], "source": [ "import collections\n", + "import time\n", + "from typing import Optional\n", "\n", "from ortools.sat.colab import flags\n", "from google.protobuf import text_format\n", @@ -113,13 +114,13 @@ "_ADD_REDUNDANT_ENERGETIC_CONSTRAINTS = flags.define_bool(\n", " \"add_redundant_energetic_constraints\",\n", " False,\n", - " \"Add redundant energetic constraints on the pairs of tasks extracted from\"\n", + " \"add redundant energetic constraints on the pairs of tasks extracted from\"\n", " + \" precedence graph.\",\n", ")\n", "_DELAY_TIME_LIMIT = flags.define_float(\n", - " \"delay_time_limit\",\n", - " 20.0,\n", - " \"Time limit when computing min delay between tasks.\"\n", + " \"pairwise_delay_total_time_limit\",\n", + " 120.0,\n", + " \"Total time limit when computing min delay between tasks.\"\n", " + \" A non-positive time limit disable min delays computation.\",\n", ")\n", "_PREEMPTIVE_LB_TIME_LIMIT = flags.define_float(\n", @@ -130,7 +131,7 @@ ")\n", "\n", "\n", - "def PrintProblemStatistics(problem):\n", + "def print_problem_statistics(problem: rcpsp_pb2.RcpspProblem):\n", " \"\"\"Display various statistics on the problem.\"\"\"\n", "\n", " # Determine problem type.\n", @@ -175,7 +176,9 @@ " print(f\" - {tasks_with_delay} tasks with successor delays\")\n", "\n", "\n", - "def AnalyseDependencyGraph(problem):\n", + "def analyse_dependency_graph(\n", + " problem: rcpsp_pb2.RcpspProblem,\n", + ") -> tuple[list[tuple[int, int, list[int]]], dict[int, list[int]]]:\n", " \"\"\"Analyses the dependency graph to improve the model.\n", "\n", " Args:\n", @@ -211,7 +214,7 @@ " # Search for pair of tasks, containing at least two parallel branch between\n", " # them in the precedence graph.\n", " num_candidates = 0\n", - " result = []\n", + " result: list[tuple[int, int, list[int]]] = []\n", " for source, start_outs in outs.items():\n", " if len(start_outs) <= 1:\n", " # Starting with the unique successor of source will be as good.\n", @@ -244,27 +247,27 @@ " result.append((source, sink, common))\n", "\n", " # Sort entries lexicographically by (len(common), source, sink)\n", - " def Price(entry):\n", + " def price(entry):\n", " return num_nodes * num_nodes * len(entry[2]) + num_nodes * entry[0] + entry[1]\n", "\n", - " result.sort(key=Price)\n", + " result.sort(key=price)\n", " print(f\" - created {len(result)} pairs of nodes to examine\", flush=True)\n", " return result, after\n", "\n", "\n", - "def SolveRcpsp(\n", - " problem,\n", - " proto_file,\n", - " params,\n", - " active_tasks,\n", - " source,\n", - " sink,\n", - " intervals_of_tasks,\n", - " delays,\n", - " in_main_solve=False,\n", - " initial_solution=None,\n", - " lower_bound=0,\n", - "):\n", + "def solve_rcpsp(\n", + " problem: rcpsp_pb2.RcpspProblem,\n", + " proto_file: str,\n", + " params: str,\n", + " active_tasks: set[int],\n", + " source: int,\n", + " sink: int,\n", + " intervals_of_tasks: list[tuple[int, int, list[int]]],\n", + " delays: dict[tuple[int, int], tuple[int, int]],\n", + " in_main_solve: bool = False,\n", + " initial_solution: Optional[rcpsp_pb2.RcpspAssignment] = None,\n", + " lower_bound: int = 0,\n", + ") -> tuple[int, int, Optional[rcpsp_pb2.RcpspAssignment]]:\n", " \"\"\"Parse and solve a given RCPSP problem in proto format.\n", "\n", " The model will only look at the tasks {source} + {sink} + active_tasks, and\n", @@ -291,7 +294,7 @@ " \"\"\"\n", " # Create the model.\n", " model = cp_model.CpModel()\n", - " model.SetName(problem.name)\n", + " model.name = problem.name\n", "\n", " num_resources = len(problem.resources)\n", "\n", @@ -336,16 +339,16 @@ " num_recipes = len(task.recipes)\n", " all_recipes = range(num_recipes)\n", "\n", - " start_var = model.NewIntVar(0, horizon, f\"start_of_task_{t}\")\n", - " end_var = model.NewIntVar(0, horizon, f\"end_of_task_{t}\")\n", + " start_var = model.new_int_var(0, horizon, f\"start_of_task_{t}\")\n", + " end_var = model.new_int_var(0, horizon, f\"end_of_task_{t}\")\n", "\n", " literals = []\n", " if num_recipes > 1:\n", " # Create one literal per recipe.\n", - " literals = [model.NewBoolVar(f\"is_present_{t}_{r}\") for r in all_recipes]\n", + " literals = [model.new_bool_var(f\"is_present_{t}_{r}\") for r in all_recipes]\n", "\n", " # Exactly one recipe must be performed.\n", - " model.AddExactlyOne(literals)\n", + " model.add_exactly_one(literals)\n", "\n", " else:\n", " literals = [1]\n", @@ -360,19 +363,19 @@ " demand_matrix[(resource, recipe_index)] = demand\n", "\n", " # Create the duration variable from the accumulated durations.\n", - " duration_var = model.NewIntVarFromDomain(\n", - " cp_model.Domain.FromValues(task_to_recipe_durations[t]),\n", + " duration_var = model.new_int_var_from_domain(\n", + " cp_model.Domain.from_values(task_to_recipe_durations[t]),\n", " f\"duration_of_task_{t}\",\n", " )\n", "\n", " # Link the recipe literals and the duration_var.\n", " for r in range(num_recipes):\n", - " model.Add(duration_var == task_to_recipe_durations[t][r]).OnlyEnforceIf(\n", + " model.add(duration_var == task_to_recipe_durations[t][r]).only_enforce_if(\n", " literals[r]\n", " )\n", "\n", " # Create the interval of the task.\n", - " task_interval = model.NewIntervalVar(\n", + " task_interval = model.new_interval_var(\n", " start_var, duration_var, end_var, f\"task_interval_{t}\"\n", " )\n", "\n", @@ -387,14 +390,14 @@ " for res in all_resources:\n", " demands = [demand_matrix[(res, recipe)] for recipe in all_recipes]\n", " task_resource_to_fixed_demands[(t, res)] = demands\n", - " demand_var = model.NewIntVarFromDomain(\n", - " cp_model.Domain.FromValues(demands), f\"demand_{t}_{res}\"\n", + " demand_var = model.new_int_var_from_domain(\n", + " cp_model.Domain.from_values(demands), f\"demand_{t}_{res}\"\n", " )\n", " task_to_resource_demands[t].append(demand_var)\n", "\n", " # Link the recipe literals and the demand_var.\n", " for r in all_recipes:\n", - " model.Add(demand_var == demand_matrix[(res, r)]).OnlyEnforceIf(\n", + " model.add(demand_var == demand_matrix[(res, r)]).only_enforce_if(\n", " literals[r]\n", " )\n", "\n", @@ -415,10 +418,13 @@ " )\n", "\n", " # Create makespan variable\n", - " makespan = model.NewIntVar(lower_bound, horizon, \"makespan\")\n", - " makespan_size = model.NewIntVar(1, horizon, \"interval_makespan_size\")\n", - " interval_makespan = model.NewIntervalVar(\n", - " makespan, makespan_size, model.NewConstant(horizon + 1), \"interval_makespan\"\n", + " makespan = model.new_int_var(lower_bound, horizon, \"makespan\")\n", + " makespan_size = model.new_int_var(1, horizon, \"interval_makespan_size\")\n", + " interval_makespan = model.new_interval_var(\n", + " makespan,\n", + " makespan_size,\n", + " model.new_constant(horizon + 1),\n", + " \"interval_makespan\",\n", " )\n", "\n", " # Add precedences.\n", @@ -437,21 +443,21 @@ " p1 = task_to_presence_literals[task_id][m1]\n", " if next_id == sink:\n", " delay = delay_matrix.recipe_delays[m1].min_delays[0]\n", - " model.Add(s1 + delay <= makespan).OnlyEnforceIf(p1)\n", + " model.add(s1 + delay <= makespan).only_enforce_if(p1)\n", " else:\n", " for m2 in range(num_next_modes):\n", " delay = delay_matrix.recipe_delays[m1].min_delays[m2]\n", " s2 = task_starts[next_id]\n", " p2 = task_to_presence_literals[next_id][m2]\n", - " model.Add(s1 + delay <= s2).OnlyEnforceIf([p1, p2])\n", + " model.add(s1 + delay <= s2).only_enforce_if([p1, p2])\n", " else:\n", " # Normal dependencies (task ends before the start of successors).\n", " for t in all_active_tasks:\n", " for n in problem.tasks[t].successors:\n", " if n == sink:\n", - " model.Add(task_ends[t] <= makespan)\n", + " model.add(task_ends[t] <= makespan)\n", " elif n in active_tasks:\n", - " model.Add(task_ends[t] <= task_starts[n])\n", + " model.add(task_ends[t] <= task_starts[n])\n", "\n", " # Containers for resource investment problems.\n", " capacities = [] # Capacity variables for all resources.\n", @@ -471,8 +477,8 @@ " demands = [task_to_resource_demands[t][res] for t in all_active_tasks]\n", "\n", " if problem.is_resource_investment:\n", - " capacity = model.NewIntVar(0, c, f\"capacity_of_{res}\")\n", - " model.AddCumulative(intervals, demands, capacity)\n", + " capacity = model.new_int_var(0, c, f\"capacity_of_{res}\")\n", + " model.add_cumulative(intervals, demands, capacity)\n", " capacities.append(capacity)\n", " max_cost += c * resource.unit_cost\n", " else: # Standard renewable resource.\n", @@ -480,7 +486,7 @@ " intervals.append(interval_makespan)\n", " demands.append(c)\n", "\n", - " model.AddCumulative(intervals, demands, c)\n", + " model.add_cumulative(intervals, demands, c)\n", " else: # Non empty non renewable resource. (single mode only)\n", " if problem.is_consumer_producer:\n", " reservoir_starts = []\n", @@ -491,15 +497,15 @@ " reservoir_demands.append(\n", " task_resource_to_fixed_demands[(t, res)][0]\n", " )\n", - " model.AddReservoirConstraint(\n", + " model.add_reservoir_constraint(\n", " reservoir_starts,\n", " reservoir_demands,\n", " resource.min_capacity,\n", " resource.max_capacity,\n", " )\n", " else: # No producer-consumer. We just sum the demands.\n", - " model.Add(\n", - " cp_model.LinearExpr.Sum(\n", + " model.add(\n", + " cp_model.LinearExpr.sum(\n", " [task_to_resource_demands[t][res] for t in all_active_tasks]\n", " )\n", " <= c\n", @@ -507,8 +513,8 @@ "\n", " # Objective.\n", " if problem.is_resource_investment:\n", - " objective = model.NewIntVar(0, max_cost, \"capacity_costs\")\n", - " model.Add(\n", + " objective = model.new_int_var(0, max_cost, \"capacity_costs\")\n", + " model.add(\n", " objective\n", " == sum(\n", " problem.resources[i].unit_cost * capacities[i]\n", @@ -518,17 +524,17 @@ " else:\n", " objective = makespan\n", "\n", - " model.Minimize(objective)\n", + " model.minimize(objective)\n", "\n", " # Add min delay constraints.\n", " if delays is not None:\n", " for (local_start, local_end), (min_delay, _) in delays.items():\n", " if local_start == source and local_end in active_tasks:\n", - " model.Add(task_starts[local_end] >= min_delay)\n", + " model.add(task_starts[local_end] >= min_delay)\n", " elif local_start in active_tasks and local_end == sink:\n", - " model.Add(makespan >= task_ends[local_start] + min_delay)\n", + " model.add(makespan >= task_ends[local_start] + min_delay)\n", " elif local_start in active_tasks and local_end in active_tasks:\n", - " model.Add(task_starts[local_end] >= task_ends[local_start] + min_delay)\n", + " model.add(task_starts[local_end] >= task_ends[local_start] + min_delay)\n", "\n", " problem_is_single_mode = True\n", " for t in all_active_tasks:\n", @@ -571,7 +577,7 @@ " if sum_of_max_energies <= c * min_delay:\n", " ignored_constraits += 1\n", " continue\n", - " model.Add(\n", + " model.add(\n", " c * (task_starts[local_end] - task_ends[local_start])\n", " >= sum(task_resource_to_energy[(t, res)] for t in common)\n", " )\n", @@ -585,15 +591,15 @@ " # Add solution hint.\n", " if initial_solution:\n", " for t in all_active_tasks:\n", - " model.AddHint(task_starts[t], initial_solution.start_of_task[t])\n", + " model.add_hint(task_starts[t], initial_solution.start_of_task[t])\n", " if len(task_to_presence_literals[t]) > 1:\n", " selected = initial_solution.selected_recipe_of_task[t]\n", - " model.AddHint(task_to_presence_literals[t][selected], 1)\n", + " model.add_hint(task_to_presence_literals[t][selected], 1)\n", "\n", " # Write model to file.\n", " if proto_file:\n", " print(f\"Writing proto to{proto_file}\")\n", - " model.ExportToFile(proto_file)\n", + " model.export_to_file(proto_file)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", @@ -615,28 +621,35 @@ " if in_main_solve:\n", " solver.parameters.log_search_progress = True\n", " #\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", " assignment = rcpsp_pb2.RcpspAssignment()\n", " for t, _ in enumerate(problem.tasks):\n", " if t in task_starts:\n", - " assignment.start_of_task.append(solver.Value(task_starts[t]))\n", + " assignment.start_of_task.append(solver.value(task_starts[t]))\n", " for r, recipe_literal in enumerate(task_to_presence_literals[t]):\n", - " if solver.BooleanValue(recipe_literal):\n", + " if solver.boolean_value(recipe_literal):\n", " assignment.selected_recipe_of_task.append(r)\n", " break\n", " else: # t is not an active task.\n", " assignment.start_of_task.append(0)\n", " assignment.selected_recipe_of_task.append(0)\n", " return (\n", - " int(solver.BestObjectiveBound()),\n", - " int(solver.ObjectiveValue()),\n", + " int(solver.best_objective_bound),\n", + " int(solver.objective_value),\n", " assignment,\n", " )\n", " return -1, -1, None\n", "\n", "\n", - "def ComputeDelaysBetweenNodes(problem, task_intervals):\n", + "def compute_delays_between_nodes(\n", + " problem: rcpsp_pb2.RcpspProblem,\n", + " task_intervals: list[tuple[int, int, list[int]]],\n", + ") -> tuple[\n", + " dict[tuple[int, int], tuple[int, int]],\n", + " Optional[rcpsp_pb2.RcpspAssignment],\n", + " bool,\n", + "]:\n", " \"\"\"Computes the min delays between all pairs of tasks in 'task_intervals'.\n", "\n", " Args:\n", @@ -656,21 +669,30 @@ " ):\n", " return delays, None, False\n", "\n", + " time_limit = _DELAY_TIME_LIMIT.value\n", " complete_problem_assignment = None\n", " num_optimal_delays = 0\n", " num_delays_not_found = 0\n", " optimal_found = True\n", " for start_task, end_task, active_tasks in task_intervals:\n", - " min_delay, feasible_delay, assignment = SolveRcpsp(\n", + " if time_limit <= 0:\n", + " optimal_found = False\n", + " print(f\" - #timeout ({_DELAY_TIME_LIMIT.value}s) reached\", flush=True)\n", + " break\n", + "\n", + " start_time = time.time()\n", + " min_delay, feasible_delay, assignment = solve_rcpsp(\n", " problem,\n", " \"\",\n", - " f\"num_search_workers:16,max_time_in_seconds:{_DELAY_TIME_LIMIT.value}\",\n", - " active_tasks,\n", + " f\"num_search_workers:16,max_time_in_seconds:{time_limit}\",\n", + " set(active_tasks),\n", " start_task,\n", " end_task,\n", " [],\n", " delays,\n", " )\n", + " time_limit -= time.time() - start_time\n", + "\n", " if min_delay != -1:\n", " delays[(start_task, end_task)] = min_delay, feasible_delay\n", " if start_task == 0 and end_task == len(problem.tasks) - 1:\n", @@ -690,7 +712,13 @@ " return delays, complete_problem_assignment, optimal_found\n", "\n", "\n", - "def AcceptNewCandidate(problem, after, demand_map, current, candidate):\n", + "def accept_new_candidate(\n", + " problem: rcpsp_pb2.RcpspProblem,\n", + " after: dict[int, list[int]],\n", + " demand_map: dict[tuple[int, int], int],\n", + " current: list[int],\n", + " candidate: int,\n", + ") -> bool:\n", " \"\"\"Check if candidate is compatible with the tasks in current.\"\"\"\n", " for c in current:\n", " if candidate in after[c] or c in after[candidate]:\n", @@ -710,7 +738,11 @@ " return True\n", "\n", "\n", - "def ComputePreemptiveLowerBound(problem, after, lower_bound):\n", + "def compute_preemptive_lower_bound(\n", + " problem: rcpsp_pb2.RcpspProblem,\n", + " after: dict[int, list[int]],\n", + " lower_bound: int,\n", + ") -> int:\n", " \"\"\"Computes a preemtive lower bound for the makespan statically.\n", "\n", " For this, it breaks all intervals into a set of intervals of size one.\n", @@ -763,7 +795,7 @@ " new_combinations = [[t]]\n", "\n", " for c in all_combinations:\n", - " if AcceptNewCandidate(problem, after, demand_map, c, t):\n", + " if accept_new_candidate(problem, after, demand_map, c, t):\n", " new_combinations.append(c + [t])\n", "\n", " all_combinations.extend(new_combinations)\n", @@ -772,14 +804,14 @@ " if len(all_combinations) > 5000000:\n", " return lower_bound # Abort if too large.\n", "\n", - " # Solve the selection model.\n", + " # solve the selection model.\n", "\n", " # TODO(user): a few possible improvements:\n", " # 1/ use \"dominating\" columns, i.e. if you can add a task to a column, then\n", " # do not use that column.\n", " # 2/ Merge all task with exactly same demands into one.\n", " model = cp_model.CpModel()\n", - " model.SetName(f\"lower_bound_{problem.name}\")\n", + " model.name = f\"lower_bound_{problem.name}\"\n", "\n", " vars_per_task = collections.defaultdict(list)\n", " all_vars = []\n", @@ -787,29 +819,29 @@ " min_duration = max_duration\n", " for t in c:\n", " min_duration = min(min_duration, duration_map[t])\n", - " count = model.NewIntVar(0, min_duration, f\"count_{c}\")\n", + " count = model.new_int_var(0, min_duration, f\"count_{c}\")\n", " all_vars.append(count)\n", " for t in c:\n", " vars_per_task[t].append(count)\n", "\n", " # Each task must be performed.\n", " for t in all_active_tasks:\n", - " model.Add(sum(vars_per_task[t]) >= duration_map[t])\n", + " model.add(sum(vars_per_task[t]) >= duration_map[t])\n", "\n", " # Objective\n", - " objective_var = model.NewIntVar(lower_bound, sum_of_demands, \"objective_var\")\n", - " model.Add(objective_var == sum(all_vars))\n", + " objective_var = model.new_int_var(lower_bound, sum_of_demands, \"objective_var\")\n", + " model.add(objective_var == sum(all_vars))\n", "\n", - " model.Minimize(objective_var)\n", + " model.minimize(objective_var)\n", "\n", - " # Solve model.\n", + " # solve model.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.num_search_workers = 16\n", " solver.parameters.max_time_in_seconds = _PREEMPTIVE_LB_TIME_LIMIT.value\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", " status_str = \"optimal\" if status == cp_model.OPTIMAL else \"\"\n", - " lower_bound = max(lower_bound, int(solver.BestObjectiveBound()))\n", + " lower_bound = max(lower_bound, int(solver.best_objective_bound))\n", " print(f\" - {status_str} static lower bound = {lower_bound}\", flush=True)\n", "\n", " return lower_bound\n", @@ -820,10 +852,10 @@ " rcpsp_parser.parse_file(_INPUT.value)\n", "\n", " problem = rcpsp_parser.problem()\n", - " PrintProblemStatistics(problem)\n", + " print_problem_statistics(problem)\n", "\n", - " intervals_of_tasks, after = AnalyseDependencyGraph(problem)\n", - " delays, initial_solution, optimal_found = ComputeDelaysBetweenNodes(\n", + " intervals_of_tasks, after = analyse_dependency_graph(problem)\n", + " delays, initial_solution, optimal_found = compute_delays_between_nodes(\n", " problem, intervals_of_tasks\n", " )\n", "\n", @@ -831,9 +863,9 @@ " key = (0, last_task)\n", " lower_bound = delays[key][0] if key in delays else 0\n", " if not optimal_found and _PREEMPTIVE_LB_TIME_LIMIT.value > 0.0:\n", - " lower_bound = ComputePreemptiveLowerBound(problem, after, lower_bound)\n", + " lower_bound = compute_preemptive_lower_bound(problem, after, lower_bound)\n", "\n", - " SolveRcpsp(\n", + " solve_rcpsp(\n", " problem=problem,\n", " proto_file=_OUTPUT_PROTO.value,\n", " params=_PARAMS.value,\n", diff --git a/examples/notebook/examples/reallocate_sat.ipynb b/examples/notebook/examples/reallocate_sat.ipynb index cf54de82091..3a1aa56091d 100644 --- a/examples/notebook/examples/reallocate_sat.ipynb +++ b/examples/notebook/examples/reallocate_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,7 +89,6 @@ "\n", "\n", "def main():\n", - "\n", " # Data\n", " data_0 = [\n", " [107, 107, 107, 0, 0], # pr1\n", @@ -105,7 +104,7 @@ " [298836792, 0, 0, 0],\n", " [3713428, 4118530, 4107277, 3072018],\n", " [6477273, 7183884, 5358471, 0],\n", - " [1485371, 1647412, 1642911, 1228807]\n", + " [1485371, 1647412, 1642911, 1228807],\n", " ]\n", "\n", " data_2 = [\n", @@ -115,7 +114,7 @@ " [2988367, 0, 0, 0],\n", " [37134, 41185, 41072, 30720],\n", " [64772, 71838, 53584, 0],\n", - " [14853, 16474, 16429, 12288]\n", + " [14853, 16474, 16429, 12288],\n", " ]\n", "\n", " pr = data_0\n", @@ -129,7 +128,7 @@ " model = cp_model.CpModel()\n", "\n", " # Variables\n", - " delta = model.NewIntVar(0, total, 'delta')\n", + " delta = model.NewIntVar(0, total, \"delta\")\n", "\n", " contributions_per_years = collections.defaultdict(list)\n", " contributions_per_prs = collections.defaultdict(list)\n", @@ -138,14 +137,12 @@ " for p, inner_l in enumerate(pr):\n", " for y, item in enumerate(inner_l):\n", " if item != 0:\n", - " contrib = model.NewIntVar(0, total, 'r%d c%d' % (p, y))\n", + " contrib = model.NewIntVar(0, total, \"r%d c%d\" % (p, y))\n", " contributions_per_years[y].append(contrib)\n", " contributions_per_prs[p].append(contrib)\n", " all_contribs[p, y] = contrib\n", "\n", - " year_var = [\n", - " model.NewIntVar(0, total, 'y[%i]' % i) for i in range(num_years)\n", - " ]\n", + " year_var = [model.NewIntVar(0, total, \"y[%i]\" % i) for i in range(num_years)]\n", "\n", " # Constraints\n", "\n", @@ -173,32 +170,32 @@ "\n", " # Output solution.\n", " if status == cp_model.OPTIMAL:\n", - " print('Data')\n", - " print(' - total = ', total)\n", - " print(' - year_average = ', avg)\n", - " print(' - number of projects = ', num_pr)\n", - " print(' - number of years = ', num_years)\n", + " print(\"Data\")\n", + " print(\" - total = \", total)\n", + " print(\" - year_average = \", avg)\n", + " print(\" - number of projects = \", num_pr)\n", + " print(\" - number of years = \", num_years)\n", "\n", - " print(' - input production')\n", + " print(\" - input production\")\n", " for p in range(num_pr):\n", " for y in range(num_years):\n", " if pr[p][y] == 0:\n", - " print(' ', end='')\n", + " print(\" \", end=\"\")\n", " else:\n", - " print('%10i' % pr[p][y], end='')\n", + " print(\"%10i\" % pr[p][y], end=\"\")\n", " print()\n", "\n", - " print('Solution')\n", + " print(\"Solution\")\n", " for p in range(num_pr):\n", " for y in range(num_years):\n", " if pr[p][y] == 0:\n", - " print(' ', end='')\n", + " print(\" \", end=\"\")\n", " else:\n", - " print('%10i' % solver.Value(all_contribs[p, y]), end='')\n", + " print(\"%10i\" % solver.Value(all_contribs[p, y]), end=\"\")\n", " print()\n", "\n", " for y in range(num_years):\n", - " print('%10i' % solver.Value(year_var[y]), end='')\n", + " print(\"%10i\" % solver.Value(year_var[y]), end=\"\")\n", " print()\n", "\n", "\n", diff --git a/examples/notebook/examples/shift_scheduling_sat.ipynb b/examples/notebook/examples/shift_scheduling_sat.ipynb index 8d715fd8e7c..68ae241e11f 100644 --- a/examples/notebook/examples/shift_scheduling_sat.ipynb +++ b/examples/notebook/examples/shift_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -84,8 +84,8 @@ "outputs": [], "source": [ "from ortools.sat.colab import flags\n", - "from ortools.sat.python import cp_model\n", "from google.protobuf import text_format\n", + "from ortools.sat.python import cp_model\n", "\n", "_OUTPUT_PROTO = flags.define_string(\n", " \"output_proto\", \"\", \"Output file to write the cp_model proto to.\"\n", @@ -95,7 +95,9 @@ ")\n", "\n", "\n", - "def negated_bounded_span(works, start, length):\n", + "def negated_bounded_span(\n", + " works: list[cp_model.BoolVarT], start: int, length: int\n", + ") -> list[cp_model.BoolVarT]:\n", " \"\"\"Filters an isolated sub-sequence of variables assined to True.\n", "\n", " Extract the span of Boolean variables [start, start + length), negate them,\n", @@ -113,20 +115,28 @@ " or by the start or end of works.\n", " \"\"\"\n", " sequence = []\n", - " # Left border (start of works, or works[start - 1])\n", + " # left border (start of works, or works[start - 1])\n", " if start > 0:\n", " sequence.append(works[start - 1])\n", " for i in range(length):\n", - " sequence.append(works[start + i].Not())\n", - " # Right border (end of works or works[start + length])\n", + " sequence.append(~works[start + i])\n", + " # right border (end of works or works[start + length])\n", " if start + length < len(works):\n", " sequence.append(works[start + length])\n", " return sequence\n", "\n", "\n", "def add_soft_sequence_constraint(\n", - " model, works, hard_min, soft_min, min_cost, soft_max, hard_max, max_cost, prefix\n", - "):\n", + " model: cp_model.CpModel,\n", + " works: list[cp_model.BoolVarT],\n", + " hard_min: int,\n", + " soft_min: int,\n", + " min_cost: int,\n", + " soft_max: int,\n", + " hard_max: int,\n", + " max_cost: int,\n", + " prefix: str,\n", + ") -> tuple[list[cp_model.BoolVarT], list[int]]:\n", " \"\"\"Sequence constraint on true variables with soft and hard bounds.\n", "\n", " This constraint look at every maximal contiguous sequence of variables\n", @@ -160,17 +170,17 @@ " # Forbid sequences that are too short.\n", " for length in range(1, hard_min):\n", " for start in range(len(works) - length + 1):\n", - " model.AddBoolOr(negated_bounded_span(works, start, length))\n", + " model.add_bool_or(negated_bounded_span(works, start, length))\n", "\n", " # Penalize sequences that are below the soft limit.\n", " if min_cost > 0:\n", " for length in range(hard_min, soft_min):\n", " for start in range(len(works) - length + 1):\n", " span = negated_bounded_span(works, start, length)\n", - " name = \": under_span(start=%i, length=%i)\" % (start, length)\n", - " lit = model.NewBoolVar(prefix + name)\n", + " name = f\": under_span(start={start}, length={length})\"\n", + " lit = model.new_bool_var(prefix + name)\n", " span.append(lit)\n", - " model.AddBoolOr(span)\n", + " model.add_bool_or(span)\n", " cost_literals.append(lit)\n", " # We filter exactly the sequence with a short length.\n", " # The penalty is proportional to the delta with soft_min.\n", @@ -181,24 +191,32 @@ " for length in range(soft_max + 1, hard_max + 1):\n", " for start in range(len(works) - length + 1):\n", " span = negated_bounded_span(works, start, length)\n", - " name = \": over_span(start=%i, length=%i)\" % (start, length)\n", - " lit = model.NewBoolVar(prefix + name)\n", + " name = f\": over_span(start={start}, length={length})\"\n", + " lit = model.new_bool_var(prefix + name)\n", " span.append(lit)\n", - " model.AddBoolOr(span)\n", + " model.add_bool_or(span)\n", " cost_literals.append(lit)\n", " # Cost paid is max_cost * excess length.\n", " cost_coefficients.append(max_cost * (length - soft_max))\n", "\n", " # Just forbid any sequence of true variables with length hard_max + 1\n", " for start in range(len(works) - hard_max):\n", - " model.AddBoolOr([works[i].Not() for i in range(start, start + hard_max + 1)])\n", + " model.add_bool_or([~works[i] for i in range(start, start + hard_max + 1)])\n", " return cost_literals, cost_coefficients\n", "\n", "\n", "def add_soft_sum_constraint(\n", - " model, works, hard_min, soft_min, min_cost, soft_max, hard_max, max_cost, prefix\n", - "):\n", - " \"\"\"Sum constraint with soft and hard bounds.\n", + " model: cp_model.CpModel,\n", + " works: list[cp_model.BoolVarT],\n", + " hard_min: int,\n", + " soft_min: int,\n", + " min_cost: int,\n", + " soft_max: int,\n", + " hard_max: int,\n", + " max_cost: int,\n", + " prefix: str,\n", + ") -> tuple[list[cp_model.IntVar], list[int]]:\n", + " \"\"\"sum constraint with soft and hard bounds.\n", "\n", " This constraint counts the variables assigned to true from works.\n", " If forbids sum < hard_min or > hard_max.\n", @@ -227,33 +245,33 @@ " \"\"\"\n", " cost_variables = []\n", " cost_coefficients = []\n", - " sum_var = model.NewIntVar(hard_min, hard_max, \"\")\n", + " sum_var = model.new_int_var(hard_min, hard_max, \"\")\n", " # This adds the hard constraints on the sum.\n", - " model.Add(sum_var == sum(works))\n", + " model.add(sum_var == sum(works))\n", "\n", " # Penalize sums below the soft_min target.\n", " if soft_min > hard_min and min_cost > 0:\n", - " delta = model.NewIntVar(-len(works), len(works), \"\")\n", - " model.Add(delta == soft_min - sum_var)\n", + " delta = model.new_int_var(-len(works), len(works), \"\")\n", + " model.add(delta == soft_min - sum_var)\n", " # TODO(user): Compare efficiency with only excess >= soft_min - sum_var.\n", - " excess = model.NewIntVar(0, 7, prefix + \": under_sum\")\n", - " model.AddMaxEquality(excess, [delta, 0])\n", + " excess = model.new_int_var(0, 7, prefix + \": under_sum\")\n", + " model.add_max_equality(excess, [delta, 0])\n", " cost_variables.append(excess)\n", " cost_coefficients.append(min_cost)\n", "\n", " # Penalize sums above the soft_max target.\n", " if soft_max < hard_max and max_cost > 0:\n", - " delta = model.NewIntVar(-7, 7, \"\")\n", - " model.Add(delta == sum_var - soft_max)\n", - " excess = model.NewIntVar(0, 7, prefix + \": over_sum\")\n", - " model.AddMaxEquality(excess, [delta, 0])\n", + " delta = model.new_int_var(-7, 7, \"\")\n", + " model.add(delta == sum_var - soft_max)\n", + " excess = model.new_int_var(0, 7, prefix + \": over_sum\")\n", + " model.add_max_equality(excess, [delta, 0])\n", " cost_variables.append(excess)\n", " cost_coefficients.append(max_cost)\n", "\n", " return cost_variables, cost_coefficients\n", "\n", "\n", - "def solve_shift_scheduling(params, output_proto):\n", + "def solve_shift_scheduling(params: str, output_proto: str):\n", " \"\"\"Solves the shift scheduling problem.\"\"\"\n", " # Data\n", " num_employees = 8\n", @@ -348,22 +366,22 @@ " for e in range(num_employees):\n", " for s in range(num_shifts):\n", " for d in range(num_days):\n", - " work[e, s, d] = model.NewBoolVar(\"work%i_%i_%i\" % (e, s, d))\n", + " work[e, s, d] = model.new_bool_var(f\"work{e}_{s}_{d}\")\n", "\n", " # Linear terms of the objective in a minimization context.\n", - " obj_int_vars = []\n", - " obj_int_coeffs = []\n", - " obj_bool_vars = []\n", - " obj_bool_coeffs = []\n", + " obj_int_vars: list[cp_model.IntVar] = []\n", + " obj_int_coeffs: list[int] = []\n", + " obj_bool_vars: list[cp_model.BoolVarT] = []\n", + " obj_bool_coeffs: list[int] = []\n", "\n", " # Exactly one shift per day.\n", " for e in range(num_employees):\n", " for d in range(num_days):\n", - " model.AddExactlyOne(work[e, s, d] for s in range(num_shifts))\n", + " model.add_exactly_one(work[e, s, d] for s in range(num_shifts))\n", "\n", " # Fixed assignments.\n", " for e, s, d in fixed_assignments:\n", - " model.Add(work[e, s, d] == 1)\n", + " model.add(work[e, s, d] == 1)\n", "\n", " # Employee requests\n", " for e, s, d, w in requests:\n", @@ -384,7 +402,7 @@ " soft_max,\n", " hard_max,\n", " max_cost,\n", - " \"shift_constraint(employee %i, shift %i)\" % (e, shift),\n", + " f\"shift_constraint(employee {e}, shift {shift})\",\n", " )\n", " obj_bool_vars.extend(variables)\n", " obj_bool_coeffs.extend(coeffs)\n", @@ -404,8 +422,7 @@ " soft_max,\n", " hard_max,\n", " max_cost,\n", - " \"weekly_sum_constraint(employee %i, shift %i, week %i)\"\n", - " % (e, shift, w),\n", + " f\"weekly_sum_constraint(employee {e}, shift {shift}, week {w})\",\n", " )\n", " obj_int_vars.extend(variables)\n", " obj_int_coeffs.extend(coeffs)\n", @@ -415,17 +432,17 @@ " for e in range(num_employees):\n", " for d in range(num_days - 1):\n", " transition = [\n", - " work[e, previous_shift, d].Not(),\n", - " work[e, next_shift, d + 1].Not(),\n", + " ~work[e, previous_shift, d],\n", + " ~work[e, next_shift, d + 1],\n", " ]\n", " if cost == 0:\n", - " model.AddBoolOr(transition)\n", + " model.add_bool_or(transition)\n", " else:\n", - " trans_var = model.NewBoolVar(\n", - " \"transition (employee=%i, day=%i)\" % (e, d)\n", + " trans_var = model.new_bool_var(\n", + " f\"transition (employee={e}, day={d})\"\n", " )\n", " transition.append(trans_var)\n", - " model.AddBoolOr(transition)\n", + " model.add_bool_or(transition)\n", " obj_bool_vars.append(trans_var)\n", " obj_bool_coeffs.append(cost)\n", "\n", @@ -436,24 +453,24 @@ " works = [work[e, s, w * 7 + d] for e in range(num_employees)]\n", " # Ignore Off shift.\n", " min_demand = weekly_cover_demands[d][s - 1]\n", - " worked = model.NewIntVar(min_demand, num_employees, \"\")\n", - " model.Add(worked == sum(works))\n", + " worked = model.new_int_var(min_demand, num_employees, \"\")\n", + " model.add(worked == sum(works))\n", " over_penalty = excess_cover_penalties[s - 1]\n", " if over_penalty > 0:\n", - " name = \"excess_demand(shift=%i, week=%i, day=%i)\" % (s, w, d)\n", - " excess = model.NewIntVar(0, num_employees - min_demand, name)\n", - " model.Add(excess == worked - min_demand)\n", + " name = f\"excess_demand(shift={s}, week={w}, day={d})\"\n", + " excess = model.new_int_var(0, num_employees - min_demand, name)\n", + " model.add(excess == worked - min_demand)\n", " obj_int_vars.append(excess)\n", " obj_int_coeffs.append(over_penalty)\n", "\n", " # Objective\n", - " model.Minimize(\n", + " model.minimize(\n", " sum(obj_bool_vars[i] * obj_bool_coeffs[i] for i in range(len(obj_bool_vars)))\n", " + sum(obj_int_vars[i] * obj_int_coeffs[i] for i in range(len(obj_int_vars)))\n", " )\n", "\n", " if output_proto:\n", - " print(\"Writing proto to %s\" % output_proto)\n", + " print(f\"Writing proto to {output_proto}\")\n", " with open(output_proto, \"w\") as text_file:\n", " text_file.write(str(model))\n", "\n", @@ -462,7 +479,7 @@ " if params:\n", " text_format.Parse(params, solver.parameters)\n", " solution_printer = cp_model.ObjectiveSolutionPrinter()\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " # Print solution.\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", @@ -475,32 +492,28 @@ " schedule = \"\"\n", " for d in range(num_days):\n", " for s in range(num_shifts):\n", - " if solver.BooleanValue(work[e, s, d]):\n", + " if solver.boolean_value(work[e, s, d]):\n", " schedule += shifts[s] + \" \"\n", - " print(\"worker %i: %s\" % (e, schedule))\n", + " print(f\"worker {e}: {schedule}\")\n", " print()\n", " print(\"Penalties:\")\n", " for i, var in enumerate(obj_bool_vars):\n", - " if solver.BooleanValue(var):\n", + " if solver.boolean_value(var):\n", " penalty = obj_bool_coeffs[i]\n", " if penalty > 0:\n", - " print(\" %s violated, penalty=%i\" % (var.Name(), penalty))\n", + " print(f\" {var.name} violated, penalty={penalty}\")\n", " else:\n", - " print(\" %s fulfilled, gain=%i\" % (var.Name(), -penalty))\n", + " print(f\" {var.name} fulfilled, gain={-penalty}\")\n", "\n", " for i, var in enumerate(obj_int_vars):\n", - " if solver.Value(var) > 0:\n", + " if solver.value(var) > 0:\n", " print(\n", - " \" %s violated by %i, linear penalty=%i\"\n", - " % (var.Name(), solver.Value(var), obj_int_coeffs[i])\n", + " f\" {var.name} violated by {solver.value(var)}, linear\"\n", + " f\" penalty={obj_int_coeffs[i]}\"\n", " )\n", "\n", " print()\n", - " print(\"Statistics\")\n", - " print(\" - status : %s\" % solver.StatusName(status))\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(solver.response_stats())\n", "\n", "\n", "def main(_):\n", diff --git a/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb b/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb index 5b10d50caa2..b8f6fede11f 100644 --- a/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb +++ b/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -108,17 +108,16 @@ "class SolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self):\n", + " def __init__(self) -> None:\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", - " \"\"\"Called after each new solution found.\"\"\"\n", + " def on_solution_callback(self) -> None:\n", + " \"\"\"Called at each new solution.\"\"\"\n", " print(\n", - " \"Solution %i, time = %f s, objective = %i\"\n", - " % (self.__solution_count, self.WallTime(), self.ObjectiveValue())\n", + " f\"Solution {self.__solution_count}, time = {self.wall_time} s,\"\n", + " f\" objective = {self.objective_value}\"\n", " )\n", - " self.__solution_count += 1\n", "\n", "\n", "def single_machine_scheduling():\n", @@ -461,9 +460,7 @@ " if min_incoming_setup == 0:\n", " continue\n", "\n", - " print(\n", - " \"job %i has a min incoming setup of %i\" % (job_id, min_incoming_setup)\n", - " )\n", + " print(f\"job {job_id} has a min incoming setup of {min_incoming_setup}\")\n", " # We can transfer some setup times to the duration of the job.\n", " job_durations[job_id] += min_incoming_setup\n", " # Decrease corresponding incoming setup times.\n", @@ -482,7 +479,7 @@ " horizon = sum(job_durations) + sum(\n", " max(setup_times[i][j] for i in range(num_jobs + 1)) for j in range(num_jobs)\n", " )\n", - " print(\"Greedy horizon =\", horizon)\n", + " print(f\"Greedy horizon = {horizon}\")\n", "\n", " # ----------------------------------------------------------------------------\n", " # Global storage of variables.\n", @@ -497,38 +494,38 @@ " release_date = release_dates[job_id]\n", " due_date = due_dates[job_id] if due_dates[job_id] != -1 else horizon\n", " print(\n", - " \"job %2i: start = %5i, duration = %4i, end = %6i\"\n", - " % (job_id, release_date, duration, due_date)\n", + " f\"job {job_id:2}: start = {release_date:5}, duration = {duration:4},\"\n", + " f\" end = {due_date:6}\"\n", " )\n", - " name_suffix = \"_%i\" % job_id\n", - " start = model.NewIntVar(release_date, due_date, \"s\" + name_suffix)\n", - " end = model.NewIntVar(release_date, due_date, \"e\" + name_suffix)\n", - " interval = model.NewIntervalVar(start, duration, end, \"i\" + name_suffix)\n", + " name_suffix = f\"_{job_id}\"\n", + " start = model.new_int_var(release_date, due_date, \"s\" + name_suffix)\n", + " end = model.new_int_var(release_date, due_date, \"e\" + name_suffix)\n", + " interval = model.new_interval_var(start, duration, end, \"i\" + name_suffix)\n", " starts.append(start)\n", " ends.append(end)\n", " intervals.append(interval)\n", "\n", " # No overlap constraint.\n", - " model.AddNoOverlap(intervals)\n", + " model.add_no_overlap(intervals)\n", "\n", " # ----------------------------------------------------------------------------\n", " # Transition times using a circuit constraint.\n", " arcs = []\n", " for i in all_jobs:\n", " # Initial arc from the dummy node (0) to a task.\n", - " start_lit = model.NewBoolVar(\"\")\n", + " start_lit = model.new_bool_var(\"\")\n", " arcs.append((0, i + 1, start_lit))\n", " # If this task is the first, set to minimum starting time.\n", " min_start_time = max(release_dates[i], setup_times[0][i])\n", - " model.Add(starts[i] == min_start_time).OnlyEnforceIf(start_lit)\n", + " model.add(starts[i] == min_start_time).only_enforce_if(start_lit)\n", " # Final arc from an arc to the dummy node.\n", - " arcs.append((i + 1, 0, model.NewBoolVar(\"\")))\n", + " arcs.append((i + 1, 0, model.new_bool_var(\"\")))\n", "\n", " for j in all_jobs:\n", " if i == j:\n", " continue\n", "\n", - " lit = model.NewBoolVar(\"%i follows %i\" % (j, i))\n", + " lit = model.new_bool_var(f\"{j} follows {i}\")\n", " arcs.append((i + 1, j + 1, lit))\n", "\n", " # We add the reified precedence to link the literal with the times of the\n", @@ -536,32 +533,32 @@ " # If release_dates[j] == 0, we can strenghten this precedence into an\n", " # equality as we are minimizing the makespan.\n", " if release_dates[j] == 0:\n", - " model.Add(starts[j] == ends[i] + setup_times[i + 1][j]).OnlyEnforceIf(\n", + " model.add(starts[j] == ends[i] + setup_times[i + 1][j]).only_enforce_if(\n", " lit\n", " )\n", " else:\n", - " model.Add(starts[j] >= ends[i] + setup_times[i + 1][j]).OnlyEnforceIf(\n", + " model.add(starts[j] >= ends[i] + setup_times[i + 1][j]).only_enforce_if(\n", " lit\n", " )\n", "\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", " # ----------------------------------------------------------------------------\n", " # Precedences.\n", " for before, after in precedences:\n", - " print(\"job %i is after job %i\" % (after, before))\n", - " model.Add(ends[before] <= starts[after])\n", + " print(f\"job {after} is after job {before}\")\n", + " model.add(ends[before] <= starts[after])\n", "\n", " # ----------------------------------------------------------------------------\n", " # Objective.\n", - " makespan = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(makespan, ends)\n", - " model.Minimize(makespan)\n", + " makespan = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(makespan, ends)\n", + " model.minimize(makespan)\n", "\n", " # ----------------------------------------------------------------------------\n", " # Write problem to file.\n", " if output_proto_file:\n", - " print(\"Writing proto to %s\" % output_proto_file)\n", + " print(f\"Writing proto to {output_proto_file}\")\n", " with open(output_proto_file, \"w\") as text_file:\n", " text_file.write(str(model))\n", "\n", @@ -571,11 +568,12 @@ " if parameters:\n", " text_format.Parse(parameters, solver.parameters)\n", " solution_printer = SolutionPrinter()\n", - " solver.Solve(model, solution_printer)\n", + " solver.best_bound_callback = lambda a: print(f\"New objective lower bound: {a}\")\n", + " solver.solve(model, solution_printer)\n", " for job_id in all_jobs:\n", " print(\n", - " \"job %i starts at %i end ends at %i\"\n", - " % (job_id, solver.Value(starts[job_id]), solver.Value(ends[job_id]))\n", + " f\"job {job_id} starts at {solver.value(starts[job_id])} end ends at\"\n", + " f\" {solver.value(ends[job_id])}\"\n", " )\n", "\n", "\n", diff --git a/examples/notebook/examples/spread_robots_sat.ipynb b/examples/notebook/examples/spread_robots_sat.ipynb index 8fa00b68162..f59067cf5dd 100644 --- a/examples/notebook/examples/spread_robots_sat.ipynb +++ b/examples/notebook/examples/spread_robots_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Maximize the minimum of pairwise distances between n robots in a square space.\n" + "maximize the minimum of pairwise distances between n robots in a square space.\n" ] }, { @@ -100,13 +100,13 @@ ")\n", "\n", "\n", - "def spread_robots(num_robots: int, room_size: int, params: str):\n", + "def spread_robots(num_robots: int, room_size: int, params: str) -> None:\n", " \"\"\"Optimize robots placement.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", " # Create the list of coordinates (x, y) for each robot.\n", - " x = [model.NewIntVar(1, room_size, f\"x_{i}\") for i in range(num_robots)]\n", - " y = [model.NewIntVar(1, room_size, f\"y_{i}\") for i in range(num_robots)]\n", + " x = [model.new_int_var(1, room_size, f\"x_{i}\") for i in range(num_robots)]\n", + " y = [model.new_int_var(1, room_size, f\"y_{i}\") for i in range(num_robots)]\n", "\n", " # The specification of the problem is to maximize the minimum euclidian\n", " # distance between any two robots. Unfortunately, the euclidian distance\n", @@ -125,7 +125,7 @@ " # forall i:\n", " # scaled_min_square_distance <= scaling * (x_diff_sq[i] + y_diff_sq[i])\n", " scaling = 1000\n", - " scaled_min_square_distance = model.NewIntVar(\n", + " scaled_min_square_distance = model.new_int_var(\n", " 0, 2 * scaling * room_size**2, \"scaled_min_square_distance\"\n", " )\n", "\n", @@ -134,45 +134,45 @@ " for i in range(num_robots - 1):\n", " for j in range(i + 1, num_robots):\n", " # Compute the distance on each dimension between robot i and robot j.\n", - " x_diff = model.NewIntVar(-room_size, room_size, f\"x_diff{i}\")\n", - " y_diff = model.NewIntVar(-room_size, room_size, f\"y_diff{i}\")\n", - " model.Add(x_diff == x[i] - x[j])\n", - " model.Add(y_diff == y[i] - y[j])\n", + " x_diff = model.new_int_var(-room_size, room_size, f\"x_diff{i}\")\n", + " y_diff = model.new_int_var(-room_size, room_size, f\"y_diff{i}\")\n", + " model.add(x_diff == x[i] - x[j])\n", + " model.add(y_diff == y[i] - y[j])\n", "\n", " # Compute the square of the previous differences.\n", - " x_diff_sq = model.NewIntVar(0, room_size**2, f\"x_diff_sq{i}\")\n", - " y_diff_sq = model.NewIntVar(0, room_size**2, f\"y_diff_sq{i}\")\n", - " model.AddMultiplicationEquality(x_diff_sq, x_diff, x_diff)\n", - " model.AddMultiplicationEquality(y_diff_sq, y_diff, y_diff)\n", + " x_diff_sq = model.new_int_var(0, room_size**2, f\"x_diff_sq{i}\")\n", + " y_diff_sq = model.new_int_var(0, room_size**2, f\"y_diff_sq{i}\")\n", + " model.add_multiplication_equality(x_diff_sq, x_diff, x_diff)\n", + " model.add_multiplication_equality(y_diff_sq, y_diff, y_diff)\n", "\n", " # We just need to be <= to the scaled square distance as we are\n", " # maximizing the min distance, which is equivalent as maximizing the min\n", " # square distance.\n", - " model.Add(scaled_min_square_distance <= scaling * (x_diff_sq + y_diff_sq))\n", + " model.add(scaled_min_square_distance <= scaling * (x_diff_sq + y_diff_sq))\n", "\n", " # Naive symmetry breaking.\n", " for i in range(1, num_robots):\n", - " model.Add(x[0] <= x[i])\n", - " model.Add(y[0] <= y[i])\n", + " model.add(x[0] <= x[i])\n", + " model.add(y[0] <= y[i])\n", "\n", " # Objective\n", - " model.Maximize(scaled_min_square_distance)\n", + " model.maximize(scaled_min_square_distance)\n", "\n", " # Creates a solver and solves the model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", " text_format.Parse(params, solver.parameters)\n", " solver.parameters.log_search_progress = True\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Prints the solution.\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", " print(\n", " f\"Spread {num_robots} with a min pairwise distance of\"\n", - " f\" {math.sqrt(solver.ObjectiveValue() / scaling)}\"\n", + " f\" {math.sqrt(solver.objective_value / scaling)}\"\n", " )\n", " for i in range(num_robots):\n", - " print(f\"robot {i}: x={solver.Value(x[i])} y={solver.Value(y[i])}\")\n", + " print(f\"robot {i}: x={solver.value(x[i])} y={solver.value(y[i])}\")\n", " else:\n", " print(\"No solution found.\")\n", "\n", diff --git a/examples/notebook/examples/steel_mill_slab_sat.ipynb b/examples/notebook/examples/steel_mill_slab_sat.ipynb index aae1b2b54d8..effe92258de 100644 --- a/examples/notebook/examples/steel_mill_slab_sat.ipynb +++ b/examples/notebook/examples/steel_mill_slab_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -106,13 +106,10 @@ ")\n", "\n", "\n", - "def build_problem(problem_id):\n", + "def build_problem(\n", + " problem_id: int,\n", + ") -> tuple[int, list[int], int, list[tuple[int, int]]]:\n", " \"\"\"Build problem data.\"\"\"\n", - " capacities = None\n", - " num_colors = None\n", - " num_slabs = None\n", - " orders = None\n", - "\n", " if problem_id == 0:\n", " capacities = [\n", " # fmt:off\n", @@ -167,15 +164,22 @@ " # fmt:on\n", " ]\n", "\n", - " elif problem_id == 3:\n", + " else: # problem_id == 3, default problem.\n", " capacities = [0, 17, 44]\n", " num_colors = 8\n", " num_slabs = 10\n", " orders = [ # (size, color)\n", - " # fmt:off\n", - " (4, 1), (22, 2), (9, 3), (5, 4), (8, 5), (3, 6), (3, 4), (4, 7),\n", - " (7, 4), (7, 8), (3, 6),\n", - " # fmt:on\n", + " (4, 1),\n", + " (22, 2),\n", + " (9, 3),\n", + " (5, 4),\n", + " (8, 5),\n", + " (3, 6),\n", + " (3, 4),\n", + " (4, 7),\n", + " (7, 4),\n", + " (7, 8),\n", + " (3, 6),\n", " ]\n", "\n", " return (num_slabs, capacities, num_colors, orders)\n", @@ -184,7 +188,7 @@ "class SteelMillSlabSolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, orders, assign, load, loss):\n", + " def __init__(self, orders, assign, load, loss) -> None:\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__orders = orders\n", " self.__assign = assign\n", @@ -195,40 +199,35 @@ " self.__all_slabs = range(len(assign[0]))\n", " self.__start_time = time.time()\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " \"\"\"Called on each new solution.\"\"\"\n", " current_time = time.time()\n", - " objective = sum(self.Value(l) for l in self.__loss)\n", + " objective = sum(self.value(l) for l in self.__loss)\n", " print(\n", - " \"Solution %i, time = %f s, objective = %i\"\n", - " % (self.__solution_count, current_time - self.__start_time, objective)\n", + " f\"Solution {self.__solution_count}, time =\"\n", + " f\" {current_time - self.__start_time} s, objective = {objective}\"\n", " )\n", " self.__solution_count += 1\n", " orders_in_slab = [\n", - " [o for o in self.__all_orders if self.Value(self.__assign[o][s])]\n", + " [o for o in self.__all_orders if self.value(self.__assign[o][s])]\n", " for s in self.__all_slabs\n", " ]\n", " for s in self.__all_slabs:\n", " if orders_in_slab[s]:\n", - " line = \" - slab %i, load = %i, loss = %i, orders = [\" % (\n", - " s,\n", - " self.Value(self.__load[s]),\n", - " self.Value(self.__loss[s]),\n", + " line = (\n", + " f\" - slab {s}, load = {self.value(self.__load[s])}, loss =\"\n", + " f\" {self.value(self.__loss[s])}, orders = [\"\n", " )\n", " for o in orders_in_slab[s]:\n", - " line += \"#%i(w%i, c%i) \" % (\n", - " o,\n", - " self.__orders[o][0],\n", - " self.__orders[o][1],\n", - " )\n", + " line += f\"#{o}(w{self.__orders[o][0]}, c{self.__orders[o][1]})\"\n", " line += \"]\"\n", " print(line)\n", "\n", "\n", - "def steel_mill_slab(problem, break_symmetries):\n", + "def steel_mill_slab(problem_id: int, break_symmetries: bool) -> None:\n", " \"\"\"Solves the Steel Mill Slab Problem.\"\"\"\n", " ### Load problem.\n", - " (num_slabs, capacities, num_colors, orders) = build_problem(problem)\n", + " num_slabs, capacities, num_colors, orders = build_problem(problem_id)\n", "\n", " num_orders = len(orders)\n", " num_capacities = len(capacities)\n", @@ -236,8 +235,8 @@ " all_colors = range(num_colors)\n", " all_orders = range(len(orders))\n", " print(\n", - " \"Solving steel mill with %i orders, %i slabs, and %i capacities\"\n", - " % (num_orders, num_slabs, num_capacities - 1)\n", + " f\"Solving steel mill with {num_orders} orders, {num_slabs} slabs, and\"\n", + " f\" {num_capacities - 1} capacities\"\n", " )\n", "\n", " # Compute auxiliary data.\n", @@ -260,44 +259,44 @@ " # Create the model and the decision variables.\n", " model = cp_model.CpModel()\n", " assign = [\n", - " [model.NewBoolVar(\"assign_%i_to_slab_%i\" % (o, s)) for s in all_slabs]\n", + " [model.new_bool_var(f\"assign_{o}_to_slab_{s}\") for s in all_slabs]\n", " for o in all_orders\n", " ]\n", - " loads = [model.NewIntVar(0, max_capacity, \"load_of_slab_%i\" % s) for s in all_slabs]\n", + " loads = [model.new_int_var(0, max_capacity, f\"load_of_slab_{s}\") for s in all_slabs]\n", " color_is_in_slab = [\n", - " [model.NewBoolVar(\"color_%i_in_slab_%i\" % (c + 1, s)) for c in all_colors]\n", + " [model.new_bool_var(f\"color_{c + 1}_in_slab_{s}\") for c in all_colors]\n", " for s in all_slabs\n", " ]\n", "\n", " # Compute load of all slabs.\n", " for s in all_slabs:\n", - " model.Add(sum(assign[o][s] * widths[o] for o in all_orders) == loads[s])\n", + " model.add(sum(assign[o][s] * widths[o] for o in all_orders) == loads[s])\n", "\n", " # Orders are assigned to one slab.\n", " for o in all_orders:\n", - " model.AddExactlyOne(assign[o])\n", + " model.add_exactly_one(assign[o])\n", "\n", " # Redundant constraint (sum of loads == sum of widths).\n", - " model.Add(sum(loads) == sum(widths))\n", + " model.add(sum(loads) == sum(widths))\n", "\n", " # Link present_colors and assign.\n", " for c in all_colors:\n", " for s in all_slabs:\n", " for o in orders_per_color[c]:\n", - " model.AddImplication(assign[o][s], color_is_in_slab[s][c])\n", - " model.AddImplication(color_is_in_slab[s][c].Not(), assign[o][s].Not())\n", + " model.add_implication(assign[o][s], color_is_in_slab[s][c])\n", + " model.add_implication(~color_is_in_slab[s][c], ~assign[o][s])\n", "\n", " # At most two colors per slab.\n", " for s in all_slabs:\n", - " model.Add(sum(color_is_in_slab[s]) <= 2)\n", + " model.add(sum(color_is_in_slab[s]) <= 2)\n", "\n", " # Project previous constraint on unique_color_orders\n", " for s in all_slabs:\n", - " model.Add(sum(assign[o][s] for o in unique_color_orders) <= 2)\n", + " model.add(sum(assign[o][s] for o in unique_color_orders) <= 2)\n", "\n", " # Symmetry breaking.\n", " for s in range(num_slabs - 1):\n", - " model.Add(loads[s] >= loads[s + 1])\n", + " model.add(loads[s] >= loads[s + 1])\n", "\n", " # Collect equivalent orders.\n", " width_to_unique_color_order = {}\n", @@ -332,50 +331,55 @@ " # Create position variables if there are symmetries to be broken.\n", " if break_symmetries and ordered_equivalent_orders:\n", " print(\n", - " \" - creating %i symmetry breaking constraints\"\n", - " % len(ordered_equivalent_orders)\n", + " f\" - creating {len(ordered_equivalent_orders)} symmetry breaking\"\n", + " \" constraints\"\n", " )\n", " positions = {}\n", " for p in ordered_equivalent_orders:\n", " if p[0] not in positions:\n", - " positions[p[0]] = model.NewIntVar(\n", - " 0, num_slabs - 1, \"position_of_slab_%i\" % p[0]\n", + " positions[p[0]] = model.new_int_var(\n", + " 0, num_slabs - 1, f\"position_of_slab_{p[0]}\"\n", " )\n", - " model.AddMapDomain(positions[p[0]], assign[p[0]])\n", + " model.add_map_domain(positions[p[0]], assign[p[0]])\n", " if p[1] not in positions:\n", - " positions[p[1]] = model.NewIntVar(\n", - " 0, num_slabs - 1, \"position_of_slab_%i\" % p[1]\n", + " positions[p[1]] = model.new_int_var(\n", + " 0, num_slabs - 1, f\"position_of_slab_{p[1]}\"\n", " )\n", - " model.AddMapDomain(positions[p[1]], assign[p[1]])\n", + " model.add_map_domain(positions[p[1]], assign[p[1]])\n", " # Finally add the symmetry breaking constraint.\n", - " model.Add(positions[p[0]] <= positions[p[1]])\n", + " model.add(positions[p[0]] <= positions[p[1]])\n", "\n", " # Objective.\n", - " obj = model.NewIntVar(0, num_slabs * max_loss, \"obj\")\n", - " losses = [model.NewIntVar(0, max_loss, \"loss_%i\" % s) for s in all_slabs]\n", + " obj = model.new_int_var(0, num_slabs * max_loss, \"obj\")\n", + " losses = [model.new_int_var(0, max_loss, f\"loss_{s}\") for s in all_slabs]\n", " for s in all_slabs:\n", - " model.AddElement(loads[s], loss_array, losses[s])\n", - " model.Add(obj == sum(losses))\n", - " model.Minimize(obj)\n", + " model.add_element(loads[s], loss_array, losses[s])\n", + " model.add(obj == sum(losses))\n", + " model.minimize(obj)\n", "\n", " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", " objective_printer = cp_model.ObjectiveSolutionPrinter()\n", - " status = solver.Solve(model, objective_printer)\n", + " status = solver.solve(model, objective_printer)\n", "\n", " ### Output the solution.\n", " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", " print(\n", - " \"Loss = %i, time = %f s, %i conflicts\"\n", - " % (solver.ObjectiveValue(), solver.WallTime(), solver.NumConflicts())\n", + " f\"Loss = {solver.objective_value}, time = {solver.wall_time} s,\"\n", + " f\" {solver.num_conflicts} conflicts\"\n", " )\n", " else:\n", " print(\"No solution\")\n", "\n", "\n", - "def collect_valid_slabs_dp(capacities, colors, widths, loss_array):\n", + "def collect_valid_slabs_dp(\n", + " capacities: list[int],\n", + " colors: list[int],\n", + " widths: list[int],\n", + " loss_array: list[int],\n", + ") -> list[list[int]]:\n", " \"\"\"Collect valid columns (assign, loss) for one slab.\"\"\"\n", " start_time = time.time()\n", "\n", @@ -404,8 +408,8 @@ " all_valid_assignments.extend(new_assignments)\n", "\n", " print(\n", - " \"%i assignments created in %.2f s\"\n", - " % (len(all_valid_assignments), time.time() - start_time)\n", + " f\"{len(all_valid_assignments)} assignments created in\"\n", + " f\" {time.time() - start_time:2f} s\"\n", " )\n", " tuples = []\n", " for assignment in all_valid_assignments:\n", @@ -419,10 +423,10 @@ " return tuples\n", "\n", "\n", - "def steel_mill_slab_with_valid_slabs(problem, break_symmetries):\n", + "def steel_mill_slab_with_valid_slabs(problem_id: int, break_symmetries: bool) -> None:\n", " \"\"\"Solves the Steel Mill Slab Problem.\"\"\"\n", " ### Load problem.\n", - " (num_slabs, capacities, num_colors, orders) = build_problem(problem)\n", + " (num_slabs, capacities, num_colors, orders) = build_problem(problem_id)\n", "\n", " num_orders = len(orders)\n", " num_capacities = len(capacities)\n", @@ -430,8 +434,8 @@ " all_colors = range(num_colors)\n", " all_orders = range(len(orders))\n", " print(\n", - " \"Solving steel mill with %i orders, %i slabs, and %i capacities\"\n", - " % (num_orders, num_slabs, num_capacities - 1)\n", + " f\"Solving steel mill with {num_orders} orders, {num_slabs} slabs, and\"\n", + " f\" {num_capacities - 1} capacities\"\n", " )\n", "\n", " # Compute auxiliary data.\n", @@ -448,11 +452,11 @@ " # Create the model and the decision variables.\n", " model = cp_model.CpModel()\n", " assign = [\n", - " [model.NewBoolVar(\"assign_%i_to_slab_%i\" % (o, s)) for s in all_slabs]\n", + " [model.new_bool_var(r\"assign_{o}_to_slab_{s}\") for s in all_slabs]\n", " for o in all_orders\n", " ]\n", - " loads = [model.NewIntVar(0, max_capacity, \"load_%i\" % s) for s in all_slabs]\n", - " losses = [model.NewIntVar(0, max_loss, \"loss_%i\" % s) for s in all_slabs]\n", + " loads = [model.new_int_var(0, max_capacity, f\"load_{s}\") for s in all_slabs]\n", + " losses = [model.new_int_var(0, max_loss, f\"loss_{s}\") for s in all_slabs]\n", "\n", " unsorted_valid_slabs = collect_valid_slabs_dp(\n", " capacities, colors, widths, loss_array\n", @@ -461,20 +465,20 @@ " valid_slabs = sorted(unsorted_valid_slabs, key=lambda c: 1000 * c[-1] + c[-2])\n", "\n", " for s in all_slabs:\n", - " model.AddAllowedAssignments(\n", + " model.add_allowed_assignments(\n", " [assign[o][s] for o in all_orders] + [losses[s], loads[s]], valid_slabs\n", " )\n", "\n", " # Orders are assigned to one slab.\n", " for o in all_orders:\n", - " model.AddExactlyOne(assign[o])\n", + " model.add_exactly_one(assign[o])\n", "\n", " # Redundant constraint (sum of loads == sum of widths).\n", - " model.Add(sum(loads) == sum(widths))\n", + " model.add(sum(loads) == sum(widths))\n", "\n", " # Symmetry breaking.\n", " for s in range(num_slabs - 1):\n", - " model.Add(loads[s] >= loads[s + 1])\n", + " model.add(loads[s] >= loads[s + 1])\n", "\n", " # Collect equivalent orders.\n", " if break_symmetries:\n", @@ -514,26 +518,26 @@ " # Create position variables if there are symmetries to be broken.\n", " if ordered_equivalent_orders:\n", " print(\n", - " \" - creating %i symmetry breaking constraints\"\n", - " % len(ordered_equivalent_orders)\n", + " f\" - creating {len(ordered_equivalent_orders)} symmetry breaking\"\n", + " \" constraints\"\n", " )\n", " positions = {}\n", " for p in ordered_equivalent_orders:\n", " if p[0] not in positions:\n", - " positions[p[0]] = model.NewIntVar(\n", - " 0, num_slabs - 1, \"position_of_slab_%i\" % p[0]\n", + " positions[p[0]] = model.new_int_var(\n", + " 0, num_slabs - 1, f\"position_of_slab_{p[0]}\"\n", " )\n", - " model.AddMapDomain(positions[p[0]], assign[p[0]])\n", + " model.add_map_domain(positions[p[0]], assign[p[0]])\n", " if p[1] not in positions:\n", - " positions[p[1]] = model.NewIntVar(\n", - " 0, num_slabs - 1, \"position_of_slab_%i\" % p[1]\n", + " positions[p[1]] = model.new_int_var(\n", + " 0, num_slabs - 1, f\"position_of_slab_{p[1]}\"\n", " )\n", - " model.AddMapDomain(positions[p[1]], assign[p[1]])\n", + " model.add_map_domain(positions[p[1]], assign[p[1]])\n", " # Finally add the symmetry breaking constraint.\n", - " model.Add(positions[p[0]] <= positions[p[1]])\n", + " model.add(positions[p[0]] <= positions[p[1]])\n", "\n", " # Objective.\n", - " model.Minimize(sum(losses))\n", + " model.minimize(sum(losses))\n", "\n", " print(\"Model created\")\n", "\n", @@ -543,29 +547,29 @@ " text_format.Parse(_PARAMS.value, solver.parameters)\n", "\n", " solution_printer = SteelMillSlabSolutionPrinter(orders, assign, loads, losses)\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " ### Output the solution.\n", " if status == cp_model.OPTIMAL:\n", " print(\n", - " \"Loss = %i, time = %.2f s, %i conflicts\"\n", - " % (solver.ObjectiveValue(), solver.WallTime(), solver.NumConflicts())\n", + " f\"Loss = {solver.objective_value}, time = {solver.wall_time:2f} s,\"\n", + " f\" {solver.num_conflicts} conflicts\"\n", " )\n", " else:\n", " print(\"No solution\")\n", "\n", "\n", - "def steel_mill_slab_with_column_generation(problem):\n", + "def steel_mill_slab_with_column_generation(problem_id: int) -> None:\n", " \"\"\"Solves the Steel Mill Slab Problem.\"\"\"\n", " ### Load problem.\n", - " (num_slabs, capacities, _, orders) = build_problem(problem)\n", + " (num_slabs, capacities, _, orders) = build_problem(problem_id)\n", "\n", " num_orders = len(orders)\n", " num_capacities = len(capacities)\n", " all_orders = range(len(orders))\n", " print(\n", - " \"Solving steel mill with %i orders, %i slabs, and %i capacities\"\n", - " % (num_orders, num_slabs, num_capacities - 1)\n", + " f\"Solving steel mill with {num_orders} orders, {num_slabs} slabs, and\"\n", + " f\" {num_capacities - 1} capacities\"\n", " )\n", "\n", " # Compute auxiliary data.\n", @@ -589,21 +593,21 @@ "\n", " # create model and decision variables.\n", " model = cp_model.CpModel()\n", - " selected = [model.NewBoolVar(\"selected_%i\" % i) for i in all_valid_slabs]\n", + " selected = [model.new_bool_var(f\"selected_{i}\") for i in all_valid_slabs]\n", "\n", " for order_id in all_orders:\n", - " model.Add(\n", + " model.add(\n", " sum(selected[i] for i, slab in enumerate(valid_slabs) if slab[order_id])\n", " == 1\n", " )\n", "\n", " # Redundant constraint (sum of loads == sum of widths).\n", - " model.Add(\n", + " model.add(\n", " sum(selected[i] * valid_slabs[i][-1] for i in all_valid_slabs) == sum(widths)\n", " )\n", "\n", " # Objective.\n", - " model.Minimize(sum(selected[i] * valid_slabs[i][-2] for i in all_valid_slabs))\n", + " model.minimize(sum(selected[i] * valid_slabs[i][-2] for i in all_valid_slabs))\n", "\n", " print(\"Model created\")\n", "\n", @@ -612,13 +616,13 @@ " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", " solution_printer = cp_model.ObjectiveSolutionPrinter()\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " ### Output the solution.\n", " if status in (cp_model.OPTIMAL, cp_model.FEASIBLE):\n", " print(\n", - " \"Loss = %i, time = %.2f s, %i conflicts\"\n", - " % (solver.ObjectiveValue(), solver.WallTime(), solver.NumConflicts())\n", + " f\"Loss = {solver.objective_value}, time = {solver.wall_time:2f} s,\"\n", + " f\" {solver.num_conflicts} conflicts\"\n", " )\n", " else:\n", " print(\"No solution\")\n", diff --git a/examples/notebook/examples/sudoku_sat.ipynb b/examples/notebook/examples/sudoku_sat.ipynb index 97370b3c599..85bf780cb12 100644 --- a/examples/notebook/examples/sudoku_sat.ipynb +++ b/examples/notebook/examples/sudoku_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,7 +86,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def solve_sudoku():\n", + "def solve_sudoku() -> None:\n", " \"\"\"Solves the sudoku problem with the CP-SAT solver.\"\"\"\n", " # Create the model.\n", " model = cp_model.CpModel()\n", @@ -111,15 +111,15 @@ " grid = {}\n", " for i in line:\n", " for j in line:\n", - " grid[(i, j)] = model.NewIntVar(1, line_size, \"grid %i %i\" % (i, j))\n", + " grid[(i, j)] = model.new_int_var(1, line_size, \"grid %i %i\" % (i, j))\n", "\n", " # AllDifferent on rows.\n", " for i in line:\n", - " model.AddAllDifferent(grid[(i, j)] for j in line)\n", + " model.add_all_different(grid[(i, j)] for j in line)\n", "\n", " # AllDifferent on columns.\n", " for j in line:\n", - " model.AddAllDifferent(grid[(i, j)] for i in line)\n", + " model.add_all_different(grid[(i, j)] for i in line)\n", "\n", " # AllDifferent on cells.\n", " for i in cell:\n", @@ -129,20 +129,20 @@ " for dj in cell:\n", " one_cell.append(grid[(i * cell_size + di, j * cell_size + dj)])\n", "\n", - " model.AddAllDifferent(one_cell)\n", + " model.add_all_different(one_cell)\n", "\n", " # Initial values.\n", " for i in line:\n", " for j in line:\n", " if initial_grid[i][j]:\n", - " model.Add(grid[(i, j)] == initial_grid[i][j])\n", + " model.add(grid[(i, j)] == initial_grid[i][j])\n", "\n", - " # Solve and print out the solution.\n", + " # Solves and prints out the solution.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", " if status == cp_model.OPTIMAL:\n", " for i in line:\n", - " print([int(solver.Value(grid[(i, j)])) for j in line])\n", + " print([int(solver.value(grid[(i, j)])) for j in line])\n", "\n", "\n", "solve_sudoku()\n", diff --git a/examples/notebook/examples/task_allocation_sat.ipynb b/examples/notebook/examples/task_allocation_sat.ipynb index a4e9d043ac2..ab7e77752d1 100644 --- a/examples/notebook/examples/task_allocation_sat.ipynb +++ b/examples/notebook/examples/task_allocation_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -91,7 +91,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def task_allocation_sat():\n", + "def task_allocation_sat() -> None:\n", " \"\"\"Solves the task allocation problem.\"\"\"\n", " # Availability matrix.\n", " available = [\n", @@ -314,12 +314,12 @@ " assign = {}\n", " for task in all_tasks:\n", " for slot in all_slots:\n", - " assign[(task, slot)] = model.NewBoolVar(\"x[%i][%i]\" % (task, slot))\n", - " count = model.NewIntVar(0, nslots, \"count\")\n", - " slot_used = [model.NewBoolVar(\"slot_used[%i]\" % s) for s in all_slots]\n", + " assign[(task, slot)] = model.new_bool_var(f\"x[{task}][{slot}]\")\n", + " count = model.new_int_var(0, nslots, \"count\")\n", + " slot_used = [model.new_bool_var(f\"slot_used[{s}]\") for s in all_slots]\n", "\n", " for task in all_tasks:\n", - " model.Add(\n", + " model.add(\n", " sum(\n", " assign[(task, slot)] for slot in all_slots if available[task][slot] == 1\n", " )\n", @@ -327,38 +327,33 @@ " )\n", "\n", " for slot in all_slots:\n", - " model.Add(\n", + " model.add(\n", " sum(\n", " assign[(task, slot)] for task in all_tasks if available[task][slot] == 1\n", " )\n", " <= capacity\n", " )\n", - " model.AddBoolOr(\n", + " model.add_bool_or(\n", " [assign[(task, slot)] for task in all_tasks if available[task][slot] == 1]\n", - " ).OnlyEnforceIf(slot_used[slot])\n", + " ).only_enforce_if(slot_used[slot])\n", " for task in all_tasks:\n", " if available[task][slot] == 1:\n", - " model.AddImplication(slot_used[slot].Not(), assign[(task, slot)].Not())\n", + " model.add_implication(~slot_used[slot], ~assign[(task, slot)])\n", " else:\n", - " model.Add(assign[(task, slot)] == 0)\n", + " model.add(assign[(task, slot)] == 0)\n", "\n", - " model.Add(count == sum(slot_used))\n", + " model.add(count == sum(slot_used))\n", " # Redundant constraint. This instance is easier if we add this constraint.\n", - " # model.Add(count >= (nslots + capacity - 1) // capacity)\n", + " # model.add(count >= (nslots + capacity - 1) // capacity)\n", "\n", - " model.Minimize(count)\n", + " model.minimize(count)\n", "\n", " # Create a solver and solve the problem.\n", " solver = cp_model.CpSolver()\n", " # Uses the portfolion of heuristics.\n", " solver.parameters.log_search_progress = True\n", " solver.parameters.num_search_workers = 16\n", - " status = solver.Solve(model)\n", - "\n", - " print(\"Statistics\")\n", - " print(\" - status =\", solver.StatusName(status))\n", - " print(\" - optimal solution =\", solver.ObjectiveValue())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " solver.solve(model)\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/tasks_and_workers_assignment_sat.ipynb b/examples/notebook/examples/tasks_and_workers_assignment_sat.ipynb index c5a309712fe..2bfdd2250f9 100644 --- a/examples/notebook/examples/tasks_and_workers_assignment_sat.ipynb +++ b/examples/notebook/examples/tasks_and_workers_assignment_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -97,13 +97,13 @@ " def on_solution_callback(self):\n", " print(\n", " \"Solution %i, time = %f s, objective = %i\"\n", - " % (self.__solution_count, self.WallTime(), self.ObjectiveValue())\n", + " % (self.__solution_count, self.wall_time, self.objective_value)\n", " )\n", " self.__solution_count += 1\n", "\n", "\n", - "def tasks_and_workers_assignment_sat():\n", - " \"\"\"Solve the assignment problem.\"\"\"\n", + "def tasks_and_workers_assignment_sat() -> None:\n", + " \"\"\"solve the assignment problem.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", " # CP-SAT solver is integer only.\n", @@ -121,71 +121,71 @@ " x = {}\n", " for i in all_workers:\n", " for j in all_groups:\n", - " x[i, j] = model.NewBoolVar(\"x[%i,%i]\" % (i, j))\n", + " x[i, j] = model.new_bool_var(\"x[%i,%i]\" % (i, j))\n", "\n", " ## y_kj is 1 if task k is assigned to group j\n", " y = {}\n", " for k in all_tasks:\n", " for j in all_groups:\n", - " y[k, j] = model.NewBoolVar(\"x[%i,%i]\" % (k, j))\n", + " y[k, j] = model.new_bool_var(\"x[%i,%i]\" % (k, j))\n", "\n", " # Constraints\n", "\n", " # Each task k is assigned to a group and only one.\n", " for k in all_tasks:\n", - " model.Add(sum(y[k, j] for j in all_groups) == 1)\n", + " model.add(sum(y[k, j] for j in all_groups) == 1)\n", "\n", " # Each worker i is assigned to a group and only one.\n", " for i in all_workers:\n", - " model.Add(sum(x[i, j] for j in all_groups) == 1)\n", + " model.add(sum(x[i, j] for j in all_groups) == 1)\n", "\n", - " # cost per group\n", + " # Cost per group\n", " sum_of_costs = sum(task_cost)\n", " averages = []\n", " num_workers_in_group = []\n", " scaled_sum_of_costs_in_group = []\n", " scaling = 1000 # We introduce scaling to deal with floating point average.\n", " for j in all_groups:\n", - " n = model.NewIntVar(1, num_workers, \"num_workers_in_group_%i\" % j)\n", - " model.Add(n == sum(x[i, j] for i in all_workers))\n", - " c = model.NewIntVar(0, sum_of_costs * scaling, \"sum_of_costs_of_group_%i\" % j)\n", - " model.Add(c == sum(y[k, j] * task_cost[k] * scaling for k in all_tasks))\n", - " a = model.NewIntVar(0, sum_of_costs * scaling, \"average_cost_of_group_%i\" % j)\n", - " model.AddDivisionEquality(a, c, n)\n", + " n = model.new_int_var(1, num_workers, \"num_workers_in_group_%i\" % j)\n", + " model.add(n == sum(x[i, j] for i in all_workers))\n", + " c = model.new_int_var(0, sum_of_costs * scaling, \"sum_of_costs_of_group_%i\" % j)\n", + " model.add(c == sum(y[k, j] * task_cost[k] * scaling for k in all_tasks))\n", + " a = model.new_int_var(0, sum_of_costs * scaling, \"average_cost_of_group_%i\" % j)\n", + " model.add_division_equality(a, c, n)\n", "\n", " averages.append(a)\n", " num_workers_in_group.append(n)\n", " scaled_sum_of_costs_in_group.append(c)\n", "\n", " # All workers are assigned.\n", - " model.Add(sum(num_workers_in_group) == num_workers)\n", + " model.add(sum(num_workers_in_group) == num_workers)\n", "\n", " # Objective.\n", - " obj = model.NewIntVar(0, sum_of_costs * scaling, \"obj\")\n", - " model.AddMaxEquality(obj, averages)\n", - " model.Minimize(obj)\n", + " obj = model.new_int_var(0, sum_of_costs * scaling, \"obj\")\n", + " model.add_max_equality(obj, averages)\n", + " model.minimize(obj)\n", "\n", " # Solve and print out the solution.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.max_time_in_seconds = 60 * 60 * 2\n", " objective_printer = ObjectivePrinter()\n", - " status = solver.Solve(model, objective_printer)\n", - " print(solver.ResponseStats())\n", + " status = solver.solve(model, objective_printer)\n", + " print(solver.response_stats())\n", "\n", " if status == cp_model.OPTIMAL:\n", " for j in all_groups:\n", " print(\"Group %i\" % j)\n", " for i in all_workers:\n", - " if solver.BooleanValue(x[i, j]):\n", + " if solver.boolean_value(x[i, j]):\n", " print(\" - worker %i\" % i)\n", " for k in all_tasks:\n", - " if solver.BooleanValue(y[k, j]):\n", + " if solver.boolean_value(y[k, j]):\n", " print(\" - task %i with cost %i\" % (k, task_cost[k]))\n", " print(\n", " \" - sum_of_costs = %i\"\n", - " % (solver.Value(scaled_sum_of_costs_in_group[j]) // scaling)\n", + " % (solver.value(scaled_sum_of_costs_in_group[j]) // scaling)\n", " )\n", - " print(\" - average cost = %f\" % (solver.Value(averages[j]) * 1.0 / scaling))\n", + " print(\" - average cost = %f\" % (solver.value(averages[j]) * 1.0 / scaling))\n", "\n", "\n", "tasks_and_workers_assignment_sat()\n", diff --git a/examples/notebook/examples/test_scheduling_sat.ipynb b/examples/notebook/examples/test_scheduling_sat.ipynb new file mode 100644 index 00000000000..fd94dc99cd0 --- /dev/null +++ b/examples/notebook/examples/test_scheduling_sat.ipynb @@ -0,0 +1,251 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# test_scheduling_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Solves a test scheduling problem.\n", + "\n", + "Tests must be run by an operator. Tests have a duration and a power consumption.\n", + "\n", + "Operators draw power from power supplies. The mapping between operators and\n", + "power supplies is given.\n", + "\n", + "Power supplies have a maximum power they can deliver.\n", + "\n", + "Can we schedule the tests so that the power consumption of each power supply is\n", + "always below its maximum power, and the total makespan is minimized?\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "import io\n", + "from typing import Dict, Tuple\n", + "\n", + "from ortools.sat.colab import flags\n", + "import pandas as pd\n", + "\n", + "from google.protobuf import text_format\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "_PARAMS = flags.define_string(\n", + " \"params\",\n", + " \"num_search_workers:16,log_search_progress:true,max_time_in_seconds:45\",\n", + " \"Sat solver parameters.\",\n", + ")\n", + "\n", + "\n", + "def build_data() -> tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:\n", + " \"\"\"Build the data frame.\"\"\"\n", + " tests_str = \"\"\"\n", + " Name Operator TestTime AveragePower\n", + " T1 O1 300 200\n", + " T2 O1 150 40\n", + " T3 O2 100 65\n", + " T4 O2 250 150\n", + " T5 O3 210 140\n", + " \"\"\"\n", + "\n", + " operators_str = \"\"\"\n", + " Operator Supply\n", + " O1 S1\n", + " O2 S2\n", + " O3 S2\n", + " \"\"\"\n", + "\n", + " supplies_str = \"\"\"\n", + " Supply MaxAllowedPower\n", + " S1 230\n", + " S2 210\n", + " \"\"\"\n", + "\n", + " tests_data = pd.read_table(io.StringIO(tests_str), sep=r\"\\s+\")\n", + " operators_data = pd.read_table(io.StringIO(operators_str), sep=r\"\\s+\")\n", + " supplies_data = pd.read_table(io.StringIO(supplies_str), sep=r\"\\s+\")\n", + "\n", + " return (tests_data, operators_data, supplies_data)\n", + "\n", + "\n", + "def solve(\n", + " tests_data: pd.DataFrame,\n", + " operator_data: pd.DataFrame,\n", + " supplies_data: pd.DataFrame,\n", + ") -> None:\n", + " \"\"\"Solve the scheduling of tests problem.\"\"\"\n", + "\n", + " # Parses data.\n", + " operator_to_supply: Dict[str, str] = {}\n", + " for _, row in operator_data.iterrows():\n", + " operator_to_supply[row[\"Operator\"]] = row[\"Supply\"]\n", + "\n", + " supply_to_max_power: Dict[str, int] = {}\n", + " for _, row in supplies_data.iterrows():\n", + " supply_to_max_power[row[\"Supply\"]] = row[\"MaxAllowedPower\"]\n", + "\n", + " horizon = tests_data[\"TestTime\"].sum()\n", + "\n", + " # OR-Tools model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Create containers.\n", + " tests_per_supply: Dict[str, Tuple[list[cp_model.IntervalVar], list[int]]] = {}\n", + " test_supply: Dict[str, str] = {}\n", + " test_starts: Dict[str, cp_model.IntVar] = {}\n", + " test_durations: Dict[str, int] = {}\n", + " test_powers: Dict[str, int] = {}\n", + " all_ends = []\n", + "\n", + " # Creates intervals.\n", + " for _, row in tests_data.iterrows():\n", + " name: str = row[\"Name\"]\n", + " operator: str = row[\"Operator\"]\n", + " test_time: int = row[\"TestTime\"]\n", + " average_power: int = row[\"AveragePower\"]\n", + " supply: str = operator_to_supply[operator]\n", + "\n", + " start = model.new_int_var(0, horizon - test_time, f\"start_{name}\")\n", + " interval = model.new_fixed_size_interval_var(\n", + " start, test_time, f\"interval_{name}\"\n", + " )\n", + "\n", + " # Bookkeeping.\n", + " test_starts[name] = start\n", + " test_durations[name] = test_time\n", + " test_powers[name] = average_power\n", + " test_supply[name] = supply\n", + " if supply not in tests_per_supply.keys():\n", + " tests_per_supply[supply] = ([], [])\n", + " tests_per_supply[supply][0].append(interval)\n", + " tests_per_supply[supply][1].append(average_power)\n", + " all_ends.append(start + test_time)\n", + "\n", + " # Create supply cumulative constraints.\n", + " for supply, (intervals, demands) in tests_per_supply.items():\n", + " model.add_cumulative(intervals, demands, supply_to_max_power[supply])\n", + "\n", + " # Objective.\n", + " makespan = model.new_int_var(0, horizon, \"makespan\")\n", + " for end in all_ends:\n", + " model.add(makespan >= end)\n", + " model.minimize(makespan)\n", + "\n", + " # Solve model.\n", + " solver = cp_model.CpSolver()\n", + " if _PARAMS.value:\n", + " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " status = solver.solve(model)\n", + "\n", + " # Report solution.\n", + " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", + " print(f\"Makespan = {solver.value(makespan)}\")\n", + " for name, start in test_starts.items():\n", + " print(\n", + " f\"{name}: start:{solver.value(start)} duration:{test_durations[name]}\"\n", + " f\" power:{test_powers[name]} on supply {test_supply[name]}\"\n", + " )\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " \"\"\"Builds the data and solve the scheduling problem.\"\"\"\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + "\n", + " tests_data, operators_data, supplies_data = build_data()\n", + " print(\"Tests data\")\n", + " print(tests_data)\n", + " print()\n", + " print(\"Operators data\")\n", + " print(operators_data)\n", + " print()\n", + " print(\"Supplies data\")\n", + " print(supplies_data)\n", + "\n", + " solve(tests_data, operators_data, supplies_data)\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/transit_time.ipynb b/examples/notebook/examples/transit_time.ipynb index bcd56870890..073c4d9aa77 100644 --- a/examples/notebook/examples/transit_time.ipynb +++ b/examples/notebook/examples/transit_time.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/examples/tsp_sat.ipynb b/examples/notebook/examples/tsp_sat.ipynb index 9030d272c94..8704e021603 100644 --- a/examples/notebook/examples/tsp_sat.ipynb +++ b/examples/notebook/examples/tsp_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -151,17 +151,17 @@ " if i == j:\n", " continue\n", "\n", - " lit = model.NewBoolVar(\"%i follows %i\" % (j, i))\n", + " lit = model.new_bool_var(\"%i follows %i\" % (j, i))\n", " arcs.append((i, j, lit))\n", " arc_literals[i, j] = lit\n", "\n", " obj_vars.append(lit)\n", " obj_coeffs.append(DISTANCE_MATRIX[i][j])\n", "\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", " # Minimize weighted sum of arcs. Because this s\n", - " model.Minimize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", + " model.minimize(sum(obj_vars[i] * obj_coeffs[i] for i in range(len(obj_vars))))\n", "\n", " # Solve and print out the solution.\n", " solver = cp_model.CpSolver()\n", @@ -169,8 +169,8 @@ " # To benefit from the linearization of the circuit constraint.\n", " solver.parameters.linearization_level = 2\n", "\n", - " solver.Solve(model)\n", - " print(solver.ResponseStats())\n", + " solver.solve(model)\n", + " print(solver.response_stats())\n", "\n", " current_node = 0\n", " str_route = \"%i\" % current_node\n", @@ -180,7 +180,7 @@ " for i in all_nodes:\n", " if i == current_node:\n", " continue\n", - " if solver.BooleanValue(arc_literals[current_node, i]):\n", + " if solver.boolean_value(arc_literals[current_node, i]):\n", " str_route += \" -> %i\" % i\n", " route_distance += DISTANCE_MATRIX[current_node][i]\n", " current_node = i\n", diff --git a/examples/notebook/examples/vendor_scheduling_sat.ipynb b/examples/notebook/examples/vendor_scheduling_sat.ipynb index 40cf9603c6f..f23d5dc94b3 100644 --- a/examples/notebook/examples/vendor_scheduling_sat.ipynb +++ b/examples/notebook/examples/vendor_scheduling_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -116,13 +116,13 @@ " for i in range(self.__num_vendors):\n", " print(\n", " \" - vendor %i: \" % i,\n", - " self.__possible_schedules[self.Value(self.__selected_schedules[i])],\n", + " self.__possible_schedules[self.value(self.__selected_schedules[i])],\n", " )\n", " print()\n", "\n", " for j in range(self.__num_hours):\n", " print(\" - # workers on day%2i: \" % j, end=\" \")\n", - " print(self.Value(self.__hours_stat[j]), end=\" \")\n", + " print(self.value(self.__hours_stat[j]), end=\" \")\n", " print()\n", " print()\n", "\n", @@ -131,7 +131,7 @@ " return self.__solution_count\n", "\n", "\n", - "def vendor_scheduling_sat():\n", + "def vendor_scheduling_sat() -> None:\n", " \"\"\"Create the shift scheduling model and solve it.\"\"\"\n", " # Create the model.\n", " model = cp_model.CpModel()\n", @@ -169,38 +169,40 @@ " all_hours = range(num_hours)\n", "\n", " #\n", - " # declare variables\n", + " # Declare variables\n", " #\n", " x = {}\n", "\n", " for v in all_vendors:\n", " tmp = []\n", " for h in all_hours:\n", - " x[v, h] = model.NewIntVar(0, num_work_types, \"x[%i,%i]\" % (v, h))\n", + " x[v, h] = model.new_int_var(0, num_work_types, \"x[%i,%i]\" % (v, h))\n", " tmp.append(x[v, h])\n", - " selected_schedule = model.NewIntVar(0, num_possible_schedules - 1, \"s[%i]\" % v)\n", - " hours = model.NewIntVar(0, num_hours, \"h[%i]\" % v)\n", + " selected_schedule = model.new_int_var(\n", + " 0, num_possible_schedules - 1, \"s[%i]\" % v\n", + " )\n", + " hours = model.new_int_var(0, num_hours, \"h[%i]\" % v)\n", " selected_schedules.append(selected_schedule)\n", " vendors_stat.append(hours)\n", " tmp.append(selected_schedule)\n", " tmp.append(hours)\n", "\n", - " model.AddAllowedAssignments(tmp, possible_schedules)\n", + " model.add_allowed_assignments(tmp, possible_schedules)\n", "\n", " #\n", " # Statistics and constraints for each hour\n", " #\n", " for h in all_hours:\n", - " workers = model.NewIntVar(0, 1000, \"workers[%i]\" % h)\n", - " model.Add(workers == sum(x[v, h] for v in all_vendors))\n", + " workers = model.new_int_var(0, 1000, \"workers[%i]\" % h)\n", + " model.add(workers == sum(x[v, h] for v in all_vendors))\n", " hours_stat.append(workers)\n", - " model.Add(workers * max_traffic_per_vendor >= traffic[h])\n", + " model.add(workers * max_traffic_per_vendor >= traffic[h])\n", "\n", " #\n", " # Redundant constraint: sort selected_schedules\n", " #\n", " for v in range(num_vendors - 1):\n", - " model.Add(selected_schedules[v] <= selected_schedules[v + 1])\n", + " model.add(selected_schedules[v] <= selected_schedules[v + 1])\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", @@ -213,13 +215,13 @@ " hours_stat,\n", " min_vendors,\n", " )\n", - " status = solver.Solve(model, solution_printer)\n", - " print(\"Status = %s\" % solver.StatusName(status))\n", + " status = solver.solve(model, solution_printer)\n", + " print(\"Status = %s\" % solver.status_name(status))\n", "\n", " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(\" - conflicts : %i\" % solver.num_conflicts)\n", + " print(\" - branches : %i\" % solver.num_branches)\n", + " print(\" - wall time : %f s\" % solver.wall_time)\n", " print(\" - number of solutions found: %i\" % solution_printer.solution_count())\n", "\n", "\n", diff --git a/examples/notebook/examples/wedding_optimal_chart_sat.ipynb b/examples/notebook/examples/wedding_optimal_chart_sat.ipynb index 745f65c58bf..c27f1c2c783 100644 --- a/examples/notebook/examples/wedding_optimal_chart_sat.ipynb +++ b/examples/notebook/examples/wedding_optimal_chart_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -124,7 +124,7 @@ "\n", " def on_solution_callback(self):\n", " current_time = time.time()\n", - " objective = self.ObjectiveValue()\n", + " objective = self.objective_value\n", " print(\n", " \"Solution %i, time = %f s, objective = %i\"\n", " % (self.__solution_count, current_time - self.__start_time, objective)\n", @@ -134,10 +134,10 @@ " for t in range(self.__num_tables):\n", " print(\"Table %d: \" % t)\n", " for g in range(self.__num_guests):\n", - " if self.Value(self.__seats[(t, g)]):\n", + " if self.value(self.__seats[(t, g)]):\n", " print(\" \" + self.__names[g])\n", "\n", - " def num_solutions(self):\n", + " def num_solutions(self) -> int:\n", " return self.__solution_count\n", "\n", "\n", @@ -198,7 +198,7 @@ " return num_tables, table_capacity, min_known_neighbors, connections, names\n", "\n", "\n", - "def solve_with_discrete_model():\n", + "def solve_with_discrete_model() -> None:\n", " \"\"\"Discrete approach.\"\"\"\n", " num_tables, table_capacity, min_known_neighbors, connections, names = build_data()\n", "\n", @@ -216,12 +216,12 @@ " seats = {}\n", " for t in all_tables:\n", " for g in all_guests:\n", - " seats[(t, g)] = model.NewBoolVar(\"guest %i seats on table %i\" % (g, t))\n", + " seats[(t, g)] = model.new_bool_var(\"guest %i seats on table %i\" % (g, t))\n", "\n", " colocated = {}\n", " for g1 in range(num_guests - 1):\n", " for g2 in range(g1 + 1, num_guests):\n", - " colocated[(g1, g2)] = model.NewBoolVar(\n", + " colocated[(g1, g2)] = model.new_bool_var(\n", " \"guest %i seats with guest %i\" % (g1, g2)\n", " )\n", "\n", @@ -229,12 +229,12 @@ " for g1 in range(num_guests - 1):\n", " for g2 in range(g1 + 1, num_guests):\n", " for t in all_tables:\n", - " same_table[(g1, g2, t)] = model.NewBoolVar(\n", + " same_table[(g1, g2, t)] = model.new_bool_var(\n", " \"guest %i seats with guest %i on table %i\" % (g1, g2, t)\n", " )\n", "\n", " # Objective\n", - " model.Maximize(\n", + " model.maximize(\n", " sum(\n", " connections[g1][g2] * colocated[g1, g2]\n", " for g1 in range(num_guests - 1)\n", @@ -249,35 +249,35 @@ "\n", " # Everybody seats at one table.\n", " for g in all_guests:\n", - " model.Add(sum(seats[(t, g)] for t in all_tables) == 1)\n", + " model.add(sum(seats[(t, g)] for t in all_tables) == 1)\n", "\n", " # Tables have a max capacity.\n", " for t in all_tables:\n", - " model.Add(sum(seats[(t, g)] for g in all_guests) <= table_capacity)\n", + " model.add(sum(seats[(t, g)] for g in all_guests) <= table_capacity)\n", "\n", " # Link colocated with seats\n", " for g1 in range(num_guests - 1):\n", " for g2 in range(g1 + 1, num_guests):\n", " for t in all_tables:\n", " # Link same_table and seats.\n", - " model.AddBoolOr(\n", + " model.add_bool_or(\n", " [\n", - " seats[(t, g1)].Not(),\n", - " seats[(t, g2)].Not(),\n", + " ~seats[(t, g1)],\n", + " ~seats[(t, g2)],\n", " same_table[(g1, g2, t)],\n", " ]\n", " )\n", - " model.AddImplication(same_table[(g1, g2, t)], seats[(t, g1)])\n", - " model.AddImplication(same_table[(g1, g2, t)], seats[(t, g2)])\n", + " model.add_implication(same_table[(g1, g2, t)], seats[(t, g1)])\n", + " model.add_implication(same_table[(g1, g2, t)], seats[(t, g2)])\n", "\n", " # Link colocated and same_table.\n", - " model.Add(\n", + " model.add(\n", " sum(same_table[(g1, g2, t)] for t in all_tables) == colocated[(g1, g2)]\n", " )\n", "\n", " # Min known neighbors rule.\n", " for g in all_guests:\n", - " model.Add(\n", + " model.add(\n", " sum(\n", " same_table[(g, g2, t)]\n", " for g2 in range(g + 1, num_guests)\n", @@ -294,17 +294,17 @@ " )\n", "\n", " # Symmetry breaking. First guest seats on the first table.\n", - " model.Add(seats[(0, 0)] == 1)\n", + " model.add(seats[(0, 0)] == 1)\n", "\n", " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " solution_printer = WeddingChartPrinter(seats, names, num_tables, num_guests)\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", " print(\"Statistics\")\n", - " print(\" - conflicts : %i\" % solver.NumConflicts())\n", - " print(\" - branches : %i\" % solver.NumBranches())\n", - " print(\" - wall time : %f s\" % solver.WallTime())\n", + " print(\" - conflicts : %i\" % solver.num_conflicts)\n", + " print(\" - branches : %i\" % solver.num_branches)\n", + " print(\" - wall time : %f s\" % solver.wall_time)\n", " print(\" - num solutions: %i\" % solution_printer.num_solutions())\n", "\n", "\n", diff --git a/examples/notebook/examples/weighted_latency_problem_sat.ipynb b/examples/notebook/examples/weighted_latency_problem_sat.ipynb index 6baf3f9073c..5267fe8663a 100644 --- a/examples/notebook/examples/weighted_latency_problem_sat.ipynb +++ b/examples/notebook/examples/weighted_latency_problem_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve a random Weighted Latency problem with the CP-SAT solver.\n" + "solve a random Weighted Latency problem with the CP-SAT solver.\n" ] }, { @@ -122,16 +122,18 @@ " return x, y, profits\n", "\n", "\n", - "def solve_with_cp_sat(x, y, profits):\n", + "def solve_with_cp_sat(x, y, profits) -> None:\n", " \"\"\"Solves the problem with the CP-SAT solver.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", " # because of the manhattan distance, the sum of distances is bounded by this.\n", " horizon = _GRID_SIZE.value * 2 * _NUM_NODES.value\n", - " times = [model.NewIntVar(0, horizon, f\"x_{i}\") for i in range(_NUM_NODES.value + 1)]\n", + " times = [\n", + " model.new_int_var(0, horizon, f\"x_{i}\") for i in range(_NUM_NODES.value + 1)\n", + " ]\n", "\n", " # Node 0 is the start node.\n", - " model.Add(times[0] == 0)\n", + " model.add(times[0] == 0)\n", "\n", " # Create the circuit constraint.\n", " arcs = []\n", @@ -141,29 +143,29 @@ " continue\n", " # We use a manhattan distance between nodes.\n", " distance = abs(x[i] - x[j]) + abs(y[i] - y[j])\n", - " lit = model.NewBoolVar(f\"{i}_to_{j}\")\n", + " lit = model.new_bool_var(f\"{i}_to_{j}\")\n", " arcs.append((i, j, lit))\n", "\n", - " # Add transitions between nodes.\n", + " # add transitions between nodes.\n", " if i == 0:\n", " # Initial transition\n", - " model.Add(times[j] == distance).OnlyEnforceIf(lit)\n", + " model.add(times[j] == distance).only_enforce_if(lit)\n", " elif j != 0:\n", " # We do not care for the last transition.\n", - " model.Add(times[j] == times[i] + distance).OnlyEnforceIf(lit)\n", - " model.AddCircuit(arcs)\n", + " model.add(times[j] == times[i] + distance).only_enforce_if(lit)\n", + " model.add_circuit(arcs)\n", "\n", - " model.Minimize(cp_model.LinearExpr.WeightedSum(times, profits))\n", + " model.minimize(cp_model.LinearExpr.weighted_sum(times, profits))\n", "\n", " if _PROTO_FILE.value:\n", - " model.ExportToFile(_PROTO_FILE.value)\n", + " model.export_to_file(_PROTO_FILE.value)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", " text_format.Parse(_PARAMS.value, solver.parameters)\n", " solver.parameters.log_search_progress = True\n", - " solver.Solve(model)\n", + " solver.solve(model)\n", "\n", "\n", "def main(argv: Sequence[str]) -> None:\n", diff --git a/examples/notebook/examples/zebra_sat.ipynb b/examples/notebook/examples/zebra_sat.ipynb index 3e8cfdfd1d2..3b784974789 100644 --- a/examples/notebook/examples/zebra_sat.ipynb +++ b/examples/notebook/examples/zebra_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -113,77 +113,77 @@ " # Create the model.\n", " model = cp_model.CpModel()\n", "\n", - " red = model.NewIntVar(1, 5, \"red\")\n", - " green = model.NewIntVar(1, 5, \"green\")\n", - " yellow = model.NewIntVar(1, 5, \"yellow\")\n", - " blue = model.NewIntVar(1, 5, \"blue\")\n", - " ivory = model.NewIntVar(1, 5, \"ivory\")\n", - "\n", - " englishman = model.NewIntVar(1, 5, \"englishman\")\n", - " spaniard = model.NewIntVar(1, 5, \"spaniard\")\n", - " japanese = model.NewIntVar(1, 5, \"japanese\")\n", - " ukrainian = model.NewIntVar(1, 5, \"ukrainian\")\n", - " norwegian = model.NewIntVar(1, 5, \"norwegian\")\n", - "\n", - " dog = model.NewIntVar(1, 5, \"dog\")\n", - " snails = model.NewIntVar(1, 5, \"snails\")\n", - " fox = model.NewIntVar(1, 5, \"fox\")\n", - " zebra = model.NewIntVar(1, 5, \"zebra\")\n", - " horse = model.NewIntVar(1, 5, \"horse\")\n", - "\n", - " tea = model.NewIntVar(1, 5, \"tea\")\n", - " coffee = model.NewIntVar(1, 5, \"coffee\")\n", - " water = model.NewIntVar(1, 5, \"water\")\n", - " milk = model.NewIntVar(1, 5, \"milk\")\n", - " fruit_juice = model.NewIntVar(1, 5, \"fruit juice\")\n", - "\n", - " old_gold = model.NewIntVar(1, 5, \"old gold\")\n", - " kools = model.NewIntVar(1, 5, \"kools\")\n", - " chesterfields = model.NewIntVar(1, 5, \"chesterfields\")\n", - " lucky_strike = model.NewIntVar(1, 5, \"lucky strike\")\n", - " parliaments = model.NewIntVar(1, 5, \"parliaments\")\n", - "\n", - " model.AddAllDifferent(red, green, yellow, blue, ivory)\n", - " model.AddAllDifferent(englishman, spaniard, japanese, ukrainian, norwegian)\n", - " model.AddAllDifferent(dog, snails, fox, zebra, horse)\n", - " model.AddAllDifferent(tea, coffee, water, milk, fruit_juice)\n", - " model.AddAllDifferent(parliaments, kools, chesterfields, lucky_strike, old_gold)\n", - "\n", - " model.Add(englishman == red)\n", - " model.Add(spaniard == dog)\n", - " model.Add(coffee == green)\n", - " model.Add(ukrainian == tea)\n", - " model.Add(green == ivory + 1)\n", - " model.Add(old_gold == snails)\n", - " model.Add(kools == yellow)\n", - " model.Add(milk == 3)\n", - " model.Add(norwegian == 1)\n", - "\n", - " diff_fox_chesterfields = model.NewIntVar(-4, 4, \"diff_fox_chesterfields\")\n", - " model.Add(diff_fox_chesterfields == fox - chesterfields)\n", - " model.AddAbsEquality(1, diff_fox_chesterfields)\n", - "\n", - " diff_horse_kools = model.NewIntVar(-4, 4, \"diff_horse_kools\")\n", - " model.Add(diff_horse_kools == horse - kools)\n", - " model.AddAbsEquality(1, diff_horse_kools)\n", - "\n", - " model.Add(lucky_strike == fruit_juice)\n", - " model.Add(japanese == parliaments)\n", - "\n", - " diff_norwegian_blue = model.NewIntVar(-4, 4, \"diff_norwegian_blue\")\n", - " model.Add(diff_norwegian_blue == norwegian - blue)\n", - " model.AddAbsEquality(1, diff_norwegian_blue)\n", + " red = model.new_int_var(1, 5, \"red\")\n", + " green = model.new_int_var(1, 5, \"green\")\n", + " yellow = model.new_int_var(1, 5, \"yellow\")\n", + " blue = model.new_int_var(1, 5, \"blue\")\n", + " ivory = model.new_int_var(1, 5, \"ivory\")\n", + "\n", + " englishman = model.new_int_var(1, 5, \"englishman\")\n", + " spaniard = model.new_int_var(1, 5, \"spaniard\")\n", + " japanese = model.new_int_var(1, 5, \"japanese\")\n", + " ukrainian = model.new_int_var(1, 5, \"ukrainian\")\n", + " norwegian = model.new_int_var(1, 5, \"norwegian\")\n", + "\n", + " dog = model.new_int_var(1, 5, \"dog\")\n", + " snails = model.new_int_var(1, 5, \"snails\")\n", + " fox = model.new_int_var(1, 5, \"fox\")\n", + " zebra = model.new_int_var(1, 5, \"zebra\")\n", + " horse = model.new_int_var(1, 5, \"horse\")\n", + "\n", + " tea = model.new_int_var(1, 5, \"tea\")\n", + " coffee = model.new_int_var(1, 5, \"coffee\")\n", + " water = model.new_int_var(1, 5, \"water\")\n", + " milk = model.new_int_var(1, 5, \"milk\")\n", + " fruit_juice = model.new_int_var(1, 5, \"fruit juice\")\n", + "\n", + " old_gold = model.new_int_var(1, 5, \"old gold\")\n", + " kools = model.new_int_var(1, 5, \"kools\")\n", + " chesterfields = model.new_int_var(1, 5, \"chesterfields\")\n", + " lucky_strike = model.new_int_var(1, 5, \"lucky strike\")\n", + " parliaments = model.new_int_var(1, 5, \"parliaments\")\n", + "\n", + " model.add_all_different(red, green, yellow, blue, ivory)\n", + " model.add_all_different(englishman, spaniard, japanese, ukrainian, norwegian)\n", + " model.add_all_different(dog, snails, fox, zebra, horse)\n", + " model.add_all_different(tea, coffee, water, milk, fruit_juice)\n", + " model.add_all_different(parliaments, kools, chesterfields, lucky_strike, old_gold)\n", + "\n", + " model.add(englishman == red)\n", + " model.add(spaniard == dog)\n", + " model.add(coffee == green)\n", + " model.add(ukrainian == tea)\n", + " model.add(green == ivory + 1)\n", + " model.add(old_gold == snails)\n", + " model.add(kools == yellow)\n", + " model.add(milk == 3)\n", + " model.add(norwegian == 1)\n", + "\n", + " diff_fox_chesterfields = model.new_int_var(-4, 4, \"diff_fox_chesterfields\")\n", + " model.add(diff_fox_chesterfields == fox - chesterfields)\n", + " model.add_abs_equality(1, diff_fox_chesterfields)\n", + "\n", + " diff_horse_kools = model.new_int_var(-4, 4, \"diff_horse_kools\")\n", + " model.add(diff_horse_kools == horse - kools)\n", + " model.add_abs_equality(1, diff_horse_kools)\n", + "\n", + " model.add(lucky_strike == fruit_juice)\n", + " model.add(japanese == parliaments)\n", + "\n", + " diff_norwegian_blue = model.new_int_var(-4, 4, \"diff_norwegian_blue\")\n", + " model.add(diff_norwegian_blue == norwegian - blue)\n", + " model.add_abs_equality(1, diff_norwegian_blue)\n", "\n", " # Solve and print out the solution.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", " people = [englishman, spaniard, japanese, ukrainian, norwegian]\n", - " water_drinker = [p for p in people if solver.Value(p) == solver.Value(water)][0]\n", - " zebra_owner = [p for p in people if solver.Value(p) == solver.Value(zebra)][0]\n", - " print(\"The\", water_drinker.Name(), \"drinks water.\")\n", - " print(\"The\", zebra_owner.Name(), \"owns the zebra.\")\n", + " water_drinker = [p for p in people if solver.value(p) == solver.value(water)][0]\n", + " zebra_owner = [p for p in people if solver.value(p) == solver.value(zebra)][0]\n", + " print(\"The\", water_drinker.name, \"drinks water.\")\n", + " print(\"The\", zebra_owner.name, \"owns the zebra.\")\n", " else:\n", " print(\"No solutions to the zebra problem, this is unusual!\")\n", "\n", diff --git a/examples/notebook/graph/assignment_linear_sum_assignment.ipynb b/examples/notebook/graph/assignment_linear_sum_assignment.ipynb index 4d9fcd76457..c0edfe84441 100644 --- a/examples/notebook/graph/assignment_linear_sum_assignment.ipynb +++ b/examples/notebook/graph/assignment_linear_sum_assignment.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/graph/assignment_min_flow.ipynb b/examples/notebook/graph/assignment_min_flow.ipynb index 444aa73f72a..e8a998a10d0 100644 --- a/examples/notebook/graph/assignment_min_flow.ipynb +++ b/examples/notebook/graph/assignment_min_flow.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -130,6 +130,7 @@ " for arc in range(smcf.num_arcs()):\n", " # Can ignore arcs leading out of source or into sink.\n", " if smcf.tail(arc) != source and smcf.head(arc) != sink:\n", + "\n", " # Arcs in the solution have a flow value of 1. Their start and end nodes\n", " # give an assignment of worker to task.\n", " if smcf.flow(arc) > 0:\n", diff --git a/examples/notebook/graph/balance_min_flow.ipynb b/examples/notebook/graph/balance_min_flow.ipynb index 238b878e278..b1c9d3fbd9a 100644 --- a/examples/notebook/graph/balance_min_flow.ipynb +++ b/examples/notebook/graph/balance_min_flow.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -161,6 +161,7 @@ " and smcf.tail(arc) != 12\n", " and smcf.head(arc) != sink\n", " ):\n", + "\n", " # Arcs in the solution will have a flow value of 1.\n", " # There start and end nodes give an assignment of worker to task.\n", " if smcf.flow(arc) > 0:\n", diff --git a/examples/notebook/graph/simple_max_flow_program.ipynb b/examples/notebook/graph/simple_max_flow_program.ipynb index 7fd3732f68f..565b87b392f 100644 --- a/examples/notebook/graph/simple_max_flow_program.ipynb +++ b/examples/notebook/graph/simple_max_flow_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/graph/simple_min_cost_flow_program.ipynb b/examples/notebook/graph/simple_min_cost_flow_program.ipynb index ab10714e41e..0f754965ee6 100644 --- a/examples/notebook/graph/simple_min_cost_flow_program.ipynb +++ b/examples/notebook/graph/simple_min_cost_flow_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/assignment_groups_mip.ipynb b/examples/notebook/linear_solver/assignment_groups_mip.ipynb index 38966588ee9..e2c72d3bd54 100644 --- a/examples/notebook/linear_solver/assignment_groups_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_groups_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -165,39 +165,39 @@ "\n", " # Group1\n", " constraint_g1 = solver.Constraint(1, 1)\n", - " for i in range(len(group1)):\n", + " for index, _ in enumerate(group1):\n", " # a*b can be transformed into 0 <= a + b - 2*p <= 1 with p in [0,1]\n", " # p is True if a AND b, False otherwise\n", " constraint = solver.Constraint(0, 1)\n", - " constraint.SetCoefficient(work[group1[i][0]], 1)\n", - " constraint.SetCoefficient(work[group1[i][1]], 1)\n", - " p = solver.BoolVar(f\"g1_p{i}\")\n", + " constraint.SetCoefficient(work[group1[index][0]], 1)\n", + " constraint.SetCoefficient(work[group1[index][1]], 1)\n", + " p = solver.BoolVar(f\"g1_p{index}\")\n", " constraint.SetCoefficient(p, -2)\n", "\n", " constraint_g1.SetCoefficient(p, 1)\n", "\n", " # Group2\n", " constraint_g2 = solver.Constraint(1, 1)\n", - " for i in range(len(group2)):\n", + " for index, _ in enumerate(group2):\n", " # a*b can be transformed into 0 <= a + b - 2*p <= 1 with p in [0,1]\n", " # p is True if a AND b, False otherwise\n", " constraint = solver.Constraint(0, 1)\n", - " constraint.SetCoefficient(work[group2[i][0]], 1)\n", - " constraint.SetCoefficient(work[group2[i][1]], 1)\n", - " p = solver.BoolVar(f\"g2_p{i}\")\n", + " constraint.SetCoefficient(work[group2[index][0]], 1)\n", + " constraint.SetCoefficient(work[group2[index][1]], 1)\n", + " p = solver.BoolVar(f\"g2_p{index}\")\n", " constraint.SetCoefficient(p, -2)\n", "\n", " constraint_g2.SetCoefficient(p, 1)\n", "\n", " # Group3\n", " constraint_g3 = solver.Constraint(1, 1)\n", - " for i in range(len(group3)):\n", + " for index, _ in enumerate(group3):\n", " # a*b can be transformed into 0 <= a + b - 2*p <= 1 with p in [0,1]\n", " # p is True if a AND b, False otherwise\n", " constraint = solver.Constraint(0, 1)\n", - " constraint.SetCoefficient(work[group3[i][0]], 1)\n", - " constraint.SetCoefficient(work[group3[i][1]], 1)\n", - " p = solver.BoolVar(f\"g3_p{i}\")\n", + " constraint.SetCoefficient(work[group3[index][0]], 1)\n", + " constraint.SetCoefficient(work[group3[index][1]], 1)\n", + " p = solver.BoolVar(f\"g3_p{index}\")\n", " constraint.SetCoefficient(p, -2)\n", "\n", " constraint_g3.SetCoefficient(p, 1)\n", diff --git a/examples/notebook/linear_solver/assignment_mb.ipynb b/examples/notebook/linear_solver/assignment_mb.ipynb index 34b7ac0a9d2..a6808aeacdc 100644 --- a/examples/notebook/linear_solver/assignment_mb.ipynb +++ b/examples/notebook/linear_solver/assignment_mb.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/assignment_mip.ipynb b/examples/notebook/linear_solver/assignment_mip.ipynb index 6540ed8f047..f814693fa91 100644 --- a/examples/notebook/linear_solver/assignment_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb b/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb index 2b1f0b69f0d..3bfb3141f4b 100644 --- a/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/assignment_teams_mip.ipynb b/examples/notebook/linear_solver/assignment_teams_mip.ipynb index 75911a8447c..5726a5d5efb 100644 --- a/examples/notebook/linear_solver/assignment_teams_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_teams_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/basic_example.ipynb b/examples/notebook/linear_solver/basic_example.ipynb index 97cc416ab3f..536f091e80d 100644 --- a/examples/notebook/linear_solver/basic_example.ipynb +++ b/examples/notebook/linear_solver/basic_example.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -83,43 +83,66 @@ "metadata": {}, "outputs": [], "source": [ + "from ortools.init.python import init\n", "from ortools.linear_solver import pywraplp\n", "\n", "\n", "def main():\n", + " print(\"Google OR-Tools version:\", init.OrToolsVersion.version_string())\n", + "\n", " # Create the linear solver with the GLOP backend.\n", " solver = pywraplp.Solver.CreateSolver(\"GLOP\")\n", " if not solver:\n", + " print(\"Could not create solver GLOP\")\n", " return\n", "\n", " # Create the variables x and y.\n", - " x = solver.NumVar(0, 1, \"x\")\n", - " y = solver.NumVar(0, 2, \"y\")\n", + " x_var = solver.NumVar(0, 1, \"x\")\n", + " y_var = solver.NumVar(0, 2, \"y\")\n", "\n", " print(\"Number of variables =\", solver.NumVariables())\n", "\n", - " # Create a linear constraint, 0 <= x + y <= 2.\n", - " ct = solver.Constraint(0, 2, \"ct\")\n", - " ct.SetCoefficient(x, 1)\n", - " ct.SetCoefficient(y, 1)\n", + " infinity = solver.infinity()\n", + " # Create a linear constraint, x + y <= 2.\n", + " constraint = solver.Constraint(-infinity, 2, \"ct\")\n", + " constraint.SetCoefficient(x_var, 1)\n", + " constraint.SetCoefficient(y_var, 1)\n", "\n", " print(\"Number of constraints =\", solver.NumConstraints())\n", "\n", " # Create the objective function, 3 * x + y.\n", " objective = solver.Objective()\n", - " objective.SetCoefficient(x, 3)\n", - " objective.SetCoefficient(y, 1)\n", + " objective.SetCoefficient(x_var, 3)\n", + " objective.SetCoefficient(y_var, 1)\n", " objective.SetMaximization()\n", "\n", " print(f\"Solving with {solver.SolverVersion()}\")\n", - " solver.Solve()\n", + " result_status = solver.Solve()\n", + "\n", + " print(f\"Status: {result_status}\")\n", + " if result_status != pywraplp.Solver.OPTIMAL:\n", + " print(\"The problem does not have an optimal solution!\")\n", + " if result_status == pywraplp.Solver.FEASIBLE:\n", + " print(\"A potentially suboptimal solution was found\")\n", + " else:\n", + " print(\"The solver could not solve the problem.\")\n", + " return\n", "\n", " print(\"Solution:\")\n", " print(\"Objective value =\", objective.Value())\n", - " print(\"x =\", x.solution_value())\n", - " print(\"y =\", y.solution_value())\n", + " print(\"x =\", x_var.solution_value())\n", + " print(\"y =\", y_var.solution_value())\n", + "\n", + " print(\"Advanced usage:\")\n", + " print(f\"Problem solved in {solver.wall_time():d} milliseconds\")\n", + " print(f\"Problem solved in {solver.iterations():d} iterations\")\n", "\n", "\n", + "init.CppBridge.init_logging(\"basic_example.py\")\n", + "cpp_flags = init.CppFlags()\n", + "cpp_flags.stderrthreshold = True\n", + "cpp_flags.log_prefix = False\n", + "init.CppBridge.set_flags(cpp_flags)\n", "main()\n", "\n" ] diff --git a/examples/notebook/linear_solver/bin_packing_mb.ipynb b/examples/notebook/linear_solver/bin_packing_mb.ipynb index cab95ab24ba..e848fa7fe92 100644 --- a/examples/notebook/linear_solver/bin_packing_mb.ipynb +++ b/examples/notebook/linear_solver/bin_packing_mb.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -90,7 +90,7 @@ "from ortools.linear_solver.python import model_builder\n", "\n", "\n", - "def create_data_model():\n", + "def create_data_model() -> tuple[pd.DataFrame, pd.DataFrame]:\n", " \"\"\"Create the data for the example.\"\"\"\n", "\n", " items_str = \"\"\"\n", diff --git a/examples/notebook/linear_solver/bin_packing_mip.ipynb b/examples/notebook/linear_solver/bin_packing_mip.ipynb index 1b43da8711d..58fc818c3d4 100644 --- a/examples/notebook/linear_solver/bin_packing_mip.ipynb +++ b/examples/notebook/linear_solver/bin_packing_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/clone_model_mb.ipynb b/examples/notebook/linear_solver/clone_model_mb.ipynb index 17478cf4ab7..350ecc4c4bd 100644 --- a/examples/notebook/linear_solver/clone_model_mb.ipynb +++ b/examples/notebook/linear_solver/clone_model_mb.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/integer_programming_example.ipynb b/examples/notebook/linear_solver/integer_programming_example.ipynb index 8db2b7c3364..7dd5b31fdb8 100644 --- a/examples/notebook/linear_solver/integer_programming_example.ipynb +++ b/examples/notebook/linear_solver/integer_programming_example.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/linear_programming_example.ipynb b/examples/notebook/linear_solver/linear_programming_example.ipynb index 00cb97c3b83..7ee26107cdc 100644 --- a/examples/notebook/linear_solver/linear_programming_example.ipynb +++ b/examples/notebook/linear_solver/linear_programming_example.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/mip_var_array.ipynb b/examples/notebook/linear_solver/mip_var_array.ipynb index c20c57d4569..f0f8664fddf 100644 --- a/examples/notebook/linear_solver/mip_var_array.ipynb +++ b/examples/notebook/linear_solver/mip_var_array.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "MIP example that uses a variable array." + "MIP example that uses a variable array.\n" ] }, { diff --git a/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb b/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb index dd829b3a21a..8e8125b9afd 100644 --- a/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb +++ b/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -144,7 +144,8 @@ " for i in data[\"all_items\"]:\n", " if x[i, b].solution_value() > 0:\n", " print(\n", - " f\"Item {i} weight: {data['weights'][i]} value: {data['values'][i]}\"\n", + " f\"Item {i} weight: {data['weights'][i]} value:\"\n", + " f\" {data['values'][i]}\"\n", " )\n", " bin_weight += data[\"weights\"][i]\n", " bin_value += data[\"values\"][i]\n", diff --git a/examples/notebook/linear_solver/simple_lp_program.ipynb b/examples/notebook/linear_solver/simple_lp_program.ipynb index 3094e7382e8..4775ecdfbd1 100644 --- a/examples/notebook/linear_solver/simple_lp_program.ipynb +++ b/examples/notebook/linear_solver/simple_lp_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/simple_lp_program_mb.ipynb b/examples/notebook/linear_solver/simple_lp_program_mb.ipynb index 2d434121d86..bee79cf01f5 100644 --- a/examples/notebook/linear_solver/simple_lp_program_mb.ipynb +++ b/examples/notebook/linear_solver/simple_lp_program_mb.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/simple_mip_program.ipynb b/examples/notebook/linear_solver/simple_mip_program.ipynb index 8529c64c2d9..b3c6b8ad242 100644 --- a/examples/notebook/linear_solver/simple_mip_program.ipynb +++ b/examples/notebook/linear_solver/simple_mip_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,7 +87,7 @@ "\n", "\n", "def main():\n", - " # Create the mip solver with the SCIP backend.\n", + " # Create the mip solver with the CP-SAT backend.\n", " solver = pywraplp.Solver.CreateSolver(\"SAT\")\n", " if not solver:\n", " return\n", diff --git a/examples/notebook/linear_solver/simple_mip_program_mb.ipynb b/examples/notebook/linear_solver/simple_mip_program_mb.ipynb index ef4151b6368..9fb3f76d991 100644 --- a/examples/notebook/linear_solver/simple_mip_program_mb.ipynb +++ b/examples/notebook/linear_solver/simple_mip_program_mb.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/linear_solver/stigler_diet.ipynb b/examples/notebook/linear_solver/stigler_diet.ipynb index 7acaf3b7079..d57c2dafa54 100644 --- a/examples/notebook/linear_solver/stigler_diet.ipynb +++ b/examples/notebook/linear_solver/stigler_diet.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/pdlp/simple_pdlp_program.ipynb b/examples/notebook/pdlp/simple_pdlp_program.ipynb index 7a0464fed0f..4cb305fd493 100644 --- a/examples/notebook/pdlp/simple_pdlp_program.ipynb +++ b/examples/notebook/pdlp/simple_pdlp_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { diff --git a/examples/notebook/sat/all_different_except_zero_sample_sat.ipynb b/examples/notebook/sat/all_different_except_zero_sample_sat.ipynb new file mode 100644 index 00000000000..deb2a3f1bb3 --- /dev/null +++ b/examples/notebook/sat/all_different_except_zero_sample_sat.ipynb @@ -0,0 +1,161 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# all_different_except_zero_sample_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Implements AllDifferentExcept0 using atomic constraints.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "import collections\n", + "\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "def all_different_except_0():\n", + " \"\"\"Encode the AllDifferentExcept0 constraint.\"\"\"\n", + "\n", + " # Model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Declare our primary variable.\n", + " x = [model.new_int_var(0, 10, f\"x{i}\") for i in range(5)]\n", + "\n", + " # Expand the AllDifferentExcept0 constraint.\n", + " variables_per_value = collections.defaultdict(list)\n", + " all_values = set()\n", + "\n", + " for var in x:\n", + " all_encoding_literals = []\n", + " # Domains of variables are represented by flat intervals.\n", + " for i in range(0, len(var.proto.domain), 2):\n", + " start = var.proto.domain[i]\n", + " end = var.proto.domain[i + 1]\n", + " for value in range(start, end + 1): # Intervals are inclusive.\n", + " # Create the literal attached to var == value.\n", + " bool_var = model.new_bool_var(f\"{var} == {value}\")\n", + " model.add(var == value).only_enforce_if(bool_var)\n", + "\n", + " # Collect all encoding literals for a given variable.\n", + " all_encoding_literals.append(bool_var)\n", + "\n", + " # Collect all encoding literals for a given value.\n", + " variables_per_value[value].append(bool_var)\n", + "\n", + " # Collect all different values.\n", + " all_values.add(value)\n", + "\n", + " # One variable must have exactly one value.\n", + " model.add_exactly_one(all_encoding_literals)\n", + "\n", + " # Add the all_different constraints.\n", + " for value, literals in variables_per_value.items():\n", + " if value == 0:\n", + " continue\n", + " model.add_at_most_one(literals)\n", + "\n", + " model.add(x[0] == 0)\n", + " model.add(x[1] == 0)\n", + "\n", + " model.maximize(sum(x))\n", + "\n", + " # Create a solver and solve.\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + "\n", + " # Checks and prints the output.\n", + " if status == cp_model.OPTIMAL:\n", + " print(f\"Optimal solution: {solver.objective_value}, expected: 27.0\")\n", + " elif status == cp_model.FEASIBLE:\n", + " print(f\"Feasible solution: {solver.objective_value}, optimal 27.0\")\n", + " elif status == cp_model.INFEASIBLE:\n", + " print(\"The model is infeasible\")\n", + " else:\n", + " print(\"Something went wrong. Please check the status and the log\")\n", + "\n", + "\n", + "all_different_except_0()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/sat/assignment_groups_sat.ipynb b/examples/notebook/sat/assignment_groups_sat.ipynb index 8a36cfac501..5bebc34dcf7 100644 --- a/examples/notebook/sat/assignment_groups_sat.ipynb +++ b/examples/notebook/sat/assignment_groups_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve assignment problem for given group of workers." + "Solves an assignment problem for given group of workers." ] }, { @@ -86,7 +86,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " # Data\n", " costs = [\n", " [90, 76, 75, 70, 50, 74],\n", @@ -137,48 +137,48 @@ " x = {}\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " x[worker, task] = model.NewBoolVar(f\"x[{worker},{task}]\")\n", + " x[worker, task] = model.new_bool_var(f\"x[{worker},{task}]\")\n", "\n", " # Constraints\n", " # Each worker is assigned to at most one task.\n", " for worker in range(num_workers):\n", - " model.AddAtMostOne(x[worker, task] for task in range(num_tasks))\n", + " model.add_at_most_one(x[worker, task] for task in range(num_tasks))\n", "\n", " # Each task is assigned to exactly one worker.\n", " for task in range(num_tasks):\n", - " model.AddExactlyOne(x[worker, task] for worker in range(num_workers))\n", + " model.add_exactly_one(x[worker, task] for worker in range(num_workers))\n", "\n", " # Create variables for each worker, indicating whether they work on some task.\n", " work = {}\n", " for worker in range(num_workers):\n", - " work[worker] = model.NewBoolVar(f\"work[{worker}]\")\n", + " work[worker] = model.new_bool_var(f\"work[{worker}]\")\n", "\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " model.Add(work[worker] == sum(x[worker, task] for task in range(num_tasks)))\n", + " model.add(work[worker] == sum(x[worker, task] for task in range(num_tasks)))\n", "\n", " # Define the allowed groups of worders\n", - " model.AddAllowedAssignments([work[0], work[1], work[2], work[3]], group1)\n", - " model.AddAllowedAssignments([work[4], work[5], work[6], work[7]], group2)\n", - " model.AddAllowedAssignments([work[8], work[9], work[10], work[11]], group3)\n", + " model.add_allowed_assignments([work[0], work[1], work[2], work[3]], group1)\n", + " model.add_allowed_assignments([work[4], work[5], work[6], work[7]], group2)\n", + " model.add_allowed_assignments([work[8], work[9], work[10], work[11]], group3)\n", "\n", " # Objective\n", " objective_terms = []\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", " objective_terms.append(costs[worker][task] * x[worker, task])\n", - " model.Minimize(sum(objective_terms))\n", + " model.minimize(sum(objective_terms))\n", "\n", " # Solve\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Print solution.\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"Total cost = {solver.ObjectiveValue()}\\n\")\n", + " print(f\"Total cost = {solver.objective_value}\\n\")\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " if solver.BooleanValue(x[worker, task]):\n", + " if solver.boolean_value(x[worker, task]):\n", " print(\n", " f\"Worker {worker} assigned to task {task}.\"\n", " + f\" Cost = {costs[worker][task]}\"\n", diff --git a/examples/notebook/sat/assignment_sat.ipynb b/examples/notebook/sat/assignment_sat.ipynb index 87a7fad908c..012e9219af3 100644 --- a/examples/notebook/sat/assignment_sat.ipynb +++ b/examples/notebook/sat/assignment_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve a simple assignment problem with CP-SAT." + "Solves a simple assignment problem with CP-SAT.\n" ] }, { @@ -90,7 +90,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " # Data\n", " data_str = \"\"\"\n", " worker task cost\n", @@ -122,28 +122,28 @@ " model = cp_model.CpModel()\n", "\n", " # Variables\n", - " x = model.NewBoolVarSeries(name=\"x\", index=data.index)\n", + " x = model.new_bool_var_series(name=\"x\", index=data.index)\n", "\n", " # Constraints\n", " # Each worker is assigned to at most one task.\n", " for unused_name, tasks in data.groupby(\"worker\"):\n", - " model.AddAtMostOne(x[tasks.index])\n", + " model.add_at_most_one(x[tasks.index])\n", "\n", " # Each task is assigned to exactly one worker.\n", " for unused_name, workers in data.groupby(\"task\"):\n", - " model.AddExactlyOne(x[workers.index])\n", + " model.add_exactly_one(x[workers.index])\n", "\n", " # Objective\n", - " model.Minimize(data.cost.dot(x))\n", + " model.minimize(data.cost.dot(x))\n", "\n", " # Solve\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Print solution.\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"Total cost = {solver.ObjectiveValue()}\\n\")\n", - " selected = data.loc[solver.BooleanValues(x).loc[lambda x: x].index]\n", + " print(f\"Total cost = {solver.objective_value}\\n\")\n", + " selected = data.loc[solver.boolean_values(x).loc[lambda x: x].index]\n", " for unused_index, row in selected.iterrows():\n", " print(f\"{row.task} assigned to {row.worker} with a cost of {row.cost}\")\n", " elif status == cp_model.INFEASIBLE:\n", diff --git a/examples/notebook/sat/assignment_task_sizes_sat.ipynb b/examples/notebook/sat/assignment_task_sizes_sat.ipynb index f71ac5221f4..aa2c528af51 100644 --- a/examples/notebook/sat/assignment_task_sizes_sat.ipynb +++ b/examples/notebook/sat/assignment_task_sizes_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve a simple assignment problem." + "Solves a simple assignment problem." ] }, { @@ -86,7 +86,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " # Data\n", " costs = [\n", " [90, 76, 75, 70, 50, 74, 12, 68],\n", @@ -114,37 +114,37 @@ " x = {}\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " x[worker, task] = model.NewBoolVar(f\"x[{worker},{task}]\")\n", + " x[worker, task] = model.new_bool_var(f\"x[{worker},{task}]\")\n", "\n", " # Constraints\n", " # Each worker is assigned to at most one task.\n", " for worker in range(num_workers):\n", - " model.Add(\n", + " model.add(\n", " sum(task_sizes[task] * x[worker, task] for task in range(num_tasks))\n", " <= total_size_max\n", " )\n", "\n", " # Each task is assigned to exactly one worker.\n", " for task in range(num_tasks):\n", - " model.AddExactlyOne(x[worker, task] for worker in range(num_workers))\n", + " model.add_exactly_one(x[worker, task] for worker in range(num_workers))\n", "\n", " # Objective\n", " objective_terms = []\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", " objective_terms.append(costs[worker][task] * x[worker, task])\n", - " model.Minimize(sum(objective_terms))\n", + " model.minimize(sum(objective_terms))\n", "\n", " # Solve\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Print solution.\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"Total cost = {solver.ObjectiveValue()}\\n\")\n", + " print(f\"Total cost = {solver.objective_value}\\n\")\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " if solver.BooleanValue(x[worker, task]):\n", + " if solver.boolean_value(x[worker, task]):\n", " print(\n", " f\"Worker {worker} assigned to task {task}.\"\n", " + f\" Cost = {costs[worker][task]}\"\n", diff --git a/examples/notebook/sat/assignment_teams_sat.ipynb b/examples/notebook/sat/assignment_teams_sat.ipynb index cad6a24205b..5e50e35875b 100644 --- a/examples/notebook/sat/assignment_teams_sat.ipynb +++ b/examples/notebook/sat/assignment_teams_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve a simple assignment problem." + "Solves a simple assignment problem." ] }, { @@ -86,7 +86,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " # Data\n", " costs = [\n", " [90, 76, 75, 70],\n", @@ -111,47 +111,47 @@ " x = {}\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " x[worker, task] = model.NewBoolVar(f\"x[{worker},{task}]\")\n", + " x[worker, task] = model.new_bool_var(f\"x[{worker},{task}]\")\n", "\n", " # Constraints\n", " # Each worker is assigned to at most one task.\n", " for worker in range(num_workers):\n", - " model.AddAtMostOne(x[worker, task] for task in range(num_tasks))\n", + " model.add_at_most_one(x[worker, task] for task in range(num_tasks))\n", "\n", " # Each task is assigned to exactly one worker.\n", " for task in range(num_tasks):\n", - " model.AddExactlyOne(x[worker, task] for worker in range(num_workers))\n", + " model.add_exactly_one(x[worker, task] for worker in range(num_workers))\n", "\n", " # Each team takes at most two tasks.\n", " team1_tasks = []\n", " for worker in team1:\n", " for task in range(num_tasks):\n", " team1_tasks.append(x[worker, task])\n", - " model.Add(sum(team1_tasks) <= team_max)\n", + " model.add(sum(team1_tasks) <= team_max)\n", "\n", " team2_tasks = []\n", " for worker in team2:\n", " for task in range(num_tasks):\n", " team2_tasks.append(x[worker, task])\n", - " model.Add(sum(team2_tasks) <= team_max)\n", + " model.add(sum(team2_tasks) <= team_max)\n", "\n", " # Objective\n", " objective_terms = []\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", " objective_terms.append(costs[worker][task] * x[worker, task])\n", - " model.Minimize(sum(objective_terms))\n", + " model.minimize(sum(objective_terms))\n", "\n", " # Solve\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Print solution.\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"Total cost = {solver.ObjectiveValue()}\\n\")\n", + " print(f\"Total cost = {solver.objective_value}\\n\")\n", " for worker in range(num_workers):\n", " for task in range(num_tasks):\n", - " if solver.BooleanValue(x[worker, task]):\n", + " if solver.boolean_value(x[worker, task]):\n", " print(\n", " f\"Worker {worker} assigned to task {task}.\"\n", " + f\" Cost = {costs[worker][task]}\"\n", diff --git a/examples/notebook/sat/assumptions_sample_sat.ipynb b/examples/notebook/sat/assumptions_sample_sat.ipynb index b6ba7d5b244..b76394e5799 100644 --- a/examples/notebook/sat/assumptions_sample_sat.ipynb +++ b/examples/notebook/sat/assumptions_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,37 +86,37 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " \"\"\"Showcases assumptions.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", - " x = model.NewIntVar(0, 10, \"x\")\n", - " y = model.NewIntVar(0, 10, \"y\")\n", - " z = model.NewIntVar(0, 10, \"z\")\n", - " a = model.NewBoolVar(\"a\")\n", - " b = model.NewBoolVar(\"b\")\n", - " c = model.NewBoolVar(\"c\")\n", + " x = model.new_int_var(0, 10, \"x\")\n", + " y = model.new_int_var(0, 10, \"y\")\n", + " z = model.new_int_var(0, 10, \"z\")\n", + " a = model.new_bool_var(\"a\")\n", + " b = model.new_bool_var(\"b\")\n", + " c = model.new_bool_var(\"c\")\n", "\n", " # Creates the constraints.\n", - " model.Add(x > y).OnlyEnforceIf(a)\n", - " model.Add(y > z).OnlyEnforceIf(b)\n", - " model.Add(z > x).OnlyEnforceIf(c)\n", + " model.add(x > y).only_enforce_if(a)\n", + " model.add(y > z).only_enforce_if(b)\n", + " model.add(z > x).only_enforce_if(c)\n", "\n", " # Add assumptions\n", - " model.AddAssumptions([a, b, c])\n", + " model.add_assumptions([a, b, c])\n", "\n", " # Creates a solver and solves.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " # Print solution.\n", - " print(f\"Status = {solver.StatusName(status)}\")\n", + " print(f\"Status = {solver.status_name(status)}\")\n", " if status == cp_model.INFEASIBLE:\n", " print(\n", - " \"SufficientAssumptionsForInfeasibility = \"\n", - " f\"{solver.SufficientAssumptionsForInfeasibility()}\"\n", + " \"sufficient_assumptions_for_infeasibility = \"\n", + " f\"{solver.sufficient_assumptions_for_infeasibility()}\"\n", " )\n", "\n", "\n", diff --git a/examples/notebook/sat/bin_packing_sat.ipynb b/examples/notebook/sat/bin_packing_sat.ipynb index 2b8642ef4cc..638ea767a97 100644 --- a/examples/notebook/sat/bin_packing_sat.ipynb +++ b/examples/notebook/sat/bin_packing_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve a simple bin packing problem using CP-SAT." + "Solves a simple bin packing problem using CP-SAT.\n" ] }, { @@ -124,7 +124,7 @@ " return items, bins\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " items, bins = create_data_model()\n", "\n", " # Create the model.\n", @@ -135,49 +135,52 @@ " items_x_bins = pd.MultiIndex.from_product(\n", " [items.index, bins.index], names=[\"item\", \"bin\"]\n", " )\n", - " x = model.NewBoolVarSeries(name=\"x\", index=items_x_bins)\n", + " x = model.new_bool_var_series(name=\"x\", index=items_x_bins)\n", "\n", " # y[j] = 1 if bin j is used.\n", - " y = model.NewBoolVarSeries(name=\"y\", index=bins.index)\n", + " y = model.new_bool_var_series(name=\"y\", index=bins.index)\n", "\n", " # Constraints\n", " # Each item must be in exactly one bin.\n", " for unused_name, all_copies in x.groupby(\"item\"):\n", - " model.AddExactlyOne(x[all_copies.index])\n", + " model.add_exactly_one(x[all_copies.index])\n", "\n", " # The amount packed in each bin cannot exceed its capacity.\n", " for selected_bin in bins.index:\n", " items_in_bin = x.xs(selected_bin, level=\"bin\")\n", - " model.Add(\n", + " model.add(\n", " items_in_bin.dot(items.weight)\n", " <= bins.loc[selected_bin].capacity * y[selected_bin]\n", " )\n", "\n", " # Objective: minimize the number of bins used.\n", - " model.Minimize(y.sum())\n", + " model.minimize(y.sum())\n", "\n", " # Create the solver and solve the model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"Number of bins used = {solver.ObjectiveValue()}\")\n", + " print(f\"Number of bins used = {solver.objective_value}\")\n", "\n", - " x_values = solver.BooleanValues(x)\n", - " y_values = solver.BooleanValues(y)\n", + " x_values = solver.boolean_values(x)\n", + " y_values = solver.boolean_values(y)\n", " active_bins = y_values.loc[lambda x: x].index\n", "\n", " for b in active_bins:\n", " print(f\"Bin {b}\")\n", - " items_in_bin = x_values.xs(b, level=\"bin\").loc[lambda x: x].index\n", - " for item in items_in_bin:\n", + " items_in_active_bin = x_values.xs(b, level=\"bin\").loc[lambda x: x].index\n", + " for item in items_in_active_bin:\n", " print(f\" Item {item} - weight {items.loc[item].weight}\")\n", - " print(f\" Packed items weight: {items.loc[items_in_bin].sum().to_string()}\")\n", + " print(\n", + " \" Packed items weight:\"\n", + " f\" {items.loc[items_in_active_bin].sum().to_string()}\"\n", + " )\n", " print()\n", "\n", " print(f\"Total packed weight: {items.weight.sum()}\")\n", " print()\n", - " print(f\"Time = {solver.WallTime()} seconds\")\n", + " print(f\"Time = {solver.wall_time} seconds\")\n", " elif status == cp_model.INFEASIBLE:\n", " print(\"No solution found\")\n", " else:\n", diff --git a/examples/notebook/sat/binpacking_problem_sat.ipynb b/examples/notebook/sat/binpacking_problem_sat.ipynb index 53a3b71f962..81a5b31467a 100644 --- a/examples/notebook/sat/binpacking_problem_sat.ipynb +++ b/examples/notebook/sat/binpacking_problem_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,7 +87,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def BinpackingProblemSat():\n", + "def binpacking_problem_sat():\n", " \"\"\"Solves a bin-packing problem using the CP-SAT solver.\"\"\"\n", " # Data.\n", " bin_capacity = 100\n", @@ -107,46 +107,46 @@ " for i in all_items:\n", " num_copies = items[i][1]\n", " for b in all_bins:\n", - " x[(i, b)] = model.NewIntVar(0, num_copies, f\"x[{i},{b}]\")\n", + " x[(i, b)] = model.new_int_var(0, num_copies, f\"x[{i},{b}]\")\n", "\n", " # Load variables.\n", - " load = [model.NewIntVar(0, bin_capacity, f\"load[{b}]\") for b in all_bins]\n", + " load = [model.new_int_var(0, bin_capacity, f\"load[{b}]\") for b in all_bins]\n", "\n", " # Slack variables.\n", - " slacks = [model.NewBoolVar(f\"slack[{b}]\") for b in all_bins]\n", + " slacks = [model.new_bool_var(f\"slack[{b}]\") for b in all_bins]\n", "\n", " # Links load and x.\n", " for b in all_bins:\n", - " model.Add(load[b] == sum(x[(i, b)] * items[i][0] for i in all_items))\n", + " model.add(load[b] == sum(x[(i, b)] * items[i][0] for i in all_items))\n", "\n", " # Place all items.\n", " for i in all_items:\n", - " model.Add(sum(x[(i, b)] for b in all_bins) == items[i][1])\n", + " model.add(sum(x[(i, b)] for b in all_bins) == items[i][1])\n", "\n", " # Links load and slack through an equivalence relation.\n", " safe_capacity = bin_capacity - slack_capacity\n", " for b in all_bins:\n", " # slack[b] => load[b] <= safe_capacity.\n", - " model.Add(load[b] <= safe_capacity).OnlyEnforceIf(slacks[b])\n", + " model.add(load[b] <= safe_capacity).only_enforce_if(slacks[b])\n", " # not(slack[b]) => load[b] > safe_capacity.\n", - " model.Add(load[b] > safe_capacity).OnlyEnforceIf(slacks[b].Not())\n", + " model.add(load[b] > safe_capacity).only_enforce_if(~slacks[b])\n", "\n", " # Maximize sum of slacks.\n", - " model.Maximize(sum(slacks))\n", + " model.maximize(sum(slacks))\n", "\n", " # Solves and prints out the solution.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", - " print(f\"Solve status: {solver.StatusName(status)}\")\n", + " status = solver.solve(model)\n", + " print(f\"solve status: {solver.status_name(status)}\")\n", " if status == cp_model.OPTIMAL:\n", - " print(f\"Optimal objective value: {solver.ObjectiveValue()}\")\n", + " print(f\"Optimal objective value: {solver.objective_value}\")\n", " print(\"Statistics\")\n", - " print(f\" - conflicts : {solver.NumConflicts()}\")\n", - " print(f\" - branches : {solver.NumBranches()}\")\n", - " print(f\" - wall time : {solver.WallTime()}s\")\n", + " print(f\" - conflicts : {solver.num_conflicts}\")\n", + " print(f\" - branches : {solver.num_branches}\")\n", + " print(f\" - wall time : {solver.wall_time}s\")\n", "\n", "\n", - "BinpackingProblemSat()\n", + "binpacking_problem_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/bool_and_int_var_product_sample_sat.ipynb b/examples/notebook/sat/bool_and_int_var_product_sample_sat.ipynb new file mode 100644 index 00000000000..741e862cdee --- /dev/null +++ b/examples/notebook/sat/bool_and_int_var_product_sample_sat.ipynb @@ -0,0 +1,150 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# bool_and_int_var_product_sample_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Code sample that encodes the product of a Boolean and an integer variable.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", + " \"\"\"Print intermediate solutions.\"\"\"\n", + "\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", + " cp_model.CpSolverSolutionCallback.__init__(self)\n", + " self.__variables = variables\n", + "\n", + " def on_solution_callback(self) -> None:\n", + " for v in self.__variables:\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", + " print()\n", + "\n", + "\n", + "def build_product_var(\n", + " model: cp_model.CpModel, b: cp_model.IntVar, x: cp_model.IntVar, name: str\n", + ") -> cp_model.IntVar:\n", + " \"\"\"Builds the product of a Boolean variable and an integer variable.\"\"\"\n", + " p = model.new_int_var_from_domain(\n", + " cp_model.Domain.from_flat_intervals(x.proto.domain).union_with(\n", + " cp_model.Domain(0, 0)\n", + " ),\n", + " name,\n", + " )\n", + " model.add(p == x).only_enforce_if(b)\n", + " model.add(p == 0).only_enforce_if(~b)\n", + " return p\n", + "\n", + "\n", + "def bool_and_int_var_product_sample_sat():\n", + " \"\"\"Encoding of the product of two Boolean variables.\n", + "\n", + " p == x * y, which is the same as p <=> x and y\n", + " \"\"\"\n", + " model = cp_model.CpModel()\n", + " b = model.new_bool_var(\"b\")\n", + " x = model.new_int_var_from_domain(\n", + " cp_model.Domain.from_values([1, 2, 3, 5, 6, 7, 9, 10]), \"x\"\n", + " )\n", + " p = build_product_var(model, b, x, \"p\")\n", + "\n", + " # Search for x and b values in increasing order.\n", + " model.add_decision_strategy(\n", + " [b, x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE\n", + " )\n", + "\n", + " # Create a solver and solve.\n", + " solver = cp_model.CpSolver()\n", + " solution_printer = VarArraySolutionPrinter([x, b, p])\n", + " solver.parameters.enumerate_all_solutions = True\n", + " solver.parameters.search_branching = cp_model.FIXED_SEARCH\n", + " solver.solve(model, solution_printer)\n", + "\n", + "\n", + "bool_and_int_var_product_sample_sat()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/sat/bool_or_sample_sat.ipynb b/examples/notebook/sat/bool_or_sample_sat.ipynb index 1aeb480cf4f..004c5706420 100644 --- a/examples/notebook/sat/bool_or_sample_sat.ipynb +++ b/examples/notebook/sat/bool_or_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,16 +87,19 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def BoolOrSampleSat():\n", + "def bool_or_sample_sat():\n", " model = cp_model.CpModel()\n", "\n", - " x = model.NewBoolVar(\"x\")\n", - " y = model.NewBoolVar(\"y\")\n", + " x = model.new_bool_var(\"x\")\n", + " y = model.new_bool_var(\"y\")\n", "\n", - " model.AddBoolOr([x, y.Not()])\n", + " model.add_bool_or([x, y.negated()])\n", + " # The [] is not mandatory.\n", + " # ~y is equivalent to y.negated()\n", + " model.add_bool_or(x, ~y)\n", "\n", "\n", - "BoolOrSampleSat()\n", + "bool_or_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/boolean_product_sample_sat.ipynb b/examples/notebook/sat/boolean_product_sample_sat.ipynb index 2994aac57ac..206189e2dcd 100644 --- a/examples/notebook/sat/boolean_product_sample_sat.ipynb +++ b/examples/notebook/sat/boolean_product_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,31 +87,31 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def BooleanProductSampleSat():\n", + "def boolean_product_sample_sat():\n", " \"\"\"Encoding of the product of two Boolean variables.\n", "\n", " p == x * y, which is the same as p <=> x and y\n", " \"\"\"\n", " model = cp_model.CpModel()\n", - " x = model.NewBoolVar(\"x\")\n", - " y = model.NewBoolVar(\"y\")\n", - " p = model.NewBoolVar(\"p\")\n", + " x = model.new_bool_var(\"x\")\n", + " y = model.new_bool_var(\"y\")\n", + " p = model.new_bool_var(\"p\")\n", "\n", " # x and y implies p, rewrite as not(x and y) or p.\n", - " model.AddBoolOr(x.Not(), y.Not(), p)\n", + " model.add_bool_or(~x, ~y, p)\n", "\n", " # p implies x and y, expanded into two implications.\n", - " model.AddImplication(p, x)\n", - " model.AddImplication(p, y)\n", + " model.add_implication(p, x)\n", + " model.add_implication(p, y)\n", "\n", " # Create a solver and solve.\n", " solver = cp_model.CpSolver()\n", " solution_printer = cp_model.VarArraySolutionPrinter([x, y, p])\n", " solver.parameters.enumerate_all_solutions = True\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", "\n", - "BooleanProductSampleSat()\n", + "boolean_product_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/channeling_sample_sat.ipynb b/examples/notebook/sat/channeling_sample_sat.ipynb index c71ff48cc95..87c91888ff6 100644 --- a/examples/notebook/sat/channeling_sample_sat.ipynb +++ b/examples/notebook/sat/channeling_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -90,46 +90,41 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", - " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", - " self.__solution_count += 1\n", + " def on_solution_callback(self) -> None:\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", - " return self.__solution_count\n", "\n", - "\n", - "def ChannelingSampleSat():\n", + "def channeling_sample_sat():\n", " \"\"\"Demonstrates how to link integer constraints together.\"\"\"\n", "\n", " # Create the CP-SAT model.\n", " model = cp_model.CpModel()\n", "\n", " # Declare our two primary variables.\n", - " x = model.NewIntVar(0, 10, \"x\")\n", - " y = model.NewIntVar(0, 10, \"y\")\n", + " x = model.new_int_var(0, 10, \"x\")\n", + " y = model.new_int_var(0, 10, \"y\")\n", "\n", " # Declare our intermediate boolean variable.\n", - " b = model.NewBoolVar(\"b\")\n", + " b = model.new_bool_var(\"b\")\n", "\n", " # Implement b == (x >= 5).\n", - " model.Add(x >= 5).OnlyEnforceIf(b)\n", - " model.Add(x < 5).OnlyEnforceIf(b.Not())\n", + " model.add(x >= 5).only_enforce_if(b)\n", + " model.add(x < 5).only_enforce_if(~b)\n", "\n", " # Create our two half-reified constraints.\n", " # First, b implies (y == 10 - x).\n", - " model.Add(y == 10 - x).OnlyEnforceIf(b)\n", + " model.add(y == 10 - x).only_enforce_if(b)\n", " # Second, not(b) implies y == 0.\n", - " model.Add(y == 0).OnlyEnforceIf(b.Not())\n", + " model.add(y == 0).only_enforce_if(~b)\n", "\n", " # Search for x values in increasing order.\n", - " model.AddDecisionStrategy([x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", + " model.add_decision_strategy([x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", "\n", " # Create a solver and solve with a fixed search.\n", " solver = cp_model.CpSolver()\n", @@ -141,10 +136,10 @@ "\n", " # Search and print out all solutions.\n", " solution_printer = VarArraySolutionPrinter([x, y, b])\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", "\n", - "ChannelingSampleSat()\n", + "channeling_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/clone_model_sample_sat.ipynb b/examples/notebook/sat/clone_model_sample_sat.ipynb index 06e98df05a5..0f07c81c5cd 100644 --- a/examples/notebook/sat/clone_model_sample_sat.ipynb +++ b/examples/notebook/sat/clone_model_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,44 +86,44 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def CloneModelSampleSat():\n", + "def clone_model_sample_sat():\n", " \"\"\"Showcases cloning a model.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", "\n", " # Creates the constraints.\n", - " model.Add(x != y)\n", + " model.add(x != y)\n", "\n", - " model.Maximize(x + 2 * y + 3 * z)\n", + " model.maximize(x + 2 * y + 3 * z)\n", "\n", " # Creates a solver and solves.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(\"Optimal value of the original model: {}\".format(solver.ObjectiveValue()))\n", + " print(\"Optimal value of the original model: {}\".format(solver.objective_value))\n", "\n", - " # Clone the model.\n", - " copy = model.Clone()\n", + " # Clones the model.\n", + " copy = model.clone()\n", "\n", - " copy_x = copy.GetIntVarFromProtoIndex(x.Index())\n", - " copy_y = copy.GetIntVarFromProtoIndex(y.Index())\n", + " copy_x = copy.get_int_var_from_proto_index(x.index)\n", + " copy_y = copy.get_int_var_from_proto_index(y.index)\n", "\n", - " copy.Add(copy_x + copy_y <= 1)\n", + " copy.add(copy_x + copy_y <= 1)\n", "\n", - " status = solver.Solve(copy)\n", + " status = solver.solve(copy)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(\"Optimal value of the modified model: {}\".format(solver.ObjectiveValue()))\n", + " print(\"Optimal value of the modified model: {}\".format(solver.objective_value))\n", "\n", "\n", - "CloneModelSampleSat()\n", + "clone_model_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/cp_is_fun_sat.ipynb b/examples/notebook/sat/cp_is_fun_sat.ipynb index 0165acf5374..d8c3facd9a2 100644 --- a/examples/notebook/sat/cp_is_fun_sat.ipynb +++ b/examples/notebook/sat/cp_is_fun_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -94,38 +94,39 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " self.__solution_count += 1\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", + " @property\n", + " def solution_count(self) -> int:\n", " return self.__solution_count\n", "\n", "\n", - "def main():\n", - " \"\"\"Solve the CP+IS+FUN==TRUE cryptarithm.\"\"\"\n", + "def main() -> None:\n", + " \"\"\"solve the CP+IS+FUN==TRUE cryptarithm.\"\"\"\n", " # Constraint programming engine\n", " model = cp_model.CpModel()\n", "\n", " base = 10\n", "\n", - " c = model.NewIntVar(1, base - 1, \"C\")\n", - " p = model.NewIntVar(0, base - 1, \"P\")\n", - " i = model.NewIntVar(1, base - 1, \"I\")\n", - " s = model.NewIntVar(0, base - 1, \"S\")\n", - " f = model.NewIntVar(1, base - 1, \"F\")\n", - " u = model.NewIntVar(0, base - 1, \"U\")\n", - " n = model.NewIntVar(0, base - 1, \"N\")\n", - " t = model.NewIntVar(1, base - 1, \"T\")\n", - " r = model.NewIntVar(0, base - 1, \"R\")\n", - " e = model.NewIntVar(0, base - 1, \"E\")\n", + " c = model.new_int_var(1, base - 1, \"C\")\n", + " p = model.new_int_var(0, base - 1, \"P\")\n", + " i = model.new_int_var(1, base - 1, \"I\")\n", + " s = model.new_int_var(0, base - 1, \"S\")\n", + " f = model.new_int_var(1, base - 1, \"F\")\n", + " u = model.new_int_var(0, base - 1, \"U\")\n", + " n = model.new_int_var(0, base - 1, \"N\")\n", + " t = model.new_int_var(1, base - 1, \"T\")\n", + " r = model.new_int_var(0, base - 1, \"R\")\n", + " e = model.new_int_var(0, base - 1, \"E\")\n", "\n", " # We need to group variables in a list to use the constraint AllDifferent.\n", " letters = [c, p, i, s, f, u, n, t, r, e]\n", @@ -134,10 +135,10 @@ " assert base >= len(letters)\n", "\n", " # Define constraints.\n", - " model.AddAllDifferent(letters)\n", + " model.add_all_different(letters)\n", "\n", " # CP + IS + FUN = TRUE\n", - " model.Add(\n", + " model.add(\n", " c * base + p + i * base + s + f * base * base + u * base + n\n", " == t * base * base * base + r * base * base + u * base + e\n", " )\n", @@ -148,15 +149,15 @@ " # Enumerate all solutions.\n", " solver.parameters.enumerate_all_solutions = True\n", " # Solve.\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", " # Statistics.\n", " print(\"\\nStatistics\")\n", - " print(f\" status : {solver.StatusName(status)}\")\n", - " print(f\" conflicts: {solver.NumConflicts()}\")\n", - " print(f\" branches : {solver.NumBranches()}\")\n", - " print(f\" wall time: {solver.WallTime()} s\")\n", - " print(f\" sol found: {solution_printer.solution_count()}\")\n", + " print(f\" status : {solver.status_name(status)}\")\n", + " print(f\" conflicts: {solver.num_conflicts}\")\n", + " print(f\" branches : {solver.num_branches}\")\n", + " print(f\" wall time: {solver.wall_time} s\")\n", + " print(f\" sol found: {solution_printer.solution_count}\")\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/sat/cp_sat_example.ipynb b/examples/notebook/sat/cp_sat_example.ipynb index 552331f9e83..130032976b8 100644 --- a/examples/notebook/sat/cp_sat_example.ipynb +++ b/examples/notebook/sat/cp_sat_example.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,42 +86,42 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " \"\"\"Minimal CP-SAT example to showcase calling the solver.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " var_upper_bound = max(50, 45, 37)\n", - " x = model.NewIntVar(0, var_upper_bound, \"x\")\n", - " y = model.NewIntVar(0, var_upper_bound, \"y\")\n", - " z = model.NewIntVar(0, var_upper_bound, \"z\")\n", + " x = model.new_int_var(0, var_upper_bound, \"x\")\n", + " y = model.new_int_var(0, var_upper_bound, \"y\")\n", + " z = model.new_int_var(0, var_upper_bound, \"z\")\n", "\n", " # Creates the constraints.\n", - " model.Add(2 * x + 7 * y + 3 * z <= 50)\n", - " model.Add(3 * x - 5 * y + 7 * z <= 45)\n", - " model.Add(5 * x + 2 * y - 6 * z <= 37)\n", + " model.add(2 * x + 7 * y + 3 * z <= 50)\n", + " model.add(3 * x - 5 * y + 7 * z <= 45)\n", + " model.add(5 * x + 2 * y - 6 * z <= 37)\n", "\n", - " model.Maximize(2 * x + 2 * y + 3 * z)\n", + " model.maximize(2 * x + 2 * y + 3 * z)\n", "\n", " # Creates a solver and solves the model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"Maximum of objective function: {solver.ObjectiveValue()}\\n\")\n", - " print(f\"x = {solver.Value(x)}\")\n", - " print(f\"y = {solver.Value(y)}\")\n", - " print(f\"z = {solver.Value(z)}\")\n", + " print(f\"Maximum of objective function: {solver.objective_value}\\n\")\n", + " print(f\"x = {solver.value(x)}\")\n", + " print(f\"y = {solver.value(y)}\")\n", + " print(f\"z = {solver.value(z)}\")\n", " else:\n", " print(\"No solution found.\")\n", "\n", " # Statistics.\n", " print(\"\\nStatistics\")\n", - " print(f\" status : {solver.StatusName(status)}\")\n", - " print(f\" conflicts: {solver.NumConflicts()}\")\n", - " print(f\" branches : {solver.NumBranches()}\")\n", - " print(f\" wall time: {solver.WallTime()} s\")\n", + " print(f\" status : {solver.status_name(status)}\")\n", + " print(f\" conflicts: {solver.num_conflicts}\")\n", + " print(f\" branches : {solver.num_branches}\")\n", + " print(f\" wall time: {solver.wall_time} s\")\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/sat/cumulative_variable_profile_sample_sat.ipynb b/examples/notebook/sat/cumulative_variable_profile_sample_sat.ipynb index 43c1601e5fd..236fdd83b44 100644 --- a/examples/notebook/sat/cumulative_variable_profile_sample_sat.ipynb +++ b/examples/notebook/sat/cumulative_variable_profile_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -73,7 +73,7 @@ "metadata": {}, "source": [ "\n", - "Solve a simple scheduling problem with a variable work load." + "Solves a scheduling problem with a min and max profile for the work load.\n" ] }, { @@ -90,22 +90,38 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def create_data_model() -> tuple[pd.DataFrame, pd.DataFrame]:\n", - " \"\"\"Creates the two dataframes that describes the model.\"\"\"\n", + "def create_data_model() -> tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:\n", + " \"\"\"Creates the dataframes that describes the model.\"\"\"\n", "\n", - " capacity_str: str = \"\"\"\n", - " start_hour capacity\n", + " max_load_str: str = \"\"\"\n", + " start_hour max_load\n", " 0 0\n", " 2 0\n", - " 4 1\n", - " 6 3\n", - " 8 6\n", + " 4 3\n", + " 6 6\n", + " 8 8\n", " 10 12\n", " 12 8\n", " 14 12\n", " 16 10\n", - " 18 4\n", - " 20 2\n", + " 18 6\n", + " 20 4\n", + " 22 0\n", + " \"\"\"\n", + "\n", + " min_load_str: str = \"\"\"\n", + " start_hour min_load\n", + " 0 0\n", + " 2 0\n", + " 4 0\n", + " 6 0\n", + " 8 3\n", + " 10 3\n", + " 12 1\n", + " 14 3\n", + " 16 3\n", + " 18 1\n", + " 20 1\n", " 22 0\n", " \"\"\"\n", "\n", @@ -143,33 +159,78 @@ " t30 90 4 2\n", " \"\"\"\n", "\n", - " capacity_df = pd.read_table(io.StringIO(capacity_str), sep=r\"\\s+\")\n", + " max_load_df = pd.read_table(io.StringIO(max_load_str), sep=r\"\\s+\")\n", + " min_load_df = pd.read_table(io.StringIO(min_load_str), sep=r\"\\s+\")\n", " tasks_df = pd.read_table(io.StringIO(tasks_str), index_col=0, sep=r\"\\s+\")\n", - " return capacity_df, tasks_df\n", + " return max_load_df, min_load_df, tasks_df\n", + "\n", "\n", + "def check_solution(\n", + " tasks: list[tuple[int, int, int]],\n", + " min_load_df: pd.DataFrame,\n", + " max_load_df: pd.DataFrame,\n", + " period_length: int,\n", + " horizon: int,\n", + ") -> bool:\n", + " \"\"\"Checks the solution validity against the min and max load constraints.\"\"\"\n", + " minutes_per_hour = 60\n", + " actual_load_profile = [0 for _ in range(horizon)]\n", + " min_load_profile = [0 for _ in range(horizon)]\n", + " max_load_profile = [0 for _ in range(horizon)]\n", "\n", - "def main():\n", + " # The complexity of the checker is linear in the number of time points, and\n", + " # should be improved.\n", + " for task in tasks:\n", + " for t in range(task[1]):\n", + " actual_load_profile[task[0] + t] += task[2]\n", + " for row in max_load_df.itertuples():\n", + " for t in range(period_length):\n", + " max_load_profile[row.start_hour * minutes_per_hour + t] = row.max_load\n", + " for row in min_load_df.itertuples():\n", + " for t in range(period_length):\n", + " min_load_profile[row.start_hour * minutes_per_hour + t] = row.min_load\n", + "\n", + " for time in range(horizon):\n", + " if actual_load_profile[time] > max_load_profile[time]:\n", + " print(\n", + " f\"actual load {actual_load_profile[time]} at time {time} is greater\"\n", + " f\" than max load {max_load_profile[time]}\"\n", + " )\n", + " return False\n", + " if actual_load_profile[time] < min_load_profile[time]:\n", + " print(\n", + " f\"actual load {actual_load_profile[time]} at time {time} is\"\n", + " f\" less than min load {min_load_profile[time]}\"\n", + " )\n", + " return False\n", + " return True\n", + "\n", + "\n", + "def main(_) -> None:\n", " \"\"\"Create the model and solves it.\"\"\"\n", - " capacity_df, tasks_df = create_data_model()\n", + " max_load_df, min_load_df, tasks_df = create_data_model()\n", "\n", " # Create the model.\n", " model = cp_model.CpModel()\n", "\n", " # Get the max capacity from the capacity dataframe.\n", - " max_capacity = capacity_df.capacity.max()\n", - " print(f\"Max capacity = {max_capacity}\")\n", + " max_load = max_load_df.max_load.max()\n", + " print(f\"Max capacity = {max_load}\")\n", " print(f\"#tasks = {len(tasks_df)}\")\n", "\n", - " minutes_per_period: int = 120\n", + " minutes_per_hour: int = 60\n", " horizon: int = 24 * 60\n", "\n", " # Variables\n", - " starts = model.NewIntVarSeries(\n", - " name=\"starts\", lower_bounds=0, upper_bounds=horizon, index=tasks_df.index\n", + " starts = model.new_int_var_series(\n", + " name=\"starts\",\n", + " lower_bounds=0,\n", + " upper_bounds=horizon - tasks_df.duration,\n", + " index=tasks_df.index,\n", " )\n", - " performed = model.NewBoolVarSeries(name=\"performed\", index=tasks_df.index)\n", + " performed = model.new_bool_var_series(name=\"performed\", index=tasks_df.index)\n", "\n", - " intervals = model.NewOptionalFixedSizeIntervalVarSeries(\n", + " intervals = model.new_optional_fixed_size_interval_var_series(\n", " name=\"intervals\",\n", " index=tasks_df.index,\n", " starts=starts,\n", @@ -177,43 +238,108 @@ " are_present=performed,\n", " )\n", "\n", - " # Set up the profile. We use fixed (intervals, demands) to fill in the space\n", - " # between the actual load profile and the max capacity.\n", - " time_period_intervals = model.NewFixedSizeIntervalVarSeries(\n", - " name=\"time_period_intervals\",\n", - " index=capacity_df.index,\n", - " starts=capacity_df.start_hour * minutes_per_period,\n", - " sizes=minutes_per_period,\n", + " # Set up the max profile. We use fixed (intervals, demands) to fill in the\n", + " # space between the actual max load profile and the max capacity.\n", + " time_period_max_intervals = model.new_fixed_size_interval_var_series(\n", + " name=\"time_period_max_intervals\",\n", + " index=max_load_df.index,\n", + " starts=max_load_df.start_hour * minutes_per_hour,\n", + " sizes=minutes_per_hour * 2,\n", + " )\n", + " time_period_max_heights = max_load - max_load_df.max_load\n", + "\n", + " # Cumulative constraint for the max profile.\n", + " model.add_cumulative(\n", + " intervals.to_list() + time_period_max_intervals.to_list(),\n", + " tasks_df.load.to_list() + time_period_max_heights.to_list(),\n", + " max_load,\n", " )\n", - " time_period_heights = max_capacity - capacity_df.capacity\n", "\n", - " # Cumulative constraint.\n", - " model.AddCumulative(\n", - " intervals.to_list() + time_period_intervals.to_list(),\n", - " tasks_df.load.to_list() + time_period_heights.to_list(),\n", - " max_capacity,\n", + " # Set up complemented intervals (from 0 to start, and from start + size to\n", + " # horizon).\n", + " prefix_intervals = model.new_optional_interval_var_series(\n", + " name=\"prefix_intervals\",\n", + " index=tasks_df.index,\n", + " starts=0,\n", + " sizes=starts,\n", + " ends=starts,\n", + " are_present=performed,\n", + " )\n", + "\n", + " suffix_intervals = model.new_optional_interval_var_series(\n", + " name=\"suffix_intervals\",\n", + " index=tasks_df.index,\n", + " starts=starts + tasks_df.duration,\n", + " sizes=horizon - starts - tasks_df.duration,\n", + " ends=horizon,\n", + " are_present=performed,\n", + " )\n", + "\n", + " # Set up the min profile. We use complemented intervals to maintain the\n", + " # complement of the work load, and fixed intervals to enforce the min\n", + " # number of active workers per time period.\n", + " #\n", + " # Note that this works only if the max load cumulative is also added to the\n", + " # model.\n", + " time_period_min_intervals = model.new_fixed_size_interval_var_series(\n", + " name=\"time_period_min_intervals\",\n", + " index=min_load_df.index,\n", + " starts=min_load_df.start_hour * minutes_per_hour,\n", + " sizes=minutes_per_hour * 2,\n", + " )\n", + " time_period_min_heights = min_load_df.min_load\n", + "\n", + " # We take into account optional intervals. The actual capacity of the min load\n", + " # cumulative is the sum of all the active demands.\n", + " sum_of_demands = sum(tasks_df.load)\n", + " complement_capacity = model.new_int_var(0, sum_of_demands, \"complement_capacity\")\n", + " model.add(complement_capacity == performed.dot(tasks_df.load))\n", + "\n", + " # Cumulative constraint for the min profile.\n", + " model.add_cumulative(\n", + " prefix_intervals.to_list()\n", + " + suffix_intervals.to_list()\n", + " + time_period_min_intervals.to_list(),\n", + " tasks_df.load.to_list()\n", + " + tasks_df.load.to_list()\n", + " + time_period_min_heights.to_list(),\n", + " complement_capacity,\n", " )\n", "\n", " # Objective: maximize the value of performed intervals.\n", " # 1 is the max priority.\n", " max_priority = max(tasks_df.priority)\n", - " model.Maximize(sum(performed * (max_priority + 1 - tasks_df.priority)))\n", + " model.maximize(sum(performed * (max_priority + 1 - tasks_df.priority)))\n", "\n", " # Create the solver and solve the model.\n", " solver = cp_model.CpSolver()\n", " solver.parameters.log_search_progress = True\n", - " solver.parameters.num_workers = 8\n", + " solver.parameters.num_workers = 16\n", " solver.parameters.max_time_in_seconds = 30.0\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " start_values = solver.Values(starts)\n", - " performed_values = solver.BooleanValues(performed)\n", + " start_values = solver.values(starts)\n", + " performed_values = solver.boolean_values(performed)\n", + " tasks: list[tuple[int, int, int]] = []\n", " for task in tasks_df.index:\n", " if performed_values[task]:\n", - " print(f\"task {task} starts at {start_values[task]}\")\n", + " print(\n", + " f'task {task} duration={tasks_df[\"duration\"][task]} '\n", + " f'load={tasks_df[\"load\"][task]} starts at {start_values[task]}'\n", + " )\n", + " tasks.append(\n", + " (start_values[task], tasks_df.duration[task], tasks_df.load[task])\n", + " )\n", " else:\n", " print(f\"task {task} is not performed\")\n", + " assert check_solution(\n", + " tasks=tasks,\n", + " min_load_df=min_load_df,\n", + " max_load_df=max_load_df,\n", + " period_length=2 * minutes_per_hour,\n", + " horizon=horizon,\n", + " )\n", " elif status == cp_model.INFEASIBLE:\n", " print(\"No solution found\")\n", " else:\n", diff --git a/examples/notebook/sat/earliness_tardiness_cost_sample_sat.ipynb b/examples/notebook/sat/earliness_tardiness_cost_sample_sat.ipynb index c7f150d9df7..3f2d6595c2d 100644 --- a/examples/notebook/sat/earliness_tardiness_cost_sample_sat.ipynb +++ b/examples/notebook/sat/earliness_tardiness_cost_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -90,20 +90,15 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", - " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", - " self.__solution_count += 1\n", + " def on_solution_callback(self) -> None:\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", - " return self.__solution_count\n", - "\n", "\n", "def earliness_tardiness_cost_sample_sat():\n", " \"\"\"Encode the piecewise linear expression.\"\"\"\n", @@ -117,7 +112,7 @@ " model = cp_model.CpModel()\n", "\n", " # Declare our primary variable.\n", - " x = model.NewIntVar(0, 20, \"x\")\n", + " x = model.new_int_var(0, 20, \"x\")\n", "\n", " # Create the expression variable and implement the piecewise linear function.\n", " #\n", @@ -126,24 +121,24 @@ " # ed ld\n", " #\n", " large_constant = 1000\n", - " expr = model.NewIntVar(0, large_constant, \"expr\")\n", + " expr = model.new_int_var(0, large_constant, \"expr\")\n", "\n", " # First segment.\n", - " s1 = model.NewIntVar(-large_constant, large_constant, \"s1\")\n", - " model.Add(s1 == earliness_cost * (earliness_date - x))\n", + " s1 = model.new_int_var(-large_constant, large_constant, \"s1\")\n", + " model.add(s1 == earliness_cost * (earliness_date - x))\n", "\n", " # Second segment.\n", " s2 = 0\n", "\n", " # Third segment.\n", - " s3 = model.NewIntVar(-large_constant, large_constant, \"s3\")\n", - " model.Add(s3 == lateness_cost * (x - lateness_date))\n", + " s3 = model.new_int_var(-large_constant, large_constant, \"s3\")\n", + " model.add(s3 == lateness_cost * (x - lateness_date))\n", "\n", " # Link together expr and x through s1, s2, and s3.\n", - " model.AddMaxEquality(expr, [s1, s2, s3])\n", + " model.add_max_equality(expr, [s1, s2, s3])\n", "\n", " # Search for x values in increasing order.\n", - " model.AddDecisionStrategy([x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", + " model.add_decision_strategy([x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", "\n", " # Create a solver and solve with a fixed search.\n", " solver = cp_model.CpSolver()\n", @@ -155,7 +150,7 @@ "\n", " # Search and print out all solutions.\n", " solution_printer = VarArraySolutionPrinter([x, expr])\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", "\n", "earliness_tardiness_cost_sample_sat()\n", diff --git a/examples/notebook/sat/index_first_boolvar_true_sample_sat.ipynb b/examples/notebook/sat/index_first_boolvar_true_sample_sat.ipynb new file mode 100644 index 00000000000..4c6e816d8c8 --- /dev/null +++ b/examples/notebook/sat/index_first_boolvar_true_sample_sat.ipynb @@ -0,0 +1,150 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# index_first_boolvar_true_sample_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Compute the index of the first Boolean variable set to true.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", + " \"\"\"Print intermediate solutions.\"\"\"\n", + "\n", + " def __init__(self, index: cp_model.IntVar, boolvars: list[cp_model.IntVar]):\n", + " cp_model.CpSolverSolutionCallback.__init__(self)\n", + " self.__index = index\n", + " self.__boolvars = boolvars\n", + "\n", + " def on_solution_callback(self) -> None:\n", + " line = \"\"\n", + " for v in self.__boolvars:\n", + " line += f\"{self.value(v)}\"\n", + " line += f\" -> {self.value(self.__index)}\"\n", + " print(line)\n", + "\n", + "\n", + "def index_of_first_bool_at_true_sample_sat():\n", + " \"\"\"Compute the index of the first Boolean variable set to true.\"\"\"\n", + "\n", + " # Model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Variables\n", + " num_bool_vars = 5\n", + " bool_vars = [model.new_bool_var(f\"{i}\") for i in range(num_bool_vars)]\n", + " index = model.new_int_var(0, num_bool_vars, \"index\")\n", + "\n", + " # Channeling between the index and the Boolean variables.\n", + " model.add_min_equality(\n", + " index,\n", + " [\n", + " num_bool_vars - bool_vars[i] * (num_bool_vars - i)\n", + " for i in range(num_bool_vars)\n", + " ],\n", + " )\n", + "\n", + " # Flip bool_vars in increasing order.\n", + " model.add_decision_strategy(\n", + " bool_vars, cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE\n", + " )\n", + "\n", + " # Create a solver and solve with a fixed search.\n", + " solver = cp_model.CpSolver()\n", + "\n", + " # Force the solver to follow the decision strategy exactly.\n", + " solver.parameters.search_branching = cp_model.FIXED_SEARCH\n", + "\n", + " # Search and print out all solutions.\n", + " solver.parameters.enumerate_all_solutions = True\n", + " solution_printer = VarArraySolutionPrinter(index, bool_vars)\n", + " solver.solve(model, solution_printer)\n", + "\n", + "\n", + "index_of_first_bool_at_true_sample_sat()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/sat/interval_relations_sample_sat.ipynb b/examples/notebook/sat/interval_relations_sample_sat.ipynb new file mode 100644 index 00000000000..397249ba5e0 --- /dev/null +++ b/examples/notebook/sat/interval_relations_sample_sat.ipynb @@ -0,0 +1,156 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# interval_relations_sample_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Builds temporal relations between intervals.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "def interval_relations_sample_sat():\n", + " \"\"\"Showcases how to build temporal relations between intervals.\"\"\"\n", + " model = cp_model.CpModel()\n", + " horizon = 100\n", + "\n", + " # An interval can be created from three 1-var affine expressions.\n", + " start_var = model.new_int_var(0, horizon, \"start\")\n", + " duration = 10 # Python CP-SAT code accept integer variables or constants.\n", + " end_var = model.new_int_var(0, horizon, \"end\")\n", + " interval_var = model.new_interval_var(start_var, duration, end_var, \"interval\")\n", + "\n", + " # If the size is fixed, a simpler version uses the start expression and the\n", + " # size.\n", + " fixed_size_start_var = model.new_int_var(0, horizon, \"fixed_start\")\n", + " fixed_size_duration = 10\n", + " fixed_size_interval_var = model.new_fixed_size_interval_var(\n", + " fixed_size_start_var,\n", + " fixed_size_duration,\n", + " \"fixed_size_interval_var\",\n", + " )\n", + "\n", + " # An optional interval can be created from three 1-var affine expressions and\n", + " # a literal.\n", + " opt_start_var = model.new_int_var(0, horizon, \"opt_start\")\n", + " opt_duration = model.new_int_var(2, 6, \"opt_size\")\n", + " opt_end_var = model.new_int_var(0, horizon, \"opt_end\")\n", + " opt_presence_var = model.new_bool_var(\"opt_presence\")\n", + " opt_interval_var = model.new_optional_interval_var(\n", + " opt_start_var, opt_duration, opt_end_var, opt_presence_var, \"opt_interval\"\n", + " )\n", + "\n", + " # If the size is fixed, a simpler version uses the start expression, the\n", + " # size, and the presence literal.\n", + " opt_fixed_size_start_var = model.new_int_var(0, horizon, \"opt_fixed_start\")\n", + " opt_fixed_size_duration = 10\n", + " opt_fixed_size_presence_var = model.new_bool_var(\"opt_fixed_presence\")\n", + " opt_fixed_size_interval_var = model.new_optional_fixed_size_interval_var(\n", + " opt_fixed_size_start_var,\n", + " opt_fixed_size_duration,\n", + " opt_fixed_size_presence_var,\n", + " \"opt_fixed_size_interval_var\",\n", + " )\n", + "\n", + " # Simple precedence between two non optional intervals.\n", + " model.add(interval_var.start_expr() >= fixed_size_interval_var.end_expr())\n", + "\n", + " # Synchronize start between two intervals (one optional, one not)\n", + " model.add(\n", + " interval_var.start_expr() == opt_interval_var.start_expr()\n", + " ).only_enforce_if(opt_presence_var)\n", + "\n", + " # Exact delay between two optional intervals.\n", + " exact_delay: int = 5\n", + " model.add(\n", + " opt_interval_var.start_expr()\n", + " == opt_fixed_size_interval_var.end_expr() + exact_delay\n", + " ).only_enforce_if(opt_presence_var, opt_fixed_size_presence_var)\n", + "\n", + "\n", + "interval_relations_sample_sat()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/sat/interval_sample_sat.ipynb b/examples/notebook/sat/interval_sample_sat.ipynb index 49877710f1a..493c78ace51 100644 --- a/examples/notebook/sat/interval_sample_sat.ipynb +++ b/examples/notebook/sat/interval_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,32 +87,32 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def IntervalSampleSat():\n", + "def interval_sample_sat():\n", " \"\"\"Showcases how to build interval variables.\"\"\"\n", " model = cp_model.CpModel()\n", " horizon = 100\n", "\n", " # An interval can be created from three affine expressions.\n", - " start_var = model.NewIntVar(0, horizon, \"start\")\n", + " start_var = model.new_int_var(0, horizon, \"start\")\n", " duration = 10 # Python cp/sat code accept integer variables or constants.\n", - " end_var = model.NewIntVar(0, horizon, \"end\")\n", - " interval_var = model.NewIntervalVar(start_var, duration, end_var + 2, \"interval\")\n", + " end_var = model.new_int_var(0, horizon, \"end\")\n", + " interval_var = model.new_interval_var(start_var, duration, end_var + 2, \"interval\")\n", "\n", " print(f\"interval = {repr(interval_var)}\")\n", "\n", " # If the size is fixed, a simpler version uses the start expression and the\n", " # size.\n", - " fixed_size_interval_var = model.NewFixedSizeIntervalVar(\n", + " fixed_size_interval_var = model.new_fixed_size_interval_var(\n", " start_var, 10, \"fixed_size_interval_var\"\n", " )\n", " print(f\"fixed_size_interval_var = {repr(fixed_size_interval_var)}\")\n", "\n", " # A fixed interval can be created using the same API.\n", - " fixed_interval = model.NewFixedSizeIntervalVar(5, 10, \"fixed_interval\")\n", + " fixed_interval = model.new_fixed_size_interval_var(5, 10, \"fixed_interval\")\n", " print(f\"fixed_interval = {repr(fixed_interval)}\")\n", "\n", "\n", - "IntervalSampleSat()\n", + "interval_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/literal_sample_sat.ipynb b/examples/notebook/sat/literal_sample_sat.ipynb index 611bb47535b..be8ff9ab741 100644 --- a/examples/notebook/sat/literal_sample_sat.ipynb +++ b/examples/notebook/sat/literal_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,15 +87,15 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def LiteralSampleSat():\n", + "def literal_sample_sat():\n", " model = cp_model.CpModel()\n", - " x = model.NewBoolVar(\"x\")\n", - " not_x = x.Not()\n", + " x = model.new_bool_var(\"x\")\n", + " not_x = ~x\n", " print(x)\n", " print(not_x)\n", "\n", "\n", - "LiteralSampleSat()\n", + "literal_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/minimal_jobshop_sat.ipynb b/examples/notebook/sat/minimal_jobshop_sat.ipynb index 6ea0dc9b20a..1fc48868739 100644 --- a/examples/notebook/sat/minimal_jobshop_sat.ipynb +++ b/examples/notebook/sat/minimal_jobshop_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -87,7 +87,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " \"\"\"Minimal jobshop problem.\"\"\"\n", " # Data.\n", " jobs_data = [ # task = (machine_id, processing_time).\n", @@ -119,9 +119,9 @@ " for task_id, task in enumerate(job):\n", " machine, duration = task\n", " suffix = f\"_{job_id}_{task_id}\"\n", - " start_var = model.NewIntVar(0, horizon, \"start\" + suffix)\n", - " end_var = model.NewIntVar(0, horizon, \"end\" + suffix)\n", - " interval_var = model.NewIntervalVar(\n", + " start_var = model.new_int_var(0, horizon, \"start\" + suffix)\n", + " end_var = model.new_int_var(0, horizon, \"end\" + suffix)\n", + " interval_var = model.new_interval_var(\n", " start_var, duration, end_var, \"interval\" + suffix\n", " )\n", " all_tasks[job_id, task_id] = task_type(\n", @@ -131,26 +131,26 @@ "\n", " # Create and add disjunctive constraints.\n", " for machine in all_machines:\n", - " model.AddNoOverlap(machine_to_intervals[machine])\n", + " model.add_no_overlap(machine_to_intervals[machine])\n", "\n", " # Precedences inside a job.\n", " for job_id, job in enumerate(jobs_data):\n", " for task_id in range(len(job) - 1):\n", - " model.Add(\n", + " model.add(\n", " all_tasks[job_id, task_id + 1].start >= all_tasks[job_id, task_id].end\n", " )\n", "\n", " # Makespan objective.\n", - " obj_var = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(\n", + " obj_var = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(\n", " obj_var,\n", " [all_tasks[job_id, len(job) - 1].end for job_id, job in enumerate(jobs_data)],\n", " )\n", - " model.Minimize(obj_var)\n", + " model.minimize(obj_var)\n", "\n", " # Creates the solver and solve.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", " print(\"Solution:\")\n", @@ -161,7 +161,7 @@ " machine = task[0]\n", " assigned_jobs[machine].append(\n", " assigned_task_type(\n", - " start=solver.Value(all_tasks[job_id, task_id].start),\n", + " start=solver.value(all_tasks[job_id, task_id].start),\n", " job=job_id,\n", " index=task_id,\n", " duration=task[1],\n", @@ -178,13 +178,13 @@ "\n", " for assigned_task in assigned_jobs[machine]:\n", " name = f\"job_{assigned_task.job}_task_{assigned_task.index}\"\n", - " # Add spaces to output to align columns.\n", + " # add spaces to output to align columns.\n", " sol_line_tasks += f\"{name:15}\"\n", "\n", " start = assigned_task.start\n", " duration = assigned_task.duration\n", " sol_tmp = f\"[{start},{start + duration}]\"\n", - " # Add spaces to output to align columns.\n", + " # add spaces to output to align columns.\n", " sol_line += f\"{sol_tmp:15}\"\n", "\n", " sol_line += \"\\n\"\n", @@ -193,16 +193,16 @@ " output += sol_line\n", "\n", " # Finally print the solution found.\n", - " print(f\"Optimal Schedule Length: {solver.ObjectiveValue()}\")\n", + " print(f\"Optimal Schedule Length: {solver.objective_value}\")\n", " print(output)\n", " else:\n", " print(\"No solution found.\")\n", "\n", " # Statistics.\n", " print(\"\\nStatistics\")\n", - " print(f\" - conflicts: {solver.NumConflicts()}\")\n", - " print(f\" - branches : {solver.NumBranches()}\")\n", - " print(f\" - wall time: {solver.WallTime()}s\")\n", + " print(f\" - conflicts: {solver.num_conflicts}\")\n", + " print(f\" - branches : {solver.num_branches}\")\n", + " print(f\" - wall time: {solver.wall_time}s\")\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/sat/multiple_knapsack_sat.ipynb b/examples/notebook/sat/multiple_knapsack_sat.ipynb index a50e758d084..95eec40688d 100644 --- a/examples/notebook/sat/multiple_knapsack_sat.ipynb +++ b/examples/notebook/sat/multiple_knapsack_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,61 +86,61 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " data = {}\n", " data[\"weights\"] = [48, 30, 42, 36, 36, 48, 42, 42, 36, 24, 30, 30, 42, 36, 36]\n", " data[\"values\"] = [10, 30, 25, 50, 35, 30, 15, 40, 30, 35, 45, 10, 20, 30, 25]\n", " assert len(data[\"weights\"]) == len(data[\"values\"])\n", - " data[\"num_items\"] = len(data[\"weights\"])\n", - " data[\"all_items\"] = range(data[\"num_items\"])\n", + " num_items = len(data[\"weights\"])\n", + " all_items = range(num_items)\n", "\n", " data[\"bin_capacities\"] = [100, 100, 100, 100, 100]\n", - " data[\"num_bins\"] = len(data[\"bin_capacities\"])\n", - " data[\"all_bins\"] = range(data[\"num_bins\"])\n", + " num_bins = len(data[\"bin_capacities\"])\n", + " all_bins = range(num_bins)\n", "\n", " model = cp_model.CpModel()\n", "\n", " # Variables.\n", " # x[i, b] = 1 if item i is packed in bin b.\n", " x = {}\n", - " for i in data[\"all_items\"]:\n", - " for b in data[\"all_bins\"]:\n", - " x[i, b] = model.NewBoolVar(f\"x_{i}_{b}\")\n", + " for i in all_items:\n", + " for b in all_bins:\n", + " x[i, b] = model.new_bool_var(f\"x_{i}_{b}\")\n", "\n", " # Constraints.\n", " # Each item is assigned to at most one bin.\n", - " for i in data[\"all_items\"]:\n", - " model.AddAtMostOne(x[i, b] for b in data[\"all_bins\"])\n", + " for i in all_items:\n", + " model.add_at_most_one(x[i, b] for b in all_bins)\n", "\n", " # The amount packed in each bin cannot exceed its capacity.\n", - " for b in data[\"all_bins\"]:\n", - " model.Add(\n", - " sum(x[i, b] * data[\"weights\"][i] for i in data[\"all_items\"])\n", + " for b in all_bins:\n", + " model.add(\n", + " sum(x[i, b] * data[\"weights\"][i] for i in all_items)\n", " <= data[\"bin_capacities\"][b]\n", " )\n", "\n", " # Objective.\n", - " # Maximize total value of packed items.\n", + " # maximize total value of packed items.\n", " objective = []\n", - " for i in data[\"all_items\"]:\n", - " for b in data[\"all_bins\"]:\n", - " objective.append(cp_model.LinearExpr.Term(x[i, b], data[\"values\"][i]))\n", - " model.Maximize(cp_model.LinearExpr.Sum(objective))\n", + " for i in all_items:\n", + " for b in all_bins:\n", + " objective.append(cp_model.LinearExpr.term(x[i, b], data[\"values\"][i]))\n", + " model.maximize(cp_model.LinearExpr.sum(objective))\n", "\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(f\"Total packed value: {solver.ObjectiveValue()}\")\n", + " print(f\"Total packed value: {solver.objective_value}\")\n", " total_weight = 0\n", - " for b in data[\"all_bins\"]:\n", + " for b in all_bins:\n", " print(f\"Bin {b}\")\n", " bin_weight = 0\n", " bin_value = 0\n", - " for i in data[\"all_items\"]:\n", - " if solver.Value(x[i, b]) > 0:\n", + " for i in all_items:\n", + " if solver.value(x[i, b]) > 0:\n", " print(\n", - " f\"Item {i} weight: {data['weights'][i]} value: {data['values'][i]}\"\n", + " f'Item:{i} weight:{data[\"weights\"][i]} value:{data[\"values\"][i]}'\n", " )\n", " bin_weight += data[\"weights\"][i]\n", " bin_value += data[\"values\"][i]\n", diff --git a/examples/notebook/sat/no_overlap_sample_sat.ipynb b/examples/notebook/sat/no_overlap_sample_sat.ipynb index cc5e8b312cb..2cdb3ec36ca 100644 --- a/examples/notebook/sat/no_overlap_sample_sat.ipynb +++ b/examples/notebook/sat/no_overlap_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,56 +86,56 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def NoOverlapSampleSat():\n", + "def no_overlap_sample_sat():\n", " \"\"\"No overlap sample with fixed activities.\"\"\"\n", " model = cp_model.CpModel()\n", " horizon = 21 # 3 weeks.\n", "\n", " # Task 0, duration 2.\n", - " start_0 = model.NewIntVar(0, horizon, \"start_0\")\n", + " start_0 = model.new_int_var(0, horizon, \"start_0\")\n", " duration_0 = 2 # Python cp/sat code accepts integer variables or constants.\n", - " end_0 = model.NewIntVar(0, horizon, \"end_0\")\n", - " task_0 = model.NewIntervalVar(start_0, duration_0, end_0, \"task_0\")\n", + " end_0 = model.new_int_var(0, horizon, \"end_0\")\n", + " task_0 = model.new_interval_var(start_0, duration_0, end_0, \"task_0\")\n", " # Task 1, duration 4.\n", - " start_1 = model.NewIntVar(0, horizon, \"start_1\")\n", + " start_1 = model.new_int_var(0, horizon, \"start_1\")\n", " duration_1 = 4 # Python cp/sat code accepts integer variables or constants.\n", - " end_1 = model.NewIntVar(0, horizon, \"end_1\")\n", - " task_1 = model.NewIntervalVar(start_1, duration_1, end_1, \"task_1\")\n", + " end_1 = model.new_int_var(0, horizon, \"end_1\")\n", + " task_1 = model.new_interval_var(start_1, duration_1, end_1, \"task_1\")\n", "\n", " # Task 2, duration 3.\n", - " start_2 = model.NewIntVar(0, horizon, \"start_2\")\n", + " start_2 = model.new_int_var(0, horizon, \"start_2\")\n", " duration_2 = 3 # Python cp/sat code accepts integer variables or constants.\n", - " end_2 = model.NewIntVar(0, horizon, \"end_2\")\n", - " task_2 = model.NewIntervalVar(start_2, duration_2, end_2, \"task_2\")\n", + " end_2 = model.new_int_var(0, horizon, \"end_2\")\n", + " task_2 = model.new_interval_var(start_2, duration_2, end_2, \"task_2\")\n", "\n", " # Weekends.\n", - " weekend_0 = model.NewIntervalVar(5, 2, 7, \"weekend_0\")\n", - " weekend_1 = model.NewIntervalVar(12, 2, 14, \"weekend_1\")\n", - " weekend_2 = model.NewIntervalVar(19, 2, 21, \"weekend_2\")\n", + " weekend_0 = model.new_interval_var(5, 2, 7, \"weekend_0\")\n", + " weekend_1 = model.new_interval_var(12, 2, 14, \"weekend_1\")\n", + " weekend_2 = model.new_interval_var(19, 2, 21, \"weekend_2\")\n", "\n", " # No Overlap constraint.\n", - " model.AddNoOverlap([task_0, task_1, task_2, weekend_0, weekend_1, weekend_2])\n", + " model.add_no_overlap([task_0, task_1, task_2, weekend_0, weekend_1, weekend_2])\n", "\n", " # Makespan objective.\n", - " obj = model.NewIntVar(0, horizon, \"makespan\")\n", - " model.AddMaxEquality(obj, [end_0, end_1, end_2])\n", - " model.Minimize(obj)\n", + " obj = model.new_int_var(0, horizon, \"makespan\")\n", + " model.add_max_equality(obj, [end_0, end_1, end_2])\n", + " model.minimize(obj)\n", "\n", " # Solve model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", " # Print out makespan and the start times for all tasks.\n", - " print(f\"Optimal Schedule Length: {solver.ObjectiveValue()}\")\n", - " print(f\"Task 0 starts at {solver.Value(start_0)}\")\n", - " print(f\"Task 1 starts at {solver.Value(start_1)}\")\n", - " print(f\"Task 2 starts at {solver.Value(start_2)}\")\n", + " print(f\"Optimal Schedule Length: {solver.objective_value}\")\n", + " print(f\"Task 0 starts at {solver.value(start_0)}\")\n", + " print(f\"Task 1 starts at {solver.value(start_1)}\")\n", + " print(f\"Task 2 starts at {solver.value(start_2)}\")\n", " else:\n", " print(f\"Solver exited with nonoptimal status: {status}\")\n", "\n", "\n", - "NoOverlapSampleSat()\n", + "no_overlap_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/non_linear_sat.ipynb b/examples/notebook/sat/non_linear_sat.ipynb index 1995b21c7bc..6a612e344ba 100644 --- a/examples/notebook/sat/non_linear_sat.ipynb +++ b/examples/notebook/sat/non_linear_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -76,7 +76,7 @@ "Non linear example.\n", "\n", "Finds a rectangle with maximum available area for given perimeter using\n", - "AddMultiplicationEquality().\n", + "add_multiplication_equality().\n", "\n" ] }, @@ -96,23 +96,23 @@ "\n", " model = cp_model.CpModel()\n", "\n", - " x = model.NewIntVar(0, perimeter, \"x\")\n", - " y = model.NewIntVar(0, perimeter, \"y\")\n", - " model.Add(2 * (x + y) == perimeter)\n", + " x = model.new_int_var(0, perimeter, \"x\")\n", + " y = model.new_int_var(0, perimeter, \"y\")\n", + " model.add(2 * (x + y) == perimeter)\n", "\n", - " area = model.NewIntVar(0, perimeter * perimeter, \"s\")\n", - " model.AddMultiplicationEquality(area, x, y)\n", + " area = model.new_int_var(0, perimeter * perimeter, \"s\")\n", + " model.add_multiplication_equality(area, x, y)\n", "\n", - " model.Maximize(area)\n", + " model.maximize(area)\n", "\n", " solver = cp_model.CpSolver()\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"x = {solver.Value(x)}\")\n", - " print(f\"y = {solver.Value(y)}\")\n", - " print(f\"s = {solver.Value(area)}\")\n", + " print(f\"x = {solver.value(x)}\")\n", + " print(f\"y = {solver.value(y)}\")\n", + " print(f\"s = {solver.value(area)}\")\n", " else:\n", " print(\"No solution found.\")\n", "\n", diff --git a/examples/notebook/sat/nqueens_sat.ipynb b/examples/notebook/sat/nqueens_sat.ipynb index 2f889fbc25c..019fb947775 100644 --- a/examples/notebook/sat/nqueens_sat.ipynb +++ b/examples/notebook/sat/nqueens_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -91,13 +91,14 @@ "class NQueenSolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, queens):\n", + " def __init__(self, queens: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__queens = queens\n", " self.__solution_count = 0\n", " self.__start_time = time.time()\n", "\n", - " def solution_count(self):\n", + " @property\n", + " def solution_count(self) -> int:\n", " return self.__solution_count\n", "\n", " def on_solution_callback(self):\n", @@ -111,7 +112,7 @@ " all_queens = range(len(self.__queens))\n", " for i in all_queens:\n", " for j in all_queens:\n", - " if self.Value(self.__queens[j]) == i:\n", + " if self.value(self.__queens[j]) == i:\n", " # There is a queen in column j, row i.\n", " print(\"Q\", end=\" \")\n", " else:\n", @@ -121,35 +122,35 @@ "\n", "\n", "\n", - "def main(board_size):\n", + "def main(board_size: int) -> None:\n", " # Creates the solver.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " # There are `board_size` number of variables, one for a queen in each column\n", " # of the board. The value of each variable is the row that the queen is in.\n", - " queens = [model.NewIntVar(0, board_size - 1, f\"x_{i}\") for i in range(board_size)]\n", + " queens = [model.new_int_var(0, board_size - 1, f\"x_{i}\") for i in range(board_size)]\n", "\n", " # Creates the constraints.\n", " # All rows must be different.\n", - " model.AddAllDifferent(queens)\n", + " model.add_all_different(queens)\n", "\n", " # No two queens can be on the same diagonal.\n", - " model.AddAllDifferent(queens[i] + i for i in range(board_size))\n", - " model.AddAllDifferent(queens[i] - i for i in range(board_size))\n", + " model.add_all_different(queens[i] + i for i in range(board_size))\n", + " model.add_all_different(queens[i] - i for i in range(board_size))\n", "\n", " # Solve the model.\n", " solver = cp_model.CpSolver()\n", " solution_printer = NQueenSolutionPrinter(queens)\n", " solver.parameters.enumerate_all_solutions = True\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", " # Statistics.\n", " print(\"\\nStatistics\")\n", - " print(f\" conflicts : {solver.NumConflicts()}\")\n", - " print(f\" branches : {solver.NumBranches()}\")\n", - " print(f\" wall time : {solver.WallTime()} s\")\n", - " print(f\" solutions found: {solution_printer.solution_count()}\")\n", + " print(f\" conflicts : {solver.num_conflicts}\")\n", + " print(f\" branches : {solver.num_branches}\")\n", + " print(f\" wall time : {solver.wall_time} s\")\n", + " print(f\" solutions found: {solution_printer.solution_count}\")\n", "\n", "\n", "# By default, solve the 8x8 problem.\n", diff --git a/examples/notebook/sat/nurses_sat.ipynb b/examples/notebook/sat/nurses_sat.ipynb index 1c8e74e03ac..02cf4d5aa2f 100644 --- a/examples/notebook/sat/nurses_sat.ipynb +++ b/examples/notebook/sat/nurses_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,7 +86,7 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " # Data.\n", " num_nurses = 4\n", " num_shifts = 3\n", @@ -104,17 +104,17 @@ " for n in all_nurses:\n", " for d in all_days:\n", " for s in all_shifts:\n", - " shifts[(n, d, s)] = model.NewBoolVar(f\"shift_n{n}_d{d}_s{s}\")\n", + " shifts[(n, d, s)] = model.new_bool_var(f\"shift_n{n}_d{d}_s{s}\")\n", "\n", " # Each shift is assigned to exactly one nurse in the schedule period.\n", " for d in all_days:\n", " for s in all_shifts:\n", - " model.AddExactlyOne(shifts[(n, d, s)] for n in all_nurses)\n", + " model.add_exactly_one(shifts[(n, d, s)] for n in all_nurses)\n", "\n", " # Each nurse works at most one shift per day.\n", " for n in all_nurses:\n", " for d in all_days:\n", - " model.AddAtMostOne(shifts[(n, d, s)] for s in all_shifts)\n", + " model.add_at_most_one(shifts[(n, d, s)] for s in all_shifts)\n", "\n", " # Try to distribute the shifts evenly, so that each nurse works\n", " # min_shifts_per_nurse shifts. If this is not possible, because the total\n", @@ -130,8 +130,8 @@ " for d in all_days:\n", " for s in all_shifts:\n", " shifts_worked.append(shifts[(n, d, s)])\n", - " model.Add(min_shifts_per_nurse <= sum(shifts_worked))\n", - " model.Add(sum(shifts_worked) <= max_shifts_per_nurse)\n", + " model.add(min_shifts_per_nurse <= sum(shifts_worked))\n", + " model.add(sum(shifts_worked) <= max_shifts_per_nurse)\n", "\n", " # Creates the solver and solve.\n", " solver = cp_model.CpSolver()\n", @@ -159,16 +159,16 @@ " for n in range(self._num_nurses):\n", " is_working = False\n", " for s in range(self._num_shifts):\n", - " if self.Value(self._shifts[(n, d, s)]):\n", + " if self.value(self._shifts[(n, d, s)]):\n", " is_working = True\n", " print(f\" Nurse {n} works shift {s}\")\n", " if not is_working:\n", " print(f\" Nurse {n} does not work\")\n", " if self._solution_count >= self._solution_limit:\n", " print(f\"Stop search after {self._solution_limit} solutions\")\n", - " self.StopSearch()\n", + " self.stop_search()\n", "\n", - " def solution_count(self):\n", + " def solutionCount(self):\n", " return self._solution_count\n", "\n", " # Display the first five solutions.\n", @@ -177,14 +177,14 @@ " shifts, num_nurses, num_days, num_shifts, solution_limit\n", " )\n", "\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", " # Statistics.\n", " print(\"\\nStatistics\")\n", - " print(f\" - conflicts : {solver.NumConflicts()}\")\n", - " print(f\" - branches : {solver.NumBranches()}\")\n", - " print(f\" - wall time : {solver.WallTime()} s\")\n", - " print(f\" - solutions found: {solution_printer.solution_count()}\")\n", + " print(f\" - conflicts : {solver.num_conflicts}\")\n", + " print(f\" - branches : {solver.num_branches}\")\n", + " print(f\" - wall time : {solver.wall_time} s\")\n", + " print(f\" - solutions found: {solution_printer.solutionCount()}\")\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/sat/optional_interval_sample_sat.ipynb b/examples/notebook/sat/optional_interval_sample_sat.ipynb index 29fc57d1a2f..f356012ecf7 100644 --- a/examples/notebook/sat/optional_interval_sample_sat.ipynb +++ b/examples/notebook/sat/optional_interval_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,17 +86,17 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def OptionalIntervalSampleSat():\n", + "def optional_interval_sample_sat():\n", " \"\"\"Showcases how to build optional interval variables.\"\"\"\n", " model = cp_model.CpModel()\n", " horizon = 100\n", "\n", " # An interval can be created from three affine expressions.\n", - " start_var = model.NewIntVar(0, horizon, \"start\")\n", + " start_var = model.new_int_var(0, horizon, \"start\")\n", " duration = 10 # Python cp/sat code accept integer variables or constants.\n", - " end_var = model.NewIntVar(0, horizon, \"end\")\n", - " presence_var = model.NewBoolVar(\"presence\")\n", - " interval_var = model.NewOptionalIntervalVar(\n", + " end_var = model.new_int_var(0, horizon, \"end\")\n", + " presence_var = model.new_bool_var(\"presence\")\n", + " interval_var = model.new_optional_interval_var(\n", " start_var, duration, end_var + 2, presence_var, \"interval\"\n", " )\n", "\n", @@ -104,19 +104,19 @@ "\n", " # If the size is fixed, a simpler version uses the start expression and the\n", " # size.\n", - " fixed_size_interval_var = model.NewOptionalFixedSizeIntervalVar(\n", + " fixed_size_interval_var = model.new_optional_fixed_size_interval_var(\n", " start_var, 10, presence_var, \"fixed_size_interval_var\"\n", " )\n", " print(f\"fixed_size_interval_var = {repr(fixed_size_interval_var)}\")\n", "\n", " # A fixed interval can be created using the same API.\n", - " fixed_interval = model.NewOptionalFixedSizeIntervalVar(\n", + " fixed_interval = model.new_optional_fixed_size_interval_var(\n", " 5, 10, presence_var, \"fixed_interval\"\n", " )\n", " print(f\"fixed_interval = {repr(fixed_interval)}\")\n", "\n", "\n", - "OptionalIntervalSampleSat()\n", + "optional_interval_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/overlapping_intervals_sample_sat.ipynb b/examples/notebook/sat/overlapping_intervals_sample_sat.ipynb index b542a5802a9..0eab0653205 100644 --- a/examples/notebook/sat/overlapping_intervals_sample_sat.ipynb +++ b/examples/notebook/sat/overlapping_intervals_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,67 +89,64 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", - " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", - " self.__solution_count += 1\n", + " def on_solution_callback(self) -> None:\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", - " return self.__solution_count\n", "\n", - "\n", - "def OverlappingIntervals():\n", + "def overlapping_interval_sample_sat():\n", " \"\"\"Create the overlapping Boolean variables and enumerate all states.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", " horizon = 7\n", "\n", " # First interval.\n", - " start_var_a = model.NewIntVar(0, horizon, \"start_a\")\n", + " start_var_a = model.new_int_var(0, horizon, \"start_a\")\n", " duration_a = 3\n", - " end_var_a = model.NewIntVar(0, horizon, \"end_a\")\n", - " unused_interval_var_a = model.NewIntervalVar(\n", + " end_var_a = model.new_int_var(0, horizon, \"end_a\")\n", + " unused_interval_var_a = model.new_interval_var(\n", " start_var_a, duration_a, end_var_a, \"interval_a\"\n", " )\n", "\n", " # Second interval.\n", - " start_var_b = model.NewIntVar(0, horizon, \"start_b\")\n", + " start_var_b = model.new_int_var(0, horizon, \"start_b\")\n", " duration_b = 2\n", - " end_var_b = model.NewIntVar(0, horizon, \"end_b\")\n", - " unused_interval_var_b = model.NewIntervalVar(\n", + " end_var_b = model.new_int_var(0, horizon, \"end_b\")\n", + " unused_interval_var_b = model.new_interval_var(\n", " start_var_b, duration_b, end_var_b, \"interval_b\"\n", " )\n", "\n", " # a_after_b Boolean variable.\n", - " a_after_b = model.NewBoolVar(\"a_after_b\")\n", - " model.Add(start_var_a >= end_var_b).OnlyEnforceIf(a_after_b)\n", - " model.Add(start_var_a < end_var_b).OnlyEnforceIf(a_after_b.Not())\n", + " a_after_b = model.new_bool_var(\"a_after_b\")\n", + " model.add(start_var_a >= end_var_b).only_enforce_if(a_after_b)\n", + " model.add(start_var_a < end_var_b).only_enforce_if(~a_after_b)\n", "\n", " # b_after_a Boolean variable.\n", - " b_after_a = model.NewBoolVar(\"b_after_a\")\n", - " model.Add(start_var_b >= end_var_a).OnlyEnforceIf(b_after_a)\n", - " model.Add(start_var_b < end_var_a).OnlyEnforceIf(b_after_a.Not())\n", + " b_after_a = model.new_bool_var(\"b_after_a\")\n", + " model.add(start_var_b >= end_var_a).only_enforce_if(b_after_a)\n", + " model.add(start_var_b < end_var_a).only_enforce_if(~b_after_a)\n", "\n", " # Result Boolean variable.\n", - " a_overlaps_b = model.NewBoolVar(\"a_overlaps_b\")\n", + " a_overlaps_b = model.new_bool_var(\"a_overlaps_b\")\n", "\n", " # Option a: using only clauses\n", - " model.AddBoolOr(a_after_b, b_after_a, a_overlaps_b)\n", - " model.AddImplication(a_after_b, a_overlaps_b.Not())\n", - " model.AddImplication(b_after_a, a_overlaps_b.Not())\n", + " model.add_bool_or(a_after_b, b_after_a, a_overlaps_b)\n", + " model.add_implication(a_after_b, ~a_overlaps_b)\n", + " model.add_implication(b_after_a, ~a_overlaps_b)\n", "\n", " # Option b: using an exactly one constraint.\n", - " # model.AddExactlyOne(a_after_b, b_after_a, a_overlaps_b)\n", + " # model.add_exactly_one(a_after_b, b_after_a, a_overlaps_b)\n", "\n", " # Search for start values in increasing order for the two intervals.\n", - " model.AddDecisionStrategy(\n", - " [start_var_a, start_var_b], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE\n", + " model.add_decision_strategy(\n", + " [start_var_a, start_var_b],\n", + " cp_model.CHOOSE_FIRST,\n", + " cp_model.SELECT_MIN_VALUE,\n", " )\n", "\n", " # Create a solver and solve with a fixed search.\n", @@ -162,10 +159,10 @@ "\n", " # Search and print out all solutions.\n", " solution_printer = VarArraySolutionPrinter([start_var_a, start_var_b, a_overlaps_b])\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", "\n", - "OverlappingIntervals()\n", + "overlapping_interval_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/rabbits_and_pheasants_sat.ipynb b/examples/notebook/sat/rabbits_and_pheasants_sat.ipynb index 8350743cc86..63dc2a60b8c 100644 --- a/examples/notebook/sat/rabbits_and_pheasants_sat.ipynb +++ b/examples/notebook/sat/rabbits_and_pheasants_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,27 +86,27 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def RabbitsAndPheasantsSat():\n", + "def rabbits_and_pheasants_sat():\n", " \"\"\"Solves the rabbits + pheasants problem.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", - " r = model.NewIntVar(0, 100, \"r\")\n", - " p = model.NewIntVar(0, 100, \"p\")\n", + " r = model.new_int_var(0, 100, \"r\")\n", + " p = model.new_int_var(0, 100, \"p\")\n", "\n", " # 20 heads.\n", - " model.Add(r + p == 20)\n", + " model.add(r + p == 20)\n", " # 56 legs.\n", - " model.Add(4 * r + 2 * p == 56)\n", + " model.add(4 * r + 2 * p == 56)\n", "\n", " # Solves and prints out the solution.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(f\"{solver.Value(r)} rabbits and {solver.Value(p)} pheasants\")\n", + " print(f\"{solver.value(r)} rabbits and {solver.value(p)} pheasants\")\n", "\n", "\n", - "RabbitsAndPheasantsSat()\n", + "rabbits_and_pheasants_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/ranking_circuit_sample_sat.ipynb b/examples/notebook/sat/ranking_circuit_sample_sat.ipynb index f89f6966694..fb87e450640 100644 --- a/examples/notebook/sat/ranking_circuit_sample_sat.ipynb +++ b/examples/notebook/sat/ranking_circuit_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -95,7 +95,7 @@ " durations: Sequence[int],\n", " presences: Sequence[cp_model.IntVar],\n", " ranks: Sequence[cp_model.IntVar],\n", - "):\n", + ") -> None:\n", " \"\"\"This method uses a circuit constraint to rank tasks.\n", "\n", " This method assumes that all starts are disjoint, meaning that all tasks have\n", @@ -105,15 +105,14 @@ " To implement this ranking, we will create a dense graph with num_tasks + 1\n", " nodes.\n", " The extra node (with id 0) will be used to decide which task is first with\n", - " its only outgoing arc, and whhich task is last with its only incoming arc.\n", + " its only outgoing arc, and which task is last with its only incoming arc.\n", " Each task i will be associated with id i + 1, and an arc between i + 1 and j +\n", " 1 indicates that j is the immediate successor of i.\n", "\n", - " The circuit constraint ensures there is at most 1 hamiltonian path of\n", + " The circuit constraint ensures there is at most 1 hamiltonian cycle of\n", " length > 1. If no such path exists, then no tasks are active.\n", - "\n", - " The multiple enforced linear constraints are meant to ensure the compatibility\n", - " between the order of starts and the order of ranks,\n", + " We also need to enforce that any hamiltonian cycle of size > 1 must contain\n", + " the node 0. And thus, there is a self loop on node 0 iff the circuit is empty.\n", "\n", " Args:\n", " model: The CpModel to add the constraints to.\n", @@ -129,26 +128,26 @@ " arcs: List[cp_model.ArcT] = []\n", " for i in all_tasks:\n", " # if node i is first.\n", - " start_lit = model.NewBoolVar(f\"start_{i}\")\n", + " start_lit = model.new_bool_var(f\"start_{i}\")\n", " arcs.append((0, i + 1, start_lit))\n", - " model.Add(ranks[i] == 0).OnlyEnforceIf(start_lit)\n", + " model.add(ranks[i] == 0).only_enforce_if(start_lit)\n", "\n", " # As there are no other constraints on the problem, we can add this\n", " # redundant constraint.\n", - " model.Add(starts[i] == 0).OnlyEnforceIf(start_lit)\n", + " model.add(starts[i] == 0).only_enforce_if(start_lit)\n", "\n", " # if node i is last.\n", - " end_lit = model.NewBoolVar(f\"end_{i}\")\n", + " end_lit = model.new_bool_var(f\"end_{i}\")\n", " arcs.append((i + 1, 0, end_lit))\n", "\n", " for j in all_tasks:\n", " if i == j:\n", - " arcs.append((i + 1, i + 1, presences[i].Not()))\n", - " model.Add(ranks[i] == -1).OnlyEnforceIf(presences[i].Not())\n", + " arcs.append((i + 1, i + 1, ~presences[i]))\n", + " model.add(ranks[i] == -1).only_enforce_if(~presences[i])\n", " else:\n", - " literal = model.NewBoolVar(f\"arc_{i}_to_{j}\")\n", + " literal = model.new_bool_var(f\"arc_{i}_to_{j}\")\n", " arcs.append((i + 1, j + 1, literal))\n", - " model.Add(ranks[j] == ranks[i] + 1).OnlyEnforceIf(literal)\n", + " model.add(ranks[j] == ranks[i] + 1).only_enforce_if(literal)\n", "\n", " # To perform the transitive reduction from precedences to successors,\n", " # we need to tie the starts of the tasks with 'literal'.\n", @@ -157,20 +156,22 @@ " #\n", " # Note that we could use this literal to penalize the transition, add an\n", " # extra delay to the precedence.\n", - " model.Add(starts[j] >= starts[i] + durations[i]).OnlyEnforceIf(literal)\n", + " model.add(starts[j] >= starts[i] + durations[i]).only_enforce_if(\n", + " literal\n", + " )\n", "\n", " # Manage the empty circuit\n", - " empty = model.NewBoolVar(\"empty\")\n", + " empty = model.new_bool_var(\"empty\")\n", " arcs.append((0, 0, empty))\n", "\n", " for i in all_tasks:\n", - " model.AddImplication(empty, presences[i].Not())\n", + " model.add_implication(empty, ~presences[i])\n", "\n", " # Add the circuit constraint.\n", - " model.AddCircuit(arcs)\n", + " model.add_circuit(arcs)\n", "\n", "\n", - "def ranking_sample_sat():\n", + "def ranking_sample_sat() -> None:\n", " \"\"\"Ranks tasks in a NoOverlap constraint.\"\"\"\n", "\n", " model = cp_model.CpModel()\n", @@ -186,14 +187,14 @@ "\n", " # Creates intervals, half of them are optional.\n", " for t in all_tasks:\n", - " start = model.NewIntVar(0, horizon, f\"start[{t}]\")\n", + " start = model.new_int_var(0, horizon, f\"start[{t}]\")\n", " duration = t + 1\n", - " presence = model.NewBoolVar(f\"presence[{t}]\")\n", - " interval = model.NewOptionalFixedSizeIntervalVar(\n", + " presence = model.new_bool_var(f\"presence[{t}]\")\n", + " interval = model.new_optional_fixed_size_interval_var(\n", " start, duration, presence, f\"opt_interval[{t}]\"\n", " )\n", " if t < num_tasks // 2:\n", - " model.Add(presence == 1)\n", + " model.add(presence == 1)\n", "\n", " starts.append(start)\n", " durations.append(duration)\n", @@ -201,45 +202,44 @@ " presences.append(presence)\n", "\n", " # Ranks = -1 if and only if the tasks is not performed.\n", - " ranks.append(model.NewIntVar(-1, num_tasks - 1, f\"rank[{t}]\"))\n", + " ranks.append(model.new_int_var(-1, num_tasks - 1, f\"rank[{t}]\"))\n", "\n", " # Adds NoOverlap constraint.\n", - " model.AddNoOverlap(intervals)\n", + " model.add_no_overlap(intervals)\n", "\n", " # Adds ranking constraint.\n", " rank_tasks_with_circuit(model, starts, durations, presences, ranks)\n", "\n", " # Adds a constraint on ranks.\n", - " model.Add(ranks[0] < ranks[1])\n", + " model.add(ranks[0] < ranks[1])\n", "\n", " # Creates makespan variable.\n", - " makespan = model.NewIntVar(0, horizon, \"makespan\")\n", + " makespan = model.new_int_var(0, horizon, \"makespan\")\n", " for t in all_tasks:\n", - " model.Add(starts[t] + durations[t] <= makespan).OnlyEnforceIf(presences[t])\n", + " model.add(starts[t] + durations[t] <= makespan).only_enforce_if(presences[t])\n", "\n", " # Minimizes makespan - fixed gain per tasks performed.\n", " # As the fixed cost is less that the duration of the last interval,\n", " # the solver will not perform the last interval.\n", - " model.Minimize(2 * makespan - 7 * sum(presences[t] for t in all_tasks))\n", + " model.minimize(2 * makespan - 7 * sum(presences[t] for t in all_tasks))\n", "\n", " # Solves the model model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", " # Prints out the makespan and the start times and ranks of all tasks.\n", - " print(f\"Optimal cost: {solver.ObjectiveValue()}\")\n", - " print(f\"Makespan: {solver.Value(makespan)}\")\n", + " print(f\"Optimal cost: {solver.objective_value}\")\n", + " print(f\"Makespan: {solver.value(makespan)}\")\n", " for t in all_tasks:\n", - " if solver.Value(presences[t]):\n", + " if solver.value(presences[t]):\n", " print(\n", - " f\"Task {t} starts at {solver.Value(starts[t])} \"\n", - " f\"with rank {solver.Value(ranks[t])}\"\n", + " f\"Task {t} starts at {solver.value(starts[t])} \"\n", + " f\"with rank {solver.value(ranks[t])}\"\n", " )\n", " else:\n", " print(\n", - " f\"Task {t} in not performed \"\n", - " f\"and ranked at {solver.Value(ranks[t])}\"\n", + " f\"Task {t} in not performed and ranked at {solver.value(ranks[t])}\"\n", " )\n", " else:\n", " print(f\"Solver exited with nonoptimal status: {status}\")\n", diff --git a/examples/notebook/sat/ranking_sample_sat.ipynb b/examples/notebook/sat/ranking_sample_sat.ipynb index bc57d367d83..8dc5a41d815 100644 --- a/examples/notebook/sat/ranking_sample_sat.ipynb +++ b/examples/notebook/sat/ranking_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,7 +86,12 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def RankTasks(model, starts, presences, ranks):\n", + "def rank_tasks(\n", + " model: cp_model.CpModel,\n", + " starts: list[cp_model.IntVar],\n", + " presences: list[cp_model.BoolVarT],\n", + " ranks: list[cp_model.IntVar],\n", + ") -> None:\n", " \"\"\"This method adds constraints and variables to links tasks and ranks.\n", "\n", " This method assumes that all starts are disjoint, meaning that all tasks have\n", @@ -96,7 +101,7 @@ " Args:\n", " model: The CpModel to add the constraints to.\n", " starts: The array of starts variables of all tasks.\n", - " presences: The array of presence variables of all tasks.\n", + " presences: The array of presence variables or constants of all tasks.\n", " ranks: The array of rank variables of all tasks.\n", " \"\"\"\n", "\n", @@ -104,45 +109,48 @@ " all_tasks = range(num_tasks)\n", "\n", " # Creates precedence variables between pairs of intervals.\n", - " precedences = {}\n", + " precedences: dict[tuple[int, int], cp_model.BoolVarT] = {}\n", " for i in all_tasks:\n", " for j in all_tasks:\n", " if i == j:\n", " precedences[(i, j)] = presences[i]\n", " else:\n", - " prec = model.NewBoolVar(f\"{i} before {j}\")\n", + " prec = model.new_bool_var(f\"{i} before {j}\")\n", " precedences[(i, j)] = prec\n", - " model.Add(starts[i] < starts[j]).OnlyEnforceIf(prec)\n", + " model.add(starts[i] < starts[j]).only_enforce_if(prec)\n", "\n", " # Treats optional intervals.\n", " for i in range(num_tasks - 1):\n", " for j in range(i + 1, num_tasks):\n", - " tmp_array = [precedences[(i, j)], precedences[(j, i)]]\n", - " if not cp_model.ObjectIsATrueLiteral(presences[i]):\n", - " tmp_array.append(presences[i].Not())\n", + " tmp_array: list[cp_model.BoolVarT] = [\n", + " precedences[(i, j)],\n", + " precedences[(j, i)],\n", + " ]\n", + " if not cp_model.object_is_a_true_literal(presences[i]):\n", + " tmp_array.append(~presences[i])\n", " # Makes sure that if i is not performed, all precedences are false.\n", - " model.AddImplication(presences[i].Not(), precedences[(i, j)].Not())\n", - " model.AddImplication(presences[i].Not(), precedences[(j, i)].Not())\n", - " if not cp_model.ObjectIsATrueLiteral(presences[j]):\n", - " tmp_array.append(presences[j].Not())\n", + " model.add_implication(~presences[i], ~precedences[(i, j)])\n", + " model.add_implication(~presences[i], ~precedences[(j, i)])\n", + " if not cp_model.object_is_a_true_literal(presences[j]):\n", + " tmp_array.append(~presences[j])\n", " # Makes sure that if j is not performed, all precedences are false.\n", - " model.AddImplication(presences[j].Not(), precedences[(i, j)].Not())\n", - " model.AddImplication(presences[j].Not(), precedences[(j, i)].Not())\n", + " model.add_implication(~presences[j], ~precedences[(i, j)])\n", + " model.add_implication(~presences[j], ~precedences[(j, i)])\n", " # The following bool_or will enforce that for any two intervals:\n", " # i precedes j or j precedes i or at least one interval is not\n", " # performed.\n", - " model.AddBoolOr(tmp_array)\n", + " model.add_bool_or(tmp_array)\n", " # Redundant constraint: it propagates early that at most one precedence\n", " # is true.\n", - " model.AddImplication(precedences[(i, j)], precedences[(j, i)].Not())\n", - " model.AddImplication(precedences[(j, i)], precedences[(i, j)].Not())\n", + " model.add_implication(precedences[(i, j)], ~precedences[(j, i)])\n", + " model.add_implication(precedences[(j, i)], ~precedences[(i, j)])\n", "\n", " # Links precedences and ranks.\n", " for i in all_tasks:\n", - " model.Add(ranks[i] == sum(precedences[(j, i)] for j in all_tasks) - 1)\n", + " model.add(ranks[i] == sum(precedences[(j, i)] for j in all_tasks) - 1)\n", "\n", "\n", - "def RankingSampleSat():\n", + "def ranking_sample_sat() -> None:\n", " \"\"\"Ranks tasks in a NoOverlap constraint.\"\"\"\n", "\n", " model = cp_model.CpModel()\n", @@ -153,20 +161,20 @@ " starts = []\n", " ends = []\n", " intervals = []\n", - " presences = []\n", + " presences: list[cp_model.BoolVarT] = []\n", " ranks = []\n", "\n", " # Creates intervals, half of them are optional.\n", " for t in all_tasks:\n", - " start = model.NewIntVar(0, horizon, f\"start[{t}]\")\n", + " start = model.new_int_var(0, horizon, f\"start[{t}]\")\n", " duration = t + 1\n", - " end = model.NewIntVar(0, horizon, f\"end[{t}]\")\n", + " end = model.new_int_var(0, horizon, f\"end[{t}]\")\n", " if t < num_tasks // 2:\n", - " interval = model.NewIntervalVar(start, duration, end, f\"interval[{t}]\")\n", - " presence = True\n", + " interval = model.new_interval_var(start, duration, end, f\"interval[{t}]\")\n", + " presence = model.new_constant(1)\n", " else:\n", - " presence = model.NewBoolVar(f\"presence[{t}]\")\n", - " interval = model.NewOptionalIntervalVar(\n", + " presence = model.new_bool_var(f\"presence[{t}]\")\n", + " interval = model.new_optional_interval_var(\n", " start, duration, end, presence, f\"o_interval[{t}]\"\n", " )\n", " starts.append(start)\n", @@ -175,51 +183,50 @@ " presences.append(presence)\n", "\n", " # Ranks = -1 if and only if the tasks is not performed.\n", - " ranks.append(model.NewIntVar(-1, num_tasks - 1, f\"rank[{t}]\"))\n", + " ranks.append(model.new_int_var(-1, num_tasks - 1, f\"rank[{t}]\"))\n", "\n", " # Adds NoOverlap constraint.\n", - " model.AddNoOverlap(intervals)\n", + " model.add_no_overlap(intervals)\n", "\n", " # Adds ranking constraint.\n", - " RankTasks(model, starts, presences, ranks)\n", + " rank_tasks(model, starts, presences, ranks)\n", "\n", " # Adds a constraint on ranks.\n", - " model.Add(ranks[0] < ranks[1])\n", + " model.add(ranks[0] < ranks[1])\n", "\n", " # Creates makespan variable.\n", - " makespan = model.NewIntVar(0, horizon, \"makespan\")\n", + " makespan = model.new_int_var(0, horizon, \"makespan\")\n", " for t in all_tasks:\n", - " model.Add(ends[t] <= makespan).OnlyEnforceIf(presences[t])\n", + " model.add(ends[t] <= makespan).only_enforce_if(presences[t])\n", "\n", " # Minimizes makespan - fixed gain per tasks performed.\n", " # As the fixed cost is less that the duration of the last interval,\n", " # the solver will not perform the last interval.\n", - " model.Minimize(2 * makespan - 7 * sum(presences[t] for t in all_tasks))\n", + " model.minimize(2 * makespan - 7 * sum(presences[t] for t in all_tasks))\n", "\n", " # Solves the model model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", " # Prints out the makespan and the start times and ranks of all tasks.\n", - " print(f\"Optimal cost: {solver.ObjectiveValue()}\")\n", - " print(f\"Makespan: {solver.Value(makespan)}\")\n", + " print(f\"Optimal cost: {solver.objective_value}\")\n", + " print(f\"Makespan: {solver.value(makespan)}\")\n", " for t in all_tasks:\n", - " if solver.Value(presences[t]):\n", + " if solver.value(presences[t]):\n", " print(\n", - " f\"Task {t} starts at {solver.Value(starts[t])} \"\n", - " f\"with rank {solver.Value(ranks[t])}\"\n", + " f\"Task {t} starts at {solver.value(starts[t])} \"\n", + " f\"with rank {solver.value(ranks[t])}\"\n", " )\n", " else:\n", " print(\n", - " f\"Task {t} in not performed \"\n", - " f\"and ranked at {solver.Value(ranks[t])}\"\n", + " f\"Task {t} in not performed and ranked at {solver.value(ranks[t])}\"\n", " )\n", " else:\n", " print(f\"Solver exited with nonoptimal status: {status}\")\n", "\n", "\n", - "RankingSampleSat()\n", + "ranking_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/reified_sample_sat.ipynb b/examples/notebook/sat/reified_sample_sat.ipynb index a4473b647aa..0ad5d781379 100644 --- a/examples/notebook/sat/reified_sample_sat.ipynb +++ b/examples/notebook/sat/reified_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,27 +86,27 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def ReifiedSampleSat():\n", + "def reified_sample_sat():\n", " \"\"\"Showcase creating a reified constraint.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", - " x = model.NewBoolVar(\"x\")\n", - " y = model.NewBoolVar(\"y\")\n", - " b = model.NewBoolVar(\"b\")\n", + " x = model.new_bool_var(\"x\")\n", + " y = model.new_bool_var(\"y\")\n", + " b = model.new_bool_var(\"b\")\n", "\n", " # First version using a half-reified bool and.\n", - " model.AddBoolAnd(x, y.Not()).OnlyEnforceIf(b)\n", + " model.add_bool_and(x, ~y).only_enforce_if(b)\n", "\n", " # Second version using implications.\n", - " model.AddImplication(b, x)\n", - " model.AddImplication(b, y.Not())\n", + " model.add_implication(b, x)\n", + " model.add_implication(b, ~y)\n", "\n", " # Third version using bool or.\n", - " model.AddBoolOr(b.Not(), x)\n", - " model.AddBoolOr(b.Not(), y.Not())\n", + " model.add_bool_or(~b, x)\n", + " model.add_bool_or(~b, ~y)\n", "\n", "\n", - "ReifiedSampleSat()\n", + "reified_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/schedule_requests_sat.ipynb b/examples/notebook/sat/schedule_requests_sat.ipynb index 645ea824f0d..42333d160a0 100644 --- a/examples/notebook/sat/schedule_requests_sat.ipynb +++ b/examples/notebook/sat/schedule_requests_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -83,10 +83,12 @@ "metadata": {}, "outputs": [], "source": [ + "from typing import Union\n", + "\n", "from ortools.sat.python import cp_model\n", "\n", "\n", - "def main():\n", + "def main() -> None:\n", " # This program tries to find an optimal assignment of nurses to shifts\n", " # (3 shifts per day, for 7 days), subject to some constraints (see below).\n", " # Each nurse can request to be assigned to specific shifts.\n", @@ -114,17 +116,17 @@ " for n in all_nurses:\n", " for d in all_days:\n", " for s in all_shifts:\n", - " shifts[(n, d, s)] = model.NewBoolVar(f\"shift_n{n}_d{d}_s{s}\")\n", + " shifts[(n, d, s)] = model.new_bool_var(f\"shift_n{n}_d{d}_s{s}\")\n", "\n", " # Each shift is assigned to exactly one nurse in .\n", " for d in all_days:\n", " for s in all_shifts:\n", - " model.AddExactlyOne(shifts[(n, d, s)] for n in all_nurses)\n", + " model.add_exactly_one(shifts[(n, d, s)] for n in all_nurses)\n", "\n", " # Each nurse works at most one shift per day.\n", " for n in all_nurses:\n", " for d in all_days:\n", - " model.AddAtMostOne(shifts[(n, d, s)] for s in all_shifts)\n", + " model.add_at_most_one(shifts[(n, d, s)] for s in all_shifts)\n", "\n", " # Try to distribute the shifts evenly, so that each nurse works\n", " # min_shifts_per_nurse shifts. If this is not possible, because the total\n", @@ -136,15 +138,14 @@ " else:\n", " max_shifts_per_nurse = min_shifts_per_nurse + 1\n", " for n in all_nurses:\n", - " num_shifts_worked = 0\n", + " num_shifts_worked: Union[cp_model.LinearExpr, int] = 0\n", " for d in all_days:\n", " for s in all_shifts:\n", " num_shifts_worked += shifts[(n, d, s)]\n", - " model.Add(min_shifts_per_nurse <= num_shifts_worked)\n", - " model.Add(num_shifts_worked <= max_shifts_per_nurse)\n", + " model.add(min_shifts_per_nurse <= num_shifts_worked)\n", + " model.add(num_shifts_worked <= max_shifts_per_nurse)\n", "\n", - " # pylint: disable=g-complex-comprehension\n", - " model.Maximize(\n", + " model.maximize(\n", " sum(\n", " shift_requests[n][d][s] * shifts[(n, d, s)]\n", " for n in all_nurses\n", @@ -155,7 +156,7 @@ "\n", " # Creates the solver and solve.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", " print(\"Solution:\")\n", @@ -163,14 +164,14 @@ " print(\"Day\", d)\n", " for n in all_nurses:\n", " for s in all_shifts:\n", - " if solver.Value(shifts[(n, d, s)]) == 1:\n", + " if solver.value(shifts[(n, d, s)]) == 1:\n", " if shift_requests[n][d][s] == 1:\n", " print(\"Nurse\", n, \"works shift\", s, \"(requested).\")\n", " else:\n", " print(\"Nurse\", n, \"works shift\", s, \"(not requested).\")\n", " print()\n", " print(\n", - " f\"Number of shift requests met = {solver.ObjectiveValue()}\",\n", + " f\"Number of shift requests met = {solver.objective_value}\",\n", " f\"(out of {num_nurses * min_shifts_per_nurse})\",\n", " )\n", " else:\n", @@ -178,9 +179,9 @@ "\n", " # Statistics.\n", " print(\"\\nStatistics\")\n", - " print(f\" - conflicts: {solver.NumConflicts()}\")\n", - " print(f\" - branches : {solver.NumBranches()}\")\n", - " print(f\" - wall time: {solver.WallTime()}s\")\n", + " print(f\" - conflicts: {solver.num_conflicts}\")\n", + " print(f\" - branches : {solver.num_branches}\")\n", + " print(f\" - wall time: {solver.wall_time}s\")\n", "\n", "\n", "main()\n", diff --git a/examples/notebook/sat/scheduling_with_calendar_sample_sat.ipynb b/examples/notebook/sat/scheduling_with_calendar_sample_sat.ipynb index af80cc4859a..0ddd27e1e61 100644 --- a/examples/notebook/sat/scheduling_with_calendar_sample_sat.ipynb +++ b/examples/notebook/sat/scheduling_with_calendar_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,22 +89,17 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", - " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", - " self.__solution_count += 1\n", + " def on_solution_callback(self) -> None:\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", - " return self.__solution_count\n", "\n", - "\n", - "def SchedulingWithCalendarSampleSat():\n", + "def scheduling_with_calendar_sample_sat():\n", " \"\"\"Interval spanning across a lunch break.\"\"\"\n", " model = cp_model.CpModel()\n", "\n", @@ -116,25 +111,27 @@ " # Because the duration is at least 3 hours, work cannot start after 15h.\n", " # Because of the break, work cannot start at 13h.\n", "\n", - " start = model.NewIntVarFromDomain(\n", - " cp_model.Domain.FromIntervals([(8, 12), (14, 15)]), \"start\"\n", + " start = model.new_int_var_from_domain(\n", + " cp_model.Domain.from_intervals([(8, 12), (14, 15)]), \"start\"\n", " )\n", - " duration = model.NewIntVar(3, 4, \"duration\")\n", - " end = model.NewIntVar(8, 18, \"end\")\n", - " unused_interval = model.NewIntervalVar(start, duration, end, \"interval\")\n", + " duration = model.new_int_var(3, 4, \"duration\")\n", + " end = model.new_int_var(8, 18, \"end\")\n", + " unused_interval = model.new_interval_var(start, duration, end, \"interval\")\n", "\n", " # We have 2 states (spanning across lunch or not)\n", - " across = model.NewBoolVar(\"across\")\n", - " non_spanning_hours = cp_model.Domain.FromValues([8, 9, 10, 14, 15])\n", - " model.AddLinearExpressionInDomain(start, non_spanning_hours).OnlyEnforceIf(\n", - " across.Not()\n", + " across = model.new_bool_var(\"across\")\n", + " non_spanning_hours = cp_model.Domain.from_values([8, 9, 10, 14, 15])\n", + " model.add_linear_expression_in_domain(start, non_spanning_hours).only_enforce_if(\n", + " ~across\n", " )\n", - " model.AddLinearConstraint(start, 11, 12).OnlyEnforceIf(across)\n", - " model.Add(duration == 3).OnlyEnforceIf(across.Not())\n", - " model.Add(duration == 4).OnlyEnforceIf(across)\n", + " model.add_linear_constraint(start, 11, 12).only_enforce_if(across)\n", + " model.add(duration == 3).only_enforce_if(~across)\n", + " model.add(duration == 4).only_enforce_if(across)\n", "\n", " # Search for x values in increasing order.\n", - " model.AddDecisionStrategy([start], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", + " model.add_decision_strategy(\n", + " [start], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE\n", + " )\n", "\n", " # Create a solver and solve with a fixed search.\n", " solver = cp_model.CpSolver()\n", @@ -146,10 +143,10 @@ "\n", " # Search and print all solutions.\n", " solution_printer = VarArraySolutionPrinter([start, duration, across])\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", "\n", - "SchedulingWithCalendarSampleSat()\n", + "scheduling_with_calendar_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/search_for_all_solutions_sample_sat.ipynb b/examples/notebook/sat/search_for_all_solutions_sample_sat.ipynb index 7b4079a024b..90bb0d27f6d 100644 --- a/examples/notebook/sat/search_for_all_solutions_sample_sat.ipynb +++ b/examples/notebook/sat/search_for_all_solutions_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,34 +89,35 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " self.__solution_count += 1\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", + " @property\n", + " def solution_count(self) -> int:\n", " return self.__solution_count\n", "\n", "\n", - "def SearchForAllSolutionsSampleSat():\n", + "def search_for_all_solutions_sample_sat():\n", " \"\"\"Showcases calling the solver to search for all solutions.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", "\n", " # Create the constraints.\n", - " model.Add(x != y)\n", + " model.add(x != y)\n", "\n", " # Create a solver and solve.\n", " solver = cp_model.CpSolver()\n", @@ -124,13 +125,13 @@ " # Enumerate all solutions.\n", " solver.parameters.enumerate_all_solutions = True\n", " # Solve.\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", - " print(f\"Status = {solver.StatusName(status)}\")\n", - " print(f\"Number of solutions found: {solution_printer.solution_count()}\")\n", + " print(f\"Status = {solver.status_name(status)}\")\n", + " print(f\"Number of solutions found: {solution_printer.solution_count}\")\n", "\n", "\n", - "SearchForAllSolutionsSampleSat()\n", + "search_for_all_solutions_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/simple_sat_program.ipynb b/examples/notebook/sat/simple_sat_program.ipynb index a1d0e86ac2e..fcb5827ea93 100644 --- a/examples/notebook/sat/simple_sat_program.ipynb +++ b/examples/notebook/sat/simple_sat_program.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,33 +86,33 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def SimpleSatProgram():\n", + "def simple_sat_program():\n", " \"\"\"Minimal CP-SAT example to showcase calling the solver.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", "\n", " # Creates the constraints.\n", - " model.Add(x != y)\n", + " model.add(x != y)\n", "\n", " # Creates a solver and solves the model.\n", " solver = cp_model.CpSolver()\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", - " print(f\"x = {solver.Value(x)}\")\n", - " print(f\"y = {solver.Value(y)}\")\n", - " print(f\"z = {solver.Value(z)}\")\n", + " print(f\"x = {solver.value(x)}\")\n", + " print(f\"y = {solver.value(y)}\")\n", + " print(f\"z = {solver.value(z)}\")\n", " else:\n", " print(\"No solution found.\")\n", "\n", "\n", - "SimpleSatProgram()\n", + "simple_sat_program()\n", "\n" ] } diff --git a/examples/notebook/sat/solution_hinting_sample_sat.ipynb b/examples/notebook/sat/solution_hinting_sample_sat.ipynb index 7933b1a3da3..a3a5ff8b045 100644 --- a/examples/notebook/sat/solution_hinting_sample_sat.ipynb +++ b/examples/notebook/sat/solution_hinting_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,36 +86,36 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def SolutionHintingSampleSat():\n", + "def solution_hinting_sample_sat():\n", " \"\"\"Showcases solution hinting.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", "\n", " # Creates the constraints.\n", - " model.Add(x != y)\n", + " model.add(x != y)\n", "\n", - " model.Maximize(x + 2 * y + 3 * z)\n", + " model.maximize(x + 2 * y + 3 * z)\n", "\n", " # Solution hinting: x <- 1, y <- 2\n", - " model.AddHint(x, 1)\n", - " model.AddHint(y, 2)\n", + " model.add_hint(x, 1)\n", + " model.add_hint(y, 2)\n", "\n", " # Creates a solver and solves.\n", " solver = cp_model.CpSolver()\n", " solution_printer = cp_model.VarArrayAndObjectiveSolutionPrinter([x, y, z])\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", - " print(f\"Status = {solver.StatusName(status)}\")\n", - " print(f\"Number of solutions found: {solution_printer.solution_count()}\")\n", + " print(f\"Status = {solver.status_name(status)}\")\n", + " print(f\"Number of solutions found: {solution_printer.solution_count}\")\n", "\n", "\n", - "SolutionHintingSampleSat()\n", + "solution_hinting_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/solve_and_print_intermediate_solutions_sample_sat.ipynb b/examples/notebook/sat/solve_and_print_intermediate_solutions_sample_sat.ipynb index 9882e2b3cbe..f3b5e59e2e2 100644 --- a/examples/notebook/sat/solve_and_print_intermediate_solutions_sample_sat.ipynb +++ b/examples/notebook/sat/solve_and_print_intermediate_solutions_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -90,49 +90,50 @@ "class VarArrayAndObjectiveSolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " print(f\"Solution {self.__solution_count}\")\n", - " print(f\" objective value = {self.ObjectiveValue()}\")\n", + " print(f\" objective value = {self.objective_value}\")\n", " for v in self.__variables:\n", - " print(f\" {v}={self.Value(v)}\", end=\" \")\n", + " print(f\" {v}={self.value(v)}\", end=\" \")\n", " print()\n", " self.__solution_count += 1\n", "\n", - " def solution_count(self):\n", + " @property\n", + " def solution_count(self) -> int:\n", " return self.__solution_count\n", "\n", "\n", - "def SolveAndPrintIntermediateSolutionsSampleSat():\n", + "def solve_and_print_intermediate_solutions_sample_sat():\n", " \"\"\"Showcases printing intermediate solutions found during search.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", "\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", "\n", " # Creates the constraints.\n", - " model.Add(x != y)\n", + " model.add(x != y)\n", "\n", - " model.Maximize(x + 2 * y + 3 * z)\n", + " model.maximize(x + 2 * y + 3 * z)\n", "\n", " # Creates a solver and solves.\n", " solver = cp_model.CpSolver()\n", " solution_printer = VarArrayAndObjectiveSolutionPrinter([x, y, z])\n", - " status = solver.Solve(model, solution_printer)\n", + " status = solver.solve(model, solution_printer)\n", "\n", - " print(f\"Status = {solver.StatusName(status)}\")\n", - " print(f\"Number of solutions found: {solution_printer.solution_count()}\")\n", + " print(f\"Status = {solver.status_name(status)}\")\n", + " print(f\"Number of solutions found: {solution_printer.solution_count}\")\n", "\n", "\n", - "SolveAndPrintIntermediateSolutionsSampleSat()\n", + "solve_and_print_intermediate_solutions_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/solve_with_time_limit_sample_sat.ipynb b/examples/notebook/sat/solve_with_time_limit_sample_sat.ipynb index 186fccec296..1bd9182de26 100644 --- a/examples/notebook/sat/solve_with_time_limit_sample_sat.ipynb +++ b/examples/notebook/sat/solve_with_time_limit_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -86,17 +86,17 @@ "from ortools.sat.python import cp_model\n", "\n", "\n", - "def SolveWithTimeLimitSampleSat():\n", + "def solve_with_time_limit_sample_sat():\n", " \"\"\"Minimal CP-SAT example to showcase calling the solver.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", " # Adds an all-different constraint.\n", - " model.Add(x != y)\n", + " model.add(x != y)\n", "\n", " # Creates a solver and solves the model.\n", " solver = cp_model.CpSolver()\n", @@ -104,15 +104,15 @@ " # Sets a time limit of 10 seconds.\n", " solver.parameters.max_time_in_seconds = 10.0\n", "\n", - " status = solver.Solve(model)\n", + " status = solver.solve(model)\n", "\n", " if status == cp_model.OPTIMAL:\n", - " print(f\"x = {solver.Value(x)}\")\n", - " print(f\"y = {solver.Value(y)}\")\n", - " print(f\"z = {solver.Value(z)}\")\n", + " print(f\"x = {solver.value(x)}\")\n", + " print(f\"y = {solver.value(y)}\")\n", + " print(f\"z = {solver.value(z)}\")\n", "\n", "\n", - "SolveWithTimeLimitSampleSat()\n", + "solve_with_time_limit_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/step_function_sample_sat.ipynb b/examples/notebook/sat/step_function_sample_sat.ipynb index e6b01496f51..d0acd538eea 100644 --- a/examples/notebook/sat/step_function_sample_sat.ipynb +++ b/examples/notebook/sat/step_function_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,20 +89,15 @@ "class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables):\n", + " def __init__(self, variables: list[cp_model.IntVar]):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", - " self.__solution_count = 0\n", "\n", - " def on_solution_callback(self):\n", - " self.__solution_count += 1\n", + " def on_solution_callback(self) -> None:\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", "\n", - " def solution_count(self):\n", - " return self.__solution_count\n", - "\n", "\n", "def step_function_sample_sat():\n", " \"\"\"Encode the step function.\"\"\"\n", @@ -111,7 +106,7 @@ " model = cp_model.CpModel()\n", "\n", " # Declare our primary variable.\n", - " x = model.NewIntVar(0, 20, \"x\")\n", + " x = model.new_int_var(0, 20, \"x\")\n", "\n", " # Create the expression variable and implement the step function\n", " # Note it is not defined for x == 2.\n", @@ -122,32 +117,32 @@ " # -- --- 0\n", " # 0 ================ 20\n", " #\n", - " expr = model.NewIntVar(0, 3, \"expr\")\n", + " expr = model.new_int_var(0, 3, \"expr\")\n", "\n", " # expr == 0 on [5, 6] U [8, 10]\n", - " b0 = model.NewBoolVar(\"b0\")\n", - " model.AddLinearExpressionInDomain(\n", - " x, cp_model.Domain.FromIntervals([(5, 6), (8, 10)])\n", - " ).OnlyEnforceIf(b0)\n", - " model.Add(expr == 0).OnlyEnforceIf(b0)\n", + " b0 = model.new_bool_var(\"b0\")\n", + " model.add_linear_expression_in_domain(\n", + " x, cp_model.Domain.from_intervals([(5, 6), (8, 10)])\n", + " ).only_enforce_if(b0)\n", + " model.add(expr == 0).only_enforce_if(b0)\n", "\n", " # expr == 2 on [0, 1] U [3, 4] U [11, 20]\n", - " b2 = model.NewBoolVar(\"b2\")\n", - " model.AddLinearExpressionInDomain(\n", - " x, cp_model.Domain.FromIntervals([(0, 1), (3, 4), (11, 20)])\n", - " ).OnlyEnforceIf(b2)\n", - " model.Add(expr == 2).OnlyEnforceIf(b2)\n", + " b2 = model.new_bool_var(\"b2\")\n", + " model.add_linear_expression_in_domain(\n", + " x, cp_model.Domain.from_intervals([(0, 1), (3, 4), (11, 20)])\n", + " ).only_enforce_if(b2)\n", + " model.add(expr == 2).only_enforce_if(b2)\n", "\n", " # expr == 3 when x == 7\n", - " b3 = model.NewBoolVar(\"b3\")\n", - " model.Add(x == 7).OnlyEnforceIf(b3)\n", - " model.Add(expr == 3).OnlyEnforceIf(b3)\n", + " b3 = model.new_bool_var(\"b3\")\n", + " model.add(x == 7).only_enforce_if(b3)\n", + " model.add(expr == 3).only_enforce_if(b3)\n", "\n", " # At least one bi is true. (we could use an exactly one constraint).\n", - " model.AddBoolOr(b0, b2, b3)\n", + " model.add_bool_or(b0, b2, b3)\n", "\n", " # Search for x values in increasing order.\n", - " model.AddDecisionStrategy([x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", + " model.add_decision_strategy([x], cp_model.CHOOSE_FIRST, cp_model.SELECT_MIN_VALUE)\n", "\n", " # Create a solver and solve with a fixed search.\n", " solver = cp_model.CpSolver()\n", @@ -159,7 +154,7 @@ "\n", " # Search and print out all solutions.\n", " solution_printer = VarArraySolutionPrinter([x, expr])\n", - " solver.Solve(model, solution_printer)\n", + " solver.solve(model, solution_printer)\n", "\n", "\n", "step_function_sample_sat()\n", diff --git a/examples/notebook/sat/stop_after_n_solutions_sample_sat.ipynb b/examples/notebook/sat/stop_after_n_solutions_sample_sat.ipynb index 572b6c6f5e7..091b49d23fa 100644 --- a/examples/notebook/sat/stop_after_n_solutions_sample_sat.ipynb +++ b/examples/notebook/sat/stop_after_n_solutions_sample_sat.ipynb @@ -5,7 +5,7 @@ "id": "google", "metadata": {}, "source": [ - "##### Copyright 2023 Google LLC." + "##### Copyright 2024 Google LLC." ] }, { @@ -89,34 +89,35 @@ "class VarArraySolutionPrinterWithLimit(cp_model.CpSolverSolutionCallback):\n", " \"\"\"Print intermediate solutions.\"\"\"\n", "\n", - " def __init__(self, variables, limit):\n", + " def __init__(self, variables: list[cp_model.IntVar], limit: int):\n", " cp_model.CpSolverSolutionCallback.__init__(self)\n", " self.__variables = variables\n", " self.__solution_count = 0\n", " self.__solution_limit = limit\n", "\n", - " def on_solution_callback(self):\n", + " def on_solution_callback(self) -> None:\n", " self.__solution_count += 1\n", " for v in self.__variables:\n", - " print(f\"{v}={self.Value(v)}\", end=\" \")\n", + " print(f\"{v}={self.value(v)}\", end=\" \")\n", " print()\n", " if self.__solution_count >= self.__solution_limit:\n", " print(f\"Stop search after {self.__solution_limit} solutions\")\n", - " self.StopSearch()\n", + " self.stop_search()\n", "\n", - " def solution_count(self):\n", + " @property\n", + " def solution_count(self) -> int:\n", " return self.__solution_count\n", "\n", "\n", - "def StopAfterNSolutionsSampleSat():\n", + "def stop_after_n_solutions_sample_sat():\n", " \"\"\"Showcases calling the solver to search for small number of solutions.\"\"\"\n", " # Creates the model.\n", " model = cp_model.CpModel()\n", " # Creates the variables.\n", " num_vals = 3\n", - " x = model.NewIntVar(0, num_vals - 1, \"x\")\n", - " y = model.NewIntVar(0, num_vals - 1, \"y\")\n", - " z = model.NewIntVar(0, num_vals - 1, \"z\")\n", + " x = model.new_int_var(0, num_vals - 1, \"x\")\n", + " y = model.new_int_var(0, num_vals - 1, \"y\")\n", + " z = model.new_int_var(0, num_vals - 1, \"z\")\n", "\n", " # Create a solver and solve.\n", " solver = cp_model.CpSolver()\n", @@ -124,13 +125,13 @@ " # Enumerate all solutions.\n", " solver.parameters.enumerate_all_solutions = True\n", " # Solve.\n", - " status = solver.Solve(model, solution_printer)\n", - " print(f\"Status = {solver.StatusName(status)}\")\n", - " print(f\"Number of solutions found: {solution_printer.solution_count()}\")\n", - " assert solution_printer.solution_count() == 5\n", + " status = solver.solve(model, solution_printer)\n", + " print(f\"Status = {solver.status_name(status)}\")\n", + " print(f\"Number of solutions found: {solution_printer.solution_count}\")\n", + " assert solution_printer.solution_count == 5\n", "\n", "\n", - "StopAfterNSolutionsSampleSat()\n", + "stop_after_n_solutions_sample_sat()\n", "\n" ] } diff --git a/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb b/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb new file mode 100644 index 00000000000..77b4f68c4ca --- /dev/null +++ b/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb @@ -0,0 +1,280 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2024 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# transitions_in_no_overlap_sample_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Implements transition times and costs in a no_overlap constraint.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Dict, List, Sequence, Tuple, Union\n", + "\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "def transitive_reduction_with_circuit_delays_and_penalties(\n", + " model: cp_model.CpModel,\n", + " starts: Sequence[cp_model.IntVar],\n", + " durations: Sequence[int],\n", + " presences: Sequence[Union[cp_model.IntVar, bool]],\n", + " penalties: Dict[Tuple[int, int], int],\n", + " delays: Dict[Tuple[int, int], int],\n", + ") -> Sequence[Tuple[cp_model.IntVar, int]]:\n", + " \"\"\"This method uses a circuit constraint to rank tasks.\n", + "\n", + " This method assumes that all starts are disjoint, meaning that all tasks have\n", + " a strictly positive duration, and they appear in the same NoOverlap\n", + " constraint.\n", + "\n", + " The extra node (with id 0) will be used to decide which task is first with\n", + " its only outgoing arc, and which task is last with its only incoming arc.\n", + " Each task i will be associated with id i + 1, and an arc between i + 1 and j +\n", + " 1 indicates that j is the immediate successor of i.\n", + "\n", + " The circuit constraint ensures there is at most 1 hamiltonian cycle of\n", + " length > 1. If no such path exists, then no tasks are active.\n", + " We also need to enforce that any hamiltonian cycle of size > 1 must contain\n", + " the node 0. And thus, there is a self loop on node 0 iff the circuit is empty.\n", + "\n", + " Args:\n", + " model: The CpModel to add the constraints to.\n", + " starts: The array of starts variables of all tasks.\n", + " durations: the durations of all tasks.\n", + " presences: The array of presence variables of all tasks.\n", + " penalties: the array of tuple (`tail_index`, `head_index`, `penalty`) that\n", + " specifies that if task `tail_index` is the successor of the task\n", + " `head_index`, then `penalty` must be added to the cost.\n", + " delays: the array of tuple (`tail_index`, `head_index`, `delay`) that\n", + " specifies that if task `tail_index` is the successor of the task\n", + " `head_index`, then an extra `delay` must be added between the end of the\n", + " first task and the start of the second task.\n", + "\n", + " Returns:\n", + " The list of pairs (Boolean variables, penalty) to be added to the objective.\n", + " \"\"\"\n", + "\n", + " num_tasks = len(starts)\n", + " all_tasks = range(num_tasks)\n", + "\n", + " arcs: List[cp_model.ArcT] = []\n", + " penalty_terms = []\n", + " for i in all_tasks:\n", + " # if node i is first.\n", + " start_lit = model.new_bool_var(f\"start_{i}\")\n", + " arcs.append((0, i + 1, start_lit))\n", + "\n", + " # As there are no other constraints on the problem, we can add this\n", + " # redundant constraint.\n", + " model.add(starts[i] == 0).only_enforce_if(start_lit)\n", + "\n", + " # if node i is last.\n", + " end_lit = model.new_bool_var(f\"end_{i}\")\n", + " arcs.append((i + 1, 0, end_lit))\n", + "\n", + " for j in all_tasks:\n", + " if i == j:\n", + " arcs.append((i + 1, i + 1, ~presences[i]))\n", + " else:\n", + " literal = model.new_bool_var(f\"arc_{i}_to_{j}\")\n", + " arcs.append((i + 1, j + 1, literal))\n", + "\n", + " # To perform the transitive reduction from precedences to successors,\n", + " # we need to tie the starts of the tasks with 'literal'.\n", + " # In a pure problem, the following inequality could be an equality.\n", + " # It is not true in general.\n", + " #\n", + " # Note that we could use this literal to penalize the transition, add an\n", + " # extra delay to the precedence.\n", + " min_delay = 0\n", + " key = (i, j)\n", + " if key in delays:\n", + " min_delay = delays[key]\n", + " model.add(\n", + " starts[j] >= starts[i] + durations[i] + min_delay\n", + " ).only_enforce_if(literal)\n", + "\n", + " # Create the penalties.\n", + " if key in penalties:\n", + " penalty_terms.append((literal, penalties[key]))\n", + "\n", + " # Manage the empty circuit\n", + " empty = model.new_bool_var(\"empty\")\n", + " arcs.append((0, 0, empty))\n", + "\n", + " for i in all_tasks:\n", + " model.add_implication(empty, ~presences[i])\n", + "\n", + " # Add the circuit constraint.\n", + " model.add_circuit(arcs)\n", + "\n", + " return penalty_terms\n", + "\n", + "\n", + "def transitions_in_no_overlap_sample_sat():\n", + " \"\"\"Implement transitions in a NoOverlap constraint.\"\"\"\n", + "\n", + " model = cp_model.CpModel()\n", + " horizon = 40\n", + " num_tasks = 4\n", + "\n", + " # Breaking the natural sequence induces a fixed penalty.\n", + " penalties = {\n", + " (1, 0): 10,\n", + " (2, 0): 10,\n", + " (3, 0): 10,\n", + " (2, 1): 10,\n", + " (3, 1): 10,\n", + " (3, 2): 10,\n", + " }\n", + "\n", + " # Switching from an odd to even or even to odd task indices induces a delay.\n", + " delays = {\n", + " (1, 0): 10,\n", + " (0, 1): 10,\n", + " (3, 0): 10,\n", + " (0, 3): 10,\n", + " (1, 2): 10,\n", + " (2, 1): 10,\n", + " (3, 2): 10,\n", + " (2, 3): 10,\n", + " }\n", + "\n", + " all_tasks = range(num_tasks)\n", + "\n", + " starts = []\n", + " durations = []\n", + " intervals = []\n", + " presences = []\n", + "\n", + " # Creates intervals, all present. But the cost is robust w.r.t. optional\n", + " # intervals.\n", + " for t in all_tasks:\n", + " start = model.new_int_var(0, horizon, f\"start[{t}]\")\n", + " duration = 5\n", + " presence = True\n", + " interval = model.new_optional_fixed_size_interval_var(\n", + " start, duration, presence, f\"opt_interval[{t}]\"\n", + " )\n", + "\n", + " starts.append(start)\n", + " durations.append(duration)\n", + " intervals.append(interval)\n", + " presences.append(presence)\n", + "\n", + " # Adds NoOverlap constraint.\n", + " model.add_no_overlap(intervals)\n", + "\n", + " # Adds ranking constraint.\n", + " penalty_terms = transitive_reduction_with_circuit_delays_and_penalties(\n", + " model, starts, durations, presences, penalties, delays\n", + " )\n", + "\n", + " # Minimize the sum of penalties,\n", + " model.minimize(sum(var * penalty for var, penalty in penalty_terms))\n", + "\n", + " # In practise, only one penalty can happen. Thus the two even tasks are\n", + " # together, same for the two odd tasks.\n", + " # Because of the penalties, the optimal sequence is 0 -> 2 -> 1 -> 3\n", + " # which induces one penalty and one delay.\n", + "\n", + " # Solves the model model.\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + "\n", + " if status == cp_model.OPTIMAL:\n", + " # Prints out the makespan and the start times and ranks of all tasks.\n", + " print(f\"Optimal cost: {solver.objective_value}\")\n", + " for t in all_tasks:\n", + " if solver.value(presences[t]):\n", + " print(f\"Task {t} starts at {solver.value(starts[t])} \")\n", + " else:\n", + " print(f\"Task {t} in not performed\")\n", + " else:\n", + " print(f\"Solver exited with nonoptimal status: {status}\")\n", + "\n", + "\n", + "transitions_in_no_overlap_sample_sat()\n", + "\n" + ] + } + ], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/python/BUILD.bazel b/examples/python/BUILD.bazel index 721320311ad..452ad05a6a3 100644 --- a/examples/python/BUILD.bazel +++ b/examples/python/BUILD.bazel @@ -54,10 +54,16 @@ code_sample_test_arg_py( suffix = "salbp_20_1", ) +code_sample_py("maximize_combinations_sat") + code_sample_py("maze_escape_sat") code_sample_py("no_wait_baking_scheduling_sat") +code_sample_py("pell_equation_sat") + +code_sample_py("pentominoes_sat") + code_sample_py("prize_collecting_tsp_sat") code_sample_py("prize_collecting_vrp_sat") @@ -94,6 +100,8 @@ code_sample_py("task_allocation_sat") code_sample_py("tasks_and_workers_assignment_sat") +code_sample_py("test_scheduling_sat") + code_sample_py("tsp_sat") code_sample_py("vendor_scheduling_sat") diff --git a/examples/python/README.md b/examples/python/README.md index 8acef41e29f..6973b819220 100644 --- a/examples/python/README.md +++ b/examples/python/README.md @@ -8,7 +8,7 @@ as this allows you to keep up-to-date with the latest Python frameworks. Wherever you have `ortools` package installed, be sure to import it from your python file. # Execution -For running the examples you can use the the following command: +For running the examples you can use the following command: ```shell python3 -m pip install --upgrade --user ortools python3 .py diff --git a/examples/python/arc_flow_cutting_stock_sat.py b/examples/python/arc_flow_cutting_stock_sat.py index 60285ee810f..ee96dc47518 100644 --- a/examples/python/arc_flow_cutting_stock_sat.py +++ b/examples/python/arc_flow_cutting_stock_sat.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2010-2024 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -10,43 +11,170 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Cutting stock problem with the objective to minimize wasted space.""" +"""Cutting stock problem with the objective to minimize wasted space.""" import collections import time -import numpy as np from absl import app from absl import flags +import numpy as np + from google.protobuf import text_format from ortools.linear_solver.python import model_builder as mb from ortools.sat.python import cp_model -FLAGS = flags.FLAGS _OUTPUT_PROTO = flags.DEFINE_string( - 'output_proto', '', 'Output file to write the cp_model proto to.') + "output_proto", "", "Output file to write the cp_model proto to." +) _PARAMS = flags.DEFINE_string( - 'params', - 'num_search_workers:8,log_search_progress:true,max_time_in_seconds:10', - 'Sat solver parameters.') -_SOLVER = flags.DEFINE_string( - 'solver', 'sat', 'Method used to solve: sat, mip.') + "params", + "num_search_workers:8,log_search_progress:true,max_time_in_seconds:10", + "Sat solver parameters.", +) +_SOLVER = flags.DEFINE_string("solver", "sat", "Method used to solve: sat, mip.") DESIRED_LENGTHS = [ - 2490, 3980, 2490, 3980, 2391, 2391, 2391, 596, 596, 596, 2456, 2456, 3018, - 938, 3018, 938, 943, 3018, 943, 3018, 2490, 3980, 2490, 3980, 2391, 2391, - 2391, 596, 596, 596, 2456, 2456, 3018, 938, 3018, 938, 943, 3018, 943, - 3018, 2890, 3980, 2890, 3980, 2391, 2391, 2391, 596, 596, 596, 2856, 2856, - 3018, 938, 3018, 938, 943, 3018, 943, 3018, 3290, 3980, 3290, 3980, 2391, - 2391, 2391, 596, 596, 596, 3256, 3256, 3018, 938, 3018, 938, 943, 3018, - 943, 3018, 3690, 3980, 3690, 3980, 2391, 2391, 2391, 596, 596, 596, 3656, - 3656, 3018, 938, 3018, 938, 943, 3018, 943, 3018, 2790, 3980, 2790, 3980, - 2391, 2391, 2391, 596, 596, 596, 2756, 2756, 3018, 938, 3018, 938, 943, - 3018, 943, 3018, 2790, 3980, 2790, 3980, 2391, 2391, 2391, 596, 596, 596, - 2756, 2756, 3018, 938, 3018, 938, 943 + 2490, + 3980, + 2490, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 2456, + 2456, + 3018, + 938, + 3018, + 938, + 943, + 3018, + 943, + 3018, + 2490, + 3980, + 2490, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 2456, + 2456, + 3018, + 938, + 3018, + 938, + 943, + 3018, + 943, + 3018, + 2890, + 3980, + 2890, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 2856, + 2856, + 3018, + 938, + 3018, + 938, + 943, + 3018, + 943, + 3018, + 3290, + 3980, + 3290, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 3256, + 3256, + 3018, + 938, + 3018, + 938, + 943, + 3018, + 943, + 3018, + 3690, + 3980, + 3690, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 3656, + 3656, + 3018, + 938, + 3018, + 938, + 943, + 3018, + 943, + 3018, + 2790, + 3980, + 2790, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 2756, + 2756, + 3018, + 938, + 3018, + 938, + 943, + 3018, + 943, + 3018, + 2790, + 3980, + 2790, + 3980, + 2391, + 2391, + 2391, + 596, + 596, + 596, + 2756, + 2756, + 3018, + 938, + 3018, + 938, + 943, ] POSSIBLE_CAPACITIES = [4000, 5000, 6000, 7000, 8000] @@ -96,7 +224,6 @@ def create_state_graph(items, max_capacity): new_state = current_state + size * (card + 1) if new_state > max_capacity: break - new_state_index = -1 if new_state in state_to_index: new_state_index = state_to_index[new_state] else: @@ -104,9 +231,9 @@ def create_state_graph(items, max_capacity): states.append(new_state) state_to_index[new_state] = new_state_index # Add the transition - transitions.append([ - current_state_index, new_state_index, item_index, card + 1 - ]) + transitions.append( + [current_state_index, new_state_index, item_index, card + 1] + ) return states, transitions @@ -114,14 +241,19 @@ def create_state_graph(items, max_capacity): def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: str): """Solve the cutting stock with arc-flow and the CP-SAT solver.""" items = regroup_and_count(DESIRED_LENGTHS) - print('Items:', items) + print("Items:", items) num_items = len(DESIRED_LENGTHS) max_capacity = max(POSSIBLE_CAPACITIES) states, transitions = create_state_graph(items, max_capacity) - print('Dynamic programming has generated', len(states), 'states and', - len(transitions), 'transitions') + print( + "Dynamic programming has generated", + len(states), + "states and", + len(transitions), + "transitions", + ) incoming_vars = collections.defaultdict(list) outgoing_vars = collections.defaultdict(list) @@ -139,8 +271,8 @@ def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: st count = items[item_index][1] max_count = count // card count_var = model.NewIntVar( - 0, max_count, - 'i%i_f%i_t%i_C%s' % (item_index, incoming, outgoing, card)) + 0, max_count, "i%i_f%i_t%i_C%s" % (item_index, incoming, outgoing, card) + ) incoming_vars[incoming].append(count_var) outgoing_vars[outgoing].append(count_var) item_vars[item_index].append(count_var) @@ -150,7 +282,7 @@ def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: st for state_index, state in enumerate(states): if state_index == 0: continue - exit_var = model.NewIntVar(0, num_items, 'e%i' % state_index) + exit_var = model.NewIntVar(0, num_items, "e%i" % state_index) outgoing_vars[state_index].append(exit_var) incoming_sink_vars.append(exit_var) price = price_usage(state, POSSIBLE_CAPACITIES) @@ -159,8 +291,7 @@ def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: st # Flow conservation for state_index in range(1, len(states)): - model.Add( - sum(incoming_vars[state_index]) == sum(outgoing_vars[state_index])) + model.Add(sum(incoming_vars[state_index]) == sum(outgoing_vars[state_index])) # Flow going out of the source must go in the sink model.Add(sum(outgoing_vars[0]) == sum(incoming_sink_vars)) @@ -169,13 +300,17 @@ def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: st for item_index, size_and_count in enumerate(items): num_arcs = len(item_vars[item_index]) model.Add( - sum(item_vars[item_index][i] * item_coeffs[item_index][i] - for i in range(num_arcs)) == size_and_count[1]) + sum( + item_vars[item_index][i] * item_coeffs[item_index][i] + for i in range(num_arcs) + ) + == size_and_count[1] + ) # Objective is the sum of waste model.Minimize( - sum(objective_vars[i] * objective_coeffs[i] - for i in range(len(objective_vars)))) + sum(objective_vars[i] * objective_coeffs[i] for i in range(len(objective_vars))) + ) # Output model proto to file. if output_proto_file: @@ -192,13 +327,18 @@ def solve_cutting_stock_with_arc_flow_and_sat(output_proto_file: str, params: st def solve_cutting_stock_with_arc_flow_and_mip(): """Solve the cutting stock with arc-flow and a MIP solver.""" items = regroup_and_count(DESIRED_LENGTHS) - print('Items:', items) + print("Items:", items) num_items = len(DESIRED_LENGTHS) max_capacity = max(POSSIBLE_CAPACITIES) states, transitions = create_state_graph(items, max_capacity) - print('Dynamic programming has generated', len(states), 'states and', - len(transitions), 'transitions') + print( + "Dynamic programming has generated", + len(states), + "states and", + len(transitions), + "transitions", + ) incoming_vars = collections.defaultdict(list) outgoing_vars = collections.defaultdict(list) @@ -216,8 +356,10 @@ def solve_cutting_stock_with_arc_flow_and_mip(): for outgoing, incoming, item_index, card in transitions: count = items[item_index][1] count_var = model.new_int_var( - 0, count, 'a%i_i%i_f%i_t%i_c%i' % (var_index, item_index, incoming, - outgoing, card)) + 0, + count, + "a%i_i%i_f%i_t%i_c%i" % (var_index, item_index, incoming, outgoing, card), + ) var_index += 1 incoming_vars[incoming].append(count_var) outgoing_vars[outgoing].append(count_var) @@ -227,7 +369,7 @@ def solve_cutting_stock_with_arc_flow_and_mip(): for state_index, state in enumerate(states): if state_index == 0: continue - exit_var = model.new_int_var(0, num_items, 'e%i' % state_index) + exit_var = model.new_int_var(0, num_items, "e%i" % state_index) outgoing_vars[state_index].append(exit_var) incoming_sink_vars.append(exit_var) price = price_usage(state, POSSIBLE_CAPACITIES) @@ -237,44 +379,52 @@ def solve_cutting_stock_with_arc_flow_and_mip(): # Flow conservation for state_index in range(1, len(states)): model.add( - mb.LinearExpr.sum(incoming_vars[state_index]) == mb.LinearExpr.sum( - outgoing_vars[state_index])) + mb.LinearExpr.sum(incoming_vars[state_index]) + == mb.LinearExpr.sum(outgoing_vars[state_index]) + ) # Flow going out of the source must go in the sink model.add( - mb.LinearExpr.sum(outgoing_vars[0]) == mb.LinearExpr.sum( - incoming_sink_vars)) + mb.LinearExpr.sum(outgoing_vars[0]) == mb.LinearExpr.sum(incoming_sink_vars) + ) # Items must be placed for item_index, size_and_count in enumerate(items): num_arcs = len(item_vars[item_index]) model.add( - mb.LinearExpr.sum([item_vars[item_index][i] * item_coeffs[item_index][i] - for i in range(num_arcs)]) == size_and_count[1]) + mb.LinearExpr.sum( + [ + item_vars[item_index][i] * item_coeffs[item_index][i] + for i in range(num_arcs) + ] + ) + == size_and_count[1] + ) # Objective is the sum of waste model.minimize(np.dot(objective_vars, objective_coeffs)) - solver = mb.ModelSolver('scip') + solver = mb.ModelSolver("scip") solver.enable_output(True) status = solver.solve(model) ### Output the solution. if status == mb.SolveStatus.OPTIMAL or status == mb.SolveStatus.FEASIBLE: - print('Objective value = %f found in %.2f s' % - (solver.objective_value, time.time() - start_time)) + print( + "Objective value = %f found in %.2f s" + % (solver.objective_value, time.time() - start_time) + ) else: - print('No solution') + print("No solution") def main(_): - """Main function""" - if _SOLVER.value == 'sat': - solve_cutting_stock_with_arc_flow_and_sat(_OUTPUT_PROTO.value, - _PARAMS.value) + """Main function.""" + if _SOLVER.value == "sat": + solve_cutting_stock_with_arc_flow_and_sat(_OUTPUT_PROTO.value, _PARAMS.value) else: # 'mip' solve_cutting_stock_with_arc_flow_and_mip() -if __name__ == '__main__': +if __name__ == "__main__": app.run(main) diff --git a/examples/python/assignment_with_constraints_sat.py b/examples/python/assignment_with_constraints_sat.py index e765aba5513..91c3a7f525d 100644 --- a/examples/python/assignment_with_constraints_sat.py +++ b/examples/python/assignment_with_constraints_sat.py @@ -42,24 +42,24 @@ def solve_assignment(): [0, 1, 0, 1], # Workers 1, 3 [0, 1, 1, 0], # Workers 1, 2 [1, 1, 0, 0], # Workers 0, 1 - [1, 0, 1, 0], - ] # Workers 0, 2 + [1, 0, 1, 0], # Workers 0, 2 + ] group2 = [ [0, 0, 1, 1], # Workers 6, 7 [0, 1, 0, 1], # Workers 5, 7 [0, 1, 1, 0], # Workers 5, 6 [1, 1, 0, 0], # Workers 4, 5 - [1, 0, 0, 1], - ] # Workers 4, 7 + [1, 0, 0, 1], # Workers 4, 7 + ] group3 = [ [0, 0, 1, 1], # Workers 10, 11 [0, 1, 0, 1], # Workers 9, 11 [0, 1, 1, 0], # Workers 9, 10 [1, 0, 1, 0], # Workers 8, 10 - [1, 0, 0, 1], - ] # Workers 8, 11 + [1, 0, 0, 1], # Workers 8, 11 + ] sizes = [10, 7, 3, 12, 15, 4, 11, 5] total_size_max = 15 @@ -73,10 +73,9 @@ def solve_assignment(): model = cp_model.CpModel() # Variables selected = [ - [model.new_bool_var("x[%i,%i]" % (i, j)) for j in all_tasks] - for i in all_workers + [model.new_bool_var(f"x[{i},{j}]") for j in all_tasks] for i in all_workers ] - works = [model.new_bool_var("works[%i]" % i) for i in all_workers] + works = [model.new_bool_var(f"works[{i}]") for i in all_workers] # Constraints @@ -107,21 +106,16 @@ def solve_assignment(): status = solver.solve(model) if status == cp_model.OPTIMAL: - print("Total cost = %i" % solver.objective_value) + print(f"Total cost = {solver.objective_value}") print() for i in all_workers: for j in all_tasks: if solver.boolean_value(selected[i][j]): - print( - "Worker ", i, " assigned to task ", j, " Cost = ", cost[i][j] - ) + print(f"Worker {i} assigned to task {j} with Cost = {cost[i][j]}") print() - print("Statistics") - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + print(solver.response_stats()) def main(argv: Sequence[str]) -> None: diff --git a/examples/python/balance_group_sat.py b/examples/python/balance_group_sat.py index 69b9ec1838c..fbd86ac5831 100644 --- a/examples/python/balance_group_sat.py +++ b/examples/python/balance_group_sat.py @@ -19,8 +19,10 @@ Furthermore, if one color is an a group, at least k items with this color must be in that group. """ -from typing import Sequence +from typing import Dict, Sequence + from absl import app + from ortools.sat.python import cp_model @@ -38,10 +40,10 @@ def __init__(self, values, colors, all_groups, all_items, item_in_group): self.__item_in_group = item_in_group def on_solution_callback(self): - print("Solution %i" % self.__solution_count) + print(f"Solution {self.__solution_count}") self.__solution_count += 1 - print(" objective value = %i" % self.objective_value) + print(f" objective value = {self.objective_value}") groups = {} sums = {} for g in self.__all_groups: @@ -54,11 +56,11 @@ def on_solution_callback(self): for g in self.__all_groups: group = groups[g] - print("group %i: sum = %0.2f [" % (g, sums[g]), end="") + print(f"group {g}: sum = {sums[g]:0.2f} [", end="") for item in group: value = self.__values[item] color = self.__colors[item] - print(" (%i, %i, %i)" % (item, value, color), end="") + print(f" ({item}, {value}, {color})", end="") print("]") @@ -88,18 +90,17 @@ def main(argv: Sequence[str]) -> None: num_items_per_group = num_items // num_groups # Collect all items in a given color. - items_per_color = {} - for c in all_colors: - items_per_color[c] = [] + items_per_color: Dict[int, list[int]] = {} + for color in all_colors: + items_per_color[color] = [] for i in all_items: - if colors[i] == c: - items_per_color[c].append(i) + if colors[i] == color: + items_per_color[color].append(i) print( - "Model has %i items, %i groups, and %i colors" - % (num_items, num_groups, num_colors) + f"Model has {num_items} items, {num_groups} groups, and" f" {num_colors} colors" ) - print(" average sum per group = %i" % average_sum_per_group) + print(f" average sum per group = {average_sum_per_group}") # Model. @@ -108,7 +109,7 @@ def main(argv: Sequence[str]) -> None: item_in_group = {} for i in all_items: for g in all_groups: - item_in_group[(i, g)] = model.new_bool_var("item %d in group %d" % (i, g)) + item_in_group[(i, g)] = model.new_bool_var(f"item {i} in group {g}") # Each group must have the same size. for g in all_groups: @@ -136,9 +137,7 @@ def main(argv: Sequence[str]) -> None: color_in_group = {} for g in all_groups: for c in all_colors: - color_in_group[(c, g)] = model.new_bool_var( - "color %d is in group %d" % (c, g) - ) + color_in_group[(c, g)] = model.new_bool_var(f"color {c} is in group {g}") # Item is in a group implies its color is in that group. for i in all_items: @@ -175,11 +174,8 @@ def main(argv: Sequence[str]) -> None: status = solver.solve(model, solution_printer) if status == cp_model.OPTIMAL: - print("Optimal epsilon: %i" % solver.objective_value) - print("Statistics") - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + print(f"Optimal epsilon: {solver.objective_value}") + print(solver.response_stats()) else: print("No solution found") diff --git a/examples/python/bus_driver_scheduling_flow_sat.py b/examples/python/bus_driver_scheduling_flow_sat.py index 1da2d35f872..d012bbb2034 100644 --- a/examples/python/bus_driver_scheduling_flow_sat.py +++ b/examples/python/bus_driver_scheduling_flow_sat.py @@ -29,14 +29,13 @@ from ortools.sat.python import cp_model PARSER = argparse.ArgumentParser() +PARSER.add_argument("--instance", default=1, type=int, help="Instance number (1..3).") PARSER.add_argument( - '--instance', default=1, type=int, help='Instance number (1..3).') -PARSER.add_argument( - '--output_proto_file', + "--output_proto_file", default="", - help='Output file to write the cp_model' - 'proto to.') -PARSER.add_argument('--params', default="", help='Sat solver parameters.') + help="Output file to write the cp_model" "proto to.", +) +PARSER.add_argument("--params", default="", help="Sat solver parameters.") SAMPLE_SHIFTS_SMALL = [ # @@ -48,1618 +47,1618 @@ # - shift end minute # - shift duration in minutes # - [0, '05:18', '06:00', 318, 360, 42], - [1, '05:26', '06:08', 326, 368, 42], - [2, '05:40', '05:56', 340, 356, 16], - [3, '06:06', '06:51', 366, 411, 45], - [4, '06:40', '07:52', 400, 472, 72], - [5, '06:42', '07:13', 402, 433, 31], - [6, '06:48', '08:15', 408, 495, 87], - [7, '06:59', '08:07', 419, 487, 68], - [8, '07:20', '07:36', 440, 456, 16], - [9, '07:35', '08:22', 455, 502, 47], - [10, '07:50', '08:55', 470, 535, 65], - [11, '08:00', '09:05', 480, 545, 65], - [12, '08:00', '08:35', 480, 515, 35], - [13, '08:11', '09:41', 491, 581, 90], - [14, '08:28', '08:50', 508, 530, 22], - [15, '08:35', '08:45', 515, 525, 10], - [16, '08:40', '08:50', 520, 530, 10], - [17, '09:03', '10:28', 543, 628, 85], - [18, '09:23', '09:49', 563, 589, 26], - [19, '09:30', '09:40', 570, 580, 10], - [20, '09:57', '10:20', 597, 620, 23], - [21, '10:09', '11:03', 609, 663, 54], - [22, '10:20', '10:30', 620, 630, 10], - [23, '11:00', '11:10', 660, 670, 10], - [24, '11:45', '12:24', 705, 744, 39], - [25, '12:18', '13:00', 738, 780, 42], - [26, '13:18', '14:44', 798, 884, 86], - [27, '13:53', '14:49', 833, 889, 56], - [28, '14:03', '14:50', 843, 890, 47], - [29, '14:28', '15:15', 868, 915, 47], - [30, '14:30', '15:41', 870, 941, 71], - [31, '14:48', '15:35', 888, 935, 47], - [32, '15:03', '15:50', 903, 950, 47], - [33, '15:28', '16:54', 928, 1014, 86], - [34, '15:38', '16:25', 938, 985, 47], - [35, '15:40', '15:56', 940, 956, 16], - [36, '15:58', '16:45', 958, 1005, 47], - [37, '16:04', '17:30', 964, 1050, 86], - [38, '16:28', '17:15', 988, 1035, 47], - [39, '16:36', '17:21', 996, 1041, 45], - [40, '16:50', '17:00', 1010, 1020, 10], - [41, '16:54', '18:20', 1014, 1100, 86], - [42, '17:01', '17:13', 1021, 1033, 12], - [43, '17:19', '18:31', 1039, 1111, 72], - [44, '17:23', '18:10', 1043, 1090, 47], - [45, '17:34', '18:15', 1054, 1095, 41], - [46, '18:04', '19:29', 1084, 1169, 85], - [47, '18:34', '19:58', 1114, 1198, 84], - [48, '19:56', '20:34', 1196, 1234, 38], - [49, '20:05', '20:48', 1205, 1248, 43] + [0, "05:18", "06:00", 318, 360, 42], + [1, "05:26", "06:08", 326, 368, 42], + [2, "05:40", "05:56", 340, 356, 16], + [3, "06:06", "06:51", 366, 411, 45], + [4, "06:40", "07:52", 400, 472, 72], + [5, "06:42", "07:13", 402, 433, 31], + [6, "06:48", "08:15", 408, 495, 87], + [7, "06:59", "08:07", 419, 487, 68], + [8, "07:20", "07:36", 440, 456, 16], + [9, "07:35", "08:22", 455, 502, 47], + [10, "07:50", "08:55", 470, 535, 65], + [11, "08:00", "09:05", 480, 545, 65], + [12, "08:00", "08:35", 480, 515, 35], + [13, "08:11", "09:41", 491, 581, 90], + [14, "08:28", "08:50", 508, 530, 22], + [15, "08:35", "08:45", 515, 525, 10], + [16, "08:40", "08:50", 520, 530, 10], + [17, "09:03", "10:28", 543, 628, 85], + [18, "09:23", "09:49", 563, 589, 26], + [19, "09:30", "09:40", 570, 580, 10], + [20, "09:57", "10:20", 597, 620, 23], + [21, "10:09", "11:03", 609, 663, 54], + [22, "10:20", "10:30", 620, 630, 10], + [23, "11:00", "11:10", 660, 670, 10], + [24, "11:45", "12:24", 705, 744, 39], + [25, "12:18", "13:00", 738, 780, 42], + [26, "13:18", "14:44", 798, 884, 86], + [27, "13:53", "14:49", 833, 889, 56], + [28, "14:03", "14:50", 843, 890, 47], + [29, "14:28", "15:15", 868, 915, 47], + [30, "14:30", "15:41", 870, 941, 71], + [31, "14:48", "15:35", 888, 935, 47], + [32, "15:03", "15:50", 903, 950, 47], + [33, "15:28", "16:54", 928, 1014, 86], + [34, "15:38", "16:25", 938, 985, 47], + [35, "15:40", "15:56", 940, 956, 16], + [36, "15:58", "16:45", 958, 1005, 47], + [37, "16:04", "17:30", 964, 1050, 86], + [38, "16:28", "17:15", 988, 1035, 47], + [39, "16:36", "17:21", 996, 1041, 45], + [40, "16:50", "17:00", 1010, 1020, 10], + [41, "16:54", "18:20", 1014, 1100, 86], + [42, "17:01", "17:13", 1021, 1033, 12], + [43, "17:19", "18:31", 1039, 1111, 72], + [44, "17:23", "18:10", 1043, 1090, 47], + [45, "17:34", "18:15", 1054, 1095, 41], + [46, "18:04", "19:29", 1084, 1169, 85], + [47, "18:34", "19:58", 1114, 1198, 84], + [48, "19:56", "20:34", 1196, 1234, 38], + [49, "20:05", "20:48", 1205, 1248, 43], ] # yapf:disable SAMPLE_SHIFTS_MEDIUM = [ - [0, '04:30', '04:53', 270, 293, 23], - [1, '04:46', '04:56', 286, 296, 10], - [2, '04:52', '05:56', 292, 356, 64], - [3, '04:53', '05:23', 293, 323, 30], - [4, '05:07', '05:44', 307, 344, 37], - [5, '05:10', '06:06', 310, 366, 56], - [6, '05:18', '06:03', 318, 363, 45], - [7, '05:30', '05:40', 330, 340, 10], - [8, '05:30', '05:40', 330, 340, 10], - [9, '05:33', '06:15', 333, 375, 42], - [10, '05:40', '05:50', 340, 350, 10], - [11, '05:43', '06:08', 343, 368, 25], - [12, '05:54', '07:20', 354, 440, 86], - [13, '06:04', '06:37', 364, 397, 33], - [14, '06:13', '06:58', 373, 418, 45], - [15, '06:14', '07:40', 374, 460, 86], - [16, '06:15', '07:15', 375, 435, 60], - [17, '06:16', '06:26', 376, 386, 10], - [18, '06:17', '06:34', 377, 394, 17], - [19, '06:20', '06:36', 380, 396, 16], - [20, '06:22', '07:06', 382, 426, 44], - [21, '06:24', '07:50', 384, 470, 86], - [22, '06:27', '06:44', 387, 404, 17], - [23, '06:30', '06:40', 390, 400, 10], - [24, '06:31', '06:43', 391, 403, 12], - [25, '06:33', '07:53', 393, 473, 80], - [26, '06:34', '07:09', 394, 429, 35], - [27, '06:40', '06:56', 400, 416, 16], - [28, '06:44', '07:17', 404, 437, 33], - [29, '06:46', '06:58', 406, 418, 12], - [30, '06:49', '07:43', 409, 463, 54], - [31, '06:50', '07:05', 410, 425, 15], - [32, '06:52', '07:36', 412, 456, 44], - [33, '06:54', '07:27', 414, 447, 33], - [34, '06:56', '08:23', 416, 503, 87], - [35, '07:04', '07:44', 424, 464, 40], - [36, '07:11', '08:36', 431, 516, 85], - [37, '07:17', '07:35', 437, 455, 18], - [38, '07:22', '08:06', 442, 486, 44], - [39, '07:27', '08:15', 447, 495, 48], - [40, '07:35', '07:45', 455, 465, 10], - [41, '07:43', '08:08', 463, 488, 25], - [42, '07:50', '08:37', 470, 517, 47], - [43, '07:58', '08:45', 478, 525, 47], - [44, '08:00', '08:35', 480, 515, 35], - [45, '08:06', '08:51', 486, 531, 45], - [46, '08:10', '08:45', 490, 525, 35], - [47, '08:15', '08:30', 495, 510, 15], - [48, '08:16', '09:00', 496, 540, 44], - [49, '08:18', '09:16', 498, 556, 58], - [50, '08:20', '08:36', 500, 516, 16], - [51, '08:27', '09:07', 507, 547, 40], - [52, '08:30', '08:45', 510, 525, 15], - [53, '08:35', '09:15', 515, 555, 40], - [54, '08:46', '09:30', 526, 570, 44], - [55, '08:51', '09:17', 531, 557, 26], - [56, '08:55', '09:15', 535, 555, 20], - [57, '08:58', '09:38', 538, 578, 40], - [58, '09:00', '09:35', 540, 575, 35], - [59, '09:00', '09:16', 540, 556, 16], - [60, '09:20', '09:36', 560, 576, 16], - [61, '09:31', '09:43', 571, 583, 12], - [62, '09:33', '10:15', 573, 615, 42], - [63, '09:54', '10:05', 594, 605, 11], - [64, '10:11', '10:38', 611, 638, 27], - [65, '10:18', '11:00', 618, 660, 42], - [66, '10:21', '10:47', 621, 647, 26], - [67, '10:25', '11:04', 625, 664, 39], - [68, '10:26', '11:08', 626, 668, 42], - [69, '10:44', '12:11', 644, 731, 87], - [70, '11:00', '11:16', 660, 676, 16], - [71, '11:15', '11:54', 675, 714, 39], - [72, '11:16', '11:28', 676, 688, 12], - [73, '11:20', '11:30', 680, 690, 10], - [74, '11:21', '11:47', 681, 707, 26], - [75, '11:25', '12:04', 685, 724, 39], - [76, '11:34', '11:45', 694, 705, 11], - [77, '11:35', '12:14', 695, 734, 39], - [78, '11:41', '12:23', 701, 743, 42], - [79, '11:44', '12:35', 704, 755, 51], - [80, '11:46', '11:58', 706, 718, 12], - [81, '12:00', '12:10', 720, 730, 10], - [82, '12:04', '12:15', 724, 735, 11], - [83, '12:04', '13:04', 724, 784, 60], - [84, '12:11', '12:38', 731, 758, 27], - [85, '12:15', '12:54', 735, 774, 39], - [86, '12:25', '13:10', 745, 790, 45], - [87, '12:30', '12:40', 750, 760, 10], - [88, '12:34', '13:58', 754, 838, 84], - [89, '12:38', '13:25', 758, 805, 47], - [90, '12:48', '13:35', 768, 815, 47], - [91, '13:00', '13:16', 780, 796, 16], - [92, '13:05', '13:44', 785, 824, 39], - [93, '13:08', '13:55', 788, 835, 47], - [94, '13:14', '14:38', 794, 878, 84], - [95, '13:23', '13:49', 803, 829, 26], - [96, '13:25', '14:04', 805, 844, 39], - [97, '13:28', '14:54', 808, 894, 86], - [98, '13:31', '13:43', 811, 823, 12], - [99, '13:34', '14:58', 814, 898, 84], - [100, '13:38', '14:25', 818, 865, 47], - [101, '13:38', '15:04', 818, 904, 86], - [102, '13:39', '14:33', 819, 873, 54], - [103, '13:40', '13:50', 820, 830, 10], - [104, '13:43', '14:10', 823, 850, 27], - [105, '13:48', '14:35', 828, 875, 47], - [106, '13:48', '14:35', 828, 875, 47], - [107, '13:53', '14:40', 833, 880, 47], - [108, '13:58', '15:24', 838, 924, 86], - [109, '13:58', '14:25', 838, 865, 27], - [110, '14:00', '14:16', 840, 856, 16], - [111, '14:13', '15:00', 853, 900, 47], - [112, '14:20', '15:31', 860, 931, 71], - [113, '14:25', '15:02', 865, 902, 37], - [114, '14:34', '14:45', 874, 885, 11], - [115, '14:40', '15:51', 880, 951, 71], - [116, '14:40', '14:56', 880, 896, 16], - [117, '14:46', '14:58', 886, 898, 12], - [118, '14:49', '15:43', 889, 943, 54], - [119, '14:52', '15:21', 892, 921, 29], - [120, '14:58', '16:24', 898, 984, 86], - [121, '14:59', '15:53', 899, 953, 54], - [122, '15:00', '15:10', 900, 910, 10], - [123, '15:00', '15:35', 900, 935, 35], - [124, '15:08', '15:45', 908, 945, 37], - [125, '15:12', '15:36', 912, 936, 24], - [126, '15:18', '16:05', 918, 965, 47], - [127, '15:24', '16:05', 924, 965, 41], - [128, '15:31', '15:43', 931, 943, 12], - [129, '15:35', '15:54', 935, 954, 19], - [130, '15:36', '16:21', 936, 981, 45], - [131, '15:39', '16:33', 939, 993, 54], - [132, '15:48', '16:35', 948, 995, 47], - [133, '15:50', '17:01', 950, 1021, 71], - [134, '16:03', '16:50', 963, 1010, 47], - [135, '16:18', '17:44', 978, 1064, 86], - [136, '16:24', '17:05', 984, 1025, 41], - [137, '16:28', '17:15', 988, 1035, 47], - [138, '16:34', '17:15', 994, 1035, 41], - [139, '16:38', '17:25', 998, 1045, 47], - [140, '16:40', '16:56', 1000, 1016, 16], - [141, '16:45', '17:04', 1005, 1024, 19], - [142, '16:52', '17:36', 1012, 1056, 44], - [143, '16:58', '17:45', 1018, 1065, 47], - [144, '17:04', '18:30', 1024, 1110, 86], - [145, '17:04', '17:45', 1024, 1065, 41], - [146, '17:09', '18:03', 1029, 1083, 54], - [147, '17:18', '18:44', 1038, 1124, 86], - [148, '17:28', '18:15', 1048, 1095, 47], - [149, '17:29', '18:41', 1049, 1121, 72], - [150, '17:36', '18:21', 1056, 1101, 45], - [151, '17:38', '18:25', 1058, 1105, 47], - [152, '17:40', '17:56', 1060, 1076, 16], - [153, '17:45', '18:04', 1065, 1084, 19], - [154, '17:46', '17:58', 1066, 1078, 12], - [155, '17:48', '18:35', 1068, 1115, 47], - [156, '17:49', '18:43', 1069, 1123, 54], - [157, '17:55', '18:14', 1075, 1094, 19], - [158, '17:58', '18:45', 1078, 1125, 47], - [159, '18:00', '19:11', 1080, 1151, 71], - [160, '18:04', '18:45', 1084, 1125, 41], - [161, '18:09', '19:03', 1089, 1143, 54], - [162, '18:13', '19:00', 1093, 1140, 47], - [163, '18:13', '18:40', 1093, 1120, 27], - [164, '18:19', '19:13', 1099, 1153, 54], - [165, '18:28', '19:25', 1108, 1165, 57], - [166, '18:48', '19:28', 1128, 1168, 40], - [167, '19:03', '19:45', 1143, 1185, 42], - [168, '19:20', '19:36', 1160, 1176, 16], - [169, '19:21', '19:31', 1161, 1171, 10], - [170, '19:25', '20:04', 1165, 1204, 39], - [171, '19:26', '20:08', 1166, 1208, 42], - [172, '19:30', '19:40', 1170, 1180, 10], - [173, '19:44', '20:33', 1184, 1233, 49], - [174, '19:48', '21:09', 1188, 1269, 81], - [175, '19:53', '21:02', 1193, 1262, 69], - [176, '20:04', '20:29', 1204, 1229, 25], - [177, '20:17', '21:03', 1217, 1263, 46], - [178, '20:20', '20:57', 1220, 1257, 37], - [179, '20:29', '21:18', 1229, 1278, 49], - [180, '20:35', '21:54', 1235, 1314, 79], - [181, '20:40', '20:50', 1240, 1250, 10], - [182, '20:47', '21:42', 1247, 1302, 55], - [183, '21:00', '21:10', 1260, 1270, 10], - [184, '21:07', '21:44', 1267, 1304, 37], - [185, '21:14', '22:03', 1274, 1323, 49], - [186, '21:39', '21:55', 1299, 1315, 16], - [187, '21:40', '22:17', 1300, 1337, 37], - [188, '21:40', '21:50', 1300, 1310, 10], - [189, '21:48', '22:03', 1308, 1323, 15], - [190, '22:17', '23:03', 1337, 1383, 46], - [191, '22:43', '23:08', 1363, 1388, 25], - [192, '23:35', '01:05', 1415, 1505, 90], - [193, '23:46', '00:01', 1426, 1441, 15], - [194, '23:47', '00:33', 1427, 1473, 46], - [195, '23:52', '00:24', 1432, 1464, 32], - [196, '23:58', '00:38', 1438, 1478, 40], - [197, '00:02', '00:12', 1442, 1452, 10], - [198, '00:07', '00:39', 1447, 1479, 32], - [199, '00:25', '01:12', 1465, 1512, 47] + [0, "04:30", "04:53", 270, 293, 23], + [1, "04:46", "04:56", 286, 296, 10], + [2, "04:52", "05:56", 292, 356, 64], + [3, "04:53", "05:23", 293, 323, 30], + [4, "05:07", "05:44", 307, 344, 37], + [5, "05:10", "06:06", 310, 366, 56], + [6, "05:18", "06:03", 318, 363, 45], + [7, "05:30", "05:40", 330, 340, 10], + [8, "05:30", "05:40", 330, 340, 10], + [9, "05:33", "06:15", 333, 375, 42], + [10, "05:40", "05:50", 340, 350, 10], + [11, "05:43", "06:08", 343, 368, 25], + [12, "05:54", "07:20", 354, 440, 86], + [13, "06:04", "06:37", 364, 397, 33], + [14, "06:13", "06:58", 373, 418, 45], + [15, "06:14", "07:40", 374, 460, 86], + [16, "06:15", "07:15", 375, 435, 60], + [17, "06:16", "06:26", 376, 386, 10], + [18, "06:17", "06:34", 377, 394, 17], + [19, "06:20", "06:36", 380, 396, 16], + [20, "06:22", "07:06", 382, 426, 44], + [21, "06:24", "07:50", 384, 470, 86], + [22, "06:27", "06:44", 387, 404, 17], + [23, "06:30", "06:40", 390, 400, 10], + [24, "06:31", "06:43", 391, 403, 12], + [25, "06:33", "07:53", 393, 473, 80], + [26, "06:34", "07:09", 394, 429, 35], + [27, "06:40", "06:56", 400, 416, 16], + [28, "06:44", "07:17", 404, 437, 33], + [29, "06:46", "06:58", 406, 418, 12], + [30, "06:49", "07:43", 409, 463, 54], + [31, "06:50", "07:05", 410, 425, 15], + [32, "06:52", "07:36", 412, 456, 44], + [33, "06:54", "07:27", 414, 447, 33], + [34, "06:56", "08:23", 416, 503, 87], + [35, "07:04", "07:44", 424, 464, 40], + [36, "07:11", "08:36", 431, 516, 85], + [37, "07:17", "07:35", 437, 455, 18], + [38, "07:22", "08:06", 442, 486, 44], + [39, "07:27", "08:15", 447, 495, 48], + [40, "07:35", "07:45", 455, 465, 10], + [41, "07:43", "08:08", 463, 488, 25], + [42, "07:50", "08:37", 470, 517, 47], + [43, "07:58", "08:45", 478, 525, 47], + [44, "08:00", "08:35", 480, 515, 35], + [45, "08:06", "08:51", 486, 531, 45], + [46, "08:10", "08:45", 490, 525, 35], + [47, "08:15", "08:30", 495, 510, 15], + [48, "08:16", "09:00", 496, 540, 44], + [49, "08:18", "09:16", 498, 556, 58], + [50, "08:20", "08:36", 500, 516, 16], + [51, "08:27", "09:07", 507, 547, 40], + [52, "08:30", "08:45", 510, 525, 15], + [53, "08:35", "09:15", 515, 555, 40], + [54, "08:46", "09:30", 526, 570, 44], + [55, "08:51", "09:17", 531, 557, 26], + [56, "08:55", "09:15", 535, 555, 20], + [57, "08:58", "09:38", 538, 578, 40], + [58, "09:00", "09:35", 540, 575, 35], + [59, "09:00", "09:16", 540, 556, 16], + [60, "09:20", "09:36", 560, 576, 16], + [61, "09:31", "09:43", 571, 583, 12], + [62, "09:33", "10:15", 573, 615, 42], + [63, "09:54", "10:05", 594, 605, 11], + [64, "10:11", "10:38", 611, 638, 27], + [65, "10:18", "11:00", 618, 660, 42], + [66, "10:21", "10:47", 621, 647, 26], + [67, "10:25", "11:04", 625, 664, 39], + [68, "10:26", "11:08", 626, 668, 42], + [69, "10:44", "12:11", 644, 731, 87], + [70, "11:00", "11:16", 660, 676, 16], + [71, "11:15", "11:54", 675, 714, 39], + [72, "11:16", "11:28", 676, 688, 12], + [73, "11:20", "11:30", 680, 690, 10], + [74, "11:21", "11:47", 681, 707, 26], + [75, "11:25", "12:04", 685, 724, 39], + [76, "11:34", "11:45", 694, 705, 11], + [77, "11:35", "12:14", 695, 734, 39], + [78, "11:41", "12:23", 701, 743, 42], + [79, "11:44", "12:35", 704, 755, 51], + [80, "11:46", "11:58", 706, 718, 12], + [81, "12:00", "12:10", 720, 730, 10], + [82, "12:04", "12:15", 724, 735, 11], + [83, "12:04", "13:04", 724, 784, 60], + [84, "12:11", "12:38", 731, 758, 27], + [85, "12:15", "12:54", 735, 774, 39], + [86, "12:25", "13:10", 745, 790, 45], + [87, "12:30", "12:40", 750, 760, 10], + [88, "12:34", "13:58", 754, 838, 84], + [89, "12:38", "13:25", 758, 805, 47], + [90, "12:48", "13:35", 768, 815, 47], + [91, "13:00", "13:16", 780, 796, 16], + [92, "13:05", "13:44", 785, 824, 39], + [93, "13:08", "13:55", 788, 835, 47], + [94, "13:14", "14:38", 794, 878, 84], + [95, "13:23", "13:49", 803, 829, 26], + [96, "13:25", "14:04", 805, 844, 39], + [97, "13:28", "14:54", 808, 894, 86], + [98, "13:31", "13:43", 811, 823, 12], + [99, "13:34", "14:58", 814, 898, 84], + [100, "13:38", "14:25", 818, 865, 47], + [101, "13:38", "15:04", 818, 904, 86], + [102, "13:39", "14:33", 819, 873, 54], + [103, "13:40", "13:50", 820, 830, 10], + [104, "13:43", "14:10", 823, 850, 27], + [105, "13:48", "14:35", 828, 875, 47], + [106, "13:48", "14:35", 828, 875, 47], + [107, "13:53", "14:40", 833, 880, 47], + [108, "13:58", "15:24", 838, 924, 86], + [109, "13:58", "14:25", 838, 865, 27], + [110, "14:00", "14:16", 840, 856, 16], + [111, "14:13", "15:00", 853, 900, 47], + [112, "14:20", "15:31", 860, 931, 71], + [113, "14:25", "15:02", 865, 902, 37], + [114, "14:34", "14:45", 874, 885, 11], + [115, "14:40", "15:51", 880, 951, 71], + [116, "14:40", "14:56", 880, 896, 16], + [117, "14:46", "14:58", 886, 898, 12], + [118, "14:49", "15:43", 889, 943, 54], + [119, "14:52", "15:21", 892, 921, 29], + [120, "14:58", "16:24", 898, 984, 86], + [121, "14:59", "15:53", 899, 953, 54], + [122, "15:00", "15:10", 900, 910, 10], + [123, "15:00", "15:35", 900, 935, 35], + [124, "15:08", "15:45", 908, 945, 37], + [125, "15:12", "15:36", 912, 936, 24], + [126, "15:18", "16:05", 918, 965, 47], + [127, "15:24", "16:05", 924, 965, 41], + [128, "15:31", "15:43", 931, 943, 12], + [129, "15:35", "15:54", 935, 954, 19], + [130, "15:36", "16:21", 936, 981, 45], + [131, "15:39", "16:33", 939, 993, 54], + [132, "15:48", "16:35", 948, 995, 47], + [133, "15:50", "17:01", 950, 1021, 71], + [134, "16:03", "16:50", 963, 1010, 47], + [135, "16:18", "17:44", 978, 1064, 86], + [136, "16:24", "17:05", 984, 1025, 41], + [137, "16:28", "17:15", 988, 1035, 47], + [138, "16:34", "17:15", 994, 1035, 41], + [139, "16:38", "17:25", 998, 1045, 47], + [140, "16:40", "16:56", 1000, 1016, 16], + [141, "16:45", "17:04", 1005, 1024, 19], + [142, "16:52", "17:36", 1012, 1056, 44], + [143, "16:58", "17:45", 1018, 1065, 47], + [144, "17:04", "18:30", 1024, 1110, 86], + [145, "17:04", "17:45", 1024, 1065, 41], + [146, "17:09", "18:03", 1029, 1083, 54], + [147, "17:18", "18:44", 1038, 1124, 86], + [148, "17:28", "18:15", 1048, 1095, 47], + [149, "17:29", "18:41", 1049, 1121, 72], + [150, "17:36", "18:21", 1056, 1101, 45], + [151, "17:38", "18:25", 1058, 1105, 47], + [152, "17:40", "17:56", 1060, 1076, 16], + [153, "17:45", "18:04", 1065, 1084, 19], + [154, "17:46", "17:58", 1066, 1078, 12], + [155, "17:48", "18:35", 1068, 1115, 47], + [156, "17:49", "18:43", 1069, 1123, 54], + [157, "17:55", "18:14", 1075, 1094, 19], + [158, "17:58", "18:45", 1078, 1125, 47], + [159, "18:00", "19:11", 1080, 1151, 71], + [160, "18:04", "18:45", 1084, 1125, 41], + [161, "18:09", "19:03", 1089, 1143, 54], + [162, "18:13", "19:00", 1093, 1140, 47], + [163, "18:13", "18:40", 1093, 1120, 27], + [164, "18:19", "19:13", 1099, 1153, 54], + [165, "18:28", "19:25", 1108, 1165, 57], + [166, "18:48", "19:28", 1128, 1168, 40], + [167, "19:03", "19:45", 1143, 1185, 42], + [168, "19:20", "19:36", 1160, 1176, 16], + [169, "19:21", "19:31", 1161, 1171, 10], + [170, "19:25", "20:04", 1165, 1204, 39], + [171, "19:26", "20:08", 1166, 1208, 42], + [172, "19:30", "19:40", 1170, 1180, 10], + [173, "19:44", "20:33", 1184, 1233, 49], + [174, "19:48", "21:09", 1188, 1269, 81], + [175, "19:53", "21:02", 1193, 1262, 69], + [176, "20:04", "20:29", 1204, 1229, 25], + [177, "20:17", "21:03", 1217, 1263, 46], + [178, "20:20", "20:57", 1220, 1257, 37], + [179, "20:29", "21:18", 1229, 1278, 49], + [180, "20:35", "21:54", 1235, 1314, 79], + [181, "20:40", "20:50", 1240, 1250, 10], + [182, "20:47", "21:42", 1247, 1302, 55], + [183, "21:00", "21:10", 1260, 1270, 10], + [184, "21:07", "21:44", 1267, 1304, 37], + [185, "21:14", "22:03", 1274, 1323, 49], + [186, "21:39", "21:55", 1299, 1315, 16], + [187, "21:40", "22:17", 1300, 1337, 37], + [188, "21:40", "21:50", 1300, 1310, 10], + [189, "21:48", "22:03", 1308, 1323, 15], + [190, "22:17", "23:03", 1337, 1383, 46], + [191, "22:43", "23:08", 1363, 1388, 25], + [192, "23:35", "01:05", 1415, 1505, 90], + [193, "23:46", "00:01", 1426, 1441, 15], + [194, "23:47", "00:33", 1427, 1473, 46], + [195, "23:52", "00:24", 1432, 1464, 32], + [196, "23:58", "00:38", 1438, 1478, 40], + [197, "00:02", "00:12", 1442, 1452, 10], + [198, "00:07", "00:39", 1447, 1479, 32], + [199, "00:25", "01:12", 1465, 1512, 47], ] # yapf:disable SAMPLE_SHIFTS_LARGE = [ - [0, '04:18', '05:00', 258, 300, 42], - [1, '04:27', '05:08', 267, 308, 41], - [2, '04:29', '05:26', 269, 326, 57], - [3, '04:29', '04:55', 269, 295, 26], - [4, '04:30', '04:53', 270, 293, 23], - [5, '04:30', '04:51', 270, 291, 21], - [6, '04:31', '04:53', 271, 293, 22], - [7, '04:33', '05:15', 273, 315, 42], - [8, '04:34', '04:44', 274, 284, 10], - [9, '04:34', '05:03', 274, 303, 29], - [10, '04:35', '04:50', 275, 290, 15], - [11, '04:36', '04:46', 276, 286, 10], - [12, '04:37', '05:18', 277, 318, 41], - [13, '04:41', '05:13', 281, 313, 32], - [14, '04:42', '05:23', 282, 323, 41], - [15, '04:43', '04:53', 283, 293, 10], - [16, '04:44', '05:45', 284, 345, 61], - [17, '04:45', '05:11', 285, 311, 26], - [18, '04:46', '05:01', 286, 301, 15], - [19, '04:46', '04:56', 286, 296, 10], - [20, '04:47', '05:14', 287, 314, 27], - [21, '04:48', '05:30', 288, 330, 42], - [22, '04:49', '05:41', 289, 341, 52], - [23, '04:49', '05:18', 289, 318, 29], - [24, '04:50', '05:33', 290, 333, 43], - [25, '04:52', '05:56', 292, 356, 64], - [26, '04:52', '05:07', 292, 307, 15], - [27, '04:53', '05:19', 293, 319, 26], - [28, '04:53', '05:23', 293, 323, 30], - [29, '04:55', '05:27', 295, 327, 32], - [30, '04:57', '05:38', 297, 338, 41], - [31, '05:00', '06:00', 300, 360, 60], - [32, '05:00', '05:54', 300, 354, 54], - [33, '05:01', '05:33', 301, 333, 32], - [34, '05:01', '05:26', 301, 326, 25], - [35, '05:02', '05:29', 302, 329, 27], - [36, '05:02', '05:12', 302, 312, 10], - [37, '05:03', '05:45', 303, 345, 42], - [38, '05:03', '05:18', 303, 318, 15], - [39, '05:03', '06:28', 303, 388, 85], - [40, '05:03', '05:13', 303, 313, 10], - [41, '05:04', '06:24', 304, 384, 80], - [42, '05:07', '05:44', 307, 344, 37], - [43, '05:08', '05:48', 308, 348, 40], - [44, '05:10', '06:06', 310, 366, 56], - [45, '05:11', '05:37', 311, 337, 26], - [46, '05:11', '05:53', 311, 353, 42], - [47, '05:13', '06:15', 313, 375, 62], - [48, '05:13', '05:38', 313, 338, 25], - [49, '05:16', '05:44', 316, 344, 28], - [50, '05:17', '05:27', 317, 327, 10], - [51, '05:18', '06:40', 318, 400, 82], - [52, '05:18', '06:03', 318, 363, 45], - [53, '05:18', '06:11', 318, 371, 53], - [54, '05:18', '06:00', 318, 360, 42], - [55, '05:19', '06:34', 319, 394, 75], - [56, '05:20', '06:17', 320, 377, 57], - [57, '05:22', '05:59', 322, 359, 37], - [58, '05:24', '05:48', 324, 348, 24], - [59, '05:25', '05:40', 325, 340, 15], - [60, '05:26', '06:08', 326, 368, 42], - [61, '05:27', '06:30', 327, 390, 63], - [62, '05:27', '05:54', 327, 354, 27], - [63, '05:28', '05:53', 328, 353, 25], - [64, '05:29', '05:44', 329, 344, 15], - [65, '05:30', '05:40', 330, 340, 10], - [66, '05:30', '05:40', 330, 340, 10], - [67, '05:30', '05:40', 330, 340, 10], - [68, '05:32', '06:53', 332, 413, 81], - [69, '05:33', '07:00', 333, 420, 87], - [70, '05:33', '06:15', 333, 375, 42], - [71, '05:33', '05:47', 333, 347, 14], - [72, '05:37', '06:13', 337, 373, 36], - [73, '05:37', '06:05', 337, 365, 28], - [74, '05:38', '06:33', 338, 393, 55], - [75, '05:38', '06:04', 338, 364, 26], - [76, '05:38', '06:18', 338, 378, 40], - [77, '05:39', '05:54', 339, 354, 15], - [78, '05:40', '05:56', 340, 356, 16], - [79, '05:40', '06:41', 340, 401, 61], - [80, '05:40', '05:50', 340, 350, 10], - [81, '05:41', '06:23', 341, 383, 42], - [82, '05:41', '06:01', 341, 361, 20], - [83, '05:43', '06:08', 343, 368, 25], - [84, '05:44', '07:10', 344, 430, 86], - [85, '05:44', '05:55', 344, 355, 11], - [86, '05:45', '06:44', 345, 404, 59], - [87, '05:47', '06:17', 347, 377, 30], - [88, '05:48', '07:08', 348, 428, 80], - [89, '05:48', '06:30', 348, 390, 42], - [90, '05:50', '06:50', 350, 410, 60], - [91, '05:50', '06:00', 350, 360, 10], - [92, '05:50', '06:00', 350, 360, 10], - [93, '05:50', '06:51', 350, 411, 61], - [94, '05:52', '06:33', 352, 393, 41], - [95, '05:52', '06:36', 352, 396, 44], - [96, '05:52', '06:23', 352, 383, 31], - [97, '05:54', '06:14', 354, 374, 20], - [98, '05:54', '07:20', 354, 440, 86], - [99, '05:55', '06:40', 355, 400, 45], - [100, '05:55', '06:27', 355, 387, 32], - [101, '05:56', '06:35', 356, 395, 39], - [102, '05:56', '06:06', 356, 366, 10], - [103, '05:57', '06:21', 357, 381, 24], - [104, '05:58', '07:23', 358, 443, 85], - [105, '05:58', '06:23', 358, 383, 25], - [106, '05:58', '06:08', 358, 368, 10], - [107, '05:58', '06:43', 358, 403, 45], - [108, '06:00', '06:10', 360, 370, 10], - [109, '06:00', '06:16', 360, 376, 16], - [110, '06:00', '07:01', 360, 421, 61], - [111, '06:01', '07:00', 361, 420, 59], - [112, '06:01', '06:13', 361, 373, 12], - [113, '06:01', '06:45', 361, 405, 44], - [114, '06:03', '06:50', 363, 410, 47], - [115, '06:04', '06:37', 364, 397, 33], - [116, '06:04', '07:30', 364, 450, 86], - [117, '06:05', '06:24', 365, 384, 19], - [118, '06:06', '06:51', 366, 411, 45], - [119, '06:07', '06:43', 367, 403, 36], - [120, '06:08', '07:30', 368, 450, 82], - [121, '06:10', '06:20', 370, 380, 10], - [122, '06:10', '07:17', 370, 437, 67], - [123, '06:11', '06:54', 371, 414, 43], - [124, '06:11', '06:21', 371, 381, 10], - [125, '06:13', '06:38', 373, 398, 25], - [126, '06:13', '06:58', 373, 418, 45], - [127, '06:13', '06:53', 373, 413, 40], - [128, '06:14', '07:03', 374, 423, 49], - [129, '06:14', '06:47', 374, 407, 33], - [130, '06:14', '07:40', 374, 460, 86], - [131, '06:15', '07:15', 375, 435, 60], - [132, '06:16', '06:28', 376, 388, 12], - [133, '06:16', '06:26', 376, 386, 10], - [134, '06:17', '06:34', 377, 394, 17], - [135, '06:18', '07:06', 378, 426, 48], - [136, '06:18', '07:38', 378, 458, 80], - [137, '06:18', '07:02', 378, 422, 44], - [138, '06:19', '06:53', 379, 413, 34], - [139, '06:20', '07:25', 380, 445, 65], - [140, '06:20', '06:36', 380, 396, 16], - [141, '06:20', '06:30', 380, 390, 10], - [142, '06:20', '06:30', 380, 390, 10], - [143, '06:21', '06:49', 381, 409, 28], - [144, '06:22', '07:06', 382, 426, 44], - [145, '06:24', '07:50', 384, 470, 86], - [146, '06:24', '06:57', 384, 417, 33], - [147, '06:26', '07:45', 386, 465, 79], - [148, '06:26', '07:10', 386, 430, 44], - [149, '06:27', '06:44', 387, 404, 17], - [150, '06:28', '06:53', 388, 413, 25], - [151, '06:28', '07:14', 388, 434, 46], - [152, '06:29', '07:03', 389, 423, 34], - [153, '06:30', '06:40', 390, 400, 10], - [154, '06:30', '07:37', 390, 457, 67], - [155, '06:31', '06:43', 391, 403, 12], - [156, '06:33', '07:14', 393, 434, 41], - [157, '06:33', '07:53', 393, 473, 80], - [158, '06:34', '08:16', 394, 496, 102], - [159, '06:34', '07:09', 394, 429, 35], - [160, '06:34', '07:07', 394, 427, 33], - [161, '06:36', '07:21', 396, 441, 45], - [162, '06:37', '07:22', 397, 442, 45], - [163, '06:37', '06:54', 397, 414, 17], - [164, '06:38', '07:30', 398, 450, 52], - [165, '06:38', '07:18', 398, 438, 40], - [166, '06:39', '07:33', 399, 453, 54], - [167, '06:40', '07:52', 400, 472, 72], - [168, '06:40', '06:50', 400, 410, 10], - [169, '06:40', '07:22', 400, 442, 42], - [170, '06:40', '06:56', 400, 416, 16], - [171, '06:41', '08:00', 401, 480, 79], - [172, '06:42', '07:26', 402, 446, 44], - [173, '06:42', '07:13', 402, 433, 31], - [174, '06:43', '07:08', 403, 428, 25], - [175, '06:43', '07:30', 403, 450, 47], - [176, '06:43', '07:23', 403, 443, 40], - [177, '06:44', '07:17', 404, 437, 33], - [178, '06:44', '08:13', 404, 493, 89], - [179, '06:46', '07:01', 406, 421, 15], - [180, '06:46', '06:58', 406, 418, 12], - [181, '06:47', '07:04', 407, 424, 17], - [182, '06:48', '08:15', 408, 495, 87], - [183, '06:48', '07:34', 408, 454, 46], - [184, '06:48', '07:37', 408, 457, 49], - [185, '06:49', '07:43', 409, 463, 54], - [186, '06:50', '08:00', 410, 480, 70], - [187, '06:50', '07:00', 410, 420, 10], - [188, '06:50', '07:05', 410, 425, 15], - [189, '06:51', '07:18', 411, 438, 27], - [190, '06:52', '07:36', 412, 456, 44], - [191, '06:53', '07:37', 413, 457, 44], - [192, '06:54', '08:20', 414, 500, 86], - [193, '06:54', '07:27', 414, 447, 33], - [194, '06:54', '07:20', 414, 440, 26], - [195, '06:56', '08:23', 416, 503, 87], - [196, '06:57', '07:12', 417, 432, 15], - [197, '06:57', '07:58', 417, 478, 61], - [198, '06:57', '07:45', 417, 465, 48], - [199, '06:57', '07:40', 417, 460, 43], - [200, '06:58', '07:23', 418, 443, 25], - [201, '06:59', '07:53', 419, 473, 54], - [202, '06:59', '08:07', 419, 487, 68], - [203, '07:00', '07:10', 420, 430, 10], - [204, '07:00', '07:16', 420, 436, 16], - [205, '07:01', '08:30', 421, 510, 89], - [206, '07:01', '07:13', 421, 433, 12], - [207, '07:01', '07:43', 421, 463, 42], - [208, '07:03', '08:30', 423, 510, 87], - [209, '07:04', '07:37', 424, 457, 33], - [210, '07:04', '07:44', 424, 464, 40], - [211, '07:05', '07:52', 425, 472, 47], - [212, '07:05', '08:05', 425, 485, 60], - [213, '07:05', '07:46', 425, 466, 41], - [214, '07:06', '07:51', 426, 471, 45], - [215, '07:07', '08:08', 427, 488, 61], - [216, '07:07', '07:52', 427, 472, 45], - [217, '07:07', '08:16', 427, 496, 69], - [218, '07:07', '07:27', 427, 447, 20], - [219, '07:09', '07:50', 429, 470, 41], - [220, '07:09', '08:40', 429, 520, 91], - [221, '07:09', '08:03', 429, 483, 54], - [222, '07:10', '07:20', 430, 440, 10], - [223, '07:11', '08:36', 431, 516, 85], - [224, '07:12', '08:00', 432, 480, 48], - [225, '07:12', '07:47', 432, 467, 35], - [226, '07:13', '07:54', 433, 474, 41], - [227, '07:13', '07:38', 433, 458, 25], - [228, '07:14', '07:59', 434, 479, 45], - [229, '07:16', '08:50', 436, 530, 94], - [230, '07:16', '07:28', 436, 448, 12], - [231, '07:17', '07:35', 437, 455, 18], - [232, '07:17', '07:58', 437, 478, 41], - [233, '07:18', '08:06', 438, 486, 48], - [234, '07:18', '08:44', 438, 524, 86], - [235, '07:19', '08:13', 439, 493, 54], - [236, '07:20', '08:02', 440, 482, 42], - [237, '07:20', '08:07', 440, 487, 47], - [238, '07:20', '07:30', 440, 450, 10], - [239, '07:20', '07:57', 440, 477, 37], - [240, '07:20', '07:36', 440, 456, 16], - [241, '07:21', '07:48', 441, 468, 27], - [242, '07:22', '08:06', 442, 486, 44], - [243, '07:22', '08:25', 442, 505, 63], - [244, '07:24', '08:27', 444, 507, 63], - [245, '07:24', '08:05', 444, 485, 41], - [246, '07:26', '08:23', 446, 503, 57], - [247, '07:26', '08:52', 446, 532, 86], - [248, '07:27', '08:07', 447, 487, 40], - [249, '07:27', '07:42', 447, 462, 15], - [250, '07:27', '08:15', 447, 495, 48], - [251, '07:28', '07:53', 448, 473, 25], - [252, '07:28', '08:09', 448, 489, 41], - [253, '07:28', '07:38', 448, 458, 10], - [254, '07:30', '08:35', 450, 515, 65], - [255, '07:31', '07:43', 451, 463, 12], - [256, '07:32', '08:13', 452, 493, 41], - [257, '07:34', '09:00', 454, 540, 86], - [258, '07:34', '08:33', 454, 513, 59], - [259, '07:34', '09:04', 454, 544, 90], - [260, '07:35', '08:22', 455, 502, 47], - [261, '07:35', '07:45', 455, 465, 10], - [262, '07:35', '08:16', 455, 496, 41], - [263, '07:36', '08:17', 456, 497, 41], - [264, '07:36', '08:36', 456, 516, 60], - [265, '07:37', '07:50', 457, 470, 13], - [266, '07:40', '07:56', 460, 476, 16], - [267, '07:40', '08:20', 460, 500, 40], - [268, '07:40', '08:45', 460, 525, 65], - [269, '07:41', '08:39', 461, 519, 58], - [270, '07:41', '07:51', 461, 471, 10], - [271, '07:42', '08:30', 462, 510, 48], - [272, '07:42', '08:21', 462, 501, 39], - [273, '07:43', '08:08', 463, 488, 25], - [274, '07:43', '08:24', 463, 504, 41], - [275, '07:44', '09:10', 464, 550, 86], - [276, '07:44', '08:43', 464, 523, 59], - [277, '07:46', '08:28', 466, 508, 42], - [278, '07:46', '07:58', 466, 478, 12], - [279, '07:47', '08:00', 467, 480, 13], - [280, '07:48', '09:14', 468, 554, 86], - [281, '07:49', '08:32', 469, 512, 43], - [282, '07:50', '08:55', 470, 535, 65], - [283, '07:50', '08:00', 470, 480, 10], - [284, '07:50', '08:37', 470, 517, 47], - [285, '07:50', '08:26', 470, 506, 36], - [286, '07:51', '08:18', 471, 498, 27], - [287, '07:52', '08:21', 472, 501, 29], - [288, '07:53', '08:35', 473, 515, 42], - [289, '07:54', '09:19', 474, 559, 85], - [290, '07:55', '08:53', 475, 533, 58], - [291, '07:56', '08:54', 476, 534, 58], - [292, '07:57', '08:39', 477, 519, 42], - [293, '07:57', '08:10', 477, 490, 13], - [294, '07:58', '08:45', 478, 525, 47], - [295, '07:58', '08:23', 478, 503, 25], - [296, '08:00', '08:10', 480, 490, 10], - [297, '08:00', '09:05', 480, 545, 65], - [298, '08:00', '08:16', 480, 496, 16], - [299, '08:00', '08:35', 480, 515, 35], - [300, '08:01', '08:13', 481, 493, 12], - [301, '08:01', '08:43', 481, 523, 42], - [302, '08:03', '09:26', 483, 566, 83], - [303, '08:04', '09:29', 484, 569, 85], - [304, '08:05', '08:21', 485, 501, 16], - [305, '08:05', '08:47', 485, 527, 42], - [306, '08:06', '08:51', 486, 531, 45], - [307, '08:06', '09:03', 486, 543, 57], - [308, '08:07', '08:20', 487, 500, 13], - [309, '08:08', '08:55', 488, 535, 47], - [310, '08:08', '08:50', 488, 530, 42], - [311, '08:10', '08:45', 490, 525, 35], - [312, '08:10', '09:15', 490, 555, 65], - [313, '08:10', '08:20', 490, 500, 10], - [314, '08:11', '09:41', 491, 581, 90], - [315, '08:12', '08:55', 492, 535, 43], - [316, '08:13', '08:38', 493, 518, 25], - [317, '08:14', '09:38', 494, 578, 84], - [318, '08:15', '08:30', 495, 510, 15], - [319, '08:16', '08:30', 496, 510, 14], - [320, '08:16', '08:28', 496, 508, 12], - [321, '08:16', '09:00', 496, 540, 44], - [322, '08:17', '09:13', 497, 553, 56], - [323, '08:18', '09:16', 498, 556, 58], - [324, '08:18', '09:05', 498, 545, 47], - [325, '08:20', '08:36', 500, 516, 16], - [326, '08:20', '08:55', 500, 535, 35], - [327, '08:20', '09:05', 500, 545, 45], - [328, '08:20', '08:30', 500, 510, 10], - [329, '08:20', '09:25', 500, 565, 65], - [330, '08:21', '08:38', 501, 518, 17], - [331, '08:21', '08:47', 501, 527, 26], - [332, '08:22', '08:45', 502, 525, 23], - [333, '08:23', '09:10', 503, 550, 47], - [334, '08:24', '09:48', 504, 588, 84], - [335, '08:26', '08:46', 506, 526, 20], - [336, '08:27', '09:07', 507, 547, 40], - [337, '08:28', '08:50', 508, 530, 22], - [338, '08:28', '09:56', 508, 596, 88], - [339, '08:28', '09:23', 508, 563, 55], - [340, '08:29', '09:20', 509, 560, 51], - [341, '08:30', '09:05', 510, 545, 35], - [342, '08:30', '08:45', 510, 525, 15], - [343, '08:30', '08:40', 510, 520, 10], - [344, '08:30', '09:35', 510, 575, 65], - [345, '08:31', '08:43', 511, 523, 12], - [346, '08:31', '09:13', 511, 553, 42], - [347, '08:34', '09:58', 514, 598, 84], - [348, '08:35', '08:55', 515, 535, 20], - [349, '08:35', '09:15', 515, 555, 40], - [350, '08:35', '08:45', 515, 525, 10], - [351, '08:36', '08:46', 516, 526, 10], - [352, '08:36', '09:00', 516, 540, 24], - [353, '08:38', '09:20', 518, 560, 42], - [354, '08:38', '09:35', 518, 575, 57], - [355, '08:38', '09:14', 518, 554, 36], - [356, '08:39', '09:33', 519, 573, 54], - [357, '08:40', '09:45', 520, 585, 65], - [358, '08:40', '08:50', 520, 530, 10], - [359, '08:40', '08:56', 520, 536, 16], - [360, '08:42', '09:25', 522, 565, 43], - [361, '08:43', '09:08', 523, 548, 25], - [362, '08:44', '09:35', 524, 575, 51], - [363, '08:45', '09:00', 525, 540, 15], - [364, '08:45', '09:05', 525, 545, 20], - [365, '08:46', '09:24', 526, 564, 38], - [366, '08:46', '08:58', 526, 538, 12], - [367, '08:46', '09:30', 526, 570, 44], - [368, '08:48', '10:11', 528, 611, 83], - [369, '08:48', '10:13', 528, 613, 85], - [370, '08:49', '09:43', 529, 583, 54], - [371, '08:50', '09:30', 530, 570, 40], - [372, '08:50', '10:00', 530, 600, 70], - [373, '08:50', '09:00', 530, 540, 10], - [374, '08:51', '09:17', 531, 557, 26], - [375, '08:53', '09:20', 533, 560, 27], - [376, '08:53', '09:35', 533, 575, 42], - [377, '08:55', '09:34', 535, 574, 39], - [378, '08:55', '09:15', 535, 555, 20], - [379, '08:58', '09:38', 538, 578, 40], - [380, '08:58', '10:26', 538, 626, 88], - [381, '08:59', '09:53', 539, 593, 54], - [382, '08:59', '09:50', 539, 590, 51], - [383, '09:00', '09:35', 540, 575, 35], - [384, '09:00', '09:16', 540, 556, 16], - [385, '09:00', '09:10', 540, 550, 10], - [386, '09:00', '09:16', 540, 556, 16], - [387, '09:01', '09:13', 541, 553, 12], - [388, '09:03', '09:45', 543, 585, 42], - [389, '09:03', '10:28', 543, 628, 85], - [390, '09:05', '09:44', 545, 584, 39], - [391, '09:05', '09:25', 545, 565, 20], - [392, '09:08', '09:53', 548, 593, 45], - [393, '09:08', '10:04', 548, 604, 56], - [394, '09:09', '10:03', 549, 603, 54], - [395, '09:10', '10:15', 550, 615, 65], - [396, '09:10', '09:20', 550, 560, 10], - [397, '09:11', '09:38', 551, 578, 27], - [398, '09:13', '10:00', 553, 600, 47], - [399, '09:14', '09:39', 554, 579, 25], - [400, '09:14', '10:05', 554, 605, 51], - [401, '09:15', '09:54', 555, 594, 39], - [402, '09:16', '09:28', 556, 568, 12], - [403, '09:18', '10:43', 558, 643, 85], - [404, '09:18', '10:41', 558, 641, 83], - [405, '09:18', '09:58', 558, 598, 40], - [406, '09:19', '10:13', 559, 613, 54], - [407, '09:20', '09:30', 560, 570, 10], - [408, '09:20', '09:36', 560, 576, 16], - [409, '09:21', '09:47', 561, 587, 26], - [410, '09:23', '10:30', 563, 630, 67], - [411, '09:23', '10:05', 563, 605, 42], - [412, '09:23', '09:49', 563, 589, 26], - [413, '09:24', '09:35', 564, 575, 11], - [414, '09:25', '09:35', 565, 575, 10], - [415, '09:25', '10:04', 565, 604, 39], - [416, '09:28', '10:08', 568, 608, 40], - [417, '09:29', '09:45', 569, 585, 16], - [418, '09:29', '10:20', 569, 620, 51], - [419, '09:29', '10:56', 569, 656, 87], - [420, '09:29', '10:23', 569, 623, 54], - [421, '09:30', '09:40', 570, 580, 10], - [422, '09:31', '09:43', 571, 583, 12], - [423, '09:33', '10:58', 573, 658, 85], - [424, '09:33', '10:15', 573, 615, 42], - [425, '09:34', '09:45', 574, 585, 11], - [426, '09:35', '10:14', 575, 614, 39], - [427, '09:38', '10:45', 578, 645, 67], - [428, '09:39', '10:33', 579, 633, 54], - [429, '09:40', '09:56', 580, 596, 16], - [430, '09:40', '09:50', 580, 590, 10], - [431, '09:41', '10:08', 581, 608, 27], - [432, '09:41', '10:23', 581, 623, 42], - [433, '09:44', '10:35', 584, 635, 51], - [434, '09:44', '11:11', 584, 671, 87], - [435, '09:44', '09:55', 584, 595, 11], - [436, '09:45', '10:24', 585, 624, 39], - [437, '09:46', '09:58', 586, 598, 12], - [438, '09:48', '10:30', 588, 630, 42], - [439, '09:48', '11:13', 588, 673, 85], - [440, '09:48', '10:04', 588, 604, 16], - [441, '09:49', '10:43', 589, 643, 54], - [442, '09:50', '10:00', 590, 600, 10], - [443, '09:51', '10:17', 591, 617, 26], - [444, '09:53', '10:49', 593, 649, 56], - [445, '09:53', '11:00', 593, 660, 67], - [446, '09:54', '10:05', 594, 605, 11], - [447, '09:55', '10:34', 595, 634, 39], - [448, '09:56', '10:38', 596, 638, 42], - [449, '09:57', '10:20', 597, 620, 23], - [450, '09:59', '11:26', 599, 686, 87], - [451, '09:59', '10:50', 599, 650, 51], - [452, '09:59', '10:53', 599, 653, 54], - [453, '10:00', '10:16', 600, 616, 16], - [454, '10:00', '10:10', 600, 610, 10], - [455, '10:01', '10:13', 601, 613, 12], - [456, '10:03', '11:28', 603, 688, 85], - [457, '10:03', '10:45', 603, 645, 42], - [458, '10:04', '10:15', 604, 615, 11], - [459, '10:05', '10:44', 605, 644, 39], - [460, '10:08', '11:15', 608, 675, 67], - [461, '10:09', '11:03', 609, 663, 54], - [462, '10:10', '10:20', 610, 620, 10], - [463, '10:11', '10:38', 611, 638, 27], - [464, '10:11', '10:53', 611, 653, 42], - [465, '10:14', '11:05', 614, 665, 51], - [466, '10:14', '11:41', 614, 701, 87], - [467, '10:14', '10:25', 614, 625, 11], - [468, '10:15', '10:54', 615, 654, 39], - [469, '10:16', '10:28', 616, 628, 12], - [470, '10:18', '11:43', 618, 703, 85], - [471, '10:18', '11:00', 618, 660, 42], - [472, '10:19', '11:13', 619, 673, 54], - [473, '10:20', '10:30', 620, 630, 10], - [474, '10:20', '10:36', 620, 636, 16], - [475, '10:21', '10:47', 621, 647, 26], - [476, '10:23', '11:30', 623, 690, 67], - [477, '10:23', '10:45', 623, 645, 22], - [478, '10:24', '10:35', 624, 635, 11], - [479, '10:25', '11:04', 625, 664, 39], - [480, '10:26', '11:08', 626, 668, 42], - [481, '10:29', '11:20', 629, 680, 51], - [482, '10:29', '11:23', 629, 683, 54], - [483, '10:29', '11:56', 629, 716, 87], - [484, '10:30', '10:40', 630, 640, 10], - [485, '10:31', '10:43', 631, 643, 12], - [486, '10:33', '11:15', 633, 675, 42], - [487, '10:33', '11:58', 633, 718, 85], - [488, '10:34', '10:45', 634, 645, 11], - [489, '10:35', '11:14', 635, 674, 39], - [490, '10:38', '11:45', 638, 705, 67], - [491, '10:39', '11:33', 639, 693, 54], - [492, '10:40', '10:50', 640, 650, 10], - [493, '10:40', '10:56', 640, 656, 16], - [494, '10:41', '11:23', 641, 683, 42], - [495, '10:41', '11:08', 641, 668, 27], - [496, '10:44', '12:11', 644, 731, 87], - [497, '10:44', '11:35', 644, 695, 51], - [498, '10:44', '10:55', 644, 655, 11], - [499, '10:45', '11:24', 645, 684, 39], - [500, '10:46', '10:58', 646, 658, 12], - [501, '10:48', '12:13', 648, 733, 85], - [502, '10:48', '11:30', 648, 690, 42], - [503, '10:49', '11:43', 649, 703, 54], - [504, '10:50', '11:00', 650, 660, 10], - [505, '10:51', '11:17', 651, 677, 26], - [506, '10:53', '12:00', 653, 720, 67], - [507, '10:53', '11:20', 653, 680, 27], - [508, '10:54', '11:05', 654, 665, 11], - [509, '10:55', '11:34', 655, 694, 39], - [510, '10:56', '11:38', 656, 698, 42], - [511, '10:59', '11:14', 659, 674, 15], - [512, '10:59', '12:26', 659, 746, 87], - [513, '10:59', '11:53', 659, 713, 54], - [514, '10:59', '11:50', 659, 710, 51], - [515, '11:00', '11:16', 660, 676, 16], - [516, '11:00', '11:10', 660, 670, 10], - [517, '11:01', '11:13', 661, 673, 12], - [518, '11:03', '11:45', 663, 705, 42], - [519, '11:03', '12:28', 663, 748, 85], - [520, '11:04', '11:15', 664, 675, 11], - [521, '11:05', '11:44', 665, 704, 39], - [522, '11:08', '12:15', 668, 735, 67], - [523, '11:09', '12:03', 669, 723, 54], - [524, '11:10', '11:20', 670, 680, 10], - [525, '11:11', '11:38', 671, 698, 27], - [526, '11:11', '11:53', 671, 713, 42], - [527, '11:14', '11:25', 674, 685, 11], - [528, '11:14', '12:05', 674, 725, 51], - [529, '11:14', '12:38', 674, 758, 84], - [530, '11:14', '12:41', 674, 761, 87], - [531, '11:15', '11:54', 675, 714, 39], - [532, '11:16', '11:28', 676, 688, 12], - [533, '11:18', '12:00', 678, 720, 42], - [534, '11:19', '12:13', 679, 733, 54], - [535, '11:20', '11:30', 680, 690, 10], - [536, '11:20', '11:36', 680, 696, 16], - [537, '11:21', '11:47', 681, 707, 26], - [538, '11:23', '12:30', 683, 750, 67], - [539, '11:23', '11:49', 683, 709, 26], - [540, '11:24', '12:48', 684, 768, 84], - [541, '11:24', '11:35', 684, 695, 11], - [542, '11:25', '12:04', 685, 724, 39], - [543, '11:26', '12:08', 686, 728, 42], - [544, '11:29', '11:44', 689, 704, 15], - [545, '11:29', '12:23', 689, 743, 54], - [546, '11:29', '12:20', 689, 740, 51], - [547, '11:29', '12:54', 689, 774, 85], - [548, '11:30', '11:40', 690, 700, 10], - [549, '11:31', '11:43', 691, 703, 12], - [550, '11:33', '12:15', 693, 735, 42], - [551, '11:34', '12:58', 694, 778, 84], - [552, '11:34', '11:45', 694, 705, 11], - [553, '11:35', '12:14', 695, 734, 39], - [554, '11:38', '12:45', 698, 765, 67], - [555, '11:39', '12:33', 699, 753, 54], - [556, '11:40', '11:56', 700, 716, 16], - [557, '11:40', '11:50', 700, 710, 10], - [558, '11:41', '12:08', 701, 728, 27], - [559, '11:41', '12:23', 701, 743, 42], - [560, '11:44', '11:55', 704, 715, 11], - [561, '11:44', '13:14', 704, 794, 90], - [562, '11:44', '13:08', 704, 788, 84], - [563, '11:44', '12:35', 704, 755, 51], - [564, '11:45', '12:24', 705, 744, 39], - [565, '11:46', '11:58', 706, 718, 12], - [566, '11:48', '12:30', 708, 750, 42], - [567, '11:49', '12:43', 709, 763, 54], - [568, '11:50', '12:00', 710, 720, 10], - [569, '11:51', '12:17', 711, 737, 26], - [570, '11:53', '12:49', 713, 769, 56], - [571, '11:53', '13:00', 713, 780, 67], - [572, '11:54', '13:18', 714, 798, 84], - [573, '11:54', '12:05', 714, 725, 11], - [574, '11:55', '12:40', 715, 760, 45], - [575, '11:55', '12:34', 715, 754, 39], - [576, '11:56', '12:35', 716, 755, 39], - [577, '11:57', '12:20', 717, 740, 23], - [578, '11:58', '12:29', 718, 749, 31], - [579, '11:59', '12:50', 719, 770, 51], - [580, '11:59', '12:53', 719, 773, 54], - [581, '11:59', '13:24', 719, 804, 85], - [582, '11:59', '12:14', 719, 734, 15], - [583, '12:00', '12:16', 720, 736, 16], - [584, '12:00', '12:10', 720, 730, 10], - [585, '12:01', '12:45', 721, 765, 44], - [586, '12:01', '12:13', 721, 733, 12], - [587, '12:03', '12:50', 723, 770, 47], - [588, '12:04', '12:15', 724, 735, 11], - [589, '12:04', '13:04', 724, 784, 60], - [590, '12:04', '13:28', 724, 808, 84], - [591, '12:05', '12:44', 725, 764, 39], - [592, '12:08', '13:11', 728, 791, 63], - [593, '12:08', '12:39', 728, 759, 31], - [594, '12:09', '13:03', 729, 783, 54], - [595, '12:10', '12:20', 730, 740, 10], - [596, '12:11', '12:55', 731, 775, 44], - [597, '12:11', '12:38', 731, 758, 27], - [598, '12:14', '13:05', 734, 785, 51], - [599, '12:14', '12:25', 734, 745, 11], - [600, '12:14', '13:44', 734, 824, 90], - [601, '12:14', '13:38', 734, 818, 84], - [602, '12:15', '12:54', 735, 774, 39], - [603, '12:16', '12:28', 736, 748, 12], - [604, '12:18', '13:00', 738, 780, 42], - [605, '12:19', '13:13', 739, 793, 54], - [606, '12:20', '12:30', 740, 750, 10], - [607, '12:20', '13:31', 740, 811, 71], - [608, '12:20', '12:30', 740, 750, 10], - [609, '12:20', '12:36', 740, 756, 16], - [610, '12:21', '12:47', 741, 767, 26], - [611, '12:23', '12:45', 743, 765, 22], - [612, '12:24', '12:35', 744, 755, 11], - [613, '12:24', '13:48', 744, 828, 84], - [614, '12:25', '13:10', 745, 790, 45], - [615, '12:25', '13:04', 745, 784, 39], - [616, '12:26', '13:05', 746, 785, 39], - [617, '12:28', '13:54', 748, 834, 86], - [618, '12:28', '12:38', 748, 758, 10], - [619, '12:28', '13:15', 748, 795, 47], - [620, '12:29', '13:23', 749, 803, 54], - [621, '12:30', '13:41', 750, 821, 71], - [622, '12:30', '12:40', 750, 760, 10], - [623, '12:31', '13:15', 751, 795, 44], - [624, '12:31', '12:43', 751, 763, 12], - [625, '12:33', '12:48', 753, 768, 15], - [626, '12:33', '13:20', 753, 800, 47], - [627, '12:34', '13:58', 754, 838, 84], - [628, '12:34', '13:34', 754, 814, 60], - [629, '12:34', '12:45', 754, 765, 11], - [630, '12:35', '13:14', 755, 794, 39], - [631, '12:38', '13:25', 758, 805, 47], - [632, '12:38', '13:25', 758, 805, 47], - [633, '12:38', '14:04', 758, 844, 86], - [634, '12:39', '13:33', 759, 813, 54], - [635, '12:40', '13:51', 760, 831, 71], - [636, '12:40', '12:50', 760, 770, 10], - [637, '12:40', '12:56', 760, 776, 16], - [638, '12:41', '13:08', 761, 788, 27], - [639, '12:43', '13:30', 763, 810, 47], - [640, '12:44', '12:55', 764, 775, 11], - [641, '12:44', '14:08', 764, 848, 84], - [642, '12:45', '13:24', 765, 804, 39], - [643, '12:46', '12:58', 766, 778, 12], - [644, '12:46', '13:21', 766, 801, 35], - [645, '12:48', '14:14', 768, 854, 86], - [646, '12:48', '13:35', 768, 815, 47], - [647, '12:48', '12:58', 768, 778, 10], - [648, '12:48', '13:35', 768, 815, 47], - [649, '12:49', '13:43', 769, 823, 54], - [650, '12:50', '14:01', 770, 841, 71], - [651, '12:50', '13:00', 770, 780, 10], - [652, '12:50', '13:00', 770, 780, 10], - [653, '12:51', '13:17', 771, 797, 26], - [654, '12:53', '13:20', 773, 800, 27], - [655, '12:53', '13:24', 773, 804, 31], - [656, '12:53', '13:40', 773, 820, 47], - [657, '12:54', '14:18', 774, 858, 84], - [658, '12:54', '13:05', 774, 785, 11], - [659, '12:55', '13:34', 775, 814, 39], - [660, '12:58', '14:24', 778, 864, 86], - [661, '12:58', '13:25', 778, 805, 27], - [662, '12:58', '13:45', 778, 825, 47], - [663, '12:58', '13:45', 778, 825, 47], - [664, '12:59', '13:53', 779, 833, 54], - [665, '13:00', '13:10', 780, 790, 10], - [666, '13:00', '13:16', 780, 796, 16], - [667, '13:00', '14:11', 780, 851, 71], - [668, '13:01', '13:13', 781, 793, 12], - [669, '13:03', '13:34', 783, 814, 31], - [670, '13:03', '13:50', 783, 830, 47], - [671, '13:04', '13:15', 784, 795, 11], - [672, '13:04', '14:28', 784, 868, 84], - [673, '13:05', '13:44', 785, 824, 39], - [674, '13:08', '13:55', 788, 835, 47], - [675, '13:08', '14:34', 788, 874, 86], - [676, '13:08', '13:55', 788, 835, 47], - [677, '13:09', '14:03', 789, 843, 54], - [678, '13:10', '13:20', 790, 800, 10], - [679, '13:10', '14:21', 790, 861, 71], - [680, '13:13', '14:00', 793, 840, 47], - [681, '13:13', '13:40', 793, 820, 27], - [682, '13:14', '14:38', 794, 878, 84], - [683, '13:14', '13:25', 794, 805, 11], - [684, '13:15', '13:54', 795, 834, 39], - [685, '13:16', '13:28', 796, 808, 12], - [686, '13:18', '14:05', 798, 845, 47], - [687, '13:18', '14:44', 798, 884, 86], - [688, '13:18', '14:05', 798, 845, 47], - [689, '13:19', '14:13', 799, 853, 54], - [690, '13:20', '13:36', 800, 816, 16], - [691, '13:20', '14:31', 800, 871, 71], - [692, '13:20', '13:30', 800, 810, 10], - [693, '13:21', '13:47', 801, 827, 26], - [694, '13:23', '14:10', 803, 850, 47], - [695, '13:23', '13:49', 803, 829, 26], - [696, '13:24', '14:48', 804, 888, 84], - [697, '13:24', '13:35', 804, 815, 11], - [698, '13:25', '14:04', 805, 844, 39], - [699, '13:28', '14:15', 808, 855, 47], - [700, '13:28', '14:54', 808, 894, 86], - [701, '13:28', '13:55', 808, 835, 27], - [702, '13:28', '14:15', 808, 855, 47], - [703, '13:29', '14:23', 809, 863, 54], - [704, '13:30', '13:40', 810, 820, 10], - [705, '13:30', '14:41', 810, 881, 71], - [706, '13:31', '13:43', 811, 823, 12], - [707, '13:33', '14:20', 813, 860, 47], - [708, '13:34', '14:58', 814, 898, 84], - [709, '13:34', '13:45', 814, 825, 11], - [710, '13:35', '14:14', 815, 854, 39], - [711, '13:38', '14:25', 818, 865, 47], - [712, '13:38', '14:25', 818, 865, 47], - [713, '13:38', '15:04', 818, 904, 86], - [714, '13:39', '14:33', 819, 873, 54], - [715, '13:40', '13:50', 820, 830, 10], - [716, '13:40', '13:56', 820, 836, 16], - [717, '13:40', '14:51', 820, 891, 71], - [718, '13:43', '14:30', 823, 870, 47], - [719, '13:43', '14:10', 823, 850, 27], - [720, '13:44', '15:09', 824, 909, 85], - [721, '13:44', '13:55', 824, 835, 11], - [722, '13:45', '14:24', 825, 864, 39], - [723, '13:46', '13:58', 826, 838, 12], - [724, '13:48', '14:35', 828, 875, 47], - [725, '13:48', '15:14', 828, 914, 86], - [726, '13:48', '14:35', 828, 875, 47], - [727, '13:49', '14:43', 829, 883, 54], - [728, '13:50', '14:00', 830, 840, 10], - [729, '13:50', '15:01', 830, 901, 71], - [730, '13:51', '14:17', 831, 857, 26], - [731, '13:53', '14:40', 833, 880, 47], - [732, '13:53', '14:49', 833, 889, 56], - [733, '13:54', '14:05', 834, 845, 11], - [734, '13:54', '15:19', 834, 919, 85], - [735, '13:55', '14:34', 835, 874, 39], - [736, '13:57', '14:20', 837, 860, 23], - [737, '13:58', '15:24', 838, 924, 86], - [738, '13:58', '14:45', 838, 885, 47], - [739, '13:58', '14:45', 838, 885, 47], - [740, '13:58', '14:25', 838, 865, 27], - [741, '13:59', '14:53', 839, 893, 54], - [742, '14:00', '14:16', 840, 856, 16], - [743, '14:00', '14:10', 840, 850, 10], - [744, '14:00', '15:11', 840, 911, 71], - [745, '14:01', '14:13', 841, 853, 12], - [746, '14:03', '14:50', 843, 890, 47], - [747, '14:04', '14:15', 844, 855, 11], - [748, '14:04', '15:29', 844, 929, 85], - [749, '14:05', '14:44', 845, 884, 39], - [750, '14:08', '14:55', 848, 895, 47], - [751, '14:08', '14:55', 848, 895, 47], - [752, '14:08', '15:34', 848, 934, 86], - [753, '14:09', '15:03', 849, 903, 54], - [754, '14:10', '15:21', 850, 921, 71], - [755, '14:10', '14:20', 850, 860, 10], - [756, '14:13', '15:00', 853, 900, 47], - [757, '14:13', '14:40', 853, 880, 27], - [758, '14:14', '15:40', 854, 940, 86], - [759, '14:14', '14:25', 854, 865, 11], - [760, '14:15', '14:54', 855, 894, 39], - [761, '14:16', '14:28', 856, 868, 12], - [762, '14:18', '15:05', 858, 905, 47], - [763, '14:18', '15:44', 858, 944, 86], - [764, '14:18', '15:05', 858, 905, 47], - [765, '14:19', '15:13', 859, 913, 54], - [766, '14:20', '15:31', 860, 931, 71], - [767, '14:20', '14:30', 860, 870, 10], - [768, '14:20', '14:36', 860, 876, 16], - [769, '14:21', '14:47', 861, 887, 26], - [770, '14:23', '15:10', 863, 910, 47], - [771, '14:23', '14:45', 863, 885, 22], - [772, '14:24', '15:50', 864, 950, 86], - [773, '14:24', '14:35', 864, 875, 11], - [774, '14:25', '15:02', 865, 902, 37], - [775, '14:26', '14:52', 866, 892, 26], - [776, '14:28', '15:15', 868, 915, 47], - [777, '14:28', '14:55', 868, 895, 27], - [778, '14:28', '15:54', 868, 954, 86], - [779, '14:28', '15:15', 868, 915, 47], - [780, '14:29', '15:23', 869, 923, 54], - [781, '14:30', '15:41', 870, 941, 71], - [782, '14:30', '14:40', 870, 880, 10], - [783, '14:31', '14:43', 871, 883, 12], - [784, '14:33', '15:20', 873, 920, 47], - [785, '14:34', '16:00', 874, 960, 86], - [786, '14:34', '14:45', 874, 885, 11], - [787, '14:35', '15:11', 875, 911, 36], - [788, '14:38', '15:25', 878, 925, 47], - [789, '14:38', '15:25', 878, 925, 47], - [790, '14:38', '16:04', 878, 964, 86], - [791, '14:39', '15:33', 879, 933, 54], - [792, '14:40', '14:50', 880, 890, 10], - [793, '14:40', '15:51', 880, 951, 71], - [794, '14:40', '14:56', 880, 896, 16], - [795, '14:43', '15:30', 883, 930, 47], - [796, '14:43', '15:10', 883, 910, 27], - [797, '14:44', '15:00', 884, 900, 16], - [798, '14:44', '16:10', 884, 970, 86], - [799, '14:45', '15:19', 885, 919, 34], - [800, '14:46', '14:58', 886, 898, 12], - [801, '14:48', '15:35', 888, 935, 47], - [802, '14:48', '15:35', 888, 935, 47], - [803, '14:48', '17:04', 888, 1024, 136], - [804, '14:49', '15:43', 889, 943, 54], - [805, '14:50', '16:01', 890, 961, 71], - [806, '14:50', '15:00', 890, 900, 10], - [807, '14:51', '15:17', 891, 917, 26], - [808, '14:52', '15:27', 892, 927, 35], - [809, '14:52', '15:21', 892, 921, 29], - [810, '14:53', '15:40', 893, 940, 47], - [811, '14:54', '15:08', 894, 908, 14], - [812, '14:54', '16:20', 894, 980, 86], - [813, '14:58', '16:24', 898, 984, 86], - [814, '14:58', '15:45', 898, 945, 47], - [815, '14:58', '15:25', 898, 925, 27], - [816, '14:58', '15:45', 898, 945, 47], - [817, '14:59', '15:53', 899, 953, 54], - [818, '15:00', '15:10', 900, 910, 10], - [819, '15:00', '15:35', 900, 935, 35], - [820, '15:00', '16:11', 900, 971, 71], - [821, '15:00', '15:16', 900, 916, 16], - [822, '15:01', '15:13', 901, 913, 12], - [823, '15:02', '15:16', 902, 916, 14], - [824, '15:03', '15:50', 903, 950, 47], - [825, '15:04', '16:30', 904, 990, 86], - [826, '15:08', '16:34', 908, 994, 86], - [827, '15:08', '15:55', 908, 955, 47], - [828, '15:08', '15:55', 908, 955, 47], - [829, '15:08', '15:45', 908, 945, 37], - [830, '15:09', '16:14', 909, 974, 65], - [831, '15:09', '16:03', 909, 963, 54], - [832, '15:10', '16:21', 910, 981, 71], - [833, '15:10', '15:20', 910, 920, 10], - [834, '15:11', '15:24', 911, 924, 13], - [835, '15:12', '15:36', 912, 936, 24], - [836, '15:13', '16:00', 913, 960, 47], - [837, '15:13', '15:40', 913, 940, 27], - [838, '15:14', '16:40', 914, 1000, 86], - [839, '15:16', '15:28', 916, 928, 12], - [840, '15:16', '15:55', 916, 955, 39], - [841, '15:18', '16:05', 918, 965, 47], - [842, '15:18', '16:44', 918, 1004, 86], - [843, '15:18', '16:05', 918, 965, 47], - [844, '15:19', '16:13', 919, 973, 54], - [845, '15:19', '15:34', 919, 934, 15], - [846, '15:20', '15:30', 920, 930, 10], - [847, '15:20', '16:31', 920, 991, 71], - [848, '15:20', '15:36', 920, 936, 16], - [849, '15:21', '15:47', 921, 947, 26], - [850, '15:21', '16:06', 921, 966, 45], - [851, '15:23', '16:10', 923, 970, 47], - [852, '15:24', '16:50', 924, 1010, 86], - [853, '15:24', '16:05', 924, 965, 41], - [854, '15:27', '15:51', 927, 951, 24], - [855, '15:27', '15:44', 927, 944, 17], - [856, '15:28', '16:15', 928, 975, 47], - [857, '15:28', '16:54', 928, 1014, 86], - [858, '15:28', '16:15', 928, 975, 47], - [859, '15:28', '15:55', 928, 955, 27], - [860, '15:29', '16:23', 929, 983, 54], - [861, '15:30', '16:41', 930, 1001, 71], - [862, '15:30', '15:40', 930, 940, 10], - [863, '15:31', '15:43', 931, 943, 12], - [864, '15:33', '16:20', 933, 980, 47], - [865, '15:34', '17:00', 934, 1020, 86], - [866, '15:34', '16:15', 934, 975, 41], - [867, '15:35', '15:54', 935, 954, 19], - [868, '15:36', '16:21', 936, 981, 45], - [869, '15:38', '16:25', 938, 985, 47], - [870, '15:38', '16:25', 938, 985, 47], - [871, '15:38', '16:39', 938, 999, 61], - [872, '15:39', '16:33', 939, 993, 54], - [873, '15:40', '15:50', 940, 950, 10], - [874, '15:40', '16:51', 940, 1011, 71], - [875, '15:40', '15:56', 940, 956, 16], - [876, '15:43', '16:10', 943, 970, 27], - [877, '15:43', '16:30', 943, 990, 47], - [878, '15:44', '17:10', 944, 1030, 86], - [879, '15:44', '16:25', 944, 985, 41], - [880, '15:45', '16:04', 945, 964, 19], - [881, '15:46', '15:58', 946, 958, 12], - [882, '15:48', '16:35', 948, 995, 47], - [883, '15:48', '16:35', 948, 995, 47], - [884, '15:48', '17:14', 948, 1034, 86], - [885, '15:49', '16:43', 949, 1003, 54], - [886, '15:50', '16:00', 950, 960, 10], - [887, '15:50', '17:01', 950, 1021, 71], - [888, '15:51', '16:18', 951, 978, 27], - [889, '15:52', '16:36', 952, 996, 44], - [890, '15:53', '16:40', 953, 1000, 47], - [891, '15:54', '17:20', 954, 1040, 86], - [892, '15:54', '16:35', 954, 995, 41], - [893, '15:55', '16:14', 955, 974, 19], - [894, '15:58', '16:25', 958, 985, 27], - [895, '15:58', '16:45', 958, 1005, 47], - [896, '15:58', '16:45', 958, 1005, 47], - [897, '15:58', '17:24', 958, 1044, 86], - [898, '15:59', '17:11', 959, 1031, 72], - [899, '15:59', '16:53', 959, 1013, 54], - [900, '16:00', '16:10', 960, 970, 10], - [901, '16:00', '16:16', 960, 976, 16], - [902, '16:01', '16:13', 961, 973, 12], - [903, '16:03', '16:50', 963, 1010, 47], - [904, '16:04', '17:30', 964, 1050, 86], - [905, '16:04', '16:45', 964, 1005, 41], - [906, '16:05', '16:24', 965, 984, 19], - [907, '16:06', '16:51', 966, 1011, 45], - [908, '16:08', '16:55', 968, 1015, 47], - [909, '16:08', '17:34', 968, 1054, 86], - [910, '16:08', '16:55', 968, 1015, 47], - [911, '16:09', '17:03', 969, 1023, 54], - [912, '16:09', '17:21', 969, 1041, 72], - [913, '16:10', '16:20', 970, 980, 10], - [914, '16:13', '16:40', 973, 1000, 27], - [915, '16:13', '17:00', 973, 1020, 47], - [916, '16:14', '16:55', 974, 1015, 41], - [917, '16:14', '17:40', 974, 1060, 86], - [918, '16:15', '16:34', 975, 994, 19], - [919, '16:16', '16:28', 976, 988, 12], - [920, '16:18', '17:05', 978, 1025, 47], - [921, '16:18', '17:05', 978, 1025, 47], - [922, '16:18', '17:44', 978, 1064, 86], - [923, '16:19', '17:31', 979, 1051, 72], - [924, '16:19', '17:13', 979, 1033, 54], - [925, '16:20', '16:30', 980, 990, 10], - [926, '16:20', '16:36', 980, 996, 16], - [927, '16:21', '16:48', 981, 1008, 27], - [928, '16:22', '17:06', 982, 1026, 44], - [929, '16:23', '17:10', 983, 1030, 47], - [930, '16:24', '17:05', 984, 1025, 41], - [931, '16:24', '17:50', 984, 1070, 86], - [932, '16:25', '16:44', 985, 1004, 19], - [933, '16:28', '17:15', 988, 1035, 47], - [934, '16:28', '17:15', 988, 1035, 47], - [935, '16:28', '16:55', 988, 1015, 27], - [936, '16:28', '17:54', 988, 1074, 86], - [937, '16:29', '17:23', 989, 1043, 54], - [938, '16:29', '17:41', 989, 1061, 72], - [939, '16:30', '16:40', 990, 1000, 10], - [940, '16:31', '16:43', 991, 1003, 12], - [941, '16:33', '17:20', 993, 1040, 47], - [942, '16:34', '17:15', 994, 1035, 41], - [943, '16:34', '18:00', 994, 1080, 86], - [944, '16:35', '16:54', 995, 1014, 19], - [945, '16:36', '17:21', 996, 1041, 45], - [946, '16:38', '17:25', 998, 1045, 47], - [947, '16:38', '17:25', 998, 1045, 47], - [948, '16:38', '18:04', 998, 1084, 86], - [949, '16:39', '17:33', 999, 1053, 54], - [950, '16:39', '17:51', 999, 1071, 72], - [951, '16:40', '16:56', 1000, 1016, 16], - [952, '16:40', '16:50', 1000, 1010, 10], - [953, '16:43', '17:10', 1003, 1030, 27], - [954, '16:43', '17:30', 1003, 1050, 47], - [955, '16:44', '17:25', 1004, 1045, 41], - [956, '16:44', '18:10', 1004, 1090, 86], - [957, '16:45', '17:04', 1005, 1024, 19], - [958, '16:46', '16:58', 1006, 1018, 12], - [959, '16:48', '18:14', 1008, 1094, 86], - [960, '16:48', '17:35', 1008, 1055, 47], - [961, '16:48', '17:35', 1008, 1055, 47], - [962, '16:49', '18:01', 1009, 1081, 72], - [963, '16:49', '17:43', 1009, 1063, 54], - [964, '16:50', '17:00', 1010, 1020, 10], - [965, '16:51', '17:18', 1011, 1038, 27], - [966, '16:52', '17:36', 1012, 1056, 44], - [967, '16:53', '17:40', 1013, 1060, 47], - [968, '16:54', '18:20', 1014, 1100, 86], - [969, '16:54', '17:35', 1014, 1055, 41], - [970, '16:55', '17:14', 1015, 1034, 19], - [971, '16:58', '17:25', 1018, 1045, 27], - [972, '16:58', '17:45', 1018, 1065, 47], - [973, '16:58', '17:45', 1018, 1065, 47], - [974, '16:58', '18:24', 1018, 1104, 86], - [975, '16:59', '18:11', 1019, 1091, 72], - [976, '16:59', '17:53', 1019, 1073, 54], - [977, '17:00', '17:16', 1020, 1036, 16], - [978, '17:00', '17:10', 1020, 1030, 10], - [979, '17:01', '17:13', 1021, 1033, 12], - [980, '17:03', '17:50', 1023, 1070, 47], - [981, '17:04', '18:30', 1024, 1110, 86], - [982, '17:04', '17:45', 1024, 1065, 41], - [983, '17:05', '17:24', 1025, 1044, 19], - [984, '17:06', '17:51', 1026, 1071, 45], - [985, '17:08', '17:55', 1028, 1075, 47], - [986, '17:08', '17:55', 1028, 1075, 47], - [987, '17:08', '18:34', 1028, 1114, 86], - [988, '17:09', '18:03', 1029, 1083, 54], - [989, '17:09', '18:21', 1029, 1101, 72], - [990, '17:10', '17:20', 1030, 1040, 10], - [991, '17:13', '17:40', 1033, 1060, 27], - [992, '17:13', '18:00', 1033, 1080, 47], - [993, '17:14', '17:55', 1034, 1075, 41], - [994, '17:14', '18:40', 1034, 1120, 86], - [995, '17:15', '17:34', 1035, 1054, 19], - [996, '17:16', '17:28', 1036, 1048, 12], - [997, '17:18', '18:05', 1038, 1085, 47], - [998, '17:18', '18:05', 1038, 1085, 47], - [999, '17:18', '18:44', 1038, 1124, 86], - [1000, '17:19', '18:31', 1039, 1111, 72], - [1001, '17:19', '18:13', 1039, 1093, 54], - [1002, '17:20', '17:36', 1040, 1056, 16], - [1003, '17:20', '17:30', 1040, 1050, 10], - [1004, '17:21', '17:47', 1041, 1067, 26], - [1005, '17:22', '18:06', 1042, 1086, 44], - [1006, '17:23', '18:10', 1043, 1090, 47], - [1007, '17:24', '18:50', 1044, 1130, 86], - [1008, '17:24', '18:05', 1044, 1085, 41], - [1009, '17:25', '17:44', 1045, 1064, 19], - [1010, '17:28', '17:55', 1048, 1075, 27], - [1011, '17:28', '18:15', 1048, 1095, 47], - [1012, '17:28', '18:15', 1048, 1095, 47], - [1013, '17:28', '18:54', 1048, 1134, 86], - [1014, '17:29', '18:41', 1049, 1121, 72], - [1015, '17:29', '18:23', 1049, 1103, 54], - [1016, '17:30', '17:40', 1050, 1060, 10], - [1017, '17:31', '17:43', 1051, 1063, 12], - [1018, '17:33', '18:20', 1053, 1100, 47], - [1019, '17:34', '18:15', 1054, 1095, 41], - [1020, '17:34', '19:00', 1054, 1140, 86], - [1021, '17:35', '17:54', 1055, 1074, 19], - [1022, '17:36', '18:21', 1056, 1101, 45], - [1023, '17:38', '18:25', 1058, 1105, 47], - [1024, '17:38', '19:04', 1058, 1144, 86], - [1025, '17:38', '18:25', 1058, 1105, 47], - [1026, '17:39', '18:51', 1059, 1131, 72], - [1027, '17:39', '18:33', 1059, 1113, 54], - [1028, '17:40', '17:56', 1060, 1076, 16], - [1029, '17:40', '17:50', 1060, 1070, 10], - [1030, '17:43', '18:10', 1063, 1090, 27], - [1031, '17:43', '18:30', 1063, 1110, 47], - [1032, '17:44', '18:25', 1064, 1105, 41], - [1033, '17:44', '19:14', 1064, 1154, 90], - [1034, '17:45', '18:04', 1065, 1084, 19], - [1035, '17:46', '17:58', 1066, 1078, 12], - [1036, '17:48', '18:35', 1068, 1115, 47], - [1037, '17:48', '18:35', 1068, 1115, 47], - [1038, '17:48', '19:14', 1068, 1154, 86], - [1039, '17:49', '19:01', 1069, 1141, 72], - [1040, '17:49', '18:43', 1069, 1123, 54], - [1041, '17:50', '18:00', 1070, 1080, 10], - [1042, '17:51', '18:17', 1071, 1097, 26], - [1043, '17:52', '18:36', 1072, 1116, 44], - [1044, '17:53', '18:40', 1073, 1120, 47], - [1045, '17:54', '18:35', 1074, 1115, 41], - [1046, '17:54', '18:57', 1074, 1137, 63], - [1047, '17:55', '18:14', 1075, 1094, 19], - [1048, '17:58', '18:45', 1078, 1125, 47], - [1049, '17:58', '18:45', 1078, 1125, 47], - [1050, '17:58', '18:25', 1078, 1105, 27], - [1051, '17:58', '19:26', 1078, 1166, 88], - [1052, '17:59', '18:53', 1079, 1133, 54], - [1053, '18:00', '19:11', 1080, 1151, 71], - [1054, '18:00', '18:10', 1080, 1090, 10], - [1055, '18:00', '18:16', 1080, 1096, 16], - [1056, '18:01', '18:13', 1081, 1093, 12], - [1057, '18:03', '18:50', 1083, 1130, 47], - [1058, '18:04', '18:45', 1084, 1125, 41], - [1059, '18:04', '19:29', 1084, 1169, 85], - [1060, '18:05', '18:24', 1085, 1104, 19], - [1061, '18:06', '18:51', 1086, 1131, 45], - [1062, '18:08', '18:55', 1088, 1135, 47], - [1063, '18:08', '19:06', 1088, 1146, 58], - [1064, '18:08', '18:55', 1088, 1135, 47], - [1065, '18:09', '19:03', 1089, 1143, 54], - [1066, '18:10', '18:20', 1090, 1100, 10], - [1067, '18:10', '19:21', 1090, 1161, 71], - [1068, '18:13', '19:00', 1093, 1140, 47], - [1069, '18:13', '18:40', 1093, 1120, 27], - [1070, '18:14', '19:43', 1094, 1183, 89], - [1071, '18:14', '18:55', 1094, 1135, 41], - [1072, '18:15', '18:34', 1095, 1114, 19], - [1073, '18:16', '18:28', 1096, 1108, 12], - [1074, '18:17', '18:27', 1097, 1107, 10], - [1075, '18:18', '19:41', 1098, 1181, 83], - [1076, '18:18', '18:58', 1098, 1138, 40], - [1077, '18:18', '19:05', 1098, 1145, 47], - [1078, '18:19', '19:13', 1099, 1153, 54], - [1079, '18:20', '19:31', 1100, 1171, 71], - [1080, '18:20', '18:36', 1100, 1116, 16], - [1081, '18:20', '18:30', 1100, 1110, 10], - [1082, '18:22', '19:05', 1102, 1145, 43], - [1083, '18:23', '19:05', 1103, 1145, 42], - [1084, '18:24', '19:27', 1104, 1167, 63], - [1085, '18:24', '19:05', 1104, 1145, 41], - [1086, '18:25', '18:44', 1105, 1124, 19], - [1087, '18:28', '19:25', 1108, 1165, 57], - [1088, '18:28', '18:55', 1108, 1135, 27], - [1089, '18:28', '19:08', 1108, 1148, 40], - [1090, '18:28', '19:15', 1108, 1155, 47], - [1091, '18:29', '19:23', 1109, 1163, 54], - [1092, '18:30', '19:05', 1110, 1145, 35], - [1093, '18:30', '18:40', 1110, 1120, 10], - [1094, '18:31', '18:43', 1111, 1123, 12], - [1095, '18:33', '19:15', 1113, 1155, 42], - [1096, '18:34', '19:58', 1114, 1198, 84], - [1097, '18:34', '19:14', 1114, 1154, 40], - [1098, '18:35', '18:55', 1115, 1135, 20], - [1099, '18:36', '19:20', 1116, 1160, 44], - [1100, '18:38', '19:25', 1118, 1165, 47], - [1101, '18:38', '19:23', 1118, 1163, 45], - [1102, '18:38', '19:56', 1118, 1196, 78], - [1103, '18:39', '19:33', 1119, 1173, 54], - [1104, '18:40', '18:50', 1120, 1130, 10], - [1105, '18:40', '19:45', 1120, 1185, 65], - [1106, '18:40', '18:56', 1120, 1136, 16], - [1107, '18:43', '19:10', 1123, 1150, 27], - [1108, '18:43', '19:30', 1123, 1170, 47], - [1109, '18:44', '19:24', 1124, 1164, 40], - [1110, '18:45', '19:05', 1125, 1145, 20], - [1111, '18:46', '18:58', 1126, 1138, 12], - [1112, '18:48', '19:35', 1128, 1175, 47], - [1113, '18:48', '20:12', 1128, 1212, 84], - [1114, '18:48', '20:11', 1128, 1211, 83], - [1115, '18:48', '19:28', 1128, 1168, 40], - [1116, '18:49', '19:43', 1129, 1183, 54], - [1117, '18:50', '19:00', 1130, 1140, 10], - [1118, '18:51', '19:01', 1131, 1141, 10], - [1119, '18:53', '19:35', 1133, 1175, 42], - [1120, '18:53', '19:15', 1133, 1155, 22], - [1121, '18:53', '20:00', 1133, 1200, 67], - [1122, '18:55', '19:15', 1135, 1155, 20], - [1123, '18:55', '19:34', 1135, 1174, 39], - [1124, '18:58', '19:38', 1138, 1178, 40], - [1125, '18:59', '19:53', 1139, 1193, 54], - [1126, '18:59', '19:50', 1139, 1190, 51], - [1127, '18:59', '19:53', 1139, 1193, 54], - [1128, '19:00', '19:16', 1140, 1156, 16], - [1129, '19:00', '19:10', 1140, 1150, 10], - [1130, '19:00', '19:16', 1140, 1156, 16], - [1131, '19:01', '19:13', 1141, 1153, 12], - [1132, '19:03', '20:26', 1143, 1226, 83], - [1133, '19:03', '19:45', 1143, 1185, 42], - [1134, '19:05', '19:44', 1145, 1184, 39], - [1135, '19:05', '19:25', 1145, 1165, 20], - [1136, '19:08', '20:15', 1148, 1215, 67], - [1137, '19:08', '19:35', 1148, 1175, 27], - [1138, '19:09', '19:49', 1149, 1189, 40], - [1139, '19:09', '20:03', 1149, 1203, 54], - [1140, '19:10', '19:20', 1150, 1160, 10], - [1141, '19:10', '19:20', 1150, 1160, 10], - [1142, '19:11', '19:53', 1151, 1193, 42], - [1143, '19:14', '20:26', 1154, 1226, 72], - [1144, '19:14', '19:35', 1154, 1175, 21], - [1145, '19:14', '19:24', 1154, 1164, 10], - [1146, '19:14', '20:05', 1154, 1205, 51], - [1147, '19:15', '19:30', 1155, 1170, 15], - [1148, '19:15', '19:54', 1155, 1194, 39], - [1149, '19:18', '20:39', 1158, 1239, 81], - [1150, '19:18', '20:00', 1158, 1200, 42], - [1151, '19:19', '20:14', 1159, 1214, 55], - [1152, '19:20', '19:30', 1160, 1170, 10], - [1153, '19:20', '19:36', 1160, 1176, 16], - [1154, '19:21', '19:31', 1161, 1171, 10], - [1155, '19:23', '20:30', 1163, 1230, 67], - [1156, '19:23', '19:35', 1163, 1175, 12], - [1157, '19:24', '19:45', 1164, 1185, 21], - [1158, '19:24', '19:45', 1164, 1185, 21], - [1159, '19:25', '20:04', 1165, 1204, 39], - [1160, '19:26', '20:08', 1166, 1208, 42], - [1161, '19:29', '20:02', 1169, 1202, 33], - [1162, '19:29', '20:18', 1169, 1218, 49], - [1163, '19:29', '20:41', 1169, 1241, 72], - [1164, '19:30', '19:40', 1170, 1180, 10], - [1165, '19:33', '20:54', 1173, 1254, 81], - [1166, '19:33', '20:17', 1173, 1217, 44], - [1167, '19:34', '19:55', 1174, 1195, 21], - [1168, '19:35', '20:14', 1175, 1214, 39], - [1169, '19:38', '20:05', 1178, 1205, 27], - [1170, '19:38', '20:45', 1178, 1245, 67], - [1171, '19:39', '20:12', 1179, 1212, 33], - [1172, '19:40', '19:50', 1180, 1190, 10], - [1173, '19:40', '19:56', 1180, 1196, 16], - [1174, '19:41', '20:27', 1181, 1227, 46], - [1175, '19:43', '19:55', 1183, 1195, 12], - [1176, '19:44', '20:05', 1184, 1205, 21], - [1177, '19:44', '20:33', 1184, 1233, 49], - [1178, '19:44', '21:00', 1184, 1260, 76], - [1179, '19:45', '20:24', 1185, 1224, 39], - [1180, '19:48', '20:37', 1188, 1237, 49], - [1181, '19:48', '21:09', 1188, 1269, 81], - [1182, '19:50', '20:00', 1190, 1200, 10], - [1183, '19:52', '20:29', 1192, 1229, 37], - [1184, '19:53', '20:08', 1193, 1208, 15], - [1185, '19:53', '21:02', 1193, 1262, 69], - [1186, '19:53', '20:20', 1193, 1220, 27], - [1187, '19:54', '20:19', 1194, 1219, 25], - [1188, '19:55', '20:34', 1195, 1234, 39], - [1189, '19:56', '20:34', 1196, 1234, 38], - [1190, '19:59', '20:48', 1199, 1248, 49], - [1191, '19:59', '21:20', 1199, 1280, 81], - [1192, '20:00', '20:16', 1200, 1216, 16], - [1193, '20:00', '20:10', 1200, 1210, 10], - [1194, '20:03', '20:42', 1203, 1242, 39], - [1195, '20:03', '21:24', 1203, 1284, 81], - [1196, '20:04', '20:29', 1204, 1229, 25], - [1197, '20:05', '20:48', 1205, 1248, 43], - [1198, '20:07', '20:44', 1207, 1244, 37], - [1199, '20:08', '20:40', 1208, 1240, 32], - [1200, '20:08', '20:35', 1208, 1235, 27], - [1201, '20:10', '20:20', 1210, 1220, 10], - [1202, '20:10', '20:22', 1210, 1222, 12], - [1203, '20:11', '20:47', 1211, 1247, 36], - [1204, '20:14', '21:04', 1214, 1264, 50], - [1205, '20:14', '21:03', 1214, 1263, 49], - [1206, '20:17', '21:03', 1217, 1263, 46], - [1207, '20:18', '21:39', 1218, 1299, 81], - [1208, '20:20', '20:30', 1220, 1230, 10], - [1209, '20:20', '20:57', 1220, 1257, 37], - [1210, '20:20', '20:36', 1220, 1236, 16], - [1211, '20:22', '20:59', 1222, 1259, 37], - [1212, '20:22', '20:42', 1222, 1242, 20], - [1213, '20:24', '20:49', 1224, 1249, 25], - [1214, '20:27', '21:22', 1227, 1282, 55], - [1215, '20:29', '21:18', 1229, 1278, 49], - [1216, '20:30', '21:07', 1230, 1267, 37], - [1217, '20:30', '20:40', 1230, 1240, 10], - [1218, '20:30', '20:40', 1230, 1240, 10], - [1219, '20:30', '21:40', 1230, 1300, 70], - [1220, '20:32', '21:18', 1232, 1278, 46], - [1221, '20:35', '21:54', 1235, 1314, 79], - [1222, '20:37', '21:14', 1237, 1274, 37], - [1223, '20:38', '21:08', 1238, 1268, 30], - [1224, '20:40', '20:50', 1240, 1250, 10], - [1225, '20:40', '21:17', 1240, 1277, 37], - [1226, '20:40', '20:56', 1240, 1256, 16], - [1227, '20:44', '21:33', 1244, 1293, 49], - [1228, '20:47', '21:33', 1247, 1293, 46], - [1229, '20:47', '21:42', 1247, 1302, 55], - [1230, '20:50', '21:00', 1250, 1260, 10], - [1231, '20:50', '22:00', 1250, 1320, 70], - [1232, '20:50', '22:09', 1250, 1329, 79], - [1233, '20:50', '21:27', 1250, 1287, 37], - [1234, '20:52', '21:29', 1252, 1289, 37], - [1235, '20:53', '21:20', 1253, 1280, 27], - [1236, '20:56', '21:11', 1256, 1271, 15], - [1237, '20:59', '21:48', 1259, 1308, 49], - [1238, '21:00', '21:10', 1260, 1270, 10], - [1239, '21:00', '21:37', 1260, 1297, 37], - [1240, '21:02', '21:48', 1262, 1308, 46], - [1241, '21:05', '22:24', 1265, 1344, 79], - [1242, '21:07', '21:44', 1267, 1304, 37], - [1243, '21:07', '22:02', 1267, 1322, 55], - [1244, '21:08', '21:38', 1268, 1298, 30], - [1245, '21:10', '22:25', 1270, 1345, 75], - [1246, '21:10', '21:20', 1270, 1280, 10], - [1247, '21:10', '21:47', 1270, 1307, 37], - [1248, '21:14', '22:03', 1274, 1323, 49], - [1249, '21:17', '22:03', 1277, 1323, 46], - [1250, '21:20', '22:18', 1280, 1338, 58], - [1251, '21:20', '21:57', 1280, 1317, 37], - [1252, '21:20', '21:30', 1280, 1290, 10], - [1253, '21:22', '21:59', 1282, 1319, 37], - [1254, '21:24', '21:49', 1284, 1309, 25], - [1255, '21:27', '22:21', 1287, 1341, 54], - [1256, '21:30', '22:07', 1290, 1327, 37], - [1257, '21:30', '22:20', 1290, 1340, 50], - [1258, '21:30', '21:40', 1290, 1300, 10], - [1259, '21:32', '22:18', 1292, 1338, 46], - [1260, '21:32', '22:01', 1292, 1321, 29], - [1261, '21:35', '22:54', 1295, 1374, 79], - [1262, '21:37', '22:14', 1297, 1334, 37], - [1263, '21:39', '21:55', 1299, 1315, 16], - [1264, '21:40', '22:17', 1300, 1337, 37], - [1265, '21:40', '21:50', 1300, 1310, 10], - [1266, '21:41', '22:08', 1301, 1328, 27], - [1267, '21:47', '22:16', 1307, 1336, 29], - [1268, '21:47', '22:51', 1307, 1371, 64], - [1269, '21:47', '22:33', 1307, 1353, 46], - [1270, '21:48', '22:03', 1308, 1323, 15], - [1271, '21:50', '22:55', 1310, 1375, 65], - [1272, '21:50', '22:27', 1310, 1347, 37], - [1273, '21:50', '22:00', 1310, 1320, 10], - [1274, '21:52', '22:29', 1312, 1349, 37], - [1275, '21:53', '22:19', 1313, 1339, 26], - [1276, '22:00', '22:38', 1320, 1358, 38], - [1277, '22:00', '22:10', 1320, 1330, 10], - [1278, '22:02', '22:12', 1322, 1332, 10], - [1279, '22:02', '22:48', 1322, 1368, 46], - [1280, '22:04', '22:31', 1324, 1351, 27], - [1281, '22:05', '23:24', 1325, 1404, 79], - [1282, '22:07', '22:44', 1327, 1364, 37], - [1283, '22:07', '22:39', 1327, 1359, 32], - [1284, '22:09', '22:25', 1329, 1345, 16], - [1285, '22:10', '23:25', 1330, 1405, 75], - [1286, '22:13', '22:38', 1333, 1358, 25], - [1287, '22:13', '22:53', 1333, 1373, 40], - [1288, '22:17', '22:27', 1337, 1347, 10], - [1289, '22:17', '23:03', 1337, 1383, 46], - [1290, '22:19', '22:46', 1339, 1366, 27], - [1291, '22:22', '22:59', 1342, 1379, 37], - [1292, '22:24', '22:48', 1344, 1368, 24], - [1293, '22:27', '22:52', 1347, 1372, 25], - [1294, '22:27', '23:21', 1347, 1401, 54], - [1295, '22:28', '23:08', 1348, 1388, 40], - [1296, '22:30', '23:17', 1350, 1397, 47], - [1297, '22:32', '22:42', 1352, 1362, 10], - [1298, '22:32', '23:11', 1352, 1391, 39], - [1299, '22:34', '23:01', 1354, 1381, 27], - [1300, '22:35', '23:54', 1355, 1434, 79], - [1301, '22:37', '23:14', 1357, 1394, 37], - [1302, '22:43', '23:23', 1363, 1403, 40], - [1303, '22:43', '23:08', 1363, 1388, 25], - [1304, '22:47', '23:33', 1367, 1413, 46], - [1305, '22:47', '22:57', 1367, 1377, 10], - [1306, '22:49', '23:16', 1369, 1396, 27], - [1307, '22:52', '23:29', 1372, 1409, 37], - [1308, '22:53', '23:15', 1373, 1395, 22], - [1309, '22:55', '23:55', 1375, 1435, 60], - [1310, '22:57', '23:51', 1377, 1431, 54], - [1311, '22:58', '23:38', 1378, 1418, 40], - [1312, '23:02', '23:41', 1382, 1421, 39], - [1313, '23:02', '23:12', 1382, 1392, 10], - [1314, '23:04', '23:31', 1384, 1411, 27], - [1315, '23:05', '00:24', 1385, 1464, 79], - [1316, '23:07', '23:44', 1387, 1424, 37], - [1317, '23:13', '23:53', 1393, 1433, 40], - [1318, '23:13', '23:38', 1393, 1418, 25], - [1319, '23:17', '00:03', 1397, 1443, 46], - [1320, '23:17', '23:27', 1397, 1407, 10], - [1321, '23:19', '23:46', 1399, 1426, 27], - [1322, '23:22', '23:59', 1402, 1439, 37], - [1323, '23:25', '00:25', 1405, 1465, 60], - [1324, '23:27', '00:21', 1407, 1461, 54], - [1325, '23:28', '00:08', 1408, 1448, 40], - [1326, '23:32', '23:42', 1412, 1422, 10], - [1327, '23:34', '00:01', 1414, 1441, 27], - [1328, '23:35', '01:05', 1415, 1505, 90], - [1329, '23:37', '00:09', 1417, 1449, 32], - [1330, '23:43', '00:23', 1423, 1463, 40], - [1331, '23:43', '00:08', 1423, 1448, 25], - [1332, '23:46', '00:01', 1426, 1441, 15], - [1333, '23:47', '23:57', 1427, 1437, 10], - [1334, '23:47', '00:33', 1427, 1473, 46], - [1335, '23:52', '00:24', 1432, 1464, 32], - [1336, '23:55', '00:49', 1435, 1489, 54], - [1337, '23:57', '00:57', 1437, 1497, 60], - [1338, '23:58', '00:38', 1438, 1478, 40], - [1339, '00:02', '00:12', 1442, 1452, 10], - [1340, '00:07', '00:39', 1447, 1479, 32], - [1341, '00:13', '00:38', 1453, 1478, 25], - [1342, '00:13', '00:51', 1453, 1491, 38], - [1343, '00:15', '01:14', 1455, 1514, 59], - [1344, '00:17', '01:23', 1457, 1523, 66], - [1345, '00:23', '00:33', 1463, 1473, 10], - [1346, '00:24', '00:40', 1464, 1480, 16], - [1347, '00:25', '01:12', 1465, 1512, 47], - [1348, '00:28', '01:07', 1468, 1507, 39], - [1349, '00:33', '01:05', 1473, 1505, 32], - [1350, '00:43', '01:21', 1483, 1521, 38], - [1351, '00:44', '00:54', 1484, 1494, 10], - [1352, '00:47', '01:09', 1487, 1509, 22], - [1353, '00:47', '01:26', 1487, 1526, 39], - [1354, '00:54', '01:04', 1494, 1504, 10], - [1355, '00:57', '01:07', 1497, 1507, 10] + [0, "04:18", "05:00", 258, 300, 42], + [1, "04:27", "05:08", 267, 308, 41], + [2, "04:29", "05:26", 269, 326, 57], + [3, "04:29", "04:55", 269, 295, 26], + [4, "04:30", "04:53", 270, 293, 23], + [5, "04:30", "04:51", 270, 291, 21], + [6, "04:31", "04:53", 271, 293, 22], + [7, "04:33", "05:15", 273, 315, 42], + [8, "04:34", "04:44", 274, 284, 10], + [9, "04:34", "05:03", 274, 303, 29], + [10, "04:35", "04:50", 275, 290, 15], + [11, "04:36", "04:46", 276, 286, 10], + [12, "04:37", "05:18", 277, 318, 41], + [13, "04:41", "05:13", 281, 313, 32], + [14, "04:42", "05:23", 282, 323, 41], + [15, "04:43", "04:53", 283, 293, 10], + [16, "04:44", "05:45", 284, 345, 61], + [17, "04:45", "05:11", 285, 311, 26], + [18, "04:46", "05:01", 286, 301, 15], + [19, "04:46", "04:56", 286, 296, 10], + [20, "04:47", "05:14", 287, 314, 27], + [21, "04:48", "05:30", 288, 330, 42], + [22, "04:49", "05:41", 289, 341, 52], + [23, "04:49", "05:18", 289, 318, 29], + [24, "04:50", "05:33", 290, 333, 43], + [25, "04:52", "05:56", 292, 356, 64], + [26, "04:52", "05:07", 292, 307, 15], + [27, "04:53", "05:19", 293, 319, 26], + [28, "04:53", "05:23", 293, 323, 30], + [29, "04:55", "05:27", 295, 327, 32], + [30, "04:57", "05:38", 297, 338, 41], + [31, "05:00", "06:00", 300, 360, 60], + [32, "05:00", "05:54", 300, 354, 54], + [33, "05:01", "05:33", 301, 333, 32], + [34, "05:01", "05:26", 301, 326, 25], + [35, "05:02", "05:29", 302, 329, 27], + [36, "05:02", "05:12", 302, 312, 10], + [37, "05:03", "05:45", 303, 345, 42], + [38, "05:03", "05:18", 303, 318, 15], + [39, "05:03", "06:28", 303, 388, 85], + [40, "05:03", "05:13", 303, 313, 10], + [41, "05:04", "06:24", 304, 384, 80], + [42, "05:07", "05:44", 307, 344, 37], + [43, "05:08", "05:48", 308, 348, 40], + [44, "05:10", "06:06", 310, 366, 56], + [45, "05:11", "05:37", 311, 337, 26], + [46, "05:11", "05:53", 311, 353, 42], + [47, "05:13", "06:15", 313, 375, 62], + [48, "05:13", "05:38", 313, 338, 25], + [49, "05:16", "05:44", 316, 344, 28], + [50, "05:17", "05:27", 317, 327, 10], + [51, "05:18", "06:40", 318, 400, 82], + [52, "05:18", "06:03", 318, 363, 45], + [53, "05:18", "06:11", 318, 371, 53], + [54, "05:18", "06:00", 318, 360, 42], + [55, "05:19", "06:34", 319, 394, 75], + [56, "05:20", "06:17", 320, 377, 57], + [57, "05:22", "05:59", 322, 359, 37], + [58, "05:24", "05:48", 324, 348, 24], + [59, "05:25", "05:40", 325, 340, 15], + [60, "05:26", "06:08", 326, 368, 42], + [61, "05:27", "06:30", 327, 390, 63], + [62, "05:27", "05:54", 327, 354, 27], + [63, "05:28", "05:53", 328, 353, 25], + [64, "05:29", "05:44", 329, 344, 15], + [65, "05:30", "05:40", 330, 340, 10], + [66, "05:30", "05:40", 330, 340, 10], + [67, "05:30", "05:40", 330, 340, 10], + [68, "05:32", "06:53", 332, 413, 81], + [69, "05:33", "07:00", 333, 420, 87], + [70, "05:33", "06:15", 333, 375, 42], + [71, "05:33", "05:47", 333, 347, 14], + [72, "05:37", "06:13", 337, 373, 36], + [73, "05:37", "06:05", 337, 365, 28], + [74, "05:38", "06:33", 338, 393, 55], + [75, "05:38", "06:04", 338, 364, 26], + [76, "05:38", "06:18", 338, 378, 40], + [77, "05:39", "05:54", 339, 354, 15], + [78, "05:40", "05:56", 340, 356, 16], + [79, "05:40", "06:41", 340, 401, 61], + [80, "05:40", "05:50", 340, 350, 10], + [81, "05:41", "06:23", 341, 383, 42], + [82, "05:41", "06:01", 341, 361, 20], + [83, "05:43", "06:08", 343, 368, 25], + [84, "05:44", "07:10", 344, 430, 86], + [85, "05:44", "05:55", 344, 355, 11], + [86, "05:45", "06:44", 345, 404, 59], + [87, "05:47", "06:17", 347, 377, 30], + [88, "05:48", "07:08", 348, 428, 80], + [89, "05:48", "06:30", 348, 390, 42], + [90, "05:50", "06:50", 350, 410, 60], + [91, "05:50", "06:00", 350, 360, 10], + [92, "05:50", "06:00", 350, 360, 10], + [93, "05:50", "06:51", 350, 411, 61], + [94, "05:52", "06:33", 352, 393, 41], + [95, "05:52", "06:36", 352, 396, 44], + [96, "05:52", "06:23", 352, 383, 31], + [97, "05:54", "06:14", 354, 374, 20], + [98, "05:54", "07:20", 354, 440, 86], + [99, "05:55", "06:40", 355, 400, 45], + [100, "05:55", "06:27", 355, 387, 32], + [101, "05:56", "06:35", 356, 395, 39], + [102, "05:56", "06:06", 356, 366, 10], + [103, "05:57", "06:21", 357, 381, 24], + [104, "05:58", "07:23", 358, 443, 85], + [105, "05:58", "06:23", 358, 383, 25], + [106, "05:58", "06:08", 358, 368, 10], + [107, "05:58", "06:43", 358, 403, 45], + [108, "06:00", "06:10", 360, 370, 10], + [109, "06:00", "06:16", 360, 376, 16], + [110, "06:00", "07:01", 360, 421, 61], + [111, "06:01", "07:00", 361, 420, 59], + [112, "06:01", "06:13", 361, 373, 12], + [113, "06:01", "06:45", 361, 405, 44], + [114, "06:03", "06:50", 363, 410, 47], + [115, "06:04", "06:37", 364, 397, 33], + [116, "06:04", "07:30", 364, 450, 86], + [117, "06:05", "06:24", 365, 384, 19], + [118, "06:06", "06:51", 366, 411, 45], + [119, "06:07", "06:43", 367, 403, 36], + [120, "06:08", "07:30", 368, 450, 82], + [121, "06:10", "06:20", 370, 380, 10], + [122, "06:10", "07:17", 370, 437, 67], + [123, "06:11", "06:54", 371, 414, 43], + [124, "06:11", "06:21", 371, 381, 10], + [125, "06:13", "06:38", 373, 398, 25], + [126, "06:13", "06:58", 373, 418, 45], + [127, "06:13", "06:53", 373, 413, 40], + [128, "06:14", "07:03", 374, 423, 49], + [129, "06:14", "06:47", 374, 407, 33], + [130, "06:14", "07:40", 374, 460, 86], + [131, "06:15", "07:15", 375, 435, 60], + [132, "06:16", "06:28", 376, 388, 12], + [133, "06:16", "06:26", 376, 386, 10], + [134, "06:17", "06:34", 377, 394, 17], + [135, "06:18", "07:06", 378, 426, 48], + [136, "06:18", "07:38", 378, 458, 80], + [137, "06:18", "07:02", 378, 422, 44], + [138, "06:19", "06:53", 379, 413, 34], + [139, "06:20", "07:25", 380, 445, 65], + [140, "06:20", "06:36", 380, 396, 16], + [141, "06:20", "06:30", 380, 390, 10], + [142, "06:20", "06:30", 380, 390, 10], + [143, "06:21", "06:49", 381, 409, 28], + [144, "06:22", "07:06", 382, 426, 44], + [145, "06:24", "07:50", 384, 470, 86], + [146, "06:24", "06:57", 384, 417, 33], + [147, "06:26", "07:45", 386, 465, 79], + [148, "06:26", "07:10", 386, 430, 44], + [149, "06:27", "06:44", 387, 404, 17], + [150, "06:28", "06:53", 388, 413, 25], + [151, "06:28", "07:14", 388, 434, 46], + [152, "06:29", "07:03", 389, 423, 34], + [153, "06:30", "06:40", 390, 400, 10], + [154, "06:30", "07:37", 390, 457, 67], + [155, "06:31", "06:43", 391, 403, 12], + [156, "06:33", "07:14", 393, 434, 41], + [157, "06:33", "07:53", 393, 473, 80], + [158, "06:34", "08:16", 394, 496, 102], + [159, "06:34", "07:09", 394, 429, 35], + [160, "06:34", "07:07", 394, 427, 33], + [161, "06:36", "07:21", 396, 441, 45], + [162, "06:37", "07:22", 397, 442, 45], + [163, "06:37", "06:54", 397, 414, 17], + [164, "06:38", "07:30", 398, 450, 52], + [165, "06:38", "07:18", 398, 438, 40], + [166, "06:39", "07:33", 399, 453, 54], + [167, "06:40", "07:52", 400, 472, 72], + [168, "06:40", "06:50", 400, 410, 10], + [169, "06:40", "07:22", 400, 442, 42], + [170, "06:40", "06:56", 400, 416, 16], + [171, "06:41", "08:00", 401, 480, 79], + [172, "06:42", "07:26", 402, 446, 44], + [173, "06:42", "07:13", 402, 433, 31], + [174, "06:43", "07:08", 403, 428, 25], + [175, "06:43", "07:30", 403, 450, 47], + [176, "06:43", "07:23", 403, 443, 40], + [177, "06:44", "07:17", 404, 437, 33], + [178, "06:44", "08:13", 404, 493, 89], + [179, "06:46", "07:01", 406, 421, 15], + [180, "06:46", "06:58", 406, 418, 12], + [181, "06:47", "07:04", 407, 424, 17], + [182, "06:48", "08:15", 408, 495, 87], + [183, "06:48", "07:34", 408, 454, 46], + [184, "06:48", "07:37", 408, 457, 49], + [185, "06:49", "07:43", 409, 463, 54], + [186, "06:50", "08:00", 410, 480, 70], + [187, "06:50", "07:00", 410, 420, 10], + [188, "06:50", "07:05", 410, 425, 15], + [189, "06:51", "07:18", 411, 438, 27], + [190, "06:52", "07:36", 412, 456, 44], + [191, "06:53", "07:37", 413, 457, 44], + [192, "06:54", "08:20", 414, 500, 86], + [193, "06:54", "07:27", 414, 447, 33], + [194, "06:54", "07:20", 414, 440, 26], + [195, "06:56", "08:23", 416, 503, 87], + [196, "06:57", "07:12", 417, 432, 15], + [197, "06:57", "07:58", 417, 478, 61], + [198, "06:57", "07:45", 417, 465, 48], + [199, "06:57", "07:40", 417, 460, 43], + [200, "06:58", "07:23", 418, 443, 25], + [201, "06:59", "07:53", 419, 473, 54], + [202, "06:59", "08:07", 419, 487, 68], + [203, "07:00", "07:10", 420, 430, 10], + [204, "07:00", "07:16", 420, 436, 16], + [205, "07:01", "08:30", 421, 510, 89], + [206, "07:01", "07:13", 421, 433, 12], + [207, "07:01", "07:43", 421, 463, 42], + [208, "07:03", "08:30", 423, 510, 87], + [209, "07:04", "07:37", 424, 457, 33], + [210, "07:04", "07:44", 424, 464, 40], + [211, "07:05", "07:52", 425, 472, 47], + [212, "07:05", "08:05", 425, 485, 60], + [213, "07:05", "07:46", 425, 466, 41], + [214, "07:06", "07:51", 426, 471, 45], + [215, "07:07", "08:08", 427, 488, 61], + [216, "07:07", "07:52", 427, 472, 45], + [217, "07:07", "08:16", 427, 496, 69], + [218, "07:07", "07:27", 427, 447, 20], + [219, "07:09", "07:50", 429, 470, 41], + [220, "07:09", "08:40", 429, 520, 91], + [221, "07:09", "08:03", 429, 483, 54], + [222, "07:10", "07:20", 430, 440, 10], + [223, "07:11", "08:36", 431, 516, 85], + [224, "07:12", "08:00", 432, 480, 48], + [225, "07:12", "07:47", 432, 467, 35], + [226, "07:13", "07:54", 433, 474, 41], + [227, "07:13", "07:38", 433, 458, 25], + [228, "07:14", "07:59", 434, 479, 45], + [229, "07:16", "08:50", 436, 530, 94], + [230, "07:16", "07:28", 436, 448, 12], + [231, "07:17", "07:35", 437, 455, 18], + [232, "07:17", "07:58", 437, 478, 41], + [233, "07:18", "08:06", 438, 486, 48], + [234, "07:18", "08:44", 438, 524, 86], + [235, "07:19", "08:13", 439, 493, 54], + [236, "07:20", "08:02", 440, 482, 42], + [237, "07:20", "08:07", 440, 487, 47], + [238, "07:20", "07:30", 440, 450, 10], + [239, "07:20", "07:57", 440, 477, 37], + [240, "07:20", "07:36", 440, 456, 16], + [241, "07:21", "07:48", 441, 468, 27], + [242, "07:22", "08:06", 442, 486, 44], + [243, "07:22", "08:25", 442, 505, 63], + [244, "07:24", "08:27", 444, 507, 63], + [245, "07:24", "08:05", 444, 485, 41], + [246, "07:26", "08:23", 446, 503, 57], + [247, "07:26", "08:52", 446, 532, 86], + [248, "07:27", "08:07", 447, 487, 40], + [249, "07:27", "07:42", 447, 462, 15], + [250, "07:27", "08:15", 447, 495, 48], + [251, "07:28", "07:53", 448, 473, 25], + [252, "07:28", "08:09", 448, 489, 41], + [253, "07:28", "07:38", 448, 458, 10], + [254, "07:30", "08:35", 450, 515, 65], + [255, "07:31", "07:43", 451, 463, 12], + [256, "07:32", "08:13", 452, 493, 41], + [257, "07:34", "09:00", 454, 540, 86], + [258, "07:34", "08:33", 454, 513, 59], + [259, "07:34", "09:04", 454, 544, 90], + [260, "07:35", "08:22", 455, 502, 47], + [261, "07:35", "07:45", 455, 465, 10], + [262, "07:35", "08:16", 455, 496, 41], + [263, "07:36", "08:17", 456, 497, 41], + [264, "07:36", "08:36", 456, 516, 60], + [265, "07:37", "07:50", 457, 470, 13], + [266, "07:40", "07:56", 460, 476, 16], + [267, "07:40", "08:20", 460, 500, 40], + [268, "07:40", "08:45", 460, 525, 65], + [269, "07:41", "08:39", 461, 519, 58], + [270, "07:41", "07:51", 461, 471, 10], + [271, "07:42", "08:30", 462, 510, 48], + [272, "07:42", "08:21", 462, 501, 39], + [273, "07:43", "08:08", 463, 488, 25], + [274, "07:43", "08:24", 463, 504, 41], + [275, "07:44", "09:10", 464, 550, 86], + [276, "07:44", "08:43", 464, 523, 59], + [277, "07:46", "08:28", 466, 508, 42], + [278, "07:46", "07:58", 466, 478, 12], + [279, "07:47", "08:00", 467, 480, 13], + [280, "07:48", "09:14", 468, 554, 86], + [281, "07:49", "08:32", 469, 512, 43], + [282, "07:50", "08:55", 470, 535, 65], + [283, "07:50", "08:00", 470, 480, 10], + [284, "07:50", "08:37", 470, 517, 47], + [285, "07:50", "08:26", 470, 506, 36], + [286, "07:51", "08:18", 471, 498, 27], + [287, "07:52", "08:21", 472, 501, 29], + [288, "07:53", "08:35", 473, 515, 42], + [289, "07:54", "09:19", 474, 559, 85], + [290, "07:55", "08:53", 475, 533, 58], + [291, "07:56", "08:54", 476, 534, 58], + [292, "07:57", "08:39", 477, 519, 42], + [293, "07:57", "08:10", 477, 490, 13], + [294, "07:58", "08:45", 478, 525, 47], + [295, "07:58", "08:23", 478, 503, 25], + [296, "08:00", "08:10", 480, 490, 10], + [297, "08:00", "09:05", 480, 545, 65], + [298, "08:00", "08:16", 480, 496, 16], + [299, "08:00", "08:35", 480, 515, 35], + [300, "08:01", "08:13", 481, 493, 12], + [301, "08:01", "08:43", 481, 523, 42], + [302, "08:03", "09:26", 483, 566, 83], + [303, "08:04", "09:29", 484, 569, 85], + [304, "08:05", "08:21", 485, 501, 16], + [305, "08:05", "08:47", 485, 527, 42], + [306, "08:06", "08:51", 486, 531, 45], + [307, "08:06", "09:03", 486, 543, 57], + [308, "08:07", "08:20", 487, 500, 13], + [309, "08:08", "08:55", 488, 535, 47], + [310, "08:08", "08:50", 488, 530, 42], + [311, "08:10", "08:45", 490, 525, 35], + [312, "08:10", "09:15", 490, 555, 65], + [313, "08:10", "08:20", 490, 500, 10], + [314, "08:11", "09:41", 491, 581, 90], + [315, "08:12", "08:55", 492, 535, 43], + [316, "08:13", "08:38", 493, 518, 25], + [317, "08:14", "09:38", 494, 578, 84], + [318, "08:15", "08:30", 495, 510, 15], + [319, "08:16", "08:30", 496, 510, 14], + [320, "08:16", "08:28", 496, 508, 12], + [321, "08:16", "09:00", 496, 540, 44], + [322, "08:17", "09:13", 497, 553, 56], + [323, "08:18", "09:16", 498, 556, 58], + [324, "08:18", "09:05", 498, 545, 47], + [325, "08:20", "08:36", 500, 516, 16], + [326, "08:20", "08:55", 500, 535, 35], + [327, "08:20", "09:05", 500, 545, 45], + [328, "08:20", "08:30", 500, 510, 10], + [329, "08:20", "09:25", 500, 565, 65], + [330, "08:21", "08:38", 501, 518, 17], + [331, "08:21", "08:47", 501, 527, 26], + [332, "08:22", "08:45", 502, 525, 23], + [333, "08:23", "09:10", 503, 550, 47], + [334, "08:24", "09:48", 504, 588, 84], + [335, "08:26", "08:46", 506, 526, 20], + [336, "08:27", "09:07", 507, 547, 40], + [337, "08:28", "08:50", 508, 530, 22], + [338, "08:28", "09:56", 508, 596, 88], + [339, "08:28", "09:23", 508, 563, 55], + [340, "08:29", "09:20", 509, 560, 51], + [341, "08:30", "09:05", 510, 545, 35], + [342, "08:30", "08:45", 510, 525, 15], + [343, "08:30", "08:40", 510, 520, 10], + [344, "08:30", "09:35", 510, 575, 65], + [345, "08:31", "08:43", 511, 523, 12], + [346, "08:31", "09:13", 511, 553, 42], + [347, "08:34", "09:58", 514, 598, 84], + [348, "08:35", "08:55", 515, 535, 20], + [349, "08:35", "09:15", 515, 555, 40], + [350, "08:35", "08:45", 515, 525, 10], + [351, "08:36", "08:46", 516, 526, 10], + [352, "08:36", "09:00", 516, 540, 24], + [353, "08:38", "09:20", 518, 560, 42], + [354, "08:38", "09:35", 518, 575, 57], + [355, "08:38", "09:14", 518, 554, 36], + [356, "08:39", "09:33", 519, 573, 54], + [357, "08:40", "09:45", 520, 585, 65], + [358, "08:40", "08:50", 520, 530, 10], + [359, "08:40", "08:56", 520, 536, 16], + [360, "08:42", "09:25", 522, 565, 43], + [361, "08:43", "09:08", 523, 548, 25], + [362, "08:44", "09:35", 524, 575, 51], + [363, "08:45", "09:00", 525, 540, 15], + [364, "08:45", "09:05", 525, 545, 20], + [365, "08:46", "09:24", 526, 564, 38], + [366, "08:46", "08:58", 526, 538, 12], + [367, "08:46", "09:30", 526, 570, 44], + [368, "08:48", "10:11", 528, 611, 83], + [369, "08:48", "10:13", 528, 613, 85], + [370, "08:49", "09:43", 529, 583, 54], + [371, "08:50", "09:30", 530, 570, 40], + [372, "08:50", "10:00", 530, 600, 70], + [373, "08:50", "09:00", 530, 540, 10], + [374, "08:51", "09:17", 531, 557, 26], + [375, "08:53", "09:20", 533, 560, 27], + [376, "08:53", "09:35", 533, 575, 42], + [377, "08:55", "09:34", 535, 574, 39], + [378, "08:55", "09:15", 535, 555, 20], + [379, "08:58", "09:38", 538, 578, 40], + [380, "08:58", "10:26", 538, 626, 88], + [381, "08:59", "09:53", 539, 593, 54], + [382, "08:59", "09:50", 539, 590, 51], + [383, "09:00", "09:35", 540, 575, 35], + [384, "09:00", "09:16", 540, 556, 16], + [385, "09:00", "09:10", 540, 550, 10], + [386, "09:00", "09:16", 540, 556, 16], + [387, "09:01", "09:13", 541, 553, 12], + [388, "09:03", "09:45", 543, 585, 42], + [389, "09:03", "10:28", 543, 628, 85], + [390, "09:05", "09:44", 545, 584, 39], + [391, "09:05", "09:25", 545, 565, 20], + [392, "09:08", "09:53", 548, 593, 45], + [393, "09:08", "10:04", 548, 604, 56], + [394, "09:09", "10:03", 549, 603, 54], + [395, "09:10", "10:15", 550, 615, 65], + [396, "09:10", "09:20", 550, 560, 10], + [397, "09:11", "09:38", 551, 578, 27], + [398, "09:13", "10:00", 553, 600, 47], + [399, "09:14", "09:39", 554, 579, 25], + [400, "09:14", "10:05", 554, 605, 51], + [401, "09:15", "09:54", 555, 594, 39], + [402, "09:16", "09:28", 556, 568, 12], + [403, "09:18", "10:43", 558, 643, 85], + [404, "09:18", "10:41", 558, 641, 83], + [405, "09:18", "09:58", 558, 598, 40], + [406, "09:19", "10:13", 559, 613, 54], + [407, "09:20", "09:30", 560, 570, 10], + [408, "09:20", "09:36", 560, 576, 16], + [409, "09:21", "09:47", 561, 587, 26], + [410, "09:23", "10:30", 563, 630, 67], + [411, "09:23", "10:05", 563, 605, 42], + [412, "09:23", "09:49", 563, 589, 26], + [413, "09:24", "09:35", 564, 575, 11], + [414, "09:25", "09:35", 565, 575, 10], + [415, "09:25", "10:04", 565, 604, 39], + [416, "09:28", "10:08", 568, 608, 40], + [417, "09:29", "09:45", 569, 585, 16], + [418, "09:29", "10:20", 569, 620, 51], + [419, "09:29", "10:56", 569, 656, 87], + [420, "09:29", "10:23", 569, 623, 54], + [421, "09:30", "09:40", 570, 580, 10], + [422, "09:31", "09:43", 571, 583, 12], + [423, "09:33", "10:58", 573, 658, 85], + [424, "09:33", "10:15", 573, 615, 42], + [425, "09:34", "09:45", 574, 585, 11], + [426, "09:35", "10:14", 575, 614, 39], + [427, "09:38", "10:45", 578, 645, 67], + [428, "09:39", "10:33", 579, 633, 54], + [429, "09:40", "09:56", 580, 596, 16], + [430, "09:40", "09:50", 580, 590, 10], + [431, "09:41", "10:08", 581, 608, 27], + [432, "09:41", "10:23", 581, 623, 42], + [433, "09:44", "10:35", 584, 635, 51], + [434, "09:44", "11:11", 584, 671, 87], + [435, "09:44", "09:55", 584, 595, 11], + [436, "09:45", "10:24", 585, 624, 39], + [437, "09:46", "09:58", 586, 598, 12], + [438, "09:48", "10:30", 588, 630, 42], + [439, "09:48", "11:13", 588, 673, 85], + [440, "09:48", "10:04", 588, 604, 16], + [441, "09:49", "10:43", 589, 643, 54], + [442, "09:50", "10:00", 590, 600, 10], + [443, "09:51", "10:17", 591, 617, 26], + [444, "09:53", "10:49", 593, 649, 56], + [445, "09:53", "11:00", 593, 660, 67], + [446, "09:54", "10:05", 594, 605, 11], + [447, "09:55", "10:34", 595, 634, 39], + [448, "09:56", "10:38", 596, 638, 42], + [449, "09:57", "10:20", 597, 620, 23], + [450, "09:59", "11:26", 599, 686, 87], + [451, "09:59", "10:50", 599, 650, 51], + [452, "09:59", "10:53", 599, 653, 54], + [453, "10:00", "10:16", 600, 616, 16], + [454, "10:00", "10:10", 600, 610, 10], + [455, "10:01", "10:13", 601, 613, 12], + [456, "10:03", "11:28", 603, 688, 85], + [457, "10:03", "10:45", 603, 645, 42], + [458, "10:04", "10:15", 604, 615, 11], + [459, "10:05", "10:44", 605, 644, 39], + [460, "10:08", "11:15", 608, 675, 67], + [461, "10:09", "11:03", 609, 663, 54], + [462, "10:10", "10:20", 610, 620, 10], + [463, "10:11", "10:38", 611, 638, 27], + [464, "10:11", "10:53", 611, 653, 42], + [465, "10:14", "11:05", 614, 665, 51], + [466, "10:14", "11:41", 614, 701, 87], + [467, "10:14", "10:25", 614, 625, 11], + [468, "10:15", "10:54", 615, 654, 39], + [469, "10:16", "10:28", 616, 628, 12], + [470, "10:18", "11:43", 618, 703, 85], + [471, "10:18", "11:00", 618, 660, 42], + [472, "10:19", "11:13", 619, 673, 54], + [473, "10:20", "10:30", 620, 630, 10], + [474, "10:20", "10:36", 620, 636, 16], + [475, "10:21", "10:47", 621, 647, 26], + [476, "10:23", "11:30", 623, 690, 67], + [477, "10:23", "10:45", 623, 645, 22], + [478, "10:24", "10:35", 624, 635, 11], + [479, "10:25", "11:04", 625, 664, 39], + [480, "10:26", "11:08", 626, 668, 42], + [481, "10:29", "11:20", 629, 680, 51], + [482, "10:29", "11:23", 629, 683, 54], + [483, "10:29", "11:56", 629, 716, 87], + [484, "10:30", "10:40", 630, 640, 10], + [485, "10:31", "10:43", 631, 643, 12], + [486, "10:33", "11:15", 633, 675, 42], + [487, "10:33", "11:58", 633, 718, 85], + [488, "10:34", "10:45", 634, 645, 11], + [489, "10:35", "11:14", 635, 674, 39], + [490, "10:38", "11:45", 638, 705, 67], + [491, "10:39", "11:33", 639, 693, 54], + [492, "10:40", "10:50", 640, 650, 10], + [493, "10:40", "10:56", 640, 656, 16], + [494, "10:41", "11:23", 641, 683, 42], + [495, "10:41", "11:08", 641, 668, 27], + [496, "10:44", "12:11", 644, 731, 87], + [497, "10:44", "11:35", 644, 695, 51], + [498, "10:44", "10:55", 644, 655, 11], + [499, "10:45", "11:24", 645, 684, 39], + [500, "10:46", "10:58", 646, 658, 12], + [501, "10:48", "12:13", 648, 733, 85], + [502, "10:48", "11:30", 648, 690, 42], + [503, "10:49", "11:43", 649, 703, 54], + [504, "10:50", "11:00", 650, 660, 10], + [505, "10:51", "11:17", 651, 677, 26], + [506, "10:53", "12:00", 653, 720, 67], + [507, "10:53", "11:20", 653, 680, 27], + [508, "10:54", "11:05", 654, 665, 11], + [509, "10:55", "11:34", 655, 694, 39], + [510, "10:56", "11:38", 656, 698, 42], + [511, "10:59", "11:14", 659, 674, 15], + [512, "10:59", "12:26", 659, 746, 87], + [513, "10:59", "11:53", 659, 713, 54], + [514, "10:59", "11:50", 659, 710, 51], + [515, "11:00", "11:16", 660, 676, 16], + [516, "11:00", "11:10", 660, 670, 10], + [517, "11:01", "11:13", 661, 673, 12], + [518, "11:03", "11:45", 663, 705, 42], + [519, "11:03", "12:28", 663, 748, 85], + [520, "11:04", "11:15", 664, 675, 11], + [521, "11:05", "11:44", 665, 704, 39], + [522, "11:08", "12:15", 668, 735, 67], + [523, "11:09", "12:03", 669, 723, 54], + [524, "11:10", "11:20", 670, 680, 10], + [525, "11:11", "11:38", 671, 698, 27], + [526, "11:11", "11:53", 671, 713, 42], + [527, "11:14", "11:25", 674, 685, 11], + [528, "11:14", "12:05", 674, 725, 51], + [529, "11:14", "12:38", 674, 758, 84], + [530, "11:14", "12:41", 674, 761, 87], + [531, "11:15", "11:54", 675, 714, 39], + [532, "11:16", "11:28", 676, 688, 12], + [533, "11:18", "12:00", 678, 720, 42], + [534, "11:19", "12:13", 679, 733, 54], + [535, "11:20", "11:30", 680, 690, 10], + [536, "11:20", "11:36", 680, 696, 16], + [537, "11:21", "11:47", 681, 707, 26], + [538, "11:23", "12:30", 683, 750, 67], + [539, "11:23", "11:49", 683, 709, 26], + [540, "11:24", "12:48", 684, 768, 84], + [541, "11:24", "11:35", 684, 695, 11], + [542, "11:25", "12:04", 685, 724, 39], + [543, "11:26", "12:08", 686, 728, 42], + [544, "11:29", "11:44", 689, 704, 15], + [545, "11:29", "12:23", 689, 743, 54], + [546, "11:29", "12:20", 689, 740, 51], + [547, "11:29", "12:54", 689, 774, 85], + [548, "11:30", "11:40", 690, 700, 10], + [549, "11:31", "11:43", 691, 703, 12], + [550, "11:33", "12:15", 693, 735, 42], + [551, "11:34", "12:58", 694, 778, 84], + [552, "11:34", "11:45", 694, 705, 11], + [553, "11:35", "12:14", 695, 734, 39], + [554, "11:38", "12:45", 698, 765, 67], + [555, "11:39", "12:33", 699, 753, 54], + [556, "11:40", "11:56", 700, 716, 16], + [557, "11:40", "11:50", 700, 710, 10], + [558, "11:41", "12:08", 701, 728, 27], + [559, "11:41", "12:23", 701, 743, 42], + [560, "11:44", "11:55", 704, 715, 11], + [561, "11:44", "13:14", 704, 794, 90], + [562, "11:44", "13:08", 704, 788, 84], + [563, "11:44", "12:35", 704, 755, 51], + [564, "11:45", "12:24", 705, 744, 39], + [565, "11:46", "11:58", 706, 718, 12], + [566, "11:48", "12:30", 708, 750, 42], + [567, "11:49", "12:43", 709, 763, 54], + [568, "11:50", "12:00", 710, 720, 10], + [569, "11:51", "12:17", 711, 737, 26], + [570, "11:53", "12:49", 713, 769, 56], + [571, "11:53", "13:00", 713, 780, 67], + [572, "11:54", "13:18", 714, 798, 84], + [573, "11:54", "12:05", 714, 725, 11], + [574, "11:55", "12:40", 715, 760, 45], + [575, "11:55", "12:34", 715, 754, 39], + [576, "11:56", "12:35", 716, 755, 39], + [577, "11:57", "12:20", 717, 740, 23], + [578, "11:58", "12:29", 718, 749, 31], + [579, "11:59", "12:50", 719, 770, 51], + [580, "11:59", "12:53", 719, 773, 54], + [581, "11:59", "13:24", 719, 804, 85], + [582, "11:59", "12:14", 719, 734, 15], + [583, "12:00", "12:16", 720, 736, 16], + [584, "12:00", "12:10", 720, 730, 10], + [585, "12:01", "12:45", 721, 765, 44], + [586, "12:01", "12:13", 721, 733, 12], + [587, "12:03", "12:50", 723, 770, 47], + [588, "12:04", "12:15", 724, 735, 11], + [589, "12:04", "13:04", 724, 784, 60], + [590, "12:04", "13:28", 724, 808, 84], + [591, "12:05", "12:44", 725, 764, 39], + [592, "12:08", "13:11", 728, 791, 63], + [593, "12:08", "12:39", 728, 759, 31], + [594, "12:09", "13:03", 729, 783, 54], + [595, "12:10", "12:20", 730, 740, 10], + [596, "12:11", "12:55", 731, 775, 44], + [597, "12:11", "12:38", 731, 758, 27], + [598, "12:14", "13:05", 734, 785, 51], + [599, "12:14", "12:25", 734, 745, 11], + [600, "12:14", "13:44", 734, 824, 90], + [601, "12:14", "13:38", 734, 818, 84], + [602, "12:15", "12:54", 735, 774, 39], + [603, "12:16", "12:28", 736, 748, 12], + [604, "12:18", "13:00", 738, 780, 42], + [605, "12:19", "13:13", 739, 793, 54], + [606, "12:20", "12:30", 740, 750, 10], + [607, "12:20", "13:31", 740, 811, 71], + [608, "12:20", "12:30", 740, 750, 10], + [609, "12:20", "12:36", 740, 756, 16], + [610, "12:21", "12:47", 741, 767, 26], + [611, "12:23", "12:45", 743, 765, 22], + [612, "12:24", "12:35", 744, 755, 11], + [613, "12:24", "13:48", 744, 828, 84], + [614, "12:25", "13:10", 745, 790, 45], + [615, "12:25", "13:04", 745, 784, 39], + [616, "12:26", "13:05", 746, 785, 39], + [617, "12:28", "13:54", 748, 834, 86], + [618, "12:28", "12:38", 748, 758, 10], + [619, "12:28", "13:15", 748, 795, 47], + [620, "12:29", "13:23", 749, 803, 54], + [621, "12:30", "13:41", 750, 821, 71], + [622, "12:30", "12:40", 750, 760, 10], + [623, "12:31", "13:15", 751, 795, 44], + [624, "12:31", "12:43", 751, 763, 12], + [625, "12:33", "12:48", 753, 768, 15], + [626, "12:33", "13:20", 753, 800, 47], + [627, "12:34", "13:58", 754, 838, 84], + [628, "12:34", "13:34", 754, 814, 60], + [629, "12:34", "12:45", 754, 765, 11], + [630, "12:35", "13:14", 755, 794, 39], + [631, "12:38", "13:25", 758, 805, 47], + [632, "12:38", "13:25", 758, 805, 47], + [633, "12:38", "14:04", 758, 844, 86], + [634, "12:39", "13:33", 759, 813, 54], + [635, "12:40", "13:51", 760, 831, 71], + [636, "12:40", "12:50", 760, 770, 10], + [637, "12:40", "12:56", 760, 776, 16], + [638, "12:41", "13:08", 761, 788, 27], + [639, "12:43", "13:30", 763, 810, 47], + [640, "12:44", "12:55", 764, 775, 11], + [641, "12:44", "14:08", 764, 848, 84], + [642, "12:45", "13:24", 765, 804, 39], + [643, "12:46", "12:58", 766, 778, 12], + [644, "12:46", "13:21", 766, 801, 35], + [645, "12:48", "14:14", 768, 854, 86], + [646, "12:48", "13:35", 768, 815, 47], + [647, "12:48", "12:58", 768, 778, 10], + [648, "12:48", "13:35", 768, 815, 47], + [649, "12:49", "13:43", 769, 823, 54], + [650, "12:50", "14:01", 770, 841, 71], + [651, "12:50", "13:00", 770, 780, 10], + [652, "12:50", "13:00", 770, 780, 10], + [653, "12:51", "13:17", 771, 797, 26], + [654, "12:53", "13:20", 773, 800, 27], + [655, "12:53", "13:24", 773, 804, 31], + [656, "12:53", "13:40", 773, 820, 47], + [657, "12:54", "14:18", 774, 858, 84], + [658, "12:54", "13:05", 774, 785, 11], + [659, "12:55", "13:34", 775, 814, 39], + [660, "12:58", "14:24", 778, 864, 86], + [661, "12:58", "13:25", 778, 805, 27], + [662, "12:58", "13:45", 778, 825, 47], + [663, "12:58", "13:45", 778, 825, 47], + [664, "12:59", "13:53", 779, 833, 54], + [665, "13:00", "13:10", 780, 790, 10], + [666, "13:00", "13:16", 780, 796, 16], + [667, "13:00", "14:11", 780, 851, 71], + [668, "13:01", "13:13", 781, 793, 12], + [669, "13:03", "13:34", 783, 814, 31], + [670, "13:03", "13:50", 783, 830, 47], + [671, "13:04", "13:15", 784, 795, 11], + [672, "13:04", "14:28", 784, 868, 84], + [673, "13:05", "13:44", 785, 824, 39], + [674, "13:08", "13:55", 788, 835, 47], + [675, "13:08", "14:34", 788, 874, 86], + [676, "13:08", "13:55", 788, 835, 47], + [677, "13:09", "14:03", 789, 843, 54], + [678, "13:10", "13:20", 790, 800, 10], + [679, "13:10", "14:21", 790, 861, 71], + [680, "13:13", "14:00", 793, 840, 47], + [681, "13:13", "13:40", 793, 820, 27], + [682, "13:14", "14:38", 794, 878, 84], + [683, "13:14", "13:25", 794, 805, 11], + [684, "13:15", "13:54", 795, 834, 39], + [685, "13:16", "13:28", 796, 808, 12], + [686, "13:18", "14:05", 798, 845, 47], + [687, "13:18", "14:44", 798, 884, 86], + [688, "13:18", "14:05", 798, 845, 47], + [689, "13:19", "14:13", 799, 853, 54], + [690, "13:20", "13:36", 800, 816, 16], + [691, "13:20", "14:31", 800, 871, 71], + [692, "13:20", "13:30", 800, 810, 10], + [693, "13:21", "13:47", 801, 827, 26], + [694, "13:23", "14:10", 803, 850, 47], + [695, "13:23", "13:49", 803, 829, 26], + [696, "13:24", "14:48", 804, 888, 84], + [697, "13:24", "13:35", 804, 815, 11], + [698, "13:25", "14:04", 805, 844, 39], + [699, "13:28", "14:15", 808, 855, 47], + [700, "13:28", "14:54", 808, 894, 86], + [701, "13:28", "13:55", 808, 835, 27], + [702, "13:28", "14:15", 808, 855, 47], + [703, "13:29", "14:23", 809, 863, 54], + [704, "13:30", "13:40", 810, 820, 10], + [705, "13:30", "14:41", 810, 881, 71], + [706, "13:31", "13:43", 811, 823, 12], + [707, "13:33", "14:20", 813, 860, 47], + [708, "13:34", "14:58", 814, 898, 84], + [709, "13:34", "13:45", 814, 825, 11], + [710, "13:35", "14:14", 815, 854, 39], + [711, "13:38", "14:25", 818, 865, 47], + [712, "13:38", "14:25", 818, 865, 47], + [713, "13:38", "15:04", 818, 904, 86], + [714, "13:39", "14:33", 819, 873, 54], + [715, "13:40", "13:50", 820, 830, 10], + [716, "13:40", "13:56", 820, 836, 16], + [717, "13:40", "14:51", 820, 891, 71], + [718, "13:43", "14:30", 823, 870, 47], + [719, "13:43", "14:10", 823, 850, 27], + [720, "13:44", "15:09", 824, 909, 85], + [721, "13:44", "13:55", 824, 835, 11], + [722, "13:45", "14:24", 825, 864, 39], + [723, "13:46", "13:58", 826, 838, 12], + [724, "13:48", "14:35", 828, 875, 47], + [725, "13:48", "15:14", 828, 914, 86], + [726, "13:48", "14:35", 828, 875, 47], + [727, "13:49", "14:43", 829, 883, 54], + [728, "13:50", "14:00", 830, 840, 10], + [729, "13:50", "15:01", 830, 901, 71], + [730, "13:51", "14:17", 831, 857, 26], + [731, "13:53", "14:40", 833, 880, 47], + [732, "13:53", "14:49", 833, 889, 56], + [733, "13:54", "14:05", 834, 845, 11], + [734, "13:54", "15:19", 834, 919, 85], + [735, "13:55", "14:34", 835, 874, 39], + [736, "13:57", "14:20", 837, 860, 23], + [737, "13:58", "15:24", 838, 924, 86], + [738, "13:58", "14:45", 838, 885, 47], + [739, "13:58", "14:45", 838, 885, 47], + [740, "13:58", "14:25", 838, 865, 27], + [741, "13:59", "14:53", 839, 893, 54], + [742, "14:00", "14:16", 840, 856, 16], + [743, "14:00", "14:10", 840, 850, 10], + [744, "14:00", "15:11", 840, 911, 71], + [745, "14:01", "14:13", 841, 853, 12], + [746, "14:03", "14:50", 843, 890, 47], + [747, "14:04", "14:15", 844, 855, 11], + [748, "14:04", "15:29", 844, 929, 85], + [749, "14:05", "14:44", 845, 884, 39], + [750, "14:08", "14:55", 848, 895, 47], + [751, "14:08", "14:55", 848, 895, 47], + [752, "14:08", "15:34", 848, 934, 86], + [753, "14:09", "15:03", 849, 903, 54], + [754, "14:10", "15:21", 850, 921, 71], + [755, "14:10", "14:20", 850, 860, 10], + [756, "14:13", "15:00", 853, 900, 47], + [757, "14:13", "14:40", 853, 880, 27], + [758, "14:14", "15:40", 854, 940, 86], + [759, "14:14", "14:25", 854, 865, 11], + [760, "14:15", "14:54", 855, 894, 39], + [761, "14:16", "14:28", 856, 868, 12], + [762, "14:18", "15:05", 858, 905, 47], + [763, "14:18", "15:44", 858, 944, 86], + [764, "14:18", "15:05", 858, 905, 47], + [765, "14:19", "15:13", 859, 913, 54], + [766, "14:20", "15:31", 860, 931, 71], + [767, "14:20", "14:30", 860, 870, 10], + [768, "14:20", "14:36", 860, 876, 16], + [769, "14:21", "14:47", 861, 887, 26], + [770, "14:23", "15:10", 863, 910, 47], + [771, "14:23", "14:45", 863, 885, 22], + [772, "14:24", "15:50", 864, 950, 86], + [773, "14:24", "14:35", 864, 875, 11], + [774, "14:25", "15:02", 865, 902, 37], + [775, "14:26", "14:52", 866, 892, 26], + [776, "14:28", "15:15", 868, 915, 47], + [777, "14:28", "14:55", 868, 895, 27], + [778, "14:28", "15:54", 868, 954, 86], + [779, "14:28", "15:15", 868, 915, 47], + [780, "14:29", "15:23", 869, 923, 54], + [781, "14:30", "15:41", 870, 941, 71], + [782, "14:30", "14:40", 870, 880, 10], + [783, "14:31", "14:43", 871, 883, 12], + [784, "14:33", "15:20", 873, 920, 47], + [785, "14:34", "16:00", 874, 960, 86], + [786, "14:34", "14:45", 874, 885, 11], + [787, "14:35", "15:11", 875, 911, 36], + [788, "14:38", "15:25", 878, 925, 47], + [789, "14:38", "15:25", 878, 925, 47], + [790, "14:38", "16:04", 878, 964, 86], + [791, "14:39", "15:33", 879, 933, 54], + [792, "14:40", "14:50", 880, 890, 10], + [793, "14:40", "15:51", 880, 951, 71], + [794, "14:40", "14:56", 880, 896, 16], + [795, "14:43", "15:30", 883, 930, 47], + [796, "14:43", "15:10", 883, 910, 27], + [797, "14:44", "15:00", 884, 900, 16], + [798, "14:44", "16:10", 884, 970, 86], + [799, "14:45", "15:19", 885, 919, 34], + [800, "14:46", "14:58", 886, 898, 12], + [801, "14:48", "15:35", 888, 935, 47], + [802, "14:48", "15:35", 888, 935, 47], + [803, "14:48", "17:04", 888, 1024, 136], + [804, "14:49", "15:43", 889, 943, 54], + [805, "14:50", "16:01", 890, 961, 71], + [806, "14:50", "15:00", 890, 900, 10], + [807, "14:51", "15:17", 891, 917, 26], + [808, "14:52", "15:27", 892, 927, 35], + [809, "14:52", "15:21", 892, 921, 29], + [810, "14:53", "15:40", 893, 940, 47], + [811, "14:54", "15:08", 894, 908, 14], + [812, "14:54", "16:20", 894, 980, 86], + [813, "14:58", "16:24", 898, 984, 86], + [814, "14:58", "15:45", 898, 945, 47], + [815, "14:58", "15:25", 898, 925, 27], + [816, "14:58", "15:45", 898, 945, 47], + [817, "14:59", "15:53", 899, 953, 54], + [818, "15:00", "15:10", 900, 910, 10], + [819, "15:00", "15:35", 900, 935, 35], + [820, "15:00", "16:11", 900, 971, 71], + [821, "15:00", "15:16", 900, 916, 16], + [822, "15:01", "15:13", 901, 913, 12], + [823, "15:02", "15:16", 902, 916, 14], + [824, "15:03", "15:50", 903, 950, 47], + [825, "15:04", "16:30", 904, 990, 86], + [826, "15:08", "16:34", 908, 994, 86], + [827, "15:08", "15:55", 908, 955, 47], + [828, "15:08", "15:55", 908, 955, 47], + [829, "15:08", "15:45", 908, 945, 37], + [830, "15:09", "16:14", 909, 974, 65], + [831, "15:09", "16:03", 909, 963, 54], + [832, "15:10", "16:21", 910, 981, 71], + [833, "15:10", "15:20", 910, 920, 10], + [834, "15:11", "15:24", 911, 924, 13], + [835, "15:12", "15:36", 912, 936, 24], + [836, "15:13", "16:00", 913, 960, 47], + [837, "15:13", "15:40", 913, 940, 27], + [838, "15:14", "16:40", 914, 1000, 86], + [839, "15:16", "15:28", 916, 928, 12], + [840, "15:16", "15:55", 916, 955, 39], + [841, "15:18", "16:05", 918, 965, 47], + [842, "15:18", "16:44", 918, 1004, 86], + [843, "15:18", "16:05", 918, 965, 47], + [844, "15:19", "16:13", 919, 973, 54], + [845, "15:19", "15:34", 919, 934, 15], + [846, "15:20", "15:30", 920, 930, 10], + [847, "15:20", "16:31", 920, 991, 71], + [848, "15:20", "15:36", 920, 936, 16], + [849, "15:21", "15:47", 921, 947, 26], + [850, "15:21", "16:06", 921, 966, 45], + [851, "15:23", "16:10", 923, 970, 47], + [852, "15:24", "16:50", 924, 1010, 86], + [853, "15:24", "16:05", 924, 965, 41], + [854, "15:27", "15:51", 927, 951, 24], + [855, "15:27", "15:44", 927, 944, 17], + [856, "15:28", "16:15", 928, 975, 47], + [857, "15:28", "16:54", 928, 1014, 86], + [858, "15:28", "16:15", 928, 975, 47], + [859, "15:28", "15:55", 928, 955, 27], + [860, "15:29", "16:23", 929, 983, 54], + [861, "15:30", "16:41", 930, 1001, 71], + [862, "15:30", "15:40", 930, 940, 10], + [863, "15:31", "15:43", 931, 943, 12], + [864, "15:33", "16:20", 933, 980, 47], + [865, "15:34", "17:00", 934, 1020, 86], + [866, "15:34", "16:15", 934, 975, 41], + [867, "15:35", "15:54", 935, 954, 19], + [868, "15:36", "16:21", 936, 981, 45], + [869, "15:38", "16:25", 938, 985, 47], + [870, "15:38", "16:25", 938, 985, 47], + [871, "15:38", "16:39", 938, 999, 61], + [872, "15:39", "16:33", 939, 993, 54], + [873, "15:40", "15:50", 940, 950, 10], + [874, "15:40", "16:51", 940, 1011, 71], + [875, "15:40", "15:56", 940, 956, 16], + [876, "15:43", "16:10", 943, 970, 27], + [877, "15:43", "16:30", 943, 990, 47], + [878, "15:44", "17:10", 944, 1030, 86], + [879, "15:44", "16:25", 944, 985, 41], + [880, "15:45", "16:04", 945, 964, 19], + [881, "15:46", "15:58", 946, 958, 12], + [882, "15:48", "16:35", 948, 995, 47], + [883, "15:48", "16:35", 948, 995, 47], + [884, "15:48", "17:14", 948, 1034, 86], + [885, "15:49", "16:43", 949, 1003, 54], + [886, "15:50", "16:00", 950, 960, 10], + [887, "15:50", "17:01", 950, 1021, 71], + [888, "15:51", "16:18", 951, 978, 27], + [889, "15:52", "16:36", 952, 996, 44], + [890, "15:53", "16:40", 953, 1000, 47], + [891, "15:54", "17:20", 954, 1040, 86], + [892, "15:54", "16:35", 954, 995, 41], + [893, "15:55", "16:14", 955, 974, 19], + [894, "15:58", "16:25", 958, 985, 27], + [895, "15:58", "16:45", 958, 1005, 47], + [896, "15:58", "16:45", 958, 1005, 47], + [897, "15:58", "17:24", 958, 1044, 86], + [898, "15:59", "17:11", 959, 1031, 72], + [899, "15:59", "16:53", 959, 1013, 54], + [900, "16:00", "16:10", 960, 970, 10], + [901, "16:00", "16:16", 960, 976, 16], + [902, "16:01", "16:13", 961, 973, 12], + [903, "16:03", "16:50", 963, 1010, 47], + [904, "16:04", "17:30", 964, 1050, 86], + [905, "16:04", "16:45", 964, 1005, 41], + [906, "16:05", "16:24", 965, 984, 19], + [907, "16:06", "16:51", 966, 1011, 45], + [908, "16:08", "16:55", 968, 1015, 47], + [909, "16:08", "17:34", 968, 1054, 86], + [910, "16:08", "16:55", 968, 1015, 47], + [911, "16:09", "17:03", 969, 1023, 54], + [912, "16:09", "17:21", 969, 1041, 72], + [913, "16:10", "16:20", 970, 980, 10], + [914, "16:13", "16:40", 973, 1000, 27], + [915, "16:13", "17:00", 973, 1020, 47], + [916, "16:14", "16:55", 974, 1015, 41], + [917, "16:14", "17:40", 974, 1060, 86], + [918, "16:15", "16:34", 975, 994, 19], + [919, "16:16", "16:28", 976, 988, 12], + [920, "16:18", "17:05", 978, 1025, 47], + [921, "16:18", "17:05", 978, 1025, 47], + [922, "16:18", "17:44", 978, 1064, 86], + [923, "16:19", "17:31", 979, 1051, 72], + [924, "16:19", "17:13", 979, 1033, 54], + [925, "16:20", "16:30", 980, 990, 10], + [926, "16:20", "16:36", 980, 996, 16], + [927, "16:21", "16:48", 981, 1008, 27], + [928, "16:22", "17:06", 982, 1026, 44], + [929, "16:23", "17:10", 983, 1030, 47], + [930, "16:24", "17:05", 984, 1025, 41], + [931, "16:24", "17:50", 984, 1070, 86], + [932, "16:25", "16:44", 985, 1004, 19], + [933, "16:28", "17:15", 988, 1035, 47], + [934, "16:28", "17:15", 988, 1035, 47], + [935, "16:28", "16:55", 988, 1015, 27], + [936, "16:28", "17:54", 988, 1074, 86], + [937, "16:29", "17:23", 989, 1043, 54], + [938, "16:29", "17:41", 989, 1061, 72], + [939, "16:30", "16:40", 990, 1000, 10], + [940, "16:31", "16:43", 991, 1003, 12], + [941, "16:33", "17:20", 993, 1040, 47], + [942, "16:34", "17:15", 994, 1035, 41], + [943, "16:34", "18:00", 994, 1080, 86], + [944, "16:35", "16:54", 995, 1014, 19], + [945, "16:36", "17:21", 996, 1041, 45], + [946, "16:38", "17:25", 998, 1045, 47], + [947, "16:38", "17:25", 998, 1045, 47], + [948, "16:38", "18:04", 998, 1084, 86], + [949, "16:39", "17:33", 999, 1053, 54], + [950, "16:39", "17:51", 999, 1071, 72], + [951, "16:40", "16:56", 1000, 1016, 16], + [952, "16:40", "16:50", 1000, 1010, 10], + [953, "16:43", "17:10", 1003, 1030, 27], + [954, "16:43", "17:30", 1003, 1050, 47], + [955, "16:44", "17:25", 1004, 1045, 41], + [956, "16:44", "18:10", 1004, 1090, 86], + [957, "16:45", "17:04", 1005, 1024, 19], + [958, "16:46", "16:58", 1006, 1018, 12], + [959, "16:48", "18:14", 1008, 1094, 86], + [960, "16:48", "17:35", 1008, 1055, 47], + [961, "16:48", "17:35", 1008, 1055, 47], + [962, "16:49", "18:01", 1009, 1081, 72], + [963, "16:49", "17:43", 1009, 1063, 54], + [964, "16:50", "17:00", 1010, 1020, 10], + [965, "16:51", "17:18", 1011, 1038, 27], + [966, "16:52", "17:36", 1012, 1056, 44], + [967, "16:53", "17:40", 1013, 1060, 47], + [968, "16:54", "18:20", 1014, 1100, 86], + [969, "16:54", "17:35", 1014, 1055, 41], + [970, "16:55", "17:14", 1015, 1034, 19], + [971, "16:58", "17:25", 1018, 1045, 27], + [972, "16:58", "17:45", 1018, 1065, 47], + [973, "16:58", "17:45", 1018, 1065, 47], + [974, "16:58", "18:24", 1018, 1104, 86], + [975, "16:59", "18:11", 1019, 1091, 72], + [976, "16:59", "17:53", 1019, 1073, 54], + [977, "17:00", "17:16", 1020, 1036, 16], + [978, "17:00", "17:10", 1020, 1030, 10], + [979, "17:01", "17:13", 1021, 1033, 12], + [980, "17:03", "17:50", 1023, 1070, 47], + [981, "17:04", "18:30", 1024, 1110, 86], + [982, "17:04", "17:45", 1024, 1065, 41], + [983, "17:05", "17:24", 1025, 1044, 19], + [984, "17:06", "17:51", 1026, 1071, 45], + [985, "17:08", "17:55", 1028, 1075, 47], + [986, "17:08", "17:55", 1028, 1075, 47], + [987, "17:08", "18:34", 1028, 1114, 86], + [988, "17:09", "18:03", 1029, 1083, 54], + [989, "17:09", "18:21", 1029, 1101, 72], + [990, "17:10", "17:20", 1030, 1040, 10], + [991, "17:13", "17:40", 1033, 1060, 27], + [992, "17:13", "18:00", 1033, 1080, 47], + [993, "17:14", "17:55", 1034, 1075, 41], + [994, "17:14", "18:40", 1034, 1120, 86], + [995, "17:15", "17:34", 1035, 1054, 19], + [996, "17:16", "17:28", 1036, 1048, 12], + [997, "17:18", "18:05", 1038, 1085, 47], + [998, "17:18", "18:05", 1038, 1085, 47], + [999, "17:18", "18:44", 1038, 1124, 86], + [1000, "17:19", "18:31", 1039, 1111, 72], + [1001, "17:19", "18:13", 1039, 1093, 54], + [1002, "17:20", "17:36", 1040, 1056, 16], + [1003, "17:20", "17:30", 1040, 1050, 10], + [1004, "17:21", "17:47", 1041, 1067, 26], + [1005, "17:22", "18:06", 1042, 1086, 44], + [1006, "17:23", "18:10", 1043, 1090, 47], + [1007, "17:24", "18:50", 1044, 1130, 86], + [1008, "17:24", "18:05", 1044, 1085, 41], + [1009, "17:25", "17:44", 1045, 1064, 19], + [1010, "17:28", "17:55", 1048, 1075, 27], + [1011, "17:28", "18:15", 1048, 1095, 47], + [1012, "17:28", "18:15", 1048, 1095, 47], + [1013, "17:28", "18:54", 1048, 1134, 86], + [1014, "17:29", "18:41", 1049, 1121, 72], + [1015, "17:29", "18:23", 1049, 1103, 54], + [1016, "17:30", "17:40", 1050, 1060, 10], + [1017, "17:31", "17:43", 1051, 1063, 12], + [1018, "17:33", "18:20", 1053, 1100, 47], + [1019, "17:34", "18:15", 1054, 1095, 41], + [1020, "17:34", "19:00", 1054, 1140, 86], + [1021, "17:35", "17:54", 1055, 1074, 19], + [1022, "17:36", "18:21", 1056, 1101, 45], + [1023, "17:38", "18:25", 1058, 1105, 47], + [1024, "17:38", "19:04", 1058, 1144, 86], + [1025, "17:38", "18:25", 1058, 1105, 47], + [1026, "17:39", "18:51", 1059, 1131, 72], + [1027, "17:39", "18:33", 1059, 1113, 54], + [1028, "17:40", "17:56", 1060, 1076, 16], + [1029, "17:40", "17:50", 1060, 1070, 10], + [1030, "17:43", "18:10", 1063, 1090, 27], + [1031, "17:43", "18:30", 1063, 1110, 47], + [1032, "17:44", "18:25", 1064, 1105, 41], + [1033, "17:44", "19:14", 1064, 1154, 90], + [1034, "17:45", "18:04", 1065, 1084, 19], + [1035, "17:46", "17:58", 1066, 1078, 12], + [1036, "17:48", "18:35", 1068, 1115, 47], + [1037, "17:48", "18:35", 1068, 1115, 47], + [1038, "17:48", "19:14", 1068, 1154, 86], + [1039, "17:49", "19:01", 1069, 1141, 72], + [1040, "17:49", "18:43", 1069, 1123, 54], + [1041, "17:50", "18:00", 1070, 1080, 10], + [1042, "17:51", "18:17", 1071, 1097, 26], + [1043, "17:52", "18:36", 1072, 1116, 44], + [1044, "17:53", "18:40", 1073, 1120, 47], + [1045, "17:54", "18:35", 1074, 1115, 41], + [1046, "17:54", "18:57", 1074, 1137, 63], + [1047, "17:55", "18:14", 1075, 1094, 19], + [1048, "17:58", "18:45", 1078, 1125, 47], + [1049, "17:58", "18:45", 1078, 1125, 47], + [1050, "17:58", "18:25", 1078, 1105, 27], + [1051, "17:58", "19:26", 1078, 1166, 88], + [1052, "17:59", "18:53", 1079, 1133, 54], + [1053, "18:00", "19:11", 1080, 1151, 71], + [1054, "18:00", "18:10", 1080, 1090, 10], + [1055, "18:00", "18:16", 1080, 1096, 16], + [1056, "18:01", "18:13", 1081, 1093, 12], + [1057, "18:03", "18:50", 1083, 1130, 47], + [1058, "18:04", "18:45", 1084, 1125, 41], + [1059, "18:04", "19:29", 1084, 1169, 85], + [1060, "18:05", "18:24", 1085, 1104, 19], + [1061, "18:06", "18:51", 1086, 1131, 45], + [1062, "18:08", "18:55", 1088, 1135, 47], + [1063, "18:08", "19:06", 1088, 1146, 58], + [1064, "18:08", "18:55", 1088, 1135, 47], + [1065, "18:09", "19:03", 1089, 1143, 54], + [1066, "18:10", "18:20", 1090, 1100, 10], + [1067, "18:10", "19:21", 1090, 1161, 71], + [1068, "18:13", "19:00", 1093, 1140, 47], + [1069, "18:13", "18:40", 1093, 1120, 27], + [1070, "18:14", "19:43", 1094, 1183, 89], + [1071, "18:14", "18:55", 1094, 1135, 41], + [1072, "18:15", "18:34", 1095, 1114, 19], + [1073, "18:16", "18:28", 1096, 1108, 12], + [1074, "18:17", "18:27", 1097, 1107, 10], + [1075, "18:18", "19:41", 1098, 1181, 83], + [1076, "18:18", "18:58", 1098, 1138, 40], + [1077, "18:18", "19:05", 1098, 1145, 47], + [1078, "18:19", "19:13", 1099, 1153, 54], + [1079, "18:20", "19:31", 1100, 1171, 71], + [1080, "18:20", "18:36", 1100, 1116, 16], + [1081, "18:20", "18:30", 1100, 1110, 10], + [1082, "18:22", "19:05", 1102, 1145, 43], + [1083, "18:23", "19:05", 1103, 1145, 42], + [1084, "18:24", "19:27", 1104, 1167, 63], + [1085, "18:24", "19:05", 1104, 1145, 41], + [1086, "18:25", "18:44", 1105, 1124, 19], + [1087, "18:28", "19:25", 1108, 1165, 57], + [1088, "18:28", "18:55", 1108, 1135, 27], + [1089, "18:28", "19:08", 1108, 1148, 40], + [1090, "18:28", "19:15", 1108, 1155, 47], + [1091, "18:29", "19:23", 1109, 1163, 54], + [1092, "18:30", "19:05", 1110, 1145, 35], + [1093, "18:30", "18:40", 1110, 1120, 10], + [1094, "18:31", "18:43", 1111, 1123, 12], + [1095, "18:33", "19:15", 1113, 1155, 42], + [1096, "18:34", "19:58", 1114, 1198, 84], + [1097, "18:34", "19:14", 1114, 1154, 40], + [1098, "18:35", "18:55", 1115, 1135, 20], + [1099, "18:36", "19:20", 1116, 1160, 44], + [1100, "18:38", "19:25", 1118, 1165, 47], + [1101, "18:38", "19:23", 1118, 1163, 45], + [1102, "18:38", "19:56", 1118, 1196, 78], + [1103, "18:39", "19:33", 1119, 1173, 54], + [1104, "18:40", "18:50", 1120, 1130, 10], + [1105, "18:40", "19:45", 1120, 1185, 65], + [1106, "18:40", "18:56", 1120, 1136, 16], + [1107, "18:43", "19:10", 1123, 1150, 27], + [1108, "18:43", "19:30", 1123, 1170, 47], + [1109, "18:44", "19:24", 1124, 1164, 40], + [1110, "18:45", "19:05", 1125, 1145, 20], + [1111, "18:46", "18:58", 1126, 1138, 12], + [1112, "18:48", "19:35", 1128, 1175, 47], + [1113, "18:48", "20:12", 1128, 1212, 84], + [1114, "18:48", "20:11", 1128, 1211, 83], + [1115, "18:48", "19:28", 1128, 1168, 40], + [1116, "18:49", "19:43", 1129, 1183, 54], + [1117, "18:50", "19:00", 1130, 1140, 10], + [1118, "18:51", "19:01", 1131, 1141, 10], + [1119, "18:53", "19:35", 1133, 1175, 42], + [1120, "18:53", "19:15", 1133, 1155, 22], + [1121, "18:53", "20:00", 1133, 1200, 67], + [1122, "18:55", "19:15", 1135, 1155, 20], + [1123, "18:55", "19:34", 1135, 1174, 39], + [1124, "18:58", "19:38", 1138, 1178, 40], + [1125, "18:59", "19:53", 1139, 1193, 54], + [1126, "18:59", "19:50", 1139, 1190, 51], + [1127, "18:59", "19:53", 1139, 1193, 54], + [1128, "19:00", "19:16", 1140, 1156, 16], + [1129, "19:00", "19:10", 1140, 1150, 10], + [1130, "19:00", "19:16", 1140, 1156, 16], + [1131, "19:01", "19:13", 1141, 1153, 12], + [1132, "19:03", "20:26", 1143, 1226, 83], + [1133, "19:03", "19:45", 1143, 1185, 42], + [1134, "19:05", "19:44", 1145, 1184, 39], + [1135, "19:05", "19:25", 1145, 1165, 20], + [1136, "19:08", "20:15", 1148, 1215, 67], + [1137, "19:08", "19:35", 1148, 1175, 27], + [1138, "19:09", "19:49", 1149, 1189, 40], + [1139, "19:09", "20:03", 1149, 1203, 54], + [1140, "19:10", "19:20", 1150, 1160, 10], + [1141, "19:10", "19:20", 1150, 1160, 10], + [1142, "19:11", "19:53", 1151, 1193, 42], + [1143, "19:14", "20:26", 1154, 1226, 72], + [1144, "19:14", "19:35", 1154, 1175, 21], + [1145, "19:14", "19:24", 1154, 1164, 10], + [1146, "19:14", "20:05", 1154, 1205, 51], + [1147, "19:15", "19:30", 1155, 1170, 15], + [1148, "19:15", "19:54", 1155, 1194, 39], + [1149, "19:18", "20:39", 1158, 1239, 81], + [1150, "19:18", "20:00", 1158, 1200, 42], + [1151, "19:19", "20:14", 1159, 1214, 55], + [1152, "19:20", "19:30", 1160, 1170, 10], + [1153, "19:20", "19:36", 1160, 1176, 16], + [1154, "19:21", "19:31", 1161, 1171, 10], + [1155, "19:23", "20:30", 1163, 1230, 67], + [1156, "19:23", "19:35", 1163, 1175, 12], + [1157, "19:24", "19:45", 1164, 1185, 21], + [1158, "19:24", "19:45", 1164, 1185, 21], + [1159, "19:25", "20:04", 1165, 1204, 39], + [1160, "19:26", "20:08", 1166, 1208, 42], + [1161, "19:29", "20:02", 1169, 1202, 33], + [1162, "19:29", "20:18", 1169, 1218, 49], + [1163, "19:29", "20:41", 1169, 1241, 72], + [1164, "19:30", "19:40", 1170, 1180, 10], + [1165, "19:33", "20:54", 1173, 1254, 81], + [1166, "19:33", "20:17", 1173, 1217, 44], + [1167, "19:34", "19:55", 1174, 1195, 21], + [1168, "19:35", "20:14", 1175, 1214, 39], + [1169, "19:38", "20:05", 1178, 1205, 27], + [1170, "19:38", "20:45", 1178, 1245, 67], + [1171, "19:39", "20:12", 1179, 1212, 33], + [1172, "19:40", "19:50", 1180, 1190, 10], + [1173, "19:40", "19:56", 1180, 1196, 16], + [1174, "19:41", "20:27", 1181, 1227, 46], + [1175, "19:43", "19:55", 1183, 1195, 12], + [1176, "19:44", "20:05", 1184, 1205, 21], + [1177, "19:44", "20:33", 1184, 1233, 49], + [1178, "19:44", "21:00", 1184, 1260, 76], + [1179, "19:45", "20:24", 1185, 1224, 39], + [1180, "19:48", "20:37", 1188, 1237, 49], + [1181, "19:48", "21:09", 1188, 1269, 81], + [1182, "19:50", "20:00", 1190, 1200, 10], + [1183, "19:52", "20:29", 1192, 1229, 37], + [1184, "19:53", "20:08", 1193, 1208, 15], + [1185, "19:53", "21:02", 1193, 1262, 69], + [1186, "19:53", "20:20", 1193, 1220, 27], + [1187, "19:54", "20:19", 1194, 1219, 25], + [1188, "19:55", "20:34", 1195, 1234, 39], + [1189, "19:56", "20:34", 1196, 1234, 38], + [1190, "19:59", "20:48", 1199, 1248, 49], + [1191, "19:59", "21:20", 1199, 1280, 81], + [1192, "20:00", "20:16", 1200, 1216, 16], + [1193, "20:00", "20:10", 1200, 1210, 10], + [1194, "20:03", "20:42", 1203, 1242, 39], + [1195, "20:03", "21:24", 1203, 1284, 81], + [1196, "20:04", "20:29", 1204, 1229, 25], + [1197, "20:05", "20:48", 1205, 1248, 43], + [1198, "20:07", "20:44", 1207, 1244, 37], + [1199, "20:08", "20:40", 1208, 1240, 32], + [1200, "20:08", "20:35", 1208, 1235, 27], + [1201, "20:10", "20:20", 1210, 1220, 10], + [1202, "20:10", "20:22", 1210, 1222, 12], + [1203, "20:11", "20:47", 1211, 1247, 36], + [1204, "20:14", "21:04", 1214, 1264, 50], + [1205, "20:14", "21:03", 1214, 1263, 49], + [1206, "20:17", "21:03", 1217, 1263, 46], + [1207, "20:18", "21:39", 1218, 1299, 81], + [1208, "20:20", "20:30", 1220, 1230, 10], + [1209, "20:20", "20:57", 1220, 1257, 37], + [1210, "20:20", "20:36", 1220, 1236, 16], + [1211, "20:22", "20:59", 1222, 1259, 37], + [1212, "20:22", "20:42", 1222, 1242, 20], + [1213, "20:24", "20:49", 1224, 1249, 25], + [1214, "20:27", "21:22", 1227, 1282, 55], + [1215, "20:29", "21:18", 1229, 1278, 49], + [1216, "20:30", "21:07", 1230, 1267, 37], + [1217, "20:30", "20:40", 1230, 1240, 10], + [1218, "20:30", "20:40", 1230, 1240, 10], + [1219, "20:30", "21:40", 1230, 1300, 70], + [1220, "20:32", "21:18", 1232, 1278, 46], + [1221, "20:35", "21:54", 1235, 1314, 79], + [1222, "20:37", "21:14", 1237, 1274, 37], + [1223, "20:38", "21:08", 1238, 1268, 30], + [1224, "20:40", "20:50", 1240, 1250, 10], + [1225, "20:40", "21:17", 1240, 1277, 37], + [1226, "20:40", "20:56", 1240, 1256, 16], + [1227, "20:44", "21:33", 1244, 1293, 49], + [1228, "20:47", "21:33", 1247, 1293, 46], + [1229, "20:47", "21:42", 1247, 1302, 55], + [1230, "20:50", "21:00", 1250, 1260, 10], + [1231, "20:50", "22:00", 1250, 1320, 70], + [1232, "20:50", "22:09", 1250, 1329, 79], + [1233, "20:50", "21:27", 1250, 1287, 37], + [1234, "20:52", "21:29", 1252, 1289, 37], + [1235, "20:53", "21:20", 1253, 1280, 27], + [1236, "20:56", "21:11", 1256, 1271, 15], + [1237, "20:59", "21:48", 1259, 1308, 49], + [1238, "21:00", "21:10", 1260, 1270, 10], + [1239, "21:00", "21:37", 1260, 1297, 37], + [1240, "21:02", "21:48", 1262, 1308, 46], + [1241, "21:05", "22:24", 1265, 1344, 79], + [1242, "21:07", "21:44", 1267, 1304, 37], + [1243, "21:07", "22:02", 1267, 1322, 55], + [1244, "21:08", "21:38", 1268, 1298, 30], + [1245, "21:10", "22:25", 1270, 1345, 75], + [1246, "21:10", "21:20", 1270, 1280, 10], + [1247, "21:10", "21:47", 1270, 1307, 37], + [1248, "21:14", "22:03", 1274, 1323, 49], + [1249, "21:17", "22:03", 1277, 1323, 46], + [1250, "21:20", "22:18", 1280, 1338, 58], + [1251, "21:20", "21:57", 1280, 1317, 37], + [1252, "21:20", "21:30", 1280, 1290, 10], + [1253, "21:22", "21:59", 1282, 1319, 37], + [1254, "21:24", "21:49", 1284, 1309, 25], + [1255, "21:27", "22:21", 1287, 1341, 54], + [1256, "21:30", "22:07", 1290, 1327, 37], + [1257, "21:30", "22:20", 1290, 1340, 50], + [1258, "21:30", "21:40", 1290, 1300, 10], + [1259, "21:32", "22:18", 1292, 1338, 46], + [1260, "21:32", "22:01", 1292, 1321, 29], + [1261, "21:35", "22:54", 1295, 1374, 79], + [1262, "21:37", "22:14", 1297, 1334, 37], + [1263, "21:39", "21:55", 1299, 1315, 16], + [1264, "21:40", "22:17", 1300, 1337, 37], + [1265, "21:40", "21:50", 1300, 1310, 10], + [1266, "21:41", "22:08", 1301, 1328, 27], + [1267, "21:47", "22:16", 1307, 1336, 29], + [1268, "21:47", "22:51", 1307, 1371, 64], + [1269, "21:47", "22:33", 1307, 1353, 46], + [1270, "21:48", "22:03", 1308, 1323, 15], + [1271, "21:50", "22:55", 1310, 1375, 65], + [1272, "21:50", "22:27", 1310, 1347, 37], + [1273, "21:50", "22:00", 1310, 1320, 10], + [1274, "21:52", "22:29", 1312, 1349, 37], + [1275, "21:53", "22:19", 1313, 1339, 26], + [1276, "22:00", "22:38", 1320, 1358, 38], + [1277, "22:00", "22:10", 1320, 1330, 10], + [1278, "22:02", "22:12", 1322, 1332, 10], + [1279, "22:02", "22:48", 1322, 1368, 46], + [1280, "22:04", "22:31", 1324, 1351, 27], + [1281, "22:05", "23:24", 1325, 1404, 79], + [1282, "22:07", "22:44", 1327, 1364, 37], + [1283, "22:07", "22:39", 1327, 1359, 32], + [1284, "22:09", "22:25", 1329, 1345, 16], + [1285, "22:10", "23:25", 1330, 1405, 75], + [1286, "22:13", "22:38", 1333, 1358, 25], + [1287, "22:13", "22:53", 1333, 1373, 40], + [1288, "22:17", "22:27", 1337, 1347, 10], + [1289, "22:17", "23:03", 1337, 1383, 46], + [1290, "22:19", "22:46", 1339, 1366, 27], + [1291, "22:22", "22:59", 1342, 1379, 37], + [1292, "22:24", "22:48", 1344, 1368, 24], + [1293, "22:27", "22:52", 1347, 1372, 25], + [1294, "22:27", "23:21", 1347, 1401, 54], + [1295, "22:28", "23:08", 1348, 1388, 40], + [1296, "22:30", "23:17", 1350, 1397, 47], + [1297, "22:32", "22:42", 1352, 1362, 10], + [1298, "22:32", "23:11", 1352, 1391, 39], + [1299, "22:34", "23:01", 1354, 1381, 27], + [1300, "22:35", "23:54", 1355, 1434, 79], + [1301, "22:37", "23:14", 1357, 1394, 37], + [1302, "22:43", "23:23", 1363, 1403, 40], + [1303, "22:43", "23:08", 1363, 1388, 25], + [1304, "22:47", "23:33", 1367, 1413, 46], + [1305, "22:47", "22:57", 1367, 1377, 10], + [1306, "22:49", "23:16", 1369, 1396, 27], + [1307, "22:52", "23:29", 1372, 1409, 37], + [1308, "22:53", "23:15", 1373, 1395, 22], + [1309, "22:55", "23:55", 1375, 1435, 60], + [1310, "22:57", "23:51", 1377, 1431, 54], + [1311, "22:58", "23:38", 1378, 1418, 40], + [1312, "23:02", "23:41", 1382, 1421, 39], + [1313, "23:02", "23:12", 1382, 1392, 10], + [1314, "23:04", "23:31", 1384, 1411, 27], + [1315, "23:05", "00:24", 1385, 1464, 79], + [1316, "23:07", "23:44", 1387, 1424, 37], + [1317, "23:13", "23:53", 1393, 1433, 40], + [1318, "23:13", "23:38", 1393, 1418, 25], + [1319, "23:17", "00:03", 1397, 1443, 46], + [1320, "23:17", "23:27", 1397, 1407, 10], + [1321, "23:19", "23:46", 1399, 1426, 27], + [1322, "23:22", "23:59", 1402, 1439, 37], + [1323, "23:25", "00:25", 1405, 1465, 60], + [1324, "23:27", "00:21", 1407, 1461, 54], + [1325, "23:28", "00:08", 1408, 1448, 40], + [1326, "23:32", "23:42", 1412, 1422, 10], + [1327, "23:34", "00:01", 1414, 1441, 27], + [1328, "23:35", "01:05", 1415, 1505, 90], + [1329, "23:37", "00:09", 1417, 1449, 32], + [1330, "23:43", "00:23", 1423, 1463, 40], + [1331, "23:43", "00:08", 1423, 1448, 25], + [1332, "23:46", "00:01", 1426, 1441, 15], + [1333, "23:47", "23:57", 1427, 1437, 10], + [1334, "23:47", "00:33", 1427, 1473, 46], + [1335, "23:52", "00:24", 1432, 1464, 32], + [1336, "23:55", "00:49", 1435, 1489, 54], + [1337, "23:57", "00:57", 1437, 1497, 60], + [1338, "23:58", "00:38", 1438, 1478, 40], + [1339, "00:02", "00:12", 1442, 1452, 10], + [1340, "00:07", "00:39", 1447, 1479, 32], + [1341, "00:13", "00:38", 1453, 1478, 25], + [1342, "00:13", "00:51", 1453, 1491, 38], + [1343, "00:15", "01:14", 1455, 1514, 59], + [1344, "00:17", "01:23", 1457, 1523, 66], + [1345, "00:23", "00:33", 1463, 1473, 10], + [1346, "00:24", "00:40", 1464, 1480, 16], + [1347, "00:25", "01:12", 1465, 1512, 47], + [1348, "00:28", "01:07", 1468, 1507, 39], + [1349, "00:33", "01:05", 1473, 1505, 32], + [1350, "00:43", "01:21", 1483, 1521, 38], + [1351, "00:44", "00:54", 1484, 1494, 10], + [1352, "00:47", "01:09", 1487, 1509, 22], + [1353, "00:47", "01:26", 1487, 1526, 39], + [1354, "00:54", "01:04", 1494, 1504, 10], + [1355, "00:57", "01:07", 1497, 1507, 10], ] # yapf:disable @@ -1680,17 +1679,16 @@ def find_minimum_number_of_drivers(shifts, params): # Computed data. total_driving_time = sum(shift[5] for shift in shifts) - min_num_drivers = int( - math.ceil(total_driving_time * 1.0 / max_driving_time)) + min_num_drivers = int(math.ceil(total_driving_time * 1.0 / max_driving_time)) min_start_time = min(shift[3] for shift in shifts) max_end_time = max(shift[4] for shift in shifts) - print('Bus driver scheduling') - print(' num shifts =', num_shifts) - print(' total driving time =', total_driving_time, 'minutes') - print(' min num drivers =', min_num_drivers) - print(' min start time =', min_start_time) - print(' max end time =', max_end_time) + print("Bus driver scheduling") + print(" num shifts =", num_shifts) + print(" total driving time =", total_driving_time, "minutes") + print(" min num drivers =", min_num_drivers) + print(" min start time =", min_start_time) + print(" max end time =", max_end_time) # We are going to build a flow from a the start of the day to the end # of the day. @@ -1715,11 +1713,11 @@ def find_minimum_number_of_drivers(shifts, params): # Create all the shift variables before iterating on the transitions # between these shifts. for shift in range(num_shifts): - driving_time[shift] = model.NewIntVar(0, max_driving_time, 'dt_%i' % shift) + driving_time[shift] = model.NewIntVar(0, max_driving_time, "dt_%i" % shift) no_break_driving_time[shift] = model.NewIntVar( - 0, max_driving_time_without_pauses, 'nbdt_%i' % shift) - working_time[shift] = model.NewIntVar( - 0, max_working_time, 'wt_%i' % shift) + 0, max_driving_time_without_pauses, "nbdt_%i" % shift + ) + working_time[shift] = model.NewIntVar(0, max_working_time, "wt_%i" % shift) for shift in range(num_shifts): duration = shifts[shift][5] @@ -1727,19 +1725,19 @@ def find_minimum_number_of_drivers(shifts, params): # Arc from source to shift. # - set the working time of the driver # - increase driving time and driving time since the last break - source_lit = model.NewBoolVar('from source to %i' % shift) + source_lit = model.NewBoolVar("from source to %i" % shift) all_literals.append(source_lit) outgoing_source_literals.append(source_lit) incoming_literals[shift].append(source_lit) model.Add(driving_time[shift] == duration).OnlyEnforceIf(source_lit) - model.Add(no_break_driving_time[shift] == duration).OnlyEnforceIf( - source_lit) + model.Add(no_break_driving_time[shift] == duration).OnlyEnforceIf(source_lit) model.Add(working_time[shift] == duration + extra_time).OnlyEnforceIf( - source_lit) + source_lit + ) # Arc from shift to sink # - checks that working time is greater than min_working_time - sink_lit = model.NewBoolVar('from %i to sink' % shift) + sink_lit = model.NewBoolVar("from %i to sink" % shift) all_literals.append(sink_lit) outgoing_literals[shift].append(sink_lit) incoming_sink_literals.append(sink_lit) @@ -1752,25 +1750,29 @@ def find_minimum_number_of_drivers(shifts, params): if delay > max_break: break # Assumes start times are sorted. other_duration = shifts[other][5] - lit = model.NewBoolVar('from %i to %i' % (shift, other)) + lit = model.NewBoolVar("from %i to %i" % (shift, other)) all_literals.append(lit) # Increase driving time - model.Add(driving_time[other] == - driving_time[shift] + other_duration).OnlyEnforceIf(lit) + model.Add( + driving_time[other] == driving_time[shift] + other_duration + ).OnlyEnforceIf(lit) # Increase no_break_driving or reset it to 0 depending on the delay if delay >= min_pause_after_4h: - model.Add(no_break_driving_time[other] == - other_duration).OnlyEnforceIf(lit) + model.Add(no_break_driving_time[other] == other_duration).OnlyEnforceIf( + lit + ) else: model.Add( - no_break_driving_time[other] == - no_break_driving_time[shift] + other_duration).OnlyEnforceIf(lit) + no_break_driving_time[other] + == no_break_driving_time[shift] + other_duration + ).OnlyEnforceIf(lit) # Increase working time - model.Add(working_time[other] == working_time[shift] + delay + - other_duration).OnlyEnforceIf(lit) + model.Add( + working_time[other] == working_time[shift] + delay + other_duration + ).OnlyEnforceIf(lit) # Add arc outgoing_literals[shift].append(lit) @@ -1782,16 +1784,16 @@ def find_minimum_number_of_drivers(shifts, params): model.Add(sum(incoming_literals[shift]) == 1) # Num drivers - num_drivers = model.NewIntVar(min_num_drivers, min_num_drivers * 3, 'num_drivers') + num_drivers = model.NewIntVar(min_num_drivers, min_num_drivers * 3, "num_drivers") model.Add(sum(incoming_sink_literals) == num_drivers) model.Add(sum(outgoing_source_literals) == num_drivers) - model.Minimize(num_drivers) + model.Minimize(num_drivers) # Solve model. solver = cp_model.CpSolver() solver.parameters.log_search_progress = True - #solver.parameters.num_search_workers = 16 + # solver.parameters.num_search_workers = 16 # solver.parameters.boolean_encoding_level = 0 # solver.parameters.lns_focus_on_decision_variables = True status = solver.Solve(model) @@ -1801,13 +1803,13 @@ def find_minimum_number_of_drivers(shifts, params): # Display solution optimal_num_drivers = int(solver.ObjectiveValue()) - print('minimal number of drivers =', optimal_num_drivers) + print("minimal number of drivers =", optimal_num_drivers) return optimal_num_drivers def main(args): """Optimize the bus driver allocation in two passes.""" - print('----------- first pass: minimize the number of drivers') + print("----------- first pass: minimize the number of drivers") shifts = [] if args.instance == 1: shifts = SAMPLE_SHIFTS_SMALL @@ -1817,9 +1819,9 @@ def main(args): shifts = SAMPLE_SHIFTS_LARGE num_drivers = find_minimum_number_of_drivers(shifts, args.params) - print('----------- second pass: minimize the sum of working times') - #bus_driver_scheduling(False, num_drivers) + print("----------- second pass: minimize the sum of working times") + # bus_driver_scheduling(False, num_drivers) -if __name__ == '__main__': +if __name__ == "__main__": main(PARSER.parse_args()) diff --git a/examples/python/chemical_balance_sat.py b/examples/python/chemical_balance_sat.py index 5db31020fc4..fa58d9b3c7f 100644 --- a/examples/python/chemical_balance_sat.py +++ b/examples/python/chemical_balance_sat.py @@ -89,24 +89,24 @@ def chemical_balance(): # Creates a solver and solves. solver = cp_model.CpSolver() status = solver.solve(model) - print(f"Status = {solver.status_name(status)}") - # The objective value of the solution. - print(f"Optimal objective value = {solver.objective_value / 10000.0}") - - for s in all_sets: - print( - f" {chemical_set[s][0]} = {solver.value(set_vars[s]) / 1000.0}", - end=" ", - ) - print() - for p in all_products: - name = max_quantities[p][0] - max_quantity = max_quantities[p][1] - quantity = sum( - solver.value(set_vars[s]) / 1000.0 * chemical_set[s][p + 1] - for s in all_sets - ) - print(f"{name}: {quantity} out of {max_quantity}") + if status == cp_model.OPTIMAL: + # The objective value of the solution. + print(f"Optimal objective value = {solver.objective_value / 10000.0}") + + for s in all_sets: + print( + f" {chemical_set[s][0]} = {solver.value(set_vars[s]) / 1000.0}", + end=" ", + ) + print() + for p in all_products: + name = max_quantities[p][0] + max_quantity = max_quantities[p][1] + quantity = sum( + solver.value(set_vars[s]) / 1000.0 * chemical_set[s][p + 1] + for s in all_sets + ) + print(f"{name}: {quantity:.3f} out of {max_quantity}") def main(argv: Sequence[str]) -> None: diff --git a/examples/python/clustering_sat.py b/examples/python/clustering_sat.py index 86754e5cd68..14701b38011 100644 --- a/examples/python/clustering_sat.py +++ b/examples/python/clustering_sat.py @@ -65,7 +65,7 @@ ] -def clustering_sat(): +def clustering_sat() -> None: """Entry point of the program.""" num_nodes = len(distance_matrix) print("Num nodes =", num_nodes) diff --git a/examples/python/code_samples.bzl b/examples/python/code_samples.bzl index 34617ba3733..2c465e361bf 100644 --- a/examples/python/code_samples.bzl +++ b/examples/python/code_samples.bzl @@ -14,6 +14,7 @@ """Helper macro to compile and test code samples.""" load("@pip_deps//:requirements.bzl", "requirement") +load("@rules_python//python:defs.bzl", "py_binary", "py_test") PYTHON_DEPS = [ "//ortools/init/python:init", @@ -32,7 +33,7 @@ PYTHON_DEPS = [ ] def code_sample_compile_py(name): - native.py_binary( + py_binary( name = name + "_py3", srcs = [name + ".py"], main = name + ".py", @@ -42,7 +43,7 @@ def code_sample_compile_py(name): ) def code_sample_test_py(name): - native.py_test( + py_test( name = name + "_py_test", size = "medium", srcs = [name + ".py"], @@ -53,7 +54,7 @@ def code_sample_test_py(name): ) def code_sample_test_arg_py(name, suffix, args, data): - native.py_test( + py_test( name = name + "_" + suffix + "_py_test", size = "medium", srcs = [name + ".py"], diff --git a/examples/python/cryptarithm_sat.py b/examples/python/cryptarithm_sat.py index 083e89e3d06..39e98e5e0f1 100644 --- a/examples/python/cryptarithm_sat.py +++ b/examples/python/cryptarithm_sat.py @@ -19,7 +19,7 @@ from ortools.sat.python import cp_model -def send_more_money(): +def send_more_money() -> None: """solve the cryptarithmic puzzle SEND+MORE=MONEY.""" model = cp_model.CpModel() @@ -74,7 +74,7 @@ def send_more_money(): print("y:", solver.value(y)) -def main(_): +def main(_) -> None: send_more_money() diff --git a/examples/python/flexible_job_shop_sat.py b/examples/python/flexible_job_shop_sat.py index b69a9fb0a46..afa8fbc86b4 100644 --- a/examples/python/flexible_job_shop_sat.py +++ b/examples/python/flexible_job_shop_sat.py @@ -33,20 +33,20 @@ class SolutionPrinter(cp_model.CpSolverSolutionCallback): """Print intermediate solutions.""" - def __init__(self): + def __init__(self) -> None: cp_model.CpSolverSolutionCallback.__init__(self) self.__solution_count = 0 - def on_solution_callback(self): + def on_solution_callback(self) -> None: """Called at each new solution.""" print( - "Solution %i, time = %f s, objective = %i" - % (self.__solution_count, self.wall_time, self.objective_value) + f"Solution {self.__solution_count}, time = {self.wall_time} s," + f" objective = {self.objective_value}" ) self.__solution_count += 1 -def flexible_jobshop(): +def flexible_jobshop() -> None: """solve a small flexible jobshop problem.""" # Data part. jobs = [ # task = (processing_time, machine_id) @@ -84,13 +84,13 @@ def flexible_jobshop(): max_task_duration = max(max_task_duration, alternative[0]) horizon += max_task_duration - print("Horizon = %i" % horizon) + print(f"Horizon = {horizon}") # Global storage of variables. intervals_per_resources = collections.defaultdict(list) starts = {} # indexed by (job_id, task_id). presences = {} # indexed by (job_id, task_id, alt_id). - job_ends = [] + job_ends: list[cp_model.IntVar] = [] # Scan the jobs and create the relevant variables and intervals. for job_id in all_jobs: @@ -112,7 +112,7 @@ def flexible_jobshop(): max_duration = max(max_duration, alt_duration) # Create main interval for the task. - suffix_name = "_j%i_t%i" % (job_id, task_id) + suffix_name = f"_j{job_id}_t{task_id}" start = model.new_int_var(0, horizon, "start" + suffix_name) duration = model.new_int_var( min_duration, max_duration, "duration" + suffix_name @@ -134,7 +134,7 @@ def flexible_jobshop(): if num_alternatives > 1: l_presences = [] for alt_id in all_alternatives: - alt_suffix = "_j%i_t%i_a%i" % (job_id, task_id, alt_id) + alt_suffix = f"_j{job_id}_t{task_id}_a{alt_id}" l_presence = model.new_bool_var("presence" + alt_suffix) l_start = model.new_int_var(0, horizon, "start" + alt_suffix) l_duration = task[alt_id][0] @@ -161,7 +161,8 @@ def flexible_jobshop(): intervals_per_resources[task[0][1]].append(interval) presences[(job_id, task_id, 0)] = model.new_constant(1) - job_ends.append(previous_end) + if previous_end is not None: + job_ends.append(previous_end) # Create machines constraints. for machine_id in all_machines: @@ -180,29 +181,25 @@ def flexible_jobshop(): status = solver.solve(model, solution_printer) # Print final solution. - for job_id in all_jobs: - print("Job %i:" % job_id) - for task_id in range(len(jobs[job_id])): - start_value = solver.value(starts[(job_id, task_id)]) - machine = -1 - duration = -1 - selected = -1 - for alt_id in range(len(jobs[job_id][task_id])): - if solver.value(presences[(job_id, task_id, alt_id)]): - duration = jobs[job_id][task_id][alt_id][0] - machine = jobs[job_id][task_id][alt_id][1] - selected = alt_id - print( - " task_%i_%i starts at %i (alt %i, machine %i, duration %i)" - % (job_id, task_id, start_value, selected, machine, duration) - ) - - print("solve status: %s" % solver.status_name(status)) - print("Optimal objective value: %i" % solver.objective_value) - print("Statistics") - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): + print(f"Optimal objective value: {solver.objective_value}") + for job_id in all_jobs: + print(f"Job {job_id}") + for task_id, task in enumerate(jobs[job_id]): + start_value = solver.value(starts[(job_id, task_id)]) + machine: int = -1 + task_duration: int = -1 + selected: int = -1 + for alt_id, alt in enumerate(task): + if solver.boolean_value(presences[(job_id, task_id, alt_id)]): + task_duration, machine = alt + selected = alt_id + print( + f" task_{job_id}_{task_id} starts at {start_value} (alt" + f" {selected}, machine {machine}, duration {task_duration})" + ) + + print(solver.response_stats()) flexible_jobshop() diff --git a/examples/python/gate_scheduling_sat.py b/examples/python/gate_scheduling_sat.py index ecf9d0fc03f..ff5180f4f90 100644 --- a/examples/python/gate_scheduling_sat.py +++ b/examples/python/gate_scheduling_sat.py @@ -29,7 +29,7 @@ from ortools.sat.python import cp_model -def main(_): +def main(_) -> None: """Solves the gate scheduling problem.""" model = cp_model.CpModel() @@ -67,34 +67,34 @@ def main(_): for i in all_jobs: # Create main interval. - start = model.new_int_var(0, horizon, "start_%i" % i) + start = model.new_int_var(0, horizon, f"start_{i}") duration = jobs[i][0] - end = model.new_int_var(0, horizon, "end_%i" % i) - interval = model.new_interval_var(start, duration, end, "interval_%i" % i) + end = model.new_int_var(0, horizon, f"end_{i}") + interval = model.new_interval_var(start, duration, end, f"interval_{i}") starts.append(start) intervals.append(interval) ends.append(end) demands.append(jobs[i][1]) # Create an optional copy of interval to be executed on machine 0. - performed_on_m0 = model.new_bool_var("perform_%i_on_m0" % i) + performed_on_m0 = model.new_bool_var(f"perform_{i}_on_m0") performed.append(performed_on_m0) - start0 = model.new_int_var(0, horizon, "start_%i_on_m0" % i) - end0 = model.new_int_var(0, horizon, "end_%i_on_m0" % i) + start0 = model.new_int_var(0, horizon, f"start_{i}_on_m0") + end0 = model.new_int_var(0, horizon, f"end_{i}_on_m0") interval0 = model.new_optional_interval_var( - start0, duration, end0, performed_on_m0, "interval_%i_on_m0" % i + start0, duration, end0, performed_on_m0, f"interval_{i}_on_m0" ) intervals0.append(interval0) # Create an optional copy of interval to be executed on machine 1. - start1 = model.new_int_var(0, horizon, "start_%i_on_m1" % i) - end1 = model.new_int_var(0, horizon, "end_%i_on_m1" % i) + start1 = model.new_int_var(0, horizon, f"start_{i}_on_m1") + end1 = model.new_int_var(0, horizon, f"end_{i}_on_m1") interval1 = model.new_optional_interval_var( start1, duration, end1, ~performed_on_m0, - "interval_%i_on_m1" % i, + f"interval_{i}_on_m1", ) intervals1.append(interval1) @@ -124,18 +124,24 @@ def main(_): # Output solution. if visualization.RunFromIPython(): output = visualization.SvgWrapper(solver.objective_value, max_width, 40.0) - output.AddTitle("Makespan = %i" % solver.objective_value) + output.AddTitle(f"Makespan = {solver.objective_value}") color_manager = visualization.ColorManager() color_manager.SeedRandomColor(0) for i in all_jobs: performed_machine = 1 - solver.value(performed[i]) - start = solver.value(starts[i]) + start_of_task = solver.value(starts[i]) d_x = jobs[i][0] d_y = jobs[i][1] s_y = performed_machine * (max_width - d_y) output.AddRectangle( - start, s_y, d_x, d_y, color_manager.RandomColor(), "black", "j%i" % i + start_of_task, + s_y, + d_x, + d_y, + color_manager.RandomColor(), + "black", + f"j{i}", ) output.AddXScale() @@ -143,17 +149,15 @@ def main(_): output.Display() else: print("Solution") - print(" - makespan = %i" % solver.objective_value) + print(f" - makespan = {solver.objective_value}") for i in all_jobs: performed_machine = 1 - solver.value(performed[i]) - start = solver.value(starts[i]) + start_of_task = solver.value(starts[i]) print( - " - Job %i starts at %i on machine %i" % (i, start, performed_machine) + f" - Job {i} starts at {start_of_task} on machine" + f" {performed_machine}" ) - print("Statistics") - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + print(solver.response_stats()) if __name__ == "__main__": diff --git a/examples/python/golomb8.py b/examples/python/golomb8.py index 44a464f32f5..427f20f2939 100755 --- a/examples/python/golomb8.py +++ b/examples/python/golomb8.py @@ -23,17 +23,14 @@ """ from absl import app -from absl import flags from ortools.constraint_solver import pywrapcp -FLAGS = flags.FLAGS - # We disable the following warning because it is a false positive on constraints # like: solver.Add(x == 0) # pylint: disable=g-explicit-bool-comparison -def main(_): +def main(_) -> None: # Create the solver. solver = pywrapcp.Solver("golomb ruler") diff --git a/examples/python/golomb_sat.py b/examples/python/golomb_sat.py index 43ca39f03ca..18ad572ef6d 100644 --- a/examples/python/golomb_sat.py +++ b/examples/python/golomb_sat.py @@ -38,7 +38,7 @@ ) -def solve_golomb_ruler(order: int, params: str): +def solve_golomb_ruler(order: int, params: str) -> None: """Solve the Golomb ruler problem.""" # Create the model. model = cp_model.CpModel() @@ -76,18 +76,13 @@ def solve_golomb_ruler(order: int, params: str): status = solver.solve(model, solution_printer) # Print solution. - print(f"status: {solver.status_name(status)}") if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): for idx, var in enumerate(marks): print(f"mark[{idx}]: {solver.value(var)}") intervals = [solver.value(diff) for diff in diffs] intervals.sort() print(f"intervals: {intervals}") - - print("Statistics:") - print(f"- conflicts: {solver.num_conflicts}") - print(f"- branches : {solver.num_branches}") - print(f"- wall time: {solver.wall_time}s\n") + print(solver.response_stats()) def main(argv: Sequence[str]) -> None: diff --git a/examples/python/hidato_sat.py b/examples/python/hidato_sat.py index cd20c506509..335348fce29 100755 --- a/examples/python/hidato_sat.py +++ b/examples/python/hidato_sat.py @@ -74,7 +74,7 @@ def print_matrix(game: list[list[int]]) -> None: if game[i][j] == 0: line += " ." else: - line += "% 3s" % game[i][j] + line += f"{game[i][j]:3}" print(line) @@ -102,7 +102,7 @@ def build_puzzle(problem: int) -> Union[None, list[list[int]]]: elif problem == 3: # Problems from the book: - # Gyora Bededek: "Hidato: 2000 Pure Logic Puzzles" + # Gyora Bededek: 'Hidato: 2000 Pure Logic Puzzles' # Problem 1 (Practice) puzzle = [ [0, 0, 20, 0, 0], @@ -147,7 +147,7 @@ def build_puzzle(problem: int) -> Union[None, list[list[int]]]: return puzzle -def solve_hidato(puzzle: list[list[int]], index: int): +def solve_hidato(puzzle: list[list[int]], index: int) -> None: """solve the given hidato table.""" # Create the model. model = cp_model.CpModel() @@ -156,15 +156,15 @@ def solve_hidato(puzzle: list[list[int]], index: int): c = len(puzzle[0]) if not visualization.RunFromIPython(): print("") - print("----- Solving problem %i -----" % index) + print(f"----- Solving problem {index} -----") print("") - print(("Initial game (%i x %i)" % (r, c))) + print(f"Initial game ({r} x {c})") print_matrix(puzzle) # # Declare variables. # - positions = [model.new_int_var(0, r * c - 1, "p[%i]" % i) for i in range(r * c)] + positions = [model.new_int_var(0, r * c - 1, f"p[{i}]") for i in range(r * c)] # # Constraints. @@ -202,7 +202,7 @@ def solve_hidato(puzzle: list[list[int]], index: int): color = "white" if puzzle[y][x] == 0 else "lightgreen" output.AddRectangle(x, r - y - 1, 1, 1, color, "black", str(i + 1)) - output.AddTitle("Puzzle %i solved in %f s" % (index, solver.wall_time)) + output.AddTitle(f"Puzzle {index} solved in {solver.wall_time:.2f} s") output.Display() else: print_solution( @@ -210,10 +210,7 @@ def solve_hidato(puzzle: list[list[int]], index: int): r, c, ) - print("Statistics") - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + print(solver.response_stats()) def main(_): diff --git a/examples/python/jobshop_ft06_distance_sat.py b/examples/python/jobshop_ft06_distance_sat.py index 60ce5be6415..92fb43ddc7b 100755 --- a/examples/python/jobshop_ft06_distance_sat.py +++ b/examples/python/jobshop_ft06_distance_sat.py @@ -36,7 +36,7 @@ def distance_between_jobs(x: int, y: int) -> int: return abs(x - y) -def jobshop_ft06_distance(): +def jobshop_ft06_distance() -> None: """Solves the ft06 jobshop with distances between tasks.""" # Creates the model. model = cp_model.CpModel() @@ -73,11 +73,11 @@ def jobshop_ft06_distance(): all_tasks = {} for i in all_jobs: for j in all_machines: - start_var = model.new_int_var(0, horizon, "start_%i_%i" % (i, j)) + start_var = model.new_int_var(0, horizon, f"start_{i}_{j}") duration = durations[i][j] - end_var = model.new_int_var(0, horizon, "end_%i_%i" % (i, j)) + end_var = model.new_int_var(0, horizon, f"end_{i}_{j}") interval_var = model.new_interval_var( - start_var, duration, end_var, "interval_%i_%i" % (i, j) + start_var, duration, end_var, f"interval_{i}_{j}" ) all_tasks[(i, j)] = task_type( start=start_var, end=end_var, interval=interval_var @@ -101,16 +101,16 @@ def jobshop_ft06_distance(): arcs = [] for j1 in range(len(job_intervals)): # Initial arc from the dummy node (0) to a task. - start_lit = model.new_bool_var("%i is first job" % j1) + start_lit = model.new_bool_var(f"{j1} is first job") arcs.append((0, j1 + 1, start_lit)) # Final arc from an arc to the dummy node. - arcs.append((j1 + 1, 0, model.new_bool_var("%i is last job" % j1))) + arcs.append((j1 + 1, 0, model.new_bool_var(f"{j1} is last job"))) for j2 in range(len(job_intervals)): if j1 == j2: continue - lit = model.new_bool_var("%i follows %i" % (j2, j1)) + lit = model.new_bool_var(f"{j2} follows {j1}") arcs.append((j1 + 1, j2 + 1, lit)) # We add the reified precedence to link the literal with the @@ -140,7 +140,8 @@ def jobshop_ft06_distance(): # Output solution. if status == cp_model.OPTIMAL: - print("Optimal makespan: %i" % solver.objective_value) + print(f"Optimal makespan: {solver.objective_value}") + print(solver.response_stats()) jobshop_ft06_distance() diff --git a/examples/python/jobshop_ft06_sat.py b/examples/python/jobshop_ft06_sat.py index 1b8255728b5..b5ec35e2453 100755 --- a/examples/python/jobshop_ft06_sat.py +++ b/examples/python/jobshop_ft06_sat.py @@ -66,11 +66,11 @@ def jobshop_ft06() -> None: all_tasks = {} for i in all_jobs: for j in all_machines: - start_var = model.new_int_var(0, horizon, "start_%i_%i" % (i, j)) + start_var = model.new_int_var(0, horizon, f"start_{i}_{j}") duration = durations[i][j] - end_var = model.new_int_var(0, horizon, "end_%i_%i" % (i, j)) + end_var = model.new_int_var(0, horizon, f"end_{i}_{j}") interval_var = model.new_interval_var( - start_var, duration, end_var, "interval_%i_%i" % (i, j) + start_var, duration, end_var, f"interval_{i}_{j}" ) all_tasks[(i, j)] = task_type( start=start_var, end=end_var, interval=interval_var @@ -113,7 +113,7 @@ def jobshop_ft06() -> None: ] visualization.DisplayJobshop(starts, durations, machines, "FT06") else: - print("Optimal makespan: %i" % solver.objective_value) + print(f"Optimal makespan: {solver.objective_value}") jobshop_ft06() diff --git a/examples/python/jobshop_with_maintenance_sat.py b/examples/python/jobshop_with_maintenance_sat.py index 778e826194f..954b802d28e 100644 --- a/examples/python/jobshop_with_maintenance_sat.py +++ b/examples/python/jobshop_with_maintenance_sat.py @@ -23,20 +23,20 @@ class SolutionPrinter(cp_model.CpSolverSolutionCallback): """Print intermediate solutions.""" - def __init__(self): + def __init__(self) -> None: cp_model.CpSolverSolutionCallback.__init__(self) self.__solution_count = 0 - def on_solution_callback(self): + def on_solution_callback(self) -> None: """Called at each new solution.""" print( - "Solution %i, time = %f s, objective = %i" - % (self.__solution_count, self.wall_time, self.objective_value) + f"Solution {self.__solution_count}, time = {self.wall_time} s," + f" objective = {self.objective_value}" ) self.__solution_count += 1 -def jobshop_with_maintenance(): +def jobshop_with_maintenance() -> None: """Solves a jobshop with maintenance on one machine.""" # Create the model. model = cp_model.CpModel() @@ -54,7 +54,7 @@ def jobshop_with_maintenance(): horizon = sum(task[1] for job in jobs_data for task in job) # Named tuple to store information about created variables. - task_type = collections.namedtuple("Task", "start end interval") + task_type = collections.namedtuple("task_type", "start end interval") # Named tuple to manipulate solution information. assigned_task_type = collections.namedtuple( "assigned_task_type", "start job index duration" @@ -67,9 +67,8 @@ def jobshop_with_maintenance(): for job_id, job in enumerate(jobs_data): for entry in enumerate(job): task_id, task = entry - machine = task[0] - duration = task[1] - suffix = "_%i_%i" % (job_id, task_id) + machine, duration = task + suffix = f"_{job_id}_{task_id}" start_var = model.new_int_var(0, horizon, "start" + suffix) end_var = model.new_int_var(0, horizon, "end" + suffix) interval_var = model.new_interval_var( @@ -132,15 +131,15 @@ def jobshop_with_maintenance(): sol_line = " " for assigned_task in assigned_jobs[machine]: - name = "job_%i_%i" % (assigned_task.job, assigned_task.index) + name = f"job_{assigned_task.job}_{assigned_task.index}" # add spaces to output to align columns. - sol_line_tasks += "%-10s" % name + sol_line_tasks += f"{name:>10}" start = assigned_task.start duration = assigned_task.duration - sol_tmp = "[%i,%i]" % (start, start + duration) + sol_tmp = f"[{start}, {start + duration}]" # add spaces to output to align columns. - sol_line += "%-10s" % sol_tmp + sol_line += f"{sol_tmp:>10}" sol_line += "\n" sol_line_tasks += "\n" @@ -148,12 +147,9 @@ def jobshop_with_maintenance(): output += sol_line # Finally print the solution found. - print("Optimal Schedule Length: %i" % solver.objective_value) + print(f"Optimal Schedule Length: {solver.objective_value}") print(output) - print("Statistics") - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + print(solver.response_stats()) def main(argv: Sequence[str]) -> None: diff --git a/examples/python/knapsack_2d_sat.py b/examples/python/knapsack_2d_sat.py index b9b8521905a..21007efaa68 100644 --- a/examples/python/knapsack_2d_sat.py +++ b/examples/python/knapsack_2d_sat.py @@ -71,7 +71,9 @@ def build_data() -> tuple[pd.Series, int, int]: return (data, max_height, max_width) -def solve_with_duplicate_items(data: pd.Series, max_height: int, max_width: int): +def solve_with_duplicate_items( + data: pd.Series, max_height: int, max_width: int +) -> None: """solve the problem by building 2 items (rotated or not) for each item.""" # Derived data (expanded to individual items). data_widths = data["width"].to_numpy() @@ -162,7 +164,7 @@ def solve_with_duplicate_items(data: pd.Series, max_height: int, max_width: int) status = solver.solve(model) # Report solution. - if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE: + if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): used = {i for i in range(num_items) if solver.boolean_value(is_used[i])} data = pd.DataFrame( { @@ -264,7 +266,7 @@ def solve_with_duplicate_optional_items( status = solver.solve(model) # Report solution. - if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE: + if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): used = {i for i in range(num_items) if solver.boolean_value(is_used[i])} data = pd.DataFrame( { @@ -385,7 +387,7 @@ def solve_with_rotations(data: pd.Series, max_height: int, max_width: int): status = solver.solve(model) # Report solution. - if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE: + if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): used = {i for i in range(num_items) if solver.boolean_value(is_used[i])} data = pd.DataFrame( { diff --git a/examples/python/line_balancing_sat.py b/examples/python/line_balancing_sat.py index a43cb0318e7..fc18b43606f 100644 --- a/examples/python/line_balancing_sat.py +++ b/examples/python/line_balancing_sat.py @@ -29,7 +29,7 @@ import collections import re -from typing import Sequence +from typing import Dict, Sequence from absl import app from absl import flags @@ -47,8 +47,8 @@ ) -class SectionInfo(object): - """Store model information for each section of the input file.""" +class SectionInfo: + """Store problem information for each section of the input file.""" def __init__(self): self.value = None @@ -66,44 +66,43 @@ def __str__(self): return "SectionInfo()" -def read_model(filename): - """Reads a .alb file and returns the model.""" +def read_problem(filename: str) -> Dict[str, SectionInfo]: + """Reads a .alb file and returns the problem.""" current_info = SectionInfo() - model = {} + problem: Dict[str, SectionInfo] = {} with open(filename, "r") as input_file: - print(f"Reading model from '{filename}'") - section_name = "" + print(f"Reading problem from '{filename}'") for line in input_file: stripped_line = line.strip() if not stripped_line: continue - match_section_def = re.match(r"<([\w\s]+)>", stripped_line) + match_section_def = re.fullmatch(r"<([\w\s]+)>", stripped_line) if match_section_def: section_name = match_section_def.group(1) if section_name == "end": continue current_info = SectionInfo() - model[section_name] = current_info + problem[section_name] = current_info continue - match_single_number = re.match(r"^([0-9]+)$", stripped_line) + match_single_number = re.fullmatch(r"^([0-9]+)$", stripped_line) if match_single_number: current_info.value = int(match_single_number.group(1)) continue - match_key_value = re.match(r"^([0-9]+)\s+([0-9]+)$", stripped_line) + match_key_value = re.fullmatch(r"^([0-9]+)\s+([0-9]+)$", stripped_line) if match_key_value: key = int(match_key_value.group(1)) value = int(match_key_value.group(2)) current_info.index_map[key] = value continue - match_pair = re.match(r"^([0-9]+),([0-9]+)$", stripped_line) + match_pair = re.fullmatch(r"^([0-9]+),([0-9]+)$", stripped_line) if match_pair: left = int(match_pair.group(1)) right = int(match_pair.group(2)) @@ -112,24 +111,26 @@ def read_model(filename): print(f"Unrecognized line '{stripped_line}'") - return model + return problem -def print_stats(model): - print("Model Statistics") - for key, value in model.items(): +def print_stats(problem: Dict[str, SectionInfo]) -> None: + print("Problem Statistics") + for key, value in problem.items(): print(f" - {key}: {value}") -def solve_model_greedily(model): +def solve_problem_greedily(problem: Dict[str, SectionInfo]) -> Dict[int, int]: """Compute a greedy solution.""" print("Solving using a Greedy heuristics") - num_tasks = model["number of tasks"].value + num_tasks = problem["number of tasks"].value + if num_tasks is None: + return {} all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the data. - precedences = model["precedence relations"].set_of_pairs - durations = model["task times"].index_map - cycle_time = model["cycle time"].value + precedences = problem["precedence relations"].set_of_pairs + durations = problem["task times"].index_map + cycle_time = problem["cycle time"].value weights = collections.defaultdict(int) successors = collections.defaultdict(list) @@ -142,7 +143,7 @@ def solve_model_greedily(model): if after in candidates: candidates.remove(after) - assignment = {} + assignment: Dict[int, int] = {} current_pod = 0 residual_capacity = cycle_time @@ -183,16 +184,20 @@ def solve_model_greedily(model): return assignment -def solve_boolean_model(model, hint): - """solve the given model.""" +def solve_problem_with_boolean_model( + problem: Dict[str, SectionInfo], hint: Dict[int, int] +) -> None: + """solve the given problem.""" print("Solving using the Boolean model") - # Model data - num_tasks = model["number of tasks"].value - all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the model. - durations = model["task times"].index_map - precedences = model["precedence relations"].set_of_pairs - cycle_time = model["cycle time"].value + # problem data + num_tasks = problem["number of tasks"].value + if num_tasks is None: + return + all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the problem. + durations = problem["task times"].index_map + precedences = problem["precedence relations"].set_of_pairs + cycle_time = problem["cycle time"].value num_pods = max(p for _, p in hint.items()) + 1 if hint else num_tasks - 1 all_pods = range(num_pods) @@ -272,16 +277,20 @@ def solve_boolean_model(model, hint): solver.solve(model) -def solve_scheduling_model(model, hint): - """solve the given model using a cumutive model.""" +def solve_problem_with_scheduling_model( + problem: Dict[str, SectionInfo], hint: Dict[int, int] +) -> None: + """solve the given problem using a cumulative model.""" print("Solving using the scheduling model") - # Model data - num_tasks = model["number of tasks"].value + # Problem data + num_tasks = problem["number of tasks"].value + if num_tasks is None: + return all_tasks = range(1, num_tasks + 1) # Tasks are 1 based in the data. - durations = model["task times"].index_map - precedences = model["precedence relations"].set_of_pairs - cycle_time = model["cycle time"].value + durations = problem["task times"].index_map + precedences = problem["precedence relations"].set_of_pairs + cycle_time = problem["cycle time"].value num_pods = max(p for _, p in hint.items()) + 1 if hint else num_tasks @@ -339,14 +348,14 @@ def main(argv: Sequence[str]) -> None: if len(argv) > 1: raise app.UsageError("Too many command-line arguments.") - model = read_model(_INPUT.value) - print_stats(model) - greedy_solution = solve_model_greedily(model) + problem = read_problem(_INPUT.value) + print_stats(problem) + greedy_solution = solve_problem_greedily(problem) if _MODEL.value == "boolean": - solve_boolean_model(model, greedy_solution) + solve_problem_with_boolean_model(problem, greedy_solution) elif _MODEL.value == "scheduling": - solve_scheduling_model(model, greedy_solution) + solve_problem_with_scheduling_model(problem, greedy_solution) if __name__ == "__main__": diff --git a/examples/python/maximize_combinations_sat.py b/examples/python/maximize_combinations_sat.py new file mode 100644 index 00000000000..2667b03b002 --- /dev/null +++ b/examples/python/maximize_combinations_sat.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Maximize the number of valid combinations of Boolean variables.""" + +from typing import Sequence +from absl import app + +from ortools.sat.python import cp_model + + +def maximize_combinations_sat() -> None: + """Maximize the number of valid combinations of Boolean variables.""" + model = cp_model.CpModel() + cards: list[cp_model.IntVar] = [ + model.new_bool_var("card1"), + model.new_bool_var("card2"), + model.new_bool_var("card3"), + model.new_bool_var("card4"), + ] + + combos: list[list[cp_model.IntVar]] = [ + [cards[0], cards[1]], + [cards[0], cards[2]], + [cards[1], cards[3]], + [cards[0], cards[2], cards[3]], + ] + + deck_size: int = 3 + model.add(sum(cards) == deck_size) + + valid_combos: list[cp_model.IntVar] = [] + for combination in combos: + is_valid = model.new_bool_var("") + + # All true implies is_valid. + model.add_bool_and(is_valid).only_enforce_if(combination) + + # is_valid implies all true. + for literal in combination: + model.add_implication(is_valid, literal) + valid_combos.append(is_valid) + + model.maximize(sum(valid_combos)) + + solver = cp_model.CpSolver() + solver.parameters.log_search_progress = True + status = solver.solve(model) + + if status == cp_model.OPTIMAL: + print( + "chosen cards:", + [card.name for card in cards if solver.boolean_value(card)], + ) + + +def main(argv: Sequence[str]) -> None: + if len(argv) > 1: + raise app.UsageError("Too many command-line arguments.") + maximize_combinations_sat() + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/python/maze_escape_sat.py b/examples/python/maze_escape_sat.py index 833cdfe926d..97887854999 100644 --- a/examples/python/maze_escape_sat.py +++ b/examples/python/maze_escape_sat.py @@ -20,10 +20,11 @@ Admissible moves are one step in one of the 6 directions: x+, x-, y+, y-, z+(up), z-(down) """ -from typing import Sequence +from typing import Dict, Sequence, Tuple from absl import app from absl import flags + from google.protobuf import text_format from ortools.sat.python import cp_model @@ -31,11 +32,25 @@ "output_proto", "", "Output file to write the cp_model proto to." ) _PARAMS = flags.DEFINE_string( - "params", "num_search_workers:8,log_search_progress:true", "Sat solver parameters." + "params", + "num_search_workers:8,log_search_progress:true", + "Sat solver parameters.", ) -def add_neighbor(size, x, y, z, dx, dy, dz, model, index_map, position_to_rank, arcs): +def add_neighbor( + size: int, + x: int, + y: int, + z: int, + dx: int, + dy: int, + dz: int, + model: cp_model.CpModel, + index_map: Dict[Tuple[int, int, int], int], + position_to_rank: Dict[Tuple[int, int, int], cp_model.IntVar], + arcs: list[Tuple[int, int, cp_model.LiteralT]], +) -> None: """Checks if the neighbor is valid, and adds it to the model.""" if ( x + dx < 0 @@ -55,7 +70,7 @@ def add_neighbor(size, x, y, z, dx, dy, dz, model, index_map, position_to_rank, arcs.append((before_index, after_index, move_literal)) -def escape_the_maze(params, output_proto): +def escape_the_maze(params: str, output_proto: str) -> None: """Escapes the maze.""" size = 4 boxes = [(0, 1, 0), (2, 0, 1), (1, 3, 1), (3, 1, 3)] @@ -89,7 +104,7 @@ def escape_the_maze(params, output_proto): # Circuit constraint: visit all blocks exactly once, and maintains the rank # of each block. - arcs = [] + arcs: list[Tuple[int, int, cp_model.LiteralT]] = [] for x in range(size): for y in range(size): for z in range(size): @@ -143,8 +158,8 @@ def escape_the_maze(params, output_proto): elif position == end: msg += " [end]" else: - for b in range(len(boxes)): - if position == boxes[b]: + for b, box in enumerate(boxes): + if position == box: msg += f" [boxes {b}]" path[rank] = msg print(path) diff --git a/examples/python/memory_layout_and_infeasibility_sat.py b/examples/python/memory_layout_and_infeasibility_sat.py index 0738d05147a..77f628a1eef 100644 --- a/examples/python/memory_layout_and_infeasibility_sat.py +++ b/examples/python/memory_layout_and_infeasibility_sat.py @@ -54,8 +54,10 @@ def solve_hard_model(output_proto: str, params: str) -> bool: y_starts: List[cp_model.IntVar] = [] y_intervals: List[cp_model.IntervalVar] = [] - for start, end, demand, unused_alignment in DEMANDS: - x_interval = model.new_fixed_size_interval_var(start, end - start + 1, "") + for start_time, end_time, demand, _ in DEMANDS: + x_interval = model.new_fixed_size_interval_var( + start_time, end_time - start_time + 1, "" + ) y_start = model.new_int_var(0, CAPACITY - demand, "") y_interval = model.new_fixed_size_interval_var(y_start, demand, "") @@ -74,9 +76,9 @@ def solve_hard_model(output_proto: str, params: str) -> bool: status = solver.solve(model) print(solver.response_stats()) - if status == cp_model.FEASIBLE or status == cp_model.OPTIMAL: - for index, start in enumerate(y_starts): - print(f"task {index} buffer starts at {solver.value(start)}") + if status in (cp_model.FEASIBLE, cp_model.OPTIMAL): + for index, start_var in enumerate(y_starts): + print(f"task {index} buffer starts at {solver.value(start_var)}") return status != cp_model.INFEASIBLE diff --git a/examples/python/no_wait_baking_scheduling_sat.py b/examples/python/no_wait_baking_scheduling_sat.py index b6f6177b3ec..8b7276dc185 100644 --- a/examples/python/no_wait_baking_scheduling_sat.py +++ b/examples/python/no_wait_baking_scheduling_sat.py @@ -21,7 +21,8 @@ """ import collections -from typing import Sequence +from typing import List, Sequence, Tuple + from absl import app from absl import flags @@ -29,10 +30,9 @@ from ortools.sat.python import cp_model _PARAMS = flags.DEFINE_string( - "params", "num_search_workers:16, max_time_in_seconds:30", "Sat solver parameters." -) -_PROTO_FILE = flags.DEFINE_string( - "proto_file", "", "If not empty, output the proto to this file." + "params", + "num_search_workers:16, max_time_in_seconds:30", + "Sat solver parameters.", ) # Recipes @@ -50,7 +50,7 @@ DISPLAY = "display" -class Task(object): +class Task: """A unit baking task. - Simple baking tasks have a fixed duration. They are performed by workers. @@ -64,7 +64,7 @@ def __init__(self, name, min_duration, max_duration): self.max_duration = max_duration -class Skill(object): +class Skill: """The skill of a worker or the capability of a machine.""" def __init__(self, name, efficiency): @@ -73,19 +73,21 @@ def __init__(self, name, efficiency): self.efficiency = efficiency -class Recipe(object): +class Recipe: """A recipe is a sequence of cooking tasks.""" def __init__(self, name): self.name = name self.tasks = [] - def add_task(self, resource_name, min_duration, max_duration): + def add_task( + self, resource_name: str, min_duration: int, max_duration: int + ) -> "Recipe": self.tasks.append(Task(resource_name, min_duration, max_duration)) return self -class Resource(object): +class Resource: """A resource is a worker, a machine, or just some space for cakes to rest. - Workers have a capacity of 1 and can have variable efficiency. @@ -101,12 +103,12 @@ def __init__(self, name, capacity): self.capacity = capacity self.skills = [] - def add_skill(self, skill_name, efficiency): + def add_skill(self, skill_name: str, efficiency: float) -> "Resource": self.skills.append(Skill(skill_name, efficiency)) return self -class Order(object): +class Order: """An order is a recipe that should be delivered at a given due date.""" def __init__(self, unique_id, recipe_name, due_date, quantity): @@ -124,7 +126,7 @@ def __init__(self, unique_id, recipe_name, due_date, quantity): self.quantity = quantity -def set_up_data(): +def set_up_data() -> Tuple[List[Recipe], List[Resource], List[Order]]: """Set up the bakery problem data.""" # Recipes. @@ -193,7 +195,9 @@ def set_up_data(): return recipes, resources, orders -def solve_with_cp_sat(recipes, resources, orders): +def solve_with_cp_sat( + recipes: List[Recipe], resources: List[Resource], orders: List[Order] +) -> None: """Build the optimization model, and solve the problem.""" model = cp_model.CpModel() @@ -230,7 +234,6 @@ def solve_with_cp_sat(recipes, resources, orders): skill_name = task.name suffix = f"_{order.unique_id}_batch{batch}_{skill_name}" - start = None if previous_end is None: start = model.new_int_var(start_work, horizon, f"start{suffix}") orders_sequence_of_events[order_id].append( @@ -242,7 +245,6 @@ def solve_with_cp_sat(recipes, resources, orders): size = model.new_int_var( task.min_duration, task.max_duration, f"size{suffix}" ) - end = None if task == recipe.tasks[-1]: # The order must end after the due_date. Ideally, exactly at the # due_date. diff --git a/examples/python/nqueens_sat.py b/examples/python/nqueens_sat.py index afdf79e24a9..d084d3af7c1 100644 --- a/examples/python/nqueens_sat.py +++ b/examples/python/nqueens_sat.py @@ -28,26 +28,26 @@ class NQueenSolutionPrinter(cp_model.CpSolverSolutionCallback): def __init__(self, queens: list[cp_model.IntVar]): cp_model.CpSolverSolutionCallback.__init__(self) - self.__queens = queens - self.__solution_count = 0 - self.__start_time = time.time() + self._queens = queens + self._solution_count = 0 + self._start_time = time.time() @property def solution_count(self) -> int: - return self.__solution_count + return self._solution_count def on_solution_callback(self) -> None: current_time = time.time() print( - "Solution %i, time = %f s" - % (self.__solution_count, current_time - self.__start_time) + f"Solution{self._solution_count}, time =" + f" {current_time - self._start_time} s" ) - self.__solution_count += 1 + self._solution_count += 1 - all_queens = range(len(self.__queens)) + all_queens = range(len(self._queens)) for i in all_queens: for j in all_queens: - if self.value(self.__queens[j]) == i: + if self.value(self._queens[j]) == i: # There is a queen in column j, row i. print("Q", end=" ") else: diff --git a/examples/python/pell_equation_sat.py b/examples/python/pell_equation_sat.py new file mode 100644 index 00000000000..29f3df5467d --- /dev/null +++ b/examples/python/pell_equation_sat.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Solves Pell's equation x^2 - coeff * y^2 = 1.""" + +from collections.abc import Sequence + +from absl import app +from absl import flags +from ortools.sat.python import cp_model + + +_COEFF = flags.DEFINE_integer("coeff", 1, "The Pell equation coefficient.") +_MAX_VALUE = flags.DEFINE_integer("max_value", 5000_000, "The maximum value.") + + +def solve_pell(coeff: int, max_value: int) -> None: + """Solves Pell's equation x^2 - coeff * y^2 = 1.""" + model = cp_model.CpModel() + + x = model.new_int_var(1, max_value, "x") + y = model.new_int_var(1, max_value, "y") + + # Pell's equation: + x_square = model.new_int_var(1, max_value * max_value, "x_square") + y_square = model.new_int_var(1, max_value * max_value, "y_square") + model.add_multiplication_equality(x_square, x, x) + model.add_multiplication_equality(y_square, y, y) + model.add(x_square - coeff * y_square == 1) + + model.add_decision_strategy( + [x, y], cp_model.CHOOSE_MIN_DOMAIN_SIZE, cp_model.SELECT_MIN_VALUE + ) + + solver = cp_model.CpSolver() + solver.parameters.num_workers = 12 + solver.parameters.log_search_progress = True + solver.parameters.cp_model_presolve = True + solver.parameters.cp_model_probing_level = 0 + + result = solver.solve(model) + if result == cp_model.OPTIMAL: + print(f"x={solver.value(x)} y={solver.value(y)} coeff={coeff}") + if solver.value(x) ** 2 - coeff * (solver.value(y) ** 2) != 1: + raise ValueError("Pell equation not satisfied.") + + +def main(argv: Sequence[str]) -> None: + if len(argv) > 1: + raise app.UsageError("Too many command-line arguments.") + solve_pell(_COEFF.value, _MAX_VALUE.value) + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/python/pentominoes_sat.py b/examples/python/pentominoes_sat.py new file mode 100644 index 00000000000..d4573e1f672 --- /dev/null +++ b/examples/python/pentominoes_sat.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Example to solves a pentomino paving problem. + +Given a subset of n different pentomino, the problem is to pave a square of +size 5 x n. The problem is reduced to an exact set cover problem and encoded +as a linear boolean problem. + +This problem comes from the game Katamino: +http://boardgamegeek.com/boardgame/6931/katamino + +This example also includes suggestions from +https://web.ma.utexas.edu/users/smmg/archive/1997/radin.html +""" + +from collections.abc import Sequence +from typing import Dict, List + +from absl import app +from absl import flags + +from google.protobuf import text_format +from ortools.sat.python import cp_model + + +_PARAMS = flags.DEFINE_string( + "params", + "num_search_workers:16,log_search_progress:false,max_time_in_seconds:45", + "Sat solver parameters.", +) + +_PIECES = flags.DEFINE_string( + "pieces", "FILNPTUVWXYZ", "The subset of pieces to consider." +) + +_HEIGHT = flags.DEFINE_integer("height", 5, "The height of the box.") + + +def is_one(mask: List[List[int]], x: int, y: int, orientation: int) -> bool: + """Returns true if the oriented piece is 1 at position [i][j]. + + The 3 bits in orientation respectively mean: transposition, symmetry by + x axis, symmetry by y axis. + + Args: + mask: The shape of the piece. + x: position. + y: position. + orientation: between 0 and 7. + """ + if orientation & 1: + tmp: int = x + x = y + y = tmp + if orientation & 2: + x = len(mask[0]) - 1 - x + if orientation & 4: + y = len(mask) - 1 - y + return mask[y][x] == 1 + + +def get_height(mask: List[List[int]], orientation: int) -> int: + if orientation & 1: + return len(mask[0]) + return len(mask) + + +def get_width(mask: List[List[int]], orientation: int) -> int: + if orientation & 1: + return len(mask) + return len(mask[0]) + + +def orientation_is_redundant(mask: List[List[int]], orientation: int) -> bool: + """Checks if the current rotated figure is the same as a previous rotation.""" + size_i: int = get_width(mask, orientation) + size_j: int = get_height(mask, orientation) + for o in range(orientation): + if size_i != get_width(mask, o): + continue + if size_j != get_height(mask, o): + continue + + is_the_same: bool = True + for k in range(size_i): + if not is_the_same: + break + for l in range(size_j): + if not is_the_same: + break + if is_one(mask, k, l, orientation) != is_one(mask, k, l, o): + is_the_same = False + if is_the_same: + return True + return False + + +def generate_and_solve_problem(pieces: Dict[str, List[List[int]]]) -> None: + """Solves the pentominoes problem.""" + box_height = _HEIGHT.value + box_width = 5 * len(pieces) // box_height + print(f"Box has dimension {box_height} * {box_width}") + + model = cp_model.CpModel() + position_to_variables: List[List[List[cp_model.IntVar]]] = [ + [[] for _ in range(box_width)] for _ in range(box_height) + ] + + for name, mask in pieces.items(): + all_position_variables = [] + for orientation in range(8): + if orientation_is_redundant(mask, orientation): + continue + piece_width = get_width(mask, orientation) + piece_height = get_height(mask, orientation) + for i in range(box_width - piece_width + 1): + for j in range(box_height - piece_height + 1): + v = model.new_bool_var(name) + all_position_variables.append(v) + for k in range(piece_width): + for l in range(piece_height): + if is_one(mask, k, l, orientation): + position_to_variables[j + l][i + k].append(v) + + # Only one combination is selected. + model.add_exactly_one(all_position_variables) + + for one_column in position_to_variables: + for all_pieces_in_one_position in one_column: + model.add_exactly_one(all_pieces_in_one_position) + + # Solve the model. + solver = cp_model.CpSolver() + if _PARAMS.value: + text_format.Parse(_PARAMS.value, solver.parameters) + status = solver.solve(model) + + print( + f"Problem {_PIECES.value} box {box_height}*{box_width} solved in" + f" {solver.wall_time}s with status {solver.status_name(status)}" + ) + + # Print the solution. + if status == cp_model.OPTIMAL: + for y in range(box_height): + line = "" + for x in range(box_width): + for v in position_to_variables[y][x]: + if solver.BooleanValue(v): + line += v.name + break + print(line) + + +def main(argv: Sequence[str]) -> None: + if len(argv) > 1: + raise app.UsageError("Too many command-line arguments.") + + # Pieces are stored in a matrix. mask[height][width] + pieces: Dict[str, List[List[int]]] = { + "F": [[0, 1, 1], [1, 1, 0], [0, 1, 0]], + "I": [[1, 1, 1, 1, 1]], + "L": [[1, 1, 1, 1], [1, 0, 0, 0]], + "N": [[1, 1, 1, 0], [0, 0, 1, 1]], + "P": [[1, 1, 1], [1, 1, 0]], + "T": [[1, 1, 1], [0, 1, 0], [0, 1, 0]], + "U": [[1, 0, 1], [1, 1, 1]], + "V": [[1, 0, 0], [1, 0, 0], [1, 1, 1]], + "W": [[1, 0, 0], [1, 1, 0], [0, 1, 1]], + "X": [[0, 1, 0], [1, 1, 1], [0, 1, 0]], + "Y": [[1, 1, 1, 1], [0, 1, 0, 0]], + "Z": [[1, 1, 0], [0, 1, 0], [0, 1, 1]], + } + selected_pieces: Dict[str, List[List[int]]] = {} + for p in _PIECES.value: + if p not in pieces: + print(f"Piece {p} not found in the list of pieces") + return + selected_pieces[p] = pieces[p] + if (len(selected_pieces) * 5) % _HEIGHT.value != 0: + print( + f"The height {_HEIGHT.value} does not divide the total area" + f" {5 * len(selected_pieces)}" + ) + return + if _HEIGHT.value < 3 or 5 * len(selected_pieces) // _HEIGHT.value < 3: + print(f"The height {_HEIGHT.value} is not compatible with the pieces.") + return + + generate_and_solve_problem(selected_pieces) + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/python/rcpsp_sat.py b/examples/python/rcpsp_sat.py index cddb3ff0599..c4e66d62dfb 100644 --- a/examples/python/rcpsp_sat.py +++ b/examples/python/rcpsp_sat.py @@ -22,6 +22,7 @@ """ import collections +import time from typing import Optional from absl import app @@ -50,9 +51,9 @@ + " precedence graph.", ) _DELAY_TIME_LIMIT = flags.DEFINE_float( - "delay_time_limit", - 20.0, - "Time limit when computing min delay between tasks." + "pairwise_delay_total_time_limit", + 120.0, + "Total time limit when computing min delay between tasks." + " A non-positive time limit disable min delays computation.", ) _PREEMPTIVE_LB_TIME_LIMIT = flags.DEFINE_float( @@ -601,21 +602,30 @@ def compute_delays_between_nodes( ): return delays, None, False + time_limit = _DELAY_TIME_LIMIT.value complete_problem_assignment = None num_optimal_delays = 0 num_delays_not_found = 0 optimal_found = True for start_task, end_task, active_tasks in task_intervals: + if time_limit <= 0: + optimal_found = False + print(f" - #timeout ({_DELAY_TIME_LIMIT.value}s) reached", flush=True) + break + + start_time = time.time() min_delay, feasible_delay, assignment = solve_rcpsp( problem, "", - f"num_search_workers:16,max_time_in_seconds:{_DELAY_TIME_LIMIT.value}", + f"num_search_workers:16,max_time_in_seconds:{time_limit}", set(active_tasks), start_task, end_task, [], delays, ) + time_limit -= time.time() - start_time + if min_delay != -1: delays[(start_task, end_task)] = min_delay, feasible_delay if start_task == 0 and end_task == len(problem.tasks) - 1: diff --git a/examples/python/reallocate_sat.py b/examples/python/reallocate_sat.py index 2209a270bcb..a1bde7da20a 100644 --- a/examples/python/reallocate_sat.py +++ b/examples/python/reallocate_sat.py @@ -19,7 +19,6 @@ def main(): - # Data data_0 = [ [107, 107, 107, 0, 0], # pr1 @@ -35,7 +34,7 @@ def main(): [298836792, 0, 0, 0], [3713428, 4118530, 4107277, 3072018], [6477273, 7183884, 5358471, 0], - [1485371, 1647412, 1642911, 1228807] + [1485371, 1647412, 1642911, 1228807], ] data_2 = [ @@ -45,7 +44,7 @@ def main(): [2988367, 0, 0, 0], [37134, 41185, 41072, 30720], [64772, 71838, 53584, 0], - [14853, 16474, 16429, 12288] + [14853, 16474, 16429, 12288], ] pr = data_0 @@ -59,7 +58,7 @@ def main(): model = cp_model.CpModel() # Variables - delta = model.NewIntVar(0, total, 'delta') + delta = model.NewIntVar(0, total, "delta") contributions_per_years = collections.defaultdict(list) contributions_per_prs = collections.defaultdict(list) @@ -68,14 +67,12 @@ def main(): for p, inner_l in enumerate(pr): for y, item in enumerate(inner_l): if item != 0: - contrib = model.NewIntVar(0, total, 'r%d c%d' % (p, y)) + contrib = model.NewIntVar(0, total, "r%d c%d" % (p, y)) contributions_per_years[y].append(contrib) contributions_per_prs[p].append(contrib) all_contribs[p, y] = contrib - year_var = [ - model.NewIntVar(0, total, 'y[%i]' % i) for i in range(num_years) - ] + year_var = [model.NewIntVar(0, total, "y[%i]" % i) for i in range(num_years)] # Constraints @@ -103,34 +100,34 @@ def main(): # Output solution. if status == cp_model.OPTIMAL: - print('Data') - print(' - total = ', total) - print(' - year_average = ', avg) - print(' - number of projects = ', num_pr) - print(' - number of years = ', num_years) + print("Data") + print(" - total = ", total) + print(" - year_average = ", avg) + print(" - number of projects = ", num_pr) + print(" - number of years = ", num_years) - print(' - input production') + print(" - input production") for p in range(num_pr): for y in range(num_years): if pr[p][y] == 0: - print(' ', end='') + print(" ", end="") else: - print('%10i' % pr[p][y], end='') + print("%10i" % pr[p][y], end="") print() - print('Solution') + print("Solution") for p in range(num_pr): for y in range(num_years): if pr[p][y] == 0: - print(' ', end='') + print(" ", end="") else: - print('%10i' % solver.Value(all_contribs[p, y]), end='') + print("%10i" % solver.Value(all_contribs[p, y]), end="") print() for y in range(num_years): - print('%10i' % solver.Value(year_var[y]), end='') + print("%10i" % solver.Value(year_var[y]), end="") print() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/python/shift_scheduling_sat.py b/examples/python/shift_scheduling_sat.py index a5cdb921677..7b4a304ec6c 100644 --- a/examples/python/shift_scheduling_sat.py +++ b/examples/python/shift_scheduling_sat.py @@ -110,7 +110,7 @@ def add_soft_sequence_constraint( for length in range(hard_min, soft_min): for start in range(len(works) - length + 1): span = negated_bounded_span(works, start, length) - name = ": under_span(start=%i, length=%i)" % (start, length) + name = f": under_span(start={start}, length={length})" lit = model.new_bool_var(prefix + name) span.append(lit) model.add_bool_or(span) @@ -124,7 +124,7 @@ def add_soft_sequence_constraint( for length in range(soft_max + 1, hard_max + 1): for start in range(len(works) - length + 1): span = negated_bounded_span(works, start, length) - name = ": over_span(start=%i, length=%i)" % (start, length) + name = f": over_span(start={start}, length={length})" lit = model.new_bool_var(prefix + name) span.append(lit) model.add_bool_or(span) @@ -299,7 +299,7 @@ def solve_shift_scheduling(params: str, output_proto: str): for e in range(num_employees): for s in range(num_shifts): for d in range(num_days): - work[e, s, d] = model.new_bool_var("work%i_%i_%i" % (e, s, d)) + work[e, s, d] = model.new_bool_var(f"work{e}_{s}_{d}") # Linear terms of the objective in a minimization context. obj_int_vars: list[cp_model.IntVar] = [] @@ -335,7 +335,7 @@ def solve_shift_scheduling(params: str, output_proto: str): soft_max, hard_max, max_cost, - "shift_constraint(employee %i, shift %i)" % (e, shift), + f"shift_constraint(employee {e}, shift {shift})", ) obj_bool_vars.extend(variables) obj_bool_coeffs.extend(coeffs) @@ -355,8 +355,7 @@ def solve_shift_scheduling(params: str, output_proto: str): soft_max, hard_max, max_cost, - "weekly_sum_constraint(employee %i, shift %i, week %i)" - % (e, shift, w), + f"weekly_sum_constraint(employee {e}, shift {shift}, week {w})", ) obj_int_vars.extend(variables) obj_int_coeffs.extend(coeffs) @@ -373,7 +372,7 @@ def solve_shift_scheduling(params: str, output_proto: str): model.add_bool_or(transition) else: trans_var = model.new_bool_var( - "transition (employee=%i, day=%i)" % (e, d) + f"transition (employee={e}, day={d})" ) transition.append(trans_var) model.add_bool_or(transition) @@ -391,7 +390,7 @@ def solve_shift_scheduling(params: str, output_proto: str): model.add(worked == sum(works)) over_penalty = excess_cover_penalties[s - 1] if over_penalty > 0: - name = "excess_demand(shift=%i, week=%i, day=%i)" % (s, w, d) + name = f"excess_demand(shift={s}, week={w}, day={d})" excess = model.new_int_var(0, num_employees - min_demand, name) model.add(excess == worked - min_demand) obj_int_vars.append(excess) @@ -404,7 +403,7 @@ def solve_shift_scheduling(params: str, output_proto: str): ) if output_proto: - print("Writing proto to %s" % output_proto) + print(f"Writing proto to {output_proto}") with open(output_proto, "w") as text_file: text_file.write(str(model)) @@ -428,7 +427,7 @@ def solve_shift_scheduling(params: str, output_proto: str): for s in range(num_shifts): if solver.boolean_value(work[e, s, d]): schedule += shifts[s] + " " - print("worker %i: %s" % (e, schedule)) + print(f"worker {e}: {schedule}") print() print("Penalties:") for i, var in enumerate(obj_bool_vars): @@ -442,16 +441,12 @@ def solve_shift_scheduling(params: str, output_proto: str): for i, var in enumerate(obj_int_vars): if solver.value(var) > 0: print( - " %s violated by %i, linear penalty=%i" - % (var.name, solver.value(var), obj_int_coeffs[i]) + f" {var.name} violated by {solver.value(var)}, linear" + f" penalty={obj_int_coeffs[i]}" ) print() - print("Statistics") - print(" - status : %s" % solver.status_name(status)) - print(" - conflicts : %i" % solver.num_conflicts) - print(" - branches : %i" % solver.num_branches) - print(" - wall time : %f s" % solver.wall_time) + print(solver.response_stats()) def main(_): diff --git a/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat.py b/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat.py index 65b1602631b..249d47ed25e 100644 --- a/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat.py +++ b/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat.py @@ -40,17 +40,16 @@ class SolutionPrinter(cp_model.CpSolverSolutionCallback): """Print intermediate solutions.""" - def __init__(self): + def __init__(self) -> None: cp_model.CpSolverSolutionCallback.__init__(self) self.__solution_count = 0 - def on_solution_callback(self): - """Called after each new solution found.""" + def on_solution_callback(self) -> None: + """Called at each new solution.""" print( - "Solution %i, time = %f s, objective = %i" - % (self.__solution_count, self.wall_time, self.objective_value) + f"Solution {self.__solution_count}, time = {self.wall_time} s," + f" objective = {self.objective_value}" ) - self.__solution_count += 1 def single_machine_scheduling(): @@ -393,9 +392,7 @@ def single_machine_scheduling(): if min_incoming_setup == 0: continue - print( - "job %i has a min incoming setup of %i" % (job_id, min_incoming_setup) - ) + print(f"job {job_id} has a min incoming setup of {min_incoming_setup}") # We can transfer some setup times to the duration of the job. job_durations[job_id] += min_incoming_setup # Decrease corresponding incoming setup times. @@ -414,7 +411,7 @@ def single_machine_scheduling(): horizon = sum(job_durations) + sum( max(setup_times[i][j] for i in range(num_jobs + 1)) for j in range(num_jobs) ) - print("Greedy horizon =", horizon) + print(f"Greedy horizon = {horizon}") # ---------------------------------------------------------------------------- # Global storage of variables. @@ -429,10 +426,10 @@ def single_machine_scheduling(): release_date = release_dates[job_id] due_date = due_dates[job_id] if due_dates[job_id] != -1 else horizon print( - "job %2i: start = %5i, duration = %4i, end = %6i" - % (job_id, release_date, duration, due_date) + f"job {job_id:2}: start = {release_date:5}, duration = {duration:4}," + f" end = {due_date:6}" ) - name_suffix = "_%i" % job_id + name_suffix = f"_{job_id}" start = model.new_int_var(release_date, due_date, "s" + name_suffix) end = model.new_int_var(release_date, due_date, "e" + name_suffix) interval = model.new_interval_var(start, duration, end, "i" + name_suffix) @@ -460,7 +457,7 @@ def single_machine_scheduling(): if i == j: continue - lit = model.new_bool_var("%i follows %i" % (j, i)) + lit = model.new_bool_var(f"{j} follows {i}") arcs.append((i + 1, j + 1, lit)) # We add the reified precedence to link the literal with the times of the @@ -481,7 +478,7 @@ def single_machine_scheduling(): # ---------------------------------------------------------------------------- # Precedences. for before, after in precedences: - print("job %i is after job %i" % (after, before)) + print(f"job {after} is after job {before}") model.add(ends[before] <= starts[after]) # ---------------------------------------------------------------------------- @@ -493,7 +490,7 @@ def single_machine_scheduling(): # ---------------------------------------------------------------------------- # Write problem to file. if output_proto_file: - print("Writing proto to %s" % output_proto_file) + print(f"Writing proto to {output_proto_file}") with open(output_proto_file, "w") as text_file: text_file.write(str(model)) @@ -503,11 +500,12 @@ def single_machine_scheduling(): if parameters: text_format.Parse(parameters, solver.parameters) solution_printer = SolutionPrinter() + solver.best_bound_callback = lambda a: print(f"New objective lower bound: {a}") solver.solve(model, solution_printer) for job_id in all_jobs: print( - "job %i starts at %i end ends at %i" - % (job_id, solver.value(starts[job_id]), solver.value(ends[job_id])) + f"job {job_id} starts at {solver.value(starts[job_id])} end ends at" + f" {solver.value(ends[job_id])}" ) diff --git a/examples/python/spread_robots_sat.py b/examples/python/spread_robots_sat.py index 835e9c23f29..1d7f9d45133 100644 --- a/examples/python/spread_robots_sat.py +++ b/examples/python/spread_robots_sat.py @@ -33,7 +33,7 @@ ) -def spread_robots(num_robots: int, room_size: int, params: str): +def spread_robots(num_robots: int, room_size: int, params: str) -> None: """Optimize robots placement.""" model = cp_model.CpModel() diff --git a/examples/python/steel_mill_slab_sat.py b/examples/python/steel_mill_slab_sat.py index a0f2b43973d..1a871709fe8 100644 --- a/examples/python/steel_mill_slab_sat.py +++ b/examples/python/steel_mill_slab_sat.py @@ -39,13 +39,10 @@ ) -def build_problem(problem_id): +def build_problem( + problem_id: int, +) -> tuple[int, list[int], int, list[tuple[int, int]]]: """Build problem data.""" - capacities = None - num_colors = None - num_slabs = None - orders = None - if problem_id == 0: capacities = [ # fmt:off @@ -100,15 +97,22 @@ def build_problem(problem_id): # fmt:on ] - elif problem_id == 3: + else: # problem_id == 3, default problem. capacities = [0, 17, 44] num_colors = 8 num_slabs = 10 orders = [ # (size, color) - # fmt:off - (4, 1), (22, 2), (9, 3), (5, 4), (8, 5), (3, 6), (3, 4), (4, 7), - (7, 4), (7, 8), (3, 6), - # fmt:on + (4, 1), + (22, 2), + (9, 3), + (5, 4), + (8, 5), + (3, 6), + (3, 4), + (4, 7), + (7, 4), + (7, 8), + (3, 6), ] return (num_slabs, capacities, num_colors, orders) @@ -117,7 +121,7 @@ def build_problem(problem_id): class SteelMillSlabSolutionPrinter(cp_model.CpSolverSolutionCallback): """Print intermediate solutions.""" - def __init__(self, orders, assign, load, loss): + def __init__(self, orders, assign, load, loss) -> None: cp_model.CpSolverSolutionCallback.__init__(self) self.__orders = orders self.__assign = assign @@ -128,13 +132,13 @@ def __init__(self, orders, assign, load, loss): self.__all_slabs = range(len(assign[0])) self.__start_time = time.time() - def on_solution_callback(self): + def on_solution_callback(self) -> None: """Called on each new solution.""" current_time = time.time() objective = sum(self.value(l) for l in self.__loss) print( - "Solution %i, time = %f s, objective = %i" - % (self.__solution_count, current_time - self.__start_time, objective) + f"Solution {self.__solution_count}, time =" + f" {current_time - self.__start_time} s, objective = {objective}" ) self.__solution_count += 1 orders_in_slab = [ @@ -143,25 +147,20 @@ def on_solution_callback(self): ] for s in self.__all_slabs: if orders_in_slab[s]: - line = " - slab %i, load = %i, loss = %i, orders = [" % ( - s, - self.value(self.__load[s]), - self.value(self.__loss[s]), + line = ( + f" - slab {s}, load = {self.value(self.__load[s])}, loss =" + f" {self.value(self.__loss[s])}, orders = [" ) for o in orders_in_slab[s]: - line += "#%i(w%i, c%i) " % ( - o, - self.__orders[o][0], - self.__orders[o][1], - ) + line += f"#{o}(w{self.__orders[o][0]}, c{self.__orders[o][1]})" line += "]" print(line) -def steel_mill_slab(problem, break_symmetries): +def steel_mill_slab(problem_id: int, break_symmetries: bool) -> None: """Solves the Steel Mill Slab Problem.""" ### Load problem. - (num_slabs, capacities, num_colors, orders) = build_problem(problem) + num_slabs, capacities, num_colors, orders = build_problem(problem_id) num_orders = len(orders) num_capacities = len(capacities) @@ -169,8 +168,8 @@ def steel_mill_slab(problem, break_symmetries): all_colors = range(num_colors) all_orders = range(len(orders)) print( - "Solving steel mill with %i orders, %i slabs, and %i capacities" - % (num_orders, num_slabs, num_capacities - 1) + f"Solving steel mill with {num_orders} orders, {num_slabs} slabs, and" + f" {num_capacities - 1} capacities" ) # Compute auxiliary data. @@ -193,14 +192,12 @@ def steel_mill_slab(problem, break_symmetries): # Create the model and the decision variables. model = cp_model.CpModel() assign = [ - [model.new_bool_var("assign_%i_to_slab_%i" % (o, s)) for s in all_slabs] + [model.new_bool_var(f"assign_{o}_to_slab_{s}") for s in all_slabs] for o in all_orders ] - loads = [ - model.new_int_var(0, max_capacity, "load_of_slab_%i" % s) for s in all_slabs - ] + loads = [model.new_int_var(0, max_capacity, f"load_of_slab_{s}") for s in all_slabs] color_is_in_slab = [ - [model.new_bool_var("color_%i_in_slab_%i" % (c + 1, s)) for c in all_colors] + [model.new_bool_var(f"color_{c + 1}_in_slab_{s}") for c in all_colors] for s in all_slabs ] @@ -267,19 +264,19 @@ def steel_mill_slab(problem, break_symmetries): # Create position variables if there are symmetries to be broken. if break_symmetries and ordered_equivalent_orders: print( - " - creating %i symmetry breaking constraints" - % len(ordered_equivalent_orders) + f" - creating {len(ordered_equivalent_orders)} symmetry breaking" + " constraints" ) positions = {} for p in ordered_equivalent_orders: if p[0] not in positions: positions[p[0]] = model.new_int_var( - 0, num_slabs - 1, "position_of_slab_%i" % p[0] + 0, num_slabs - 1, f"position_of_slab_{p[0]}" ) model.add_map_domain(positions[p[0]], assign[p[0]]) if p[1] not in positions: positions[p[1]] = model.new_int_var( - 0, num_slabs - 1, "position_of_slab_%i" % p[1] + 0, num_slabs - 1, f"position_of_slab_{p[1]}" ) model.add_map_domain(positions[p[1]], assign[p[1]]) # Finally add the symmetry breaking constraint. @@ -287,7 +284,7 @@ def steel_mill_slab(problem, break_symmetries): # Objective. obj = model.new_int_var(0, num_slabs * max_loss, "obj") - losses = [model.new_int_var(0, max_loss, "loss_%i" % s) for s in all_slabs] + losses = [model.new_int_var(0, max_loss, f"loss_{s}") for s in all_slabs] for s in all_slabs: model.add_element(loads[s], loss_array, losses[s]) model.add(obj == sum(losses)) @@ -303,14 +300,19 @@ def steel_mill_slab(problem, break_symmetries): ### Output the solution. if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): print( - "Loss = %i, time = %f s, %i conflicts" - % (solver.objective_value, solver.wall_time, solver.num_conflicts) + f"Loss = {solver.objective_value}, time = {solver.wall_time} s," + f" {solver.num_conflicts} conflicts" ) else: print("No solution") -def collect_valid_slabs_dp(capacities, colors, widths, loss_array): +def collect_valid_slabs_dp( + capacities: list[int], + colors: list[int], + widths: list[int], + loss_array: list[int], +) -> list[list[int]]: """Collect valid columns (assign, loss) for one slab.""" start_time = time.time() @@ -339,8 +341,8 @@ def collect_valid_slabs_dp(capacities, colors, widths, loss_array): all_valid_assignments.extend(new_assignments) print( - "%i assignments created in %.2f s" - % (len(all_valid_assignments), time.time() - start_time) + f"{len(all_valid_assignments)} assignments created in" + f" {time.time() - start_time:2f} s" ) tuples = [] for assignment in all_valid_assignments: @@ -354,10 +356,10 @@ def collect_valid_slabs_dp(capacities, colors, widths, loss_array): return tuples -def steel_mill_slab_with_valid_slabs(problem, break_symmetries): +def steel_mill_slab_with_valid_slabs(problem_id: int, break_symmetries: bool) -> None: """Solves the Steel Mill Slab Problem.""" ### Load problem. - (num_slabs, capacities, num_colors, orders) = build_problem(problem) + (num_slabs, capacities, num_colors, orders) = build_problem(problem_id) num_orders = len(orders) num_capacities = len(capacities) @@ -365,8 +367,8 @@ def steel_mill_slab_with_valid_slabs(problem, break_symmetries): all_colors = range(num_colors) all_orders = range(len(orders)) print( - "Solving steel mill with %i orders, %i slabs, and %i capacities" - % (num_orders, num_slabs, num_capacities - 1) + f"Solving steel mill with {num_orders} orders, {num_slabs} slabs, and" + f" {num_capacities - 1} capacities" ) # Compute auxiliary data. @@ -383,11 +385,11 @@ def steel_mill_slab_with_valid_slabs(problem, break_symmetries): # Create the model and the decision variables. model = cp_model.CpModel() assign = [ - [model.new_bool_var("assign_%i_to_slab_%i" % (o, s)) for s in all_slabs] + [model.new_bool_var(r"assign_{o}_to_slab_{s}") for s in all_slabs] for o in all_orders ] - loads = [model.new_int_var(0, max_capacity, "load_%i" % s) for s in all_slabs] - losses = [model.new_int_var(0, max_loss, "loss_%i" % s) for s in all_slabs] + loads = [model.new_int_var(0, max_capacity, f"load_{s}") for s in all_slabs] + losses = [model.new_int_var(0, max_loss, f"loss_{s}") for s in all_slabs] unsorted_valid_slabs = collect_valid_slabs_dp( capacities, colors, widths, loss_array @@ -449,19 +451,19 @@ def steel_mill_slab_with_valid_slabs(problem, break_symmetries): # Create position variables if there are symmetries to be broken. if ordered_equivalent_orders: print( - " - creating %i symmetry breaking constraints" - % len(ordered_equivalent_orders) + f" - creating {len(ordered_equivalent_orders)} symmetry breaking" + " constraints" ) positions = {} for p in ordered_equivalent_orders: if p[0] not in positions: positions[p[0]] = model.new_int_var( - 0, num_slabs - 1, "position_of_slab_%i" % p[0] + 0, num_slabs - 1, f"position_of_slab_{p[0]}" ) model.add_map_domain(positions[p[0]], assign[p[0]]) if p[1] not in positions: positions[p[1]] = model.new_int_var( - 0, num_slabs - 1, "position_of_slab_%i" % p[1] + 0, num_slabs - 1, f"position_of_slab_{p[1]}" ) model.add_map_domain(positions[p[1]], assign[p[1]]) # Finally add the symmetry breaking constraint. @@ -483,24 +485,24 @@ def steel_mill_slab_with_valid_slabs(problem, break_symmetries): ### Output the solution. if status == cp_model.OPTIMAL: print( - "Loss = %i, time = %.2f s, %i conflicts" - % (solver.objective_value, solver.wall_time, solver.num_conflicts) + f"Loss = {solver.objective_value}, time = {solver.wall_time:2f} s," + f" {solver.num_conflicts} conflicts" ) else: print("No solution") -def steel_mill_slab_with_column_generation(problem): +def steel_mill_slab_with_column_generation(problem_id: int) -> None: """Solves the Steel Mill Slab Problem.""" ### Load problem. - (num_slabs, capacities, _, orders) = build_problem(problem) + (num_slabs, capacities, _, orders) = build_problem(problem_id) num_orders = len(orders) num_capacities = len(capacities) all_orders = range(len(orders)) print( - "Solving steel mill with %i orders, %i slabs, and %i capacities" - % (num_orders, num_slabs, num_capacities - 1) + f"Solving steel mill with {num_orders} orders, {num_slabs} slabs, and" + f" {num_capacities - 1} capacities" ) # Compute auxiliary data. @@ -524,7 +526,7 @@ def steel_mill_slab_with_column_generation(problem): # create model and decision variables. model = cp_model.CpModel() - selected = [model.new_bool_var("selected_%i" % i) for i in all_valid_slabs] + selected = [model.new_bool_var(f"selected_{i}") for i in all_valid_slabs] for order_id in all_orders: model.add( @@ -552,8 +554,8 @@ def steel_mill_slab_with_column_generation(problem): ### Output the solution. if status in (cp_model.OPTIMAL, cp_model.FEASIBLE): print( - "Loss = %i, time = %.2f s, %i conflicts" - % (solver.objective_value, solver.wall_time, solver.num_conflicts) + f"Loss = {solver.objective_value}, time = {solver.wall_time:2f} s," + f" {solver.num_conflicts} conflicts" ) else: print("No solution") diff --git a/examples/python/sudoku_sat.py b/examples/python/sudoku_sat.py index a6d5fb7f12e..664b36bba34 100755 --- a/examples/python/sudoku_sat.py +++ b/examples/python/sudoku_sat.py @@ -17,7 +17,7 @@ from ortools.sat.python import cp_model -def solve_sudoku(): +def solve_sudoku() -> None: """Solves the sudoku problem with the CP-SAT solver.""" # Create the model. model = cp_model.CpModel() diff --git a/examples/python/task_allocation_sat.py b/examples/python/task_allocation_sat.py index f75d1906745..5f5b331dbf5 100644 --- a/examples/python/task_allocation_sat.py +++ b/examples/python/task_allocation_sat.py @@ -23,7 +23,7 @@ from ortools.sat.python import cp_model -def task_allocation_sat(): +def task_allocation_sat() -> None: """Solves the task allocation problem.""" # Availability matrix. available = [ @@ -246,9 +246,9 @@ def task_allocation_sat(): assign = {} for task in all_tasks: for slot in all_slots: - assign[(task, slot)] = model.new_bool_var("x[%i][%i]" % (task, slot)) + assign[(task, slot)] = model.new_bool_var(f"x[{task}][{slot}]") count = model.new_int_var(0, nslots, "count") - slot_used = [model.new_bool_var("slot_used[%i]" % s) for s in all_slots] + slot_used = [model.new_bool_var(f"slot_used[{s}]") for s in all_slots] for task in all_tasks: model.add( @@ -285,12 +285,7 @@ def task_allocation_sat(): # Uses the portfolion of heuristics. solver.parameters.log_search_progress = True solver.parameters.num_search_workers = 16 - status = solver.solve(model) - - print("Statistics") - print(" - status =", solver.status_name(status)) - print(" - optimal solution =", solver.objective_value) - print(" - wall time : %f s" % solver.wall_time) + solver.solve(model) def main(argv: Sequence[str]) -> None: diff --git a/examples/python/tasks_and_workers_assignment_sat.py b/examples/python/tasks_and_workers_assignment_sat.py index 0a3631c470c..2675bb1a4fe 100644 --- a/examples/python/tasks_and_workers_assignment_sat.py +++ b/examples/python/tasks_and_workers_assignment_sat.py @@ -34,7 +34,7 @@ def on_solution_callback(self): self.__solution_count += 1 -def tasks_and_workers_assignment_sat(): +def tasks_and_workers_assignment_sat() -> None: """solve the assignment problem.""" model = cp_model.CpModel() diff --git a/examples/python/test_scheduling_sat.py b/examples/python/test_scheduling_sat.py new file mode 100644 index 00000000000..ca39f0ca018 --- /dev/null +++ b/examples/python/test_scheduling_sat.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python3 +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Solves a test scheduling problem. + +Tests must be run by an operator. Tests have a duration and a power consumption. + +Operators draw power from power supplies. The mapping between operators and +power supplies is given. + +Power supplies have a maximum power they can deliver. + +Can we schedule the tests so that the power consumption of each power supply is +always below its maximum power, and the total makespan is minimized? +""" + +from collections.abc import Sequence +import io +from typing import Dict, Tuple + +from absl import app +from absl import flags +import pandas as pd + +from google.protobuf import text_format +from ortools.sat.python import cp_model + + +_PARAMS = flags.DEFINE_string( + "params", + "num_search_workers:16,log_search_progress:true,max_time_in_seconds:45", + "Sat solver parameters.", +) + + +def build_data() -> tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]: + """Build the data frame.""" + tests_str = """ + Name Operator TestTime AveragePower + T1 O1 300 200 + T2 O1 150 40 + T3 O2 100 65 + T4 O2 250 150 + T5 O3 210 140 + """ + + operators_str = """ + Operator Supply + O1 S1 + O2 S2 + O3 S2 + """ + + supplies_str = """ + Supply MaxAllowedPower + S1 230 + S2 210 + """ + + tests_data = pd.read_table(io.StringIO(tests_str), sep=r"\s+") + operators_data = pd.read_table(io.StringIO(operators_str), sep=r"\s+") + supplies_data = pd.read_table(io.StringIO(supplies_str), sep=r"\s+") + + return (tests_data, operators_data, supplies_data) + + +def solve( + tests_data: pd.DataFrame, + operator_data: pd.DataFrame, + supplies_data: pd.DataFrame, +) -> None: + """Solve the scheduling of tests problem.""" + + # Parses data. + operator_to_supply: Dict[str, str] = {} + for _, row in operator_data.iterrows(): + operator_to_supply[row["Operator"]] = row["Supply"] + + supply_to_max_power: Dict[str, int] = {} + for _, row in supplies_data.iterrows(): + supply_to_max_power[row["Supply"]] = row["MaxAllowedPower"] + + horizon = tests_data["TestTime"].sum() + + # OR-Tools model. + model = cp_model.CpModel() + + # Create containers. + tests_per_supply: Dict[str, Tuple[list[cp_model.IntervalVar], list[int]]] = {} + test_supply: Dict[str, str] = {} + test_starts: Dict[str, cp_model.IntVar] = {} + test_durations: Dict[str, int] = {} + test_powers: Dict[str, int] = {} + all_ends = [] + + # Creates intervals. + for _, row in tests_data.iterrows(): + name: str = row["Name"] + operator: str = row["Operator"] + test_time: int = row["TestTime"] + average_power: int = row["AveragePower"] + supply: str = operator_to_supply[operator] + + start = model.new_int_var(0, horizon - test_time, f"start_{name}") + interval = model.new_fixed_size_interval_var( + start, test_time, f"interval_{name}" + ) + + # Bookkeeping. + test_starts[name] = start + test_durations[name] = test_time + test_powers[name] = average_power + test_supply[name] = supply + if supply not in tests_per_supply.keys(): + tests_per_supply[supply] = ([], []) + tests_per_supply[supply][0].append(interval) + tests_per_supply[supply][1].append(average_power) + all_ends.append(start + test_time) + + # Create supply cumulative constraints. + for supply, (intervals, demands) in tests_per_supply.items(): + model.add_cumulative(intervals, demands, supply_to_max_power[supply]) + + # Objective. + makespan = model.new_int_var(0, horizon, "makespan") + for end in all_ends: + model.add(makespan >= end) + model.minimize(makespan) + + # Solve model. + solver = cp_model.CpSolver() + if _PARAMS.value: + text_format.Parse(_PARAMS.value, solver.parameters) + status = solver.solve(model) + + # Report solution. + if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE: + print(f"Makespan = {solver.value(makespan)}") + for name, start in test_starts.items(): + print( + f"{name}: start:{solver.value(start)} duration:{test_durations[name]}" + f" power:{test_powers[name]} on supply {test_supply[name]}" + ) + + +def main(argv: Sequence[str]) -> None: + """Builds the data and solve the scheduling problem.""" + if len(argv) > 1: + raise app.UsageError("Too many command-line arguments.") + + tests_data, operators_data, supplies_data = build_data() + print("Tests data") + print(tests_data) + print() + print("Operators data") + print(operators_data) + print() + print("Supplies data") + print(supplies_data) + + solve(tests_data, operators_data, supplies_data) + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/python/testdata/BUILD.bazel b/examples/python/testdata/BUILD.bazel index 7d9df48d90e..210dd5ec304 100644 --- a/examples/python/testdata/BUILD.bazel +++ b/examples/python/testdata/BUILD.bazel @@ -18,4 +18,3 @@ exports_files( "salbp_20_1.alb", ], ) - diff --git a/examples/python/vendor_scheduling_sat.py b/examples/python/vendor_scheduling_sat.py index f8c9912373e..6f7c9b377a1 100644 --- a/examples/python/vendor_scheduling_sat.py +++ b/examples/python/vendor_scheduling_sat.py @@ -63,7 +63,7 @@ def solution_count(self): return self.__solution_count -def vendor_scheduling_sat(): +def vendor_scheduling_sat() -> None: """Create the shift scheduling model and solve it.""" # Create the model. model = cp_model.CpModel() diff --git a/examples/python/wedding_optimal_chart_sat.py b/examples/python/wedding_optimal_chart_sat.py index f86471a78bd..df7419154cc 100644 --- a/examples/python/wedding_optimal_chart_sat.py +++ b/examples/python/wedding_optimal_chart_sat.py @@ -130,7 +130,7 @@ def build_data(): return num_tables, table_capacity, min_known_neighbors, connections, names -def solve_with_discrete_model(): +def solve_with_discrete_model() -> None: """Discrete approach.""" num_tables, table_capacity, min_known_neighbors, connections, names = build_data() diff --git a/examples/python/weighted_latency_problem_sat.py b/examples/python/weighted_latency_problem_sat.py index 73758fb32e9..3e66b61b6e3 100644 --- a/examples/python/weighted_latency_problem_sat.py +++ b/examples/python/weighted_latency_problem_sat.py @@ -55,7 +55,7 @@ def build_model(): return x, y, profits -def solve_with_cp_sat(x, y, profits): +def solve_with_cp_sat(x, y, profits) -> None: """Solves the problem with the CP-SAT solver.""" model = cp_model.CpModel() diff --git a/examples/service/BUILD.bazel b/examples/service/BUILD.bazel new file mode 100644 index 00000000000..b85c0ca2f33 --- /dev/null +++ b/examples/service/BUILD.bazel @@ -0,0 +1,30 @@ +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@pip_deps//:requirements.bzl", "requirement") +load("@rules_python//python:defs.bzl", "py_binary") + +package(default_visibility = [ + "//ortools/math_opt:__subpackages__", + "//ortools/service:__subpackages__", +]) + +py_binary( + name = "solve_math_opt_model_via_http", + srcs = ["solve_math_opt_model_via_http.py"], + deps = [ + requirement("absl-py"), + "//ortools/math_opt/python:mathopt", + "//ortools/math_opt/python/ipc:remote_http_solve", + ], +) diff --git a/examples/service/solve_math_opt_model_via_http.py b/examples/service/solve_math_opt_model_via_http.py new file mode 100644 index 00000000000..39a4a874fe8 --- /dev/null +++ b/examples/service/solve_math_opt_model_via_http.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Example of solving a MathOpt model through the OR API. + +The model is built using the Python API, and the corresponding proto is +serialize to JSON to make the HTTP request. +""" + +from collections.abc import Sequence + +from absl import app +from absl import flags + +from ortools.math_opt.python import mathopt +from ortools.math_opt.python.ipc import remote_http_solve + +_API_KEY = flags.DEFINE_string("api_key", None, "API key for the OR API") + + +def request_example() -> None: + """Endpoint for the Operations Research API.""" + + # Set up the API key and endpoint. + api_key = _API_KEY.value + if not api_key: + print( + "API key is required. See" + " https://developers.google.com/optimization/service/setup for" + " instructions." + ) + return + + # Build a MathOpt model + model = mathopt.Model(name="my_model") + x = model.add_binary_variable(name="x") + y = model.add_variable(lb=0.0, ub=2.5, name="y") + model.add_linear_constraint(x + y <= 1.5, name="c") + model.maximize(2 * x + y) + try: + result, logs = remote_http_solve.remote_http_solve( + model, mathopt.SolverType.GSCIP, api_key=api_key + ) + print(result) + print(logs) + except remote_http_solve.OptimizationServiceError as err: + print(err) + + +def main(argv: Sequence[str]) -> None: + del argv # Unused. + request_example() + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/tests/init_test.cc b/examples/tests/init_test.cc index eb04dfb329a..8e17bfd739e 100644 --- a/examples/tests/init_test.cc +++ b/examples/tests/init_test.cc @@ -28,7 +28,7 @@ void TestFlags() { cpp_flags.log_prefix = true; cpp_flags.cp_model_dump_prefix = "init"; cpp_flags.cp_model_dump_models = true; - cpp_flags.cp_model_dump_lns = true; + cpp_flags.cp_model_dump_submodels = true; cpp_flags.cp_model_dump_response = true; CppBridge::SetFlags(cpp_flags); } diff --git a/makefiles/Makefile.dotnet.mk b/makefiles/Makefile.dotnet.mk index 14211330482..f088fd9ba60 100644 --- a/makefiles/Makefile.dotnet.mk +++ b/makefiles/Makefile.dotnet.mk @@ -134,7 +134,9 @@ endif $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj $(SED) -i -e 's/@PROJECT_VERSION_PATCH@/$(OR_TOOLS_PATCH)/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj - $(SED) -i -e 's/@FILE_NAME@/$$*.cs/' \ + $(SED) -i -e 's/@COMPONENT_NAME@/$1/' \ + $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj + $(SED) -i -e 's/@SAMPLE_FILE_NAME@/$$*.cs/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj $(TEMP_DOTNET_DIR)/$1/%/%.cs: \ @@ -175,10 +177,10 @@ $(TEMP_DOTNET_DIR)/$1/%: \ $(TEMP_DOTNET_DIR)/$1/%/%.csproj: \ $(SRC_DIR)/examples/$1/%.cs \ - ${SRC_DIR}/ortools/dotnet/Sample.csproj.in \ + ${SRC_DIR}/ortools/dotnet/Example.csproj.in \ | $(TEMP_DOTNET_DIR)/$1/% $(SED) -e "s/@DOTNET_PACKAGES_DIR@/..\/..\/..\/$(BUILD_DIR)\/dotnet\/packages/" \ - ortools$Sdotnet$SSample.csproj.in \ + ortools$Sdotnet$SExample.csproj.in \ > $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj $(SED) -i -e 's/@DOTNET_SAMPLE_LANG@/9.0/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj @@ -196,7 +198,7 @@ else endif $(SED) -i -e 's/@DOTNET_PROJECT@/$(DOTNET_ORTOOLS_ASSEMBLY_NAME)/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj - $(SED) -i -e 's/@SAMPLE_NAME@/$$*/' \ + $(SED) -i -e 's/@EXAMPLE_NAME@/$$*/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj $(SED) -i -e 's/@PROJECT_VERSION@/$(OR_TOOLS_VERSION)/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj @@ -206,7 +208,9 @@ endif $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj $(SED) -i -e 's/@PROJECT_VERSION_PATCH@/$(OR_TOOLS_PATCH)/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj - $(SED) -i -e 's/@FILE_NAME@/$$*.cs/' \ + $(SED) -i -e 's/@COMPONENT_NAME@/$1/' \ + $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj + $(SED) -i -e 's/@EXAMPLE_FILE_NAME@/$$*.cs/' \ $(TEMP_DOTNET_DIR)$S$1$S$$*$S$$*.csproj $(TEMP_DOTNET_DIR)/$1/%/%.cs: \ @@ -658,10 +662,12 @@ $(TEMP_DOTNET_DIR)/ortools_examples/examples/dotnet/%.csproj: \ $(SED) -i -e 's/@DOTNET_TFM@/netcoreapp3.1;net6.0<\/TargetFrameworks>/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj $(SED) -i -e 's/@DOTNET_PROJECT@/$(DOTNET_ORTOOLS_ASSEMBLY_NAME)/' \ + $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj + $(SED) -i -e 's/@COMPONENT_NAME@/$1/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj $(SED) -i -e 's/@SAMPLE_NAME@/$$*/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj - $(SED) -i -e 's/@FILE_NAME@/$$*.cs/' \ + $(SED) -i -e 's/@SAMPLE_FILE_NAME@/$$*.cs/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj endef @@ -674,7 +680,7 @@ $(TEMP_DOTNET_DIR)/ortools_examples/examples/dotnet/%.csproj: \ | $(TEMP_DOTNET_DIR)/ortools_examples/examples/dotnet $(COPY) $(SRC_DIR)$Sexamples$S$1$S$$*.cs \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet - $(COPY) ortools$Sdotnet$SSample.csproj.in \ + $(COPY) ortools$Sdotnet$SExample.csproj.in \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj $(SED) -i -e 's/@PROJECT_VERSION@/$(OR_TOOLS_VERSION)/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj @@ -692,9 +698,11 @@ $(TEMP_DOTNET_DIR)/ortools_examples/examples/dotnet/%.csproj: \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj $(SED) -i -e 's/@DOTNET_PROJECT@/$(DOTNET_ORTOOLS_ASSEMBLY_NAME)/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj - $(SED) -i -e 's/@SAMPLE_NAME@/$$*/' \ + $(SED) -i -e 's/@COMPONENT_NAME@/$1/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj - $(SED) -i -e 's/@FILE_NAME@/$$*.cs/' \ + $(SED) -i -e 's/@EXAMPLE_NAME@/$$*/' \ + $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj + $(SED) -i -e 's/@EXAMPLE_FILE_NAME@/$$*.cs/' \ $(TEMP_DOTNET_DIR)$Sortools_examples$Sexamples$Sdotnet$S$$*.csproj endef diff --git a/makefiles/Makefile.java.mk b/makefiles/Makefile.java.mk index 04a8df77eea..0184b12ae09 100644 --- a/makefiles/Makefile.java.mk +++ b/makefiles/Makefile.java.mk @@ -40,7 +40,7 @@ java: @echo JAR_BIN = $(JAR_BIN) @echo JAVA_BIN = $(JAVA_BIN) @echo MVN_BIN = $(MVN_BIN) - $(warning Either JAVA support was turned off, or the the makefile cannot\ + $(warning Either JAVA support was turned off, or the makefile cannot\ find 'java' or 'maven' command which is needed for build. \ Please make sure it is installed and in system path. \ Or turn java support ON.) diff --git a/makefiles/Makefile.port.mk b/makefiles/Makefile.port.mk index aee171cc510..97a12f49cd2 100644 --- a/makefiles/Makefile.port.mk +++ b/makefiles/Makefile.port.mk @@ -288,7 +288,7 @@ ifeq ($(OR_TOOLS_PATCH),) $(warning you are using a shallow copy) OR_TOOLS_PATCH:= 9999 else - OR_TOOLS_PATCH:= $(shell git rev-list --count v$(OR_TOOLS_MAJOR).0..HEAD) + OR_TOOLS_PATCH:= $(shell git rev-list --count v$(OR_TOOLS_MAJOR).0..HEAD || echo 0) endif else $(warning you are not using a .git archive) diff --git a/makefiles/docker/archlinux/python.Dockerfile b/makefiles/docker/archlinux/python.Dockerfile index 06afcd1214c..c53713d5832 100644 --- a/makefiles/docker/archlinux/python.Dockerfile +++ b/makefiles/docker/archlinux/python.Dockerfile @@ -1,6 +1,6 @@ FROM ortools/make:archlinux_swig AS env RUN pacman -Syu --noconfirm python python-pip \ - python-wheel python-virtualenv \ + python-wheel python-virtualenv python-setuptools \ python-numpy python-pandas RUN python -m pip install --break-system-package \ absl-py mypy mypy-protobuf diff --git a/makefiles/docker/centos/Dockerfile b/makefiles/docker/centos/Dockerfile index a48eb27631c..9c0497cea36 100644 --- a/makefiles/docker/centos/Dockerfile +++ b/makefiles/docker/centos/Dockerfile @@ -20,25 +20,25 @@ RUN dnf -y update \ RUN echo "source /opt/rh/gcc-toolset-11/enable" >> /etc/bashrc SHELL ["/bin/bash", "--login", "-c"] -# Install CMake 3.26.4 -RUN wget -q "https://cmake.org/files/v3.26/cmake-3.26.4-linux-x86_64.sh" \ -&& chmod a+x cmake-3.26.4-linux-x86_64.sh \ -&& ./cmake-3.26.4-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ -&& rm cmake-3.26.4-linux-x86_64.sh +# Install CMake 3.28.3 +RUN wget -q "https://cmake.org/files/v3.28/cmake-3.28.3-linux-x86_64.sh" \ +&& chmod a+x cmake-3.28.3-linux-x86_64.sh \ +&& ./cmake-3.28.3-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.28.3-linux-x86_64.sh CMD [ "/usr/bin/bash" ] -# Install SWIG 4.1.1 +# Install SWIG 4.2.1 FROM base AS swig RUN dnf -y update \ && dnf -y install pcre2-devel \ && dnf clean all \ && rm -rf /var/cache/dnf \ -&& wget -q "https://downloads.sourceforge.net/project/swig/swig/swig-4.1.1/swig-4.1.1.tar.gz" \ -&& tar xvf swig-4.1.1.tar.gz \ -&& rm swig-4.1.1.tar.gz \ -&& cd swig-4.1.1 \ +&& wget -q "https://downloads.sourceforge.net/project/swig/swig/swig-4.2.1/swig-4.2.1.tar.gz" \ +&& tar xvf swig-4.2.1.tar.gz \ +&& rm swig-4.2.1.tar.gz \ +&& cd swig-4.2.1 \ && ./configure --prefix=/usr \ && make -j 4 \ && make install \ && cd .. \ -&& rm -rf swig-4.1.1 +&& rm -rf swig-4.2.1 diff --git a/makefiles/docker/ubuntu/Dockerfile b/makefiles/docker/ubuntu/Dockerfile index d97da04f5eb..22d71efe5ec 100644 --- a/makefiles/docker/ubuntu/Dockerfile +++ b/makefiles/docker/ubuntu/Dockerfile @@ -10,11 +10,11 @@ RUN apt-get update -qq \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Install CMake 3.25.2 -RUN wget -q "https://cmake.org/files/v3.25/cmake-3.25.2-linux-x86_64.sh" \ -&& chmod a+x cmake-3.25.2-linux-x86_64.sh \ -&& ./cmake-3.25.2-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ -&& rm cmake-3.25.2-linux-x86_64.sh +# Install CMake 3.28.3 +RUN wget -q "https://cmake.org/files/v3.28/cmake-3.28.3-linux-x86_64.sh" \ +&& chmod a+x cmake-3.28.3-linux-x86_64.sh \ +&& ./cmake-3.28.3-linux-x86_64.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.28.3-linux-x86_64.sh CMD [ "/usr/bin/bash" ] FROM base AS swig diff --git a/makefiles/docker/ubuntu/dotnet.Dockerfile b/makefiles/docker/ubuntu/dotnet.Dockerfile index 11f1745dc29..78fa9a7f85c 100644 --- a/makefiles/docker/ubuntu/dotnet.Dockerfile +++ b/makefiles/docker/ubuntu/dotnet.Dockerfile @@ -3,7 +3,7 @@ FROM ortools/make:ubuntu_swig AS env # Install .Net # see: https://docs.microsoft.com/en-us/dotnet/core/install/linux-ubuntu RUN apt-get update -qq \ -&& apt-get install -yq dotnet-sdk-6.0 \ +&& apt-get install -yq dotnet-sdk-8.0 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # Trigger first run experience by running arbitrary cmd diff --git a/ortools/algorithms/BUILD.bazel b/ortools/algorithms/BUILD.bazel index ef7b1f68741..be5f372620c 100644 --- a/ortools/algorithms/BUILD.bazel +++ b/ortools/algorithms/BUILD.bazel @@ -12,8 +12,9 @@ # limitations under the License. load("@bazel_skylib//rules:common_settings.bzl", "bool_flag") -load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_cc//cc:defs.bzl", "cc_library", "cc_proto_library") +load("@rules_proto//proto:defs.bzl", "proto_library") +load("@rules_python//python:proto.bzl", "py_proto_library") package(default_visibility = ["//visibility:public"]) @@ -66,10 +67,10 @@ cc_library( hdrs = ["binary_search.h"], deps = [ "//ortools/base", - "//ortools/base:dump_vars", "@com_google_absl//absl/functional:function_ref", "@com_google_absl//absl/log:check", "@com_google_absl//absl/numeric:int128", + "@com_google_absl//absl/types:span", ], ) @@ -127,6 +128,20 @@ cc_test( ], ) +cc_library( + name = "duplicate_remover", + srcs = ["duplicate_remover.cc"], + hdrs = ["duplicate_remover.h"], + deps = [ + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/numeric:bits", + "@com_google_absl//absl/random", + "@com_google_absl//absl/random:distributions", + "@com_google_absl//absl/types:span", + "@com_google_protobuf//:protobuf", + ], +) + # Hungarian algorithm cc_library( name = "hungarian", @@ -152,6 +167,38 @@ cc_test( "//ortools/base:types", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/random:distributions", + "@com_google_absl//absl/types:span", + ], +) + +cc_library( + name = "adjustable_k_ary_heap", + hdrs = ["adjustable_k_ary_heap.h"], + deps = ["@com_google_absl//absl/log:check"], +) + +cc_test( + name = "adjustable_k_ary_heap_test", + size = "medium", + srcs = ["adjustable_k_ary_heap_test.cc"], + deps = [ + ":adjustable_k_ary_heap", + "//ortools/base:gmock_main", + ], +) + +cc_test( + name = "adjustable_k_ary_heap_stress_test", + size = "large", + timeout = "eternal", + srcs = ["adjustable_k_ary_heap_stress_test.cc"], + deps = [ + ":adjustable_k_ary_heap", + "//ortools/base:gmock_main", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/random", ], ) @@ -167,20 +214,45 @@ cc_library( "//conditions:default": [], }), deps = [ - "//ortools/base", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", "//ortools/base:stl_util", + # We don't link any underlying solver to let the linear_solver_knapsack + # decide what solvers to include. "//ortools/linear_solver", "//ortools/sat:cp_model", + "//ortools/sat:cp_model_cc_proto", + "//ortools/sat:cp_model_solver", "//ortools/util:bitset", "//ortools/util:time_limit", ], ) -# Weighted set covering +cc_test( + name = "knapsack_solver_test", + size = "medium", + srcs = ["knapsack_solver_test.cc"], + deps = [ + ":knapsack_solver_lib", # buildcleaner: keep + "//ortools/base", + "//ortools/base:gmock_main", + "//ortools/util:time_limit", + ], +) + +# Partitioning and splitting of vector. + +# query matching library. + +# Weighted set covering library. + proto_library( name = "set_cover_proto", srcs = ["set_cover.proto"], - deps = ["//ortools/util:int128_proto"], + deps = [ + "//ortools/util:int128_proto", + ], ) cc_proto_library( @@ -188,16 +260,36 @@ cc_proto_library( deps = [":set_cover_proto"], ) +py_proto_library( + name = "set_cover_py_pb2", + deps = [":set_cover_proto"], +) + +cc_library( + name = "set_cover_lagrangian", + srcs = ["set_cover_lagrangian.cc"], + hdrs = ["set_cover_lagrangian.h"], + deps = [ + ":adjustable_k_ary_heap", + ":set_cover_invariant", + ":set_cover_model", + "//ortools/base:threadpool", + "@com_google_absl//absl/log:check", + ], +) + cc_library( name = "set_cover_model", srcs = ["set_cover_model.cc"], hdrs = ["set_cover_model.h"], deps = [ ":set_cover_cc_proto", - "//ortools/lp_data:base", - "//ortools/util:strong_integers", + "//ortools/base:intops", + "//ortools/base:strong_vector", + "//ortools/util:aligned_memory", "@com_google_absl//absl/log", "@com_google_absl//absl/log:check", + "@com_google_absl//absl/strings", ], ) @@ -209,7 +301,6 @@ cc_library( ":set_cover_cc_proto", ":set_cover_model", "//ortools/base", - "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/types:span", @@ -217,26 +308,15 @@ cc_library( ) cc_library( - name = "set_cover_utils", - srcs = ["set_cover_utils.cc"], - hdrs = ["set_cover_utils.h"], + name = "set_cover_heuristics", + srcs = ["set_cover_heuristics.cc"], + hdrs = ["set_cover_heuristics.h"], deps = [ + ":adjustable_k_ary_heap", ":set_cover_invariant", ":set_cover_model", - "//ortools/base:adjustable_priority_queue", - ], -) - -cc_library( - name = "set_cover", - srcs = ["set_cover.cc"], - hdrs = ["set_cover.h"], - deps = [ - ":set_cover_invariant", - ":set_cover_model", - ":set_cover_utils", "//ortools/base", - "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/base:nullability", "@com_google_absl//absl/log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/random", @@ -254,18 +334,35 @@ cc_library( "//ortools/linear_solver", "//ortools/lp_data:base", "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/types:span", ], ) +cc_library( + name = "set_cover_reader", + srcs = ["set_cover_reader.cc"], + hdrs = ["set_cover_reader.h"], + deps = [ + ":set_cover_model", + "//ortools/base:file", + "//ortools/base:strong_vector", + "//ortools/util:filelineiter", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:string_view", + ], +) + cc_test( name = "set_cover_test", size = "medium", timeout = "eternal", srcs = ["set_cover_test.cc"], deps = [ - ":set_cover", ":set_cover_cc_proto", + ":set_cover_heuristics", ":set_cover_invariant", ":set_cover_mip", ":set_cover_model", @@ -291,15 +388,25 @@ cc_library( srcs = ["dynamic_partition.cc"], hdrs = ["dynamic_partition.h"], deps = [ - "//ortools/base", "//ortools/base:murmur", "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", - "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", ], ) +cc_test( + name = "dynamic_partition_test", + srcs = ["dynamic_partition_test.cc"], + deps = [ + ":dynamic_partition", + "//ortools/base:gmock_main", + "//ortools/base:stl_util", + "@com_google_absl//absl/random", + "@com_google_absl//absl/random:bit_gen_ref", + ], +) + cc_library( name = "sparse_permutation", srcs = ["sparse_permutation.cc"], @@ -307,6 +414,18 @@ cc_library( deps = [ "//ortools/base", "@com_google_absl//absl/strings", + "@com_google_absl//absl/types:span", + ], +) + +cc_test( + name = "sparse_permutation_test", + srcs = ["sparse_permutation_test.cc"], + deps = [ + ":sparse_permutation", + "//ortools/base:gmock_main", + "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/random:distributions", ], ) @@ -339,7 +458,6 @@ cc_library( ":dynamic_partition", ":dynamic_permutation", ":sparse_permutation", - "//ortools/base", "//ortools/base:dump_vars", "//ortools/base:murmur", "//ortools/graph", @@ -368,13 +486,15 @@ cc_test( ":dynamic_permutation", ":find_graph_symmetries", ":sparse_permutation", - "//ortools/base", "//ortools/base:dump_vars", "//ortools/base:file", "//ortools/base:gmock_main", "//ortools/base:map_util", "//ortools/base:path", "//ortools/graph:io", + "//ortools/graph:random_graph", + "//ortools/graph:util", + "@com_google_absl//absl/numeric:bits", "@com_google_absl//absl/random", "@com_google_absl//absl/random:distributions", "@com_google_absl//absl/status:statusor", @@ -383,3 +503,53 @@ cc_test( "@com_google_absl//absl/types:span", ], ) + +cc_library( + name = "binary_indexed_tree", + hdrs = ["binary_indexed_tree.h"], + deps = [ + "@com_google_absl//absl/log:check", + ], +) + +cc_test( + name = "binary_indexed_tree_test", + srcs = ["binary_indexed_tree_test.cc"], + deps = [ + ":binary_indexed_tree", + "//ortools/base:gmock_main", + ], +) + +cc_library( + name = "n_choose_k", + srcs = ["n_choose_k.cc"], + hdrs = ["n_choose_k.h"], + deps = [ + ":binary_search", + "//ortools/base:mathutil", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/numeric:int128", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/time", + ], +) + +cc_test( + name = "n_choose_k_test", + srcs = ["n_choose_k_test.cc"], + deps = [ + ":n_choose_k", + "//ortools/base:dump_vars", + "//ortools/base:gmock_main", + "//ortools/util:flat_matrix", + "@com_google_absl//absl/numeric:int128", + "@com_google_absl//absl/random", + "@com_google_absl//absl/random:distributions", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + ], +) diff --git a/ortools/algorithms/CMakeLists.txt b/ortools/algorithms/CMakeLists.txt index f0992151f09..8f23481d2fb 100644 --- a/ortools/algorithms/CMakeLists.txt +++ b/ortools/algorithms/CMakeLists.txt @@ -29,5 +29,5 @@ target_link_libraries(${NAME} PRIVATE absl::memory absl::str_format protobuf::libprotobuf - ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto) + ${PROJECT_NAMESPACE}::ortools_proto) #add_library(${PROJECT_NAMESPACE}::algorithms ALIAS ${NAME}) diff --git a/ortools/algorithms/README.md b/ortools/algorithms/README.md index 154073ab0d1..09fa1c9d35d 100644 --- a/ortools/algorithms/README.md +++ b/ortools/algorithms/README.md @@ -10,7 +10,7 @@ cheapest combination of sets that cover all the elements. [More information on Wikipedia](https://en.wikipedia.org/wiki/Set_cover_problem). -* Solver: [`set_cover.h`](set_cover.h). +* Solver: [`set_cover_heuristics.h`](set_cover_heuristics.h). * Instance representation: [`set_cover_model.h`](set_cover_model.h). * Instance parser: [`set_cover_reader.h`](set_cover_reader.h). diff --git a/ortools/algorithms/adjustable_k_ary_heap.h b/ortools/algorithms/adjustable_k_ary_heap.h new file mode 100644 index 00000000000..48b01152f72 --- /dev/null +++ b/ortools/algorithms/adjustable_k_ary_heap.h @@ -0,0 +1,312 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef OR_TOOLS_ALGORITHMS_ADJUSTABLE_K_ARY_HEAP_H_ +#define OR_TOOLS_ALGORITHMS_ADJUSTABLE_K_ARY_HEAP_H_ + +#include +#include +#include +#include + +#include "absl/log/check.h" + +// Adjustable k-ary heap for std::pair classes containing a +// priority and an index referring to an array where the relevant data is +// stored. +// +// The comparator is the default comparator for pairs, i.e. the index is used as +// a tie-breaker for the priority, thus making the code more repeatable. +// +// Because the class uses indices and vectors, it is much faster than +// AdjustablePriorityQueue, even in the binary heap case. +// +// k-ary heaps are useful when SiftDown() (aka Decrease) is called more often +// than Pop() (aka Extract). +// +// Namely, Pop() has a complexity in O(k * log_k (n)), while SiftDown() is in +// O(log_k(n)), even when k = 2. This explains the small gain. +// +// In the implementation below, k is denoted as Arity. + +template +class AdjustableKAryHeap { + public: + using Aggregate = std::pair; + using HeapIndex = Index; + static_assert(Arity >= 2, "arity must be at least 2"); + static_assert(std::numeric_limits::is_integer, + "Index must be an integer"); + static_assert(std::numeric_limits::is_specialized, + "Priority must be an integer or floating-point type"); + AdjustableKAryHeap() { Clear(); } + + // Construct a k-heap from an existing vector, tracking original indices. + // `universe_size` is the maximum possible index in `elements`. + explicit AdjustableKAryHeap(const std::vector& elements, + HeapIndex universe_size) { + Load(elements, universe_size); + } + + explicit AdjustableKAryHeap(const std::vector& indices, + const std::vector& priorities, + HeapIndex universe_size) { + Load(indices, priorities, universe_size); + } + + void Clear() { + data_.clear(); + heap_positions_.clear(); + heap_size_ = 0; + } + + void Load(const std::vector& elements, HeapIndex universe_size) { + data_.resize(elements.size()); + heap_size_ = elements.size(); + std::copy(elements.begin(), elements.end(), data_.begin()); + heap_positions_.resize(universe_size, kNonExistent); + for (HeapIndex i = 0; i < data_.size(); ++i) { + heap_positions_[index(i)] = i; + } + BuildHeap(); + } + + void Load(const std::vector& indices, + const std::vector& priorities, HeapIndex universe_size) { + std::copy(indices.begin(), indices.end(), indices_.begin()); + std::copy(priorities.begin(), priorities.end(), priorities_.begin()); + heap_size_ = indices.size(); + heap_positions_.resize(universe_size, kNonExistent); + for (HeapIndex i = 0; i < data_.size(); ++i) { + heap_positions_[indices_[i]] = i; + } + BuildHeap(); + } + + // Removes the top element from the heap (smallest for min-heap, largest for + // max-heap), and rearranges the heap. + // This will CHECK-fail if the heap is empty (through Top()). + void Pop() { + CHECK(!IsEmpty()); + CHECK(RemoveAtHeapPosition(0)); + } + + // Returns the index of the top element, without modifying the heap. + // Note that this does not remove the element from the heap, Pop() must be + // called explicitly. + Index TopIndex() const { + CHECK(!IsEmpty()); + return data_[0].second; + } + + // Returns the index of the top element, without modifying the heap. + // Note that this does not remove the element from the heap, Pop() must be + // called explicitly. + + Priority TopPriority() const { + CHECK(!IsEmpty()); + return data_[0].first; + } + + // Returns the number of elements in the heap. + HeapIndex heap_size() const { return heap_size_; } + + // True iff the heap is empty. + bool IsEmpty() const { return heap_size() == 0; } + + // Insert an element into the heap. + void Insert(Aggregate element) { + const Index index = element.second; + if (index >= heap_positions_.size()) { + heap_positions_.resize(index + 1, kNonExistent); + } + if (GetHeapPosition(index) == kNonExistent) { + heap_positions_[index] = heap_size_; + if (heap_size_ < data_.size()) { + data_[heap_size_] = element; + } else { + data_.push_back(element); + } + ++heap_size_; + } + Update(element); + } + + // Removes the element at index. Returns false if the element does not appear + // in the heap. + bool Remove(Index index) { + if (IsEmpty()) return false; + const HeapIndex heap_position = GetHeapPosition(index); + return heap_position != kNonExistent ? RemoveAtHeapPosition(heap_position) + : false; + } + + // Change the value of an element. + void Update(Aggregate element) { + DCHECK(!IsEmpty()); + const HeapIndex heap_position = GetHeapPosition(element.second); + DCHECK_GE(heap_position, 0); + DCHECK_LT(heap_position, heap_positions_.size()); + data_[heap_position] = element; + if (HasPriority(heap_position, Parent(heap_position))) { + SiftUp(heap_position); + } else { + SiftDown(heap_position); + } + } + + // Checks if the element with index is in the heap. + bool Contains(Index index) const { + return GetHeapPosition(index) != kNonExistent; + } + + // Checks that the heap is well-formed. + bool CheckHeapProperty() const { + for (HeapIndex i = heap_size() - 1; i >= Arity; --i) { + CHECK(HasPriority(Parent(i), i)) + << "Parent " << Parent(i) << " with priority " << priority(Parent(i)) + << " does not have priority over " << i << " with priority " + << priority(i) << " , heap_size = " << heap_size() + << ", priority difference = " << priority(i) - priority(Parent(i)); + } + CHECK_LE(heap_size(), heap_positions_.size()); + CHECK_LE(heap_size(), data_.size()); + return true; + } + + private: + // Gets the current position of element with index i in the heap. + HeapIndex GetHeapPosition(Index i) const { + DCHECK_GE(i, 0); + DCHECK_LT(i, heap_positions_.size()); + return heap_positions_[i]; + } + + // Removes an element at a given heap position. + bool RemoveAtHeapPosition(HeapIndex heap_index) { + DCHECK(!IsEmpty()); + DCHECK_GE(heap_index, 0); + if (heap_index >= heap_size()) return false; + PerformSwap(heap_index, heap_size() - 1); + --heap_size_; + if (HasPriority(heap_index, Parent(heap_index))) { + SiftUp(heap_index); + } else { + SiftDown(heap_index); + } + heap_positions_[index(heap_size_)] = kNonExistent; + return true; + } + + // Maintains heap property by sifting down starting from the end, + void BuildHeap() { + for (HeapIndex i = Parent(heap_size()); i >= 0; --i) { + SiftDown(i); + } + DCHECK(CheckHeapProperty()); + } + + // Maintains heap property by sifting up an element. + void SiftUp(HeapIndex index) { + while (index > 0 && HasPriority(index, Parent(index))) { + PerformSwap(index, Parent(index)); + index = Parent(index); + } + } + + // Maintains heap property by sifting down an element. + void SiftDown(HeapIndex index) { + while (true) { + const HeapIndex highest_priority_child = GetHighestPriorityChild(index); + if (highest_priority_child == index) return; + PerformSwap(index, highest_priority_child); + index = highest_priority_child; + } + } + + // Finds the child with the highest priority, i.e. the child with the + // smallest (resp. largest) key for a min- (resp. max-) heap. + // Returns index is there are no such children. + HeapIndex GetHighestPriorityChild(HeapIndex index) const { + const HeapIndex right_bound = std::min(RightChild(index) + 1, heap_size()); + HeapIndex highest_priority_child = index; + for (HeapIndex i = LeftChild(index); i < right_bound; ++i) { + if (HasPriority(i, highest_priority_child)) { + highest_priority_child = i; + } + } + return highest_priority_child; + } + + // Swaps two elements of data_, while also making sure heap_positions_ is + // properly maintained. + void PerformSwap(HeapIndex i, HeapIndex j) { + std::swap(data_[i], data_[j]); + std::swap(heap_positions_[index(i)], heap_positions_[index(j)]); + } + + // Compares two elements based on whether we are dealing with a min- or a + // max-heap. Returns true if (data indexed by) i has more priority + // than j. Note that we only use operator::<. + bool HasPriority(HeapIndex i, HeapIndex j) const { + return IsMaxHeap ? data_[j] < data_[i] : data_[i] < data_[j]; + } + + // Since Arity is a (small) constant, we expect compilers to avoid + // multiplication instructions and use LEA instructions or a combination + // of shifts and arithmetic operations. + // Powers of 2 are guaranteed to be quick thanks to simple shifts. + + // Gets the leftmost child index of a given node + HeapIndex LeftChild(HeapIndex index) const { return Arity * index + 1; } + + // Gets the rightmost child index of a given node + HeapIndex RightChild(HeapIndex index) const { return Arity * (index + 1); } + + // For division, the optimization is more uncertain, although a simple + // multiplication and a shift might be used by the compiler. + // Of course, powers of 2 are guaranteed to be quick thanks to simple shifts. + + // Gets the parent index of a given index. + HeapIndex Parent(HeapIndex index) const { return (index - 1) / Arity; } + + // Returns the index of the element at position i in the heap. + Index index(HeapIndex i) const { return data_[i].second; } + + // Returns the index of the element at position i in the heap. + Priority priority(HeapIndex i) const { return data_[i].first; } + + // The heap is stored as a vector. + std::vector data_; + + // The heap is stored as two vectors. + // indices_ is such that heap_positions_[indices_[i]] == i + // and indices_[heap_positions_[i]] == i, at all times unless + // indices_[i] is not in the heap, and therefore + // heap_positions_[indices[i]] == -1. + std::vector indices_; + std::vector priorities_; + + // Maps original index to current heap position. + std::vector heap_positions_; + + // The number of elements currently in the heap. This may be updated + // either when removing an element (which is not removed from data_), or + // adding a new one. + HeapIndex heap_size_ = 0; + + // The index for Aggregates not in the heap. + const Index kNonExistent = -1; +}; + +#endif // OR_TOOLS_ALGORITHMS_ADJUSTABLE_K_ARY_HEAP_H_ diff --git a/ortools/algorithms/adjustable_k_ary_heap_stress_test.cc b/ortools/algorithms/adjustable_k_ary_heap_stress_test.cc new file mode 100644 index 00000000000..160c6698a29 --- /dev/null +++ b/ortools/algorithms/adjustable_k_ary_heap_stress_test.cc @@ -0,0 +1,157 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/log/check.h" +#include "absl/random/random.h" +#include "gtest/gtest.h" +#include "ortools/algorithms/adjustable_k_ary_heap.h" +#include "ortools/base/logging.h" + +// Stress test for AdjustableKaryHeap. +// The test generates a random heap of size num_elements. Then, it randomly +// changes the priority of a fraction of the elements (fraction_to_change), +// removes a fraction of the elements (fraction_to_remove), and reinserts a +// fraction of the elements (fraction_to_reinsert). After all of these +// operations, the test verifies that the heap property is satisfied. Then, it +// pops all of the elements from the heap and verifies that the elements are +// popped in order. + +ABSL_FLAG(int32_t, num_elements, 100000000, + "The number of elements for the stress test."); +ABSL_FLAG(double, fraction_to_change_priority, 0.01, + "The fraction of elements that will get a changed priority after " + "initial population."); +ABSL_FLAG(double, fraction_to_reinsert, 0.001, + "The fraction of elements that will get reinserted after " + "initial population."); +ABSL_FLAG(double, fraction_to_remove, 0.001, + "The fraction of elements that will get removed after " + "initial population."); + +namespace operations_research { + +template +void StressTest() { + AdjustableKAryHeap heap; + std::mt19937 rnd(/*seed=*/301); + const int32_t num_elements = absl::GetFlag(FLAGS_num_elements); + const double fraction_to_change = + absl::GetFlag(FLAGS_fraction_to_change_priority); + const double fraction_to_reinsert = absl::GetFlag(FLAGS_fraction_to_reinsert); + const double fraction_to_remove = absl::GetFlag(FLAGS_fraction_to_remove); + + LOG(INFO) << "Populating AdjustableKaryHeap with num_elements = " + << num_elements; + + std::vector elts_to_change, elts_to_reinsert, elts_to_remove; + for (int32_t i = 0; i < num_elements; ++i) { + const double priority = absl::Uniform(rnd, 0, 1000000000.0); + + if (absl::Uniform(rnd, 0, 1.0) < fraction_to_change) { + elts_to_change.push_back(i); + } + if (absl::Uniform(rnd, 0, 1.0) < fraction_to_reinsert) { + elts_to_reinsert.push_back(i); + } + if (absl::Uniform(rnd, 0, 1.0) < fraction_to_remove) { + elts_to_remove.push_back(i); + } + heap.Insert({priority, i}); + LOG_EVERY_POW_2(INFO) << "heap.Insert, i = " << i; + } + LOG(INFO) << "AdjustableKaryHeap filled with heap_size = " + << heap.heap_size(); + LOG(INFO) << "elts_to_change.size() = " << elts_to_change.size(); + for (const auto elem : elts_to_change) { + const double updated_priority = absl::Uniform(rnd, 0, 1000000000.0); + heap.Update({updated_priority, elem}); + } + + LOG(INFO) << "AdjustableKaryHeap filled with heap_size = " + << heap.heap_size(); + LOG(INFO) << "elts_to_remove.size() = " << elts_to_remove.size(); + for (const auto elem : elts_to_remove) { + heap.Remove(elem); + } + + LOG(INFO) << "AdjustableKaryHeap filled with heap_size = " + << heap.heap_size(); + LOG(INFO) << "elts_to_reinsert.size() = " << elts_to_reinsert.size(); + for (const auto elem : elts_to_reinsert) { + const double updated_priority = absl::Uniform(rnd, 0, 1000000000.0); + heap.Insert({updated_priority, elem}); + } + + LOG(INFO) << "Running AdjustableKaryHeap::CheckHeapProperty()"; + CHECK(heap.CheckHeapProperty()); + LOG(INFO) << "heap.CheckHeapProperty() complete"; + if (IsMaxHeap) { + double largest = std::numeric_limits::infinity(); + while (!heap.IsEmpty()) { + const auto prio = heap.TopPriority(); + const auto idx = heap.TopIndex(); + heap.Pop(); + CHECK_LE(prio, largest); + largest = prio; + heap.Remove(idx); + LOG_EVERY_POW_2(INFO) + << "heap.Remove, heap.heap_size() = " << heap.heap_size(); + } + } else { + double smallest = -std::numeric_limits::infinity(); + while (!heap.IsEmpty()) { + const auto prio = heap.TopPriority(); + const auto idx = heap.TopIndex(); + heap.Pop(); + CHECK_LE(smallest, prio); + smallest = prio; + heap.Remove(idx); + LOG_EVERY_POW_2(INFO) + << "heap.Remove, heap.heap_size() = " << heap.heap_size(); + } + } + LOG(INFO) << "AdjustableKaryHeap is now Empty. Stress64BitClean Complete"; +} + +#define ADJUSTABLE_KARY_HEAP_STRESS_TEST(arity) \ + TEST(AdjustableKAryHeapTest, Stress32Bit##arity##Max) { \ + StressTest(); \ + } \ + TEST(AdjustableKAryHeapTest, Stress32Bit##arity##Min) { \ + StressTest(); \ + } + +ADJUSTABLE_KARY_HEAP_STRESS_TEST(2); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(3); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(4); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(5); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(6); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(7); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(8); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(9); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(10); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(11); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(12); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(13); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(14); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(15); +ADJUSTABLE_KARY_HEAP_STRESS_TEST(16); + +#undef ADJUSTABLE_KARY_HEAP_STRESS_TEST + +} // namespace operations_research diff --git a/ortools/algorithms/adjustable_k_ary_heap_test.cc b/ortools/algorithms/adjustable_k_ary_heap_test.cc new file mode 100644 index 00000000000..996a7b65d83 --- /dev/null +++ b/ortools/algorithms/adjustable_k_ary_heap_test.cc @@ -0,0 +1,174 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/algorithms/adjustable_k_ary_heap.h" + +#include +#include +#include +#include +#include + +#include "gtest/gtest.h" + +namespace operations_research { + +TEST(AdjustableKAryHeapTest, RandomDataStrongCheck) { + const int kSize = 10'000; + const double priority_range = kSize / 100; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + std::vector> subsets_and_values(kSize); + for (int i = 0; i < kSize; ++i) { + subsets_and_values[i] = {priority_dist(generator), i}; + } + + AdjustableKAryHeap heap(subsets_and_values, kSize); + EXPECT_TRUE(heap.CheckHeapProperty()); + float last = std::numeric_limits::max(); + while (!heap.IsEmpty()) { + const auto prio = heap.TopPriority(); + heap.Pop(); + EXPECT_LE(prio, last); + last = prio; + } + EXPECT_TRUE(heap.IsEmpty()); + EXPECT_TRUE(heap.CheckHeapProperty()); +} + +TEST(AdjustableKAryHeapTest, RandomDataSpeed) { + const int kSize = 1'000'000; + const double priority_range = kSize / 100; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + std::vector> subsets_and_values(kSize); + for (int i = 0; i < kSize; ++i) { + subsets_and_values[i] = {priority_dist(generator), i}; + } + + AdjustableKAryHeap heap(subsets_and_values, kSize); + EXPECT_TRUE(heap.CheckHeapProperty()); + while (!heap.IsEmpty()) { + heap.Pop(); + } + EXPECT_TRUE(heap.CheckHeapProperty()); + EXPECT_TRUE(heap.IsEmpty()); +} + +TEST(AdjustableKAryHeapTest, UpdateStrongCheck) { + const int kSize = 10'000; + const int kNumUpdates = kSize / 100; + const double priority_range = kSize / 100; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + std::uniform_int_distribution index_dist(0, kSize - 1); + std::vector> subsets_and_values(kSize); + for (int i = 0; i < kSize; ++i) { + subsets_and_values[i] = {priority_dist(generator), i}; + } + AdjustableKAryHeap heap(subsets_and_values, kSize); + EXPECT_TRUE(heap.CheckHeapProperty()); + for (int iter = 0; iter < kNumUpdates; ++iter) { + heap.Update({priority_dist(generator), index_dist(generator)}); + EXPECT_TRUE(heap.CheckHeapProperty()); + } +} + +TEST(AdjustableKAryHeapTest, RemoveStrongCheck) { + const int kSize = 10'000; + const int kNumRemovals = kSize; + const double priority_range = kSize / 10; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + std::uniform_int_distribution index_dist(0, kSize - 1); + std::vector> subsets_and_values(kSize); + for (int i = 0; i < kSize; ++i) { + subsets_and_values[i] = {priority_dist(generator), i}; + } + AdjustableKAryHeap heap(subsets_and_values, kSize); + EXPECT_TRUE(heap.CheckHeapProperty()); + for (int iter = 0; iter < kNumRemovals; ++iter) { + heap.Remove(iter); + EXPECT_TRUE(heap.CheckHeapProperty()); + } +} + +TEST(AdjustableKAryHeapTest, OneByOneStrongCheck) { + const int kSize = 10'000; + const int kNumInsertions = kSize; + const double priority_range = kSize / 10; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + std::uniform_int_distribution index_dist(0, kSize - 1); + std::vector> subsets_and_values; + AdjustableKAryHeap heap; + EXPECT_TRUE(heap.CheckHeapProperty()); + for (int iter = 0; iter < kNumInsertions; ++iter) { + heap.Insert({priority_dist(generator), index_dist(generator)}); + EXPECT_TRUE(heap.CheckHeapProperty()); + } +} + +TEST(AdjustableKAryHeapTest, OneByOneStrongSpeed) { + const int kSize = 1'000'000; + const int kNumInsertions = kSize; + const double priority_range = kSize / 10; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + std::uniform_int_distribution index_dist(0, kSize - 1); + std::vector> subsets_and_values; + AdjustableKAryHeap heap; + EXPECT_TRUE(heap.CheckHeapProperty()); + for (int iter = 0; iter < kNumInsertions; ++iter) { + heap.Insert({priority_dist(generator), index_dist(generator)}); + } + EXPECT_TRUE(heap.CheckHeapProperty()); +} + +TEST(StandardHeapTest, RandomDataSpeed) { + const int kSize = 1'000'000; + const double priority_range = kSize / 100; + std::random_device rd; + std::mt19937 generator(rd()); // Mersenne Twister generator + std::uniform_real_distribution priority_dist(0, priority_range); + + std::vector values(kSize); + for (int i = 0; i < kSize; ++i) { + values[i] = priority_dist(generator); + } + std::priority_queue heap(values.begin(), values.end()); + while (!heap.empty()) { + heap.pop(); + } +} + +TEST(AdjustableKAryHeapTest, DoubleInsertionOneRemoval) { + const int kSize = 10'000; + AdjustableKAryHeap heap; + + for (int i = 0; i < kSize; ++i) { + heap.Insert({static_cast(i), i}); + heap.Insert({static_cast(i + 1), i}); + heap.Remove(i); + + EXPECT_FALSE(heap.Contains(i)); + } + EXPECT_TRUE(heap.CheckHeapProperty()); +} +} // namespace operations_research diff --git a/ortools/algorithms/binary_search.h b/ortools/algorithms/binary_search.h index 05acc9381a8..56c11dc2ffb 100644 --- a/ortools/algorithms/binary_search.h +++ b/ortools/algorithms/binary_search.h @@ -14,7 +14,6 @@ #ifndef OR_TOOLS_ALGORITHMS_BINARY_SEARCH_H_ #define OR_TOOLS_ALGORITHMS_BINARY_SEARCH_H_ -#include #include #include #include @@ -23,7 +22,7 @@ #include "absl/functional/function_ref.h" #include "absl/log/check.h" #include "absl/numeric/int128.h" -#include "ortools/base/dump_vars.h" +#include "absl/types/span.h" #include "ortools/base/logging.h" namespace operations_research { diff --git a/ortools/algorithms/binary_search_test.cc b/ortools/algorithms/binary_search_test.cc index 6ffb2f425b5..be670d26462 100644 --- a/ortools/algorithms/binary_search_test.cc +++ b/ortools/algorithms/binary_search_test.cc @@ -19,6 +19,7 @@ #include #include #include +#include #include "absl/base/log_severity.h" #include "absl/numeric/int128.h" diff --git a/ortools/algorithms/csharp/knapsack_solver.i b/ortools/algorithms/csharp/knapsack_solver.i index 8df31a777a8..fef62c4b4ef 100644 --- a/ortools/algorithms/csharp/knapsack_solver.i +++ b/ortools/algorithms/csharp/knapsack_solver.i @@ -13,9 +13,8 @@ // TODO(user): Refactor this file to adhere to the SWIG style guide. -%include "enums.swg" - %include "ortools/base/base.i" +%include "enums.swg" %import "ortools/util/csharp/vector.i" // Include the file we want to wrap a first time. @@ -26,12 +25,17 @@ // by default vector> is mapped to a jagged array i.e. .Net type long[][] // but here we want a regular matrix i.e. .Net type long[,] %template(Int64Vector) std::vector; -%template(Int64VectorVector) std::vector >; +%template(Int64Matrix) std::vector >; VECTOR_AS_CSHARP_ARRAY(int64_t, int64_t, long, Int64Vector); -REGULAR_MATRIX_AS_CSHARP_ARRAY(int64_t, int64_t, long, Int64VectorVector); +REGULAR_MATRIX_AS_CSHARP_ARRAY(int64_t, int64_t, long, Int64Matrix); + +namespace operations_research { + +%unignore KnapsackSolver; +%rename (UseReduction) KnapsackSolver::use_reduction; +%rename (SetUseReduction) KnapsackSolver::set_use_reduction; -%rename (UseReduction) operations_research::KnapsackSolver::use_reduction; -%rename (SetUseReduction) operations_research::KnapsackSolver::set_use_reduction; +} // namespace operations_research // TODO(user): Replace with %ignoreall/%unignoreall //swiglint: disable include-h-allglobals diff --git a/ortools/algorithms/dynamic_partition.cc b/ortools/algorithms/dynamic_partition.cc index b6190069420..d870d8a659a 100644 --- a/ortools/algorithms/dynamic_partition.cc +++ b/ortools/algorithms/dynamic_partition.cc @@ -19,7 +19,7 @@ #include #include -#include "absl/strings/str_format.h" +#include "absl/log/check.h" #include "absl/strings/str_join.h" #include "absl/types/span.h" #include "ortools/base/murmur.h" @@ -186,17 +186,15 @@ void DynamicPartition::UndoRefineUntilNumPartsEqual(int original_num_parts) { } } -std::string DynamicPartition::DebugString(DebugStringSorting sorting) const { - if (sorting != SORT_LEXICOGRAPHICALLY && sorting != SORT_BY_PART) { - return absl::StrFormat("Unsupported sorting: %d", sorting); - } +std::string DynamicPartition::DebugString( + bool sort_parts_lexicographically) const { std::vector> parts; for (int i = 0; i < NumParts(); ++i) { IterablePart iterable_part = ElementsInPart(i); parts.emplace_back(iterable_part.begin(), iterable_part.end()); std::sort(parts.back().begin(), parts.back().end()); } - if (sorting == SORT_LEXICOGRAPHICALLY) { + if (sort_parts_lexicographically) { std::sort(parts.begin(), parts.end()); } std::string out; @@ -283,8 +281,9 @@ std::string MergingPartition::DebugString() { for (int i = 0; i < NumNodes(); ++i) { sorted_parts[GetRootAndCompressPath(i)].push_back(i); } - for (std::vector& part : sorted_parts) + for (std::vector& part : sorted_parts) { std::sort(part.begin(), part.end()); + } std::sort(sorted_parts.begin(), sorted_parts.end()); // Note: typically, a lot of elements of "sorted_parts" will be empty, // but these won't be visible in the string that we construct below. diff --git a/ortools/algorithms/dynamic_partition.h b/ortools/algorithms/dynamic_partition.h index 1d64822b1cc..ed120f8c8a4 100644 --- a/ortools/algorithms/dynamic_partition.h +++ b/ortools/algorithms/dynamic_partition.h @@ -114,16 +114,10 @@ class DynamicPartition { // Prerequisite: NumParts() >= original_num_parts. void UndoRefineUntilNumPartsEqual(int original_num_parts); - // Dump the partition to a string. There might be different conventions for - // sorting the parts and the elements inside them. - enum DebugStringSorting { - // Elements are sorted within parts, and parts are then sorted - // lexicographically. - SORT_LEXICOGRAPHICALLY, - // Elements are sorted within parts, and parts are kept in order. - SORT_BY_PART, - }; - std::string DebugString(DebugStringSorting sorting) const; + // Converts the current partition to a string, like "3 | 1 2 | 0 4 5". Within + // each part, elements are sorted. And if sort_parts_lexicographically=true, + // the parts are sorted lexicographically instead of by their natural order. + std::string DebugString(bool sort_parts_lexicographically) const; // ADVANCED USAGE: // All elements (0..n-1) of the partition, sorted in a way that's compatible diff --git a/ortools/algorithms/dynamic_partition_test.cc b/ortools/algorithms/dynamic_partition_test.cc index 233eb73666a..b1474628b23 100644 --- a/ortools/algorithms/dynamic_partition_test.cc +++ b/ortools/algorithms/dynamic_partition_test.cc @@ -19,10 +19,8 @@ #include #include -#include "absl/memory/memory.h" #include "absl/random/bit_gen_ref.h" #include "absl/random/random.h" -#include "absl/strings/str_join.h" #include "gtest/gtest.h" #include "ortools/base/gmock.h" #include "ortools/base/stl_util.h" @@ -32,7 +30,6 @@ namespace { using ::testing::ElementsAre; using ::testing::ElementsAreArray; -using ::testing::HasSubstr; using ::testing::IsEmpty; using ::testing::StartsWith; using ::testing::UnorderedElementsAre; @@ -142,13 +139,13 @@ TEST(DynamicPartitionTest, Accessors) { UnorderedElementsAre(0, 1, 5), UnorderedElementsAre(2))); - // Test DebugString(SORT_LEXICOGRAPHICALLY). + // Test DebugString(true). EXPECT_EQ("0 1 5 | 2 | 3 4 6", - partition.DebugString(DynamicPartition::SORT_LEXICOGRAPHICALLY)); + partition.DebugString(/*sort_parts_lexicographically=*/true)); - // Test DebugString(SORT_BY_PART). + // Test DebugString(false). EXPECT_EQ("3 4 6 | 0 1 5 | 2", - partition.DebugString(DynamicPartition::SORT_BY_PART)); + partition.DebugString(/*sort_parts_lexicographically=*/false)); // Test PartOf(). EXPECT_EQ(1, partition.PartOf(0)); @@ -172,22 +169,15 @@ TEST(DynamicPartitionTest, Accessors) { TEST(DynamicPartitionTest, ConstructWithEmptyPartition) { DynamicPartition partition(std::vector(0)); - EXPECT_EQ("", partition.DebugString(DynamicPartition::SORT_BY_PART)); + EXPECT_EQ("", partition.DebugString(/*sort_parts_lexicographically=*/false)); } TEST(DynamicPartitionTest, ConstructWithPartition) { DynamicPartition partition({2, 1, 0, 1, 0, 3, 0}); EXPECT_EQ("0 | 1 3 | 2 4 6 | 5", - partition.DebugString(DynamicPartition::SORT_LEXICOGRAPHICALLY)); + partition.DebugString(/*sort_parts_lexicographically=*/true)); EXPECT_EQ("2 4 6 | 1 3 | 0 | 5", - partition.DebugString(DynamicPartition::SORT_BY_PART)); -} - -TEST(DynamicPartitionTest, DebugStringWithUnknownSorting) { - DynamicPartition partition(4); - EXPECT_THAT(partition.DebugString( - static_cast(987)), - HasSubstr("Unsupported sorting")); + partition.DebugString(/*sort_parts_lexicographically=*/false)); } TEST(DynamicPartitionTest, FingerprintBasic) { @@ -200,8 +190,8 @@ TEST(DynamicPartitionTest, FingerprintBasic) { // We have to rely on all the other methods working as expected: if any of // the other unit tests failed, then this one probably will, too. ASSERT_EQ("1 3 | 2 4 | 0 | 5", - p2.DebugString(DynamicPartition::SORT_BY_PART)); - ASSERT_THAT(p1.DebugString(DynamicPartition::SORT_BY_PART), + p2.DebugString(/*sort_parts_lexicographically=*/false)); + ASSERT_THAT(p1.DebugString(/*sort_parts_lexicographically=*/false), StartsWith("1 3 | 2 4 | 0 | 5 |")); for (int p = 0; p < 3; ++p) { @@ -310,7 +300,7 @@ TEST(DynamicPartitionTest, ElementsInHierarchicalOrder) { partition.Refine({0}); // Now: (((2 | 0) | 1) | (3 | 4)) // The parts are sorted differently than the natural order. ASSERT_EQ("2 | 3 | 1 | 4 | 0", - partition.DebugString(DynamicPartition::SORT_BY_PART)); + partition.DebugString(/*sort_parts_lexicographically=*/false)); EXPECT_THAT(partition.ElementsInHierarchicalOrder(), ElementsAre(2, 0, 1, 3, 4)); partition.UndoRefineUntilNumPartsEqual(1); diff --git a/ortools/algorithms/find_graph_symmetries.cc b/ortools/algorithms/find_graph_symmetries.cc index d310937502c..74f70f832a9 100644 --- a/ortools/algorithms/find_graph_symmetries.cc +++ b/ortools/algorithms/find_graph_symmetries.cc @@ -471,7 +471,7 @@ absl::Status GraphSymmetryFinder::FindSymmetries( "During the initial refinement."); } VLOG(4) << "Base partition: " - << base_partition.DebugString(DynamicPartition::SORT_BY_PART); + << base_partition.DebugString(/*sort_parts_lexicographically=*/false); MergingPartition node_equivalence_classes(NumNodes()); std::vector> permutations_displacing_node(NumNodes()); @@ -521,7 +521,8 @@ absl::Status GraphSymmetryFinder::FindSymmetries( DistinguishNodeInPartition(invariant_node, &base_partition, nullptr); VLOG(4) << "Invariant dive: invariant node = " << invariant_node << "; partition after: " - << base_partition.DebugString(DynamicPartition::SORT_BY_PART); + << base_partition.DebugString( + /*sort_parts_lexicographically=*/false); if (time_limit_->LimitReached()) { return absl::Status(absl::StatusCode::kDeadlineExceeded, "During the invariant dive."); @@ -547,7 +548,8 @@ absl::Status GraphSymmetryFinder::FindSymmetries( image_partition.UndoRefineUntilNumPartsEqual(base_num_parts); VLOG(4) << "Backtracking invariant dive: root node = " << root_node << "; partition: " - << base_partition.DebugString(DynamicPartition::SORT_BY_PART); + << base_partition.DebugString( + /*sort_parts_lexicographically=*/false); // Now we'll try to map "root_node" to all image nodes that seem compatible // and that aren't "root_node" itself. @@ -691,14 +693,15 @@ std::unique_ptr GraphSymmetryFinder::FindOneSuitablePermutation( int root_node, int root_image_node, DynamicPartition* base_partition, DynamicPartition* image_partition, - const std::vector>& + absl::Span> generators_found_so_far, absl::Span> permutations_displacing_node) { // DCHECKs() and statistics. ScopedTimeDistributionUpdater search_time_updater(&stats_.search_time); DCHECK_EQ("", tmp_dynamic_permutation_.DebugString()); - DCHECK_EQ(base_partition->DebugString(DynamicPartition::SORT_BY_PART), - image_partition->DebugString(DynamicPartition::SORT_BY_PART)); + DCHECK_EQ( + base_partition->DebugString(/*sort_parts_lexicographically=*/false), + image_partition->DebugString(/*sort_parts_lexicographically=*/false)); DCHECK(search_states_.empty()); // These will be used during the search. See their usage. @@ -751,8 +754,10 @@ GraphSymmetryFinder::FindOneSuitablePermutation( &image_singletons); } VLOG(4) << ss.DebugString(); - VLOG(4) << base_partition->DebugString(DynamicPartition::SORT_BY_PART); - VLOG(4) << image_partition->DebugString(DynamicPartition::SORT_BY_PART); + VLOG(4) << base_partition->DebugString( + /*sort_parts_lexicographically=*/false); + VLOG(4) << image_partition->DebugString( + /*sort_parts_lexicographically=*/false); // Run some diagnoses on the two partitions. There are many outcomes, so // it's a bit complicated: diff --git a/ortools/algorithms/find_graph_symmetries.h b/ortools/algorithms/find_graph_symmetries.h index 797aff8f555..58a402fb2fd 100644 --- a/ortools/algorithms/find_graph_symmetries.h +++ b/ortools/algorithms/find_graph_symmetries.h @@ -24,6 +24,7 @@ #ifndef OR_TOOLS_ALGORITHMS_FIND_GRAPH_SYMMETRIES_H_ #define OR_TOOLS_ALGORITHMS_FIND_GRAPH_SYMMETRIES_H_ +#include #include #include #include @@ -172,7 +173,7 @@ class GraphSymmetryFinder { std::unique_ptr FindOneSuitablePermutation( int root_node, int root_image_node, DynamicPartition* base_partition, DynamicPartition* image_partition, - const std::vector>& + absl::Span> generators_found_so_far, absl::Span> permutations_displacing_node); diff --git a/ortools/algorithms/find_graph_symmetries_test.cc b/ortools/algorithms/find_graph_symmetries_test.cc index 3ac85fddb7c..5a5ce2cacaf 100644 --- a/ortools/algorithms/find_graph_symmetries_test.cc +++ b/ortools/algorithms/find_graph_symmetries_test.cc @@ -72,7 +72,7 @@ std::string FullyRefineGraph(const std::vector>& arcs) { GraphSymmetryFinder symmetry_finder(graph, GraphIsSymmetric(graph)); DynamicPartition partition(graph.num_nodes()); symmetry_finder.RecursivelyRefinePartitionByAdjacency(0, &partition); - return partition.DebugString(DynamicPartition::SORT_LEXICOGRAPHICALLY); + return partition.DebugString(/*sort_parts_lexicographically=*/true); } TEST(RecursivelyRefinePartitionByAdjacencyTest, DoublyLinkedChain) { @@ -262,8 +262,8 @@ class FindSymmetriesTest : public ::testing::Test { return dense_perm; } - std::vector ComposePermutations(const std::vector& p1, - const std::vector& p2) { + std::vector ComposePermutations(absl::Span p1, + absl::Span p2) { CHECK_EQ(p1.size(), p2.size()); std::vector composed(p1.size(), -1); for (int i = 0; i < p1.size(); ++i) composed[i] = p1[p2[i]]; @@ -274,7 +274,7 @@ class FindSymmetriesTest : public ::testing::Test { // with some basic, non-through EXPECT(..) that check that each generator // does make the group grow. int ComputePermutationGroupSizeAndVerifyBasicIrreductibility( - const std::vector>& generators) { + absl::Span> generators) { if (generators.empty()) return 1; // The identity. const int num_nodes = generators[0]->Size(); // The group only contains the identity at first. @@ -342,7 +342,7 @@ class FindSymmetriesTest : public ::testing::Test { // Verify the equivalence classes. EXPECT_EQ(expected_node_equivalence_classes, DynamicPartition(node_equivalence_classes) - .DebugString(DynamicPartition::SORT_LEXICOGRAPHICALLY)); + .DebugString(/*sort_parts_lexicographically=*/true)); // Verify the automorphism group size. double log_of_permutation_group_size = 0.0; @@ -374,7 +374,7 @@ TEST_F(FindSymmetriesTest, CyclesOfDifferentLength) { // This can be used to convert a list of M undirected edges into the list of // 2*M corresponding directed arcs. std::vector> AppendReversedPairs( - const std::vector>& pairs) { + absl::Span> pairs) { std::vector> out; out.reserve(pairs.size() * 2); out.insert(out.begin(), pairs.begin(), pairs.end()); @@ -697,8 +697,7 @@ void AddReverseArcsAndFinalize(Graph* graph) { graph->Build(); } -void SetGraphEdges(const std::vector>& edges, - Graph* graph) { +void SetGraphEdges(absl::Span> edges, Graph* graph) { DCHECK_EQ(graph->num_arcs(), 0); for (const auto [from, to] : edges) graph->AddArc(from, to); AddReverseArcsAndFinalize(graph); diff --git a/ortools/algorithms/hungarian.cc b/ortools/algorithms/hungarian.cc index cf45fb72372..13ce0120249 100644 --- a/ortools/algorithms/hungarian.cc +++ b/ortools/algorithms/hungarian.cc @@ -653,7 +653,7 @@ bool InputContainsNan(absl::Span> input) { } void MinimizeLinearAssignment( - const std::vector>& cost, + absl::Span> cost, absl::flat_hash_map* direct_assignment, absl::flat_hash_map* reverse_assignment) { if (InputContainsNan(cost)) { @@ -671,7 +671,7 @@ void MinimizeLinearAssignment( } void MaximizeLinearAssignment( - const std::vector>& cost, + absl::Span> cost, absl::flat_hash_map* direct_assignment, absl::flat_hash_map* reverse_assignment) { if (InputContainsNan(cost)) { diff --git a/ortools/algorithms/hungarian.h b/ortools/algorithms/hungarian.h index fb8905d85f0..7f24103a3fb 100644 --- a/ortools/algorithms/hungarian.h +++ b/ortools/algorithms/hungarian.h @@ -41,18 +41,19 @@ #include #include "absl/container/flat_hash_map.h" +#include "absl/types/span.h" namespace operations_research { // See IMPORTANT NOTE at the top of the file. void MinimizeLinearAssignment( - const std::vector >& cost, + absl::Span> cost, absl::flat_hash_map* direct_assignment, absl::flat_hash_map* reverse_assignment); // See IMPORTANT NOTE at the top of the file. void MaximizeLinearAssignment( - const std::vector >& cost, + absl::Span> cost, absl::flat_hash_map* direct_assignment, absl::flat_hash_map* reverse_assignment); diff --git a/ortools/algorithms/hungarian_test.cc b/ortools/algorithms/hungarian_test.cc index 78cb65729a8..bb2c1964544 100644 --- a/ortools/algorithms/hungarian_test.cc +++ b/ortools/algorithms/hungarian_test.cc @@ -22,6 +22,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/random/distributions.h" +#include "absl/types/span.h" #include "gtest/gtest.h" #include "ortools/base/macros.h" #include "ortools/base/map_util.h" @@ -51,7 +52,7 @@ void GenericCheck(const int expected_assignment_size, } } -void TestMinimization(const std::vector>& cost, +void TestMinimization(absl::Span> cost, const int expected_assignment_size, const int expected_agents[], const int expected_tasks[]) { absl::flat_hash_map direct_assignment; @@ -62,7 +63,7 @@ void TestMinimization(const std::vector>& cost, expected_agents, expected_tasks); } -void TestMaximization(const std::vector>& cost, +void TestMaximization(absl::Span> cost, const int expected_assignment_size, const int expected_agents[], const int expected_tasks[]) { absl::flat_hash_map direct_assignment; diff --git a/ortools/algorithms/knapsack_solver.cc b/ortools/algorithms/knapsack_solver.cc index 06f062f07b7..92bfb6ccdfc 100644 --- a/ortools/algorithms/knapsack_solver.cc +++ b/ortools/algorithms/knapsack_solver.cc @@ -19,6 +19,7 @@ #include #include #include +#include #include #include "absl/log/check.h" @@ -1282,7 +1283,7 @@ int64_t KnapsackMIPSolver::Solve(TimeLimit* /*time_limit*/, // ----- KnapsackCpSat ----- class KnapsackCpSat : public BaseKnapsackSolver { public: - explicit KnapsackCpSat(const std::string& solver_name); + explicit KnapsackCpSat(absl::string_view solver_name); // Initializes the solver and enters the problem to be solved. void Init(const std::vector& profits, @@ -1305,7 +1306,7 @@ class KnapsackCpSat : public BaseKnapsackSolver { std::vector best_solution_; }; -KnapsackCpSat::KnapsackCpSat(const std::string& solver_name) +KnapsackCpSat::KnapsackCpSat(absl::string_view solver_name) : BaseKnapsackSolver(solver_name), profits_(), weights_(), @@ -1623,7 +1624,7 @@ void KnapsackSolver::InitReducedProblem( one_dimension_reduced_weights.push_back(one_dimension_weights[item_id]); } } - reduced_weights.push_back(one_dimension_reduced_weights); + reduced_weights.push_back(std::move(one_dimension_reduced_weights)); } solver_->Init(reduced_profits, reduced_weights, reduced_capacities); } diff --git a/ortools/algorithms/knapsack_solver_for_cuts.cc b/ortools/algorithms/knapsack_solver_for_cuts.cc deleted file mode 100644 index b9d3cfdd2a8..00000000000 --- a/ortools/algorithms/knapsack_solver_for_cuts.cc +++ /dev/null @@ -1,473 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "ortools/algorithms/knapsack_solver_for_cuts.h" - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/types/span.h" -#include "ortools/base/logging.h" - -namespace operations_research { -namespace { - -const int kNoSelection(-1); -const double kInfinity = std::numeric_limits::infinity(); - -// Comparator used to sort item in decreasing efficiency order -// (see KnapsackCapacityPropagator). -struct CompareKnapsackItemsInDecreasingEfficiencyOrder { - explicit CompareKnapsackItemsInDecreasingEfficiencyOrder(double _profit_max) - : profit_max(_profit_max) {} - bool operator()(const KnapsackItemForCutsPtr& item1, - const KnapsackItemForCutsPtr& item2) const { - return item1->GetEfficiency(profit_max) > item2->GetEfficiency(profit_max); - } - const double profit_max; -}; - -// Comparator used to sort search nodes in the priority queue in order -// to pop first the node with the highest profit upper bound -// (see KnapsackSearchNodeForCuts). When two nodes have the same upper bound, we -// prefer the one with the highest current profit. This is usually the one -// closer to a leaf. In practice, the main advantage is to have smaller path. -struct CompareKnapsackSearchNodePtrInDecreasingUpperBoundOrder { - bool operator()(const KnapsackSearchNodeForCuts* node_1, - const KnapsackSearchNodeForCuts* node_2) const { - const double profit_upper_bound_1 = node_1->profit_upper_bound(); - const double profit_upper_bound_2 = node_2->profit_upper_bound(); - if (profit_upper_bound_1 == profit_upper_bound_2) { - return node_1->current_profit() < node_2->current_profit(); - } - return profit_upper_bound_1 < profit_upper_bound_2; - } -}; - -using SearchQueue = std::priority_queue< - KnapsackSearchNodeForCuts*, std::vector, - CompareKnapsackSearchNodePtrInDecreasingUpperBoundOrder>; - -} // namespace - -// ----- KnapsackSearchNodeForCuts ----- -KnapsackSearchNodeForCuts::KnapsackSearchNodeForCuts( - const KnapsackSearchNodeForCuts* const parent, - const KnapsackAssignmentForCuts& assignment) - : depth_(parent == nullptr ? 0 : parent->depth() + 1), - parent_(parent), - assignment_(assignment), - current_profit_(0), - profit_upper_bound_(kInfinity), - next_item_id_(kNoSelection) {} - -// ----- KnapsackSearchPathForCuts ----- -KnapsackSearchPathForCuts::KnapsackSearchPathForCuts( - const KnapsackSearchNodeForCuts* from, const KnapsackSearchNodeForCuts* to) - : from_(from), via_(nullptr), to_(to) {} - -void KnapsackSearchPathForCuts::Init() { - const KnapsackSearchNodeForCuts* node_from = - MoveUpToDepth(from_, to_->depth()); - const KnapsackSearchNodeForCuts* node_to = MoveUpToDepth(to_, from_->depth()); - DCHECK_EQ(node_from->depth(), node_to->depth()); - - // Find common parent. - while (node_from != node_to) { - node_from = node_from->parent(); - node_to = node_to->parent(); - } - via_ = node_from; -} - -const KnapsackSearchNodeForCuts* MoveUpToDepth( - const KnapsackSearchNodeForCuts* node, int depth) { - while (node->depth() > depth) { - node = node->parent(); - } - return node; -} - -// ----- KnapsackStateForCuts ----- -KnapsackStateForCuts::KnapsackStateForCuts() : is_bound_(), is_in_() {} - -void KnapsackStateForCuts::Init(int number_of_items) { - is_bound_.assign(number_of_items, false); - is_in_.assign(number_of_items, false); -} - -// Returns false when the state is invalid. -bool KnapsackStateForCuts::UpdateState( - bool revert, const KnapsackAssignmentForCuts& assignment) { - if (revert) { - is_bound_[assignment.item_id] = false; - } else { - if (is_bound_[assignment.item_id] && - is_in_[assignment.item_id] != assignment.is_in) { - return false; - } - is_bound_[assignment.item_id] = true; - is_in_[assignment.item_id] = assignment.is_in; - } - return true; -} - -// ----- KnapsackPropagatorForCuts ----- -KnapsackPropagatorForCuts::KnapsackPropagatorForCuts( - const KnapsackStateForCuts* state) - : items_(), - current_profit_(0), - profit_lower_bound_(0), - profit_upper_bound_(kInfinity), - state_(state) {} - -KnapsackPropagatorForCuts::~KnapsackPropagatorForCuts() = default; - -void KnapsackPropagatorForCuts::Init(absl::Span profits, - absl::Span weights, - const double capacity) { - const int number_of_items = profits.size(); - items_.clear(); - - for (int i = 0; i < number_of_items; ++i) { - items_.emplace_back( - std::make_unique(i, weights[i], profits[i])); - } - capacity_ = capacity; - current_profit_ = 0; - profit_lower_bound_ = -kInfinity; - profit_upper_bound_ = kInfinity; - InitPropagator(); -} - -bool KnapsackPropagatorForCuts::Update( - bool revert, const KnapsackAssignmentForCuts& assignment) { - if (assignment.is_in) { - if (revert) { - current_profit_ -= items_[assignment.item_id]->profit; - consumed_capacity_ -= items()[assignment.item_id]->weight; - } else { - current_profit_ += items_[assignment.item_id]->profit; - consumed_capacity_ += items()[assignment.item_id]->weight; - if (consumed_capacity_ > capacity_) { - return false; - } - } - } - return true; -} - -void KnapsackPropagatorForCuts::CopyCurrentStateToSolution( - std::vector* solution) const { - DCHECK(solution != nullptr); - for (int i(0); i < items_.size(); ++i) { - const int item_id = items_[i]->id; - (*solution)[item_id] = state_->is_bound(item_id) && state_->is_in(item_id); - } - double remaining_capacity = capacity_ - consumed_capacity_; - for (const KnapsackItemForCutsPtr& item : sorted_items_) { - if (!state().is_bound(item->id)) { - if (remaining_capacity >= item->weight) { - remaining_capacity -= item->weight; - (*solution)[item->id] = true; - } else { - return; - } - } - } -} - -void KnapsackPropagatorForCuts::ComputeProfitBounds() { - set_profit_lower_bound(current_profit()); - break_item_id_ = kNoSelection; - - double remaining_capacity = capacity_ - consumed_capacity_; - int break_sorted_item_id = kNoSelection; - for (int sorted_id(0); sorted_id < sorted_items_.size(); ++sorted_id) { - if (!state().is_bound(sorted_items_[sorted_id]->id)) { - const KnapsackItemForCutsPtr& item = sorted_items_[sorted_id]; - break_item_id_ = item->id; - if (remaining_capacity >= item->weight) { - remaining_capacity -= item->weight; - set_profit_lower_bound(profit_lower_bound() + item->profit); - } else { - break_sorted_item_id = sorted_id; - break; - } - } - } - - set_profit_upper_bound(profit_lower_bound()); - // If break_sorted_item_id == kNoSelection, then all remaining items fit into - // the knapsack, and thus the lower bound on the profit equals the upper - // bound. Otherwise, we compute a tight upper bound by filling the remaining - // capacity of the knapsack with "fractional" items, in the decreasing order - // of their efficiency. - if (break_sorted_item_id != kNoSelection) { - const double additional_profit = - GetAdditionalProfitUpperBound(remaining_capacity, break_sorted_item_id); - set_profit_upper_bound(profit_upper_bound() + additional_profit); - } -} - -void KnapsackPropagatorForCuts::InitPropagator() { - consumed_capacity_ = 0; - break_item_id_ = kNoSelection; - sorted_items_.clear(); - sorted_items_.reserve(items().size()); - for (int i(0); i < items().size(); ++i) { - sorted_items_.emplace_back(std::make_unique( - i, items()[i]->weight, items()[i]->profit)); - } - profit_max_ = 0; - for (const KnapsackItemForCutsPtr& item : sorted_items_) { - profit_max_ = std::max(profit_max_, item->profit); - } - profit_max_ += 1.0; - CompareKnapsackItemsInDecreasingEfficiencyOrder compare_object(profit_max_); - std::sort(sorted_items_.begin(), sorted_items_.end(), compare_object); -} - -double KnapsackPropagatorForCuts::GetAdditionalProfitUpperBound( - double remaining_capacity, int break_item_id) const { - const int after_break_item_id = break_item_id + 1; - double additional_profit_when_no_break_item = 0; - if (after_break_item_id < sorted_items_.size()) { - // As items are sorted by decreasing profit / weight ratio, and the current - // weight is non-zero, the next_weight is non-zero too. - const double next_weight = sorted_items_[after_break_item_id]->weight; - const double next_profit = sorted_items_[after_break_item_id]->profit; - additional_profit_when_no_break_item = - std::max((remaining_capacity * next_profit) / next_weight, 0.0); - } - - const int before_break_item_id = break_item_id - 1; - double additional_profit_when_break_item = 0; - if (before_break_item_id >= 0) { - const double previous_weight = sorted_items_[before_break_item_id]->weight; - // Having previous_weight == 0 means the total capacity is smaller than - // the weight of the current item. In such a case the item cannot be part - // of a solution of the local one dimension problem. - if (previous_weight != 0) { - const double previous_profit = - sorted_items_[before_break_item_id]->profit; - const double overused_capacity = - sorted_items_[break_item_id]->weight - remaining_capacity; - const double lost_profit_from_previous_item = - (overused_capacity * previous_profit) / previous_weight; - additional_profit_when_break_item = std::max( - sorted_items_[break_item_id]->profit - lost_profit_from_previous_item, - 0.0); - } - } - - const double additional_profit = std::max( - additional_profit_when_no_break_item, additional_profit_when_break_item); - return additional_profit; -} - -// ----- KnapsackSolverForCuts ----- -KnapsackSolverForCuts::KnapsackSolverForCuts(std::string solver_name) - : propagator_(&state_), - best_solution_profit_(0), - solver_name_(std::move(solver_name)) {} - -void KnapsackSolverForCuts::Init(absl::Span profits, - absl::Span weights, - const double capacity) { - const int number_of_items(profits.size()); - state_.Init(number_of_items); - best_solution_.assign(number_of_items, false); - CHECK_EQ(number_of_items, weights.size()); - - propagator_.Init(profits, weights, capacity); -} - -void KnapsackSolverForCuts::GetLowerAndUpperBoundWhenItem(int item_id, - bool is_item_in, - double* lower_bound, - double* upper_bound) { - DCHECK(lower_bound != nullptr); - DCHECK(upper_bound != nullptr); - KnapsackAssignmentForCuts assignment(item_id, is_item_in); - const bool fail = !IncrementalUpdate(false, assignment); - if (fail) { - *lower_bound = 0; - *upper_bound = 0; - } else { - *lower_bound = propagator_.profit_lower_bound(); - *upper_bound = GetAggregatedProfitUpperBound(); - } - - const bool fail_revert = !IncrementalUpdate(true, assignment); - if (fail_revert) { - *lower_bound = 0; - *upper_bound = 0; - } -} - -double KnapsackSolverForCuts::Solve(TimeLimit* time_limit, - bool* is_solution_optimal) { - DCHECK(time_limit != nullptr); - DCHECK(is_solution_optimal != nullptr); - best_solution_profit_ = 0; - *is_solution_optimal = true; - - SearchQueue search_queue; - const KnapsackAssignmentForCuts assignment(kNoSelection, true); - auto root_node = - std::make_unique(nullptr, assignment); - root_node->set_current_profit(GetCurrentProfit()); - root_node->set_profit_upper_bound(GetAggregatedProfitUpperBound()); - root_node->set_next_item_id(GetNextItemId()); - search_nodes_.push_back(std::move(root_node)); - const KnapsackSearchNodeForCuts* current_node = - search_nodes_.back().get(); // Start with the root node. - - if (MakeNewNode(*current_node, false)) { - search_queue.push(search_nodes_.back().get()); - } - if (MakeNewNode(*current_node, true)) { - search_queue.push(search_nodes_.back().get()); - } - - int64_t number_of_nodes_visited = 0; - while (!search_queue.empty() && - search_queue.top()->profit_upper_bound() > best_solution_profit_) { - if (time_limit->LimitReached()) { - *is_solution_optimal = false; - break; - } - if (solution_upper_bound_threshold_ > -kInfinity && - GetAggregatedProfitUpperBound() < solution_upper_bound_threshold_) { - *is_solution_optimal = false; - break; - } - if (best_solution_profit_ > solution_lower_bound_threshold_) { - *is_solution_optimal = false; - break; - } - if (number_of_nodes_visited >= node_limit_) { - *is_solution_optimal = false; - break; - } - KnapsackSearchNodeForCuts* const node = search_queue.top(); - search_queue.pop(); - - if (node != current_node) { - KnapsackSearchPathForCuts path(current_node, node); - path.Init(); - CHECK_EQ(UpdatePropagators(path), true); - current_node = node; - } - number_of_nodes_visited++; - - if (MakeNewNode(*node, false)) { - search_queue.push(search_nodes_.back().get()); - } - if (MakeNewNode(*node, true)) { - search_queue.push(search_nodes_.back().get()); - } - } - return best_solution_profit_; -} - -// Returns false when at least one propagator fails. -bool KnapsackSolverForCuts::UpdatePropagators( - const KnapsackSearchPathForCuts& path) { - bool no_fail = true; - // Revert previous changes. - const KnapsackSearchNodeForCuts* node = &path.from(); - const KnapsackSearchNodeForCuts* const via = &path.via(); - while (node != via) { - no_fail = IncrementalUpdate(true, node->assignment()) && no_fail; - node = node->parent(); - } - // Apply current changes. - node = &path.to(); - while (node != via) { - no_fail = IncrementalUpdate(false, node->assignment()) && no_fail; - node = node->parent(); - } - return no_fail; -} - -double KnapsackSolverForCuts::GetAggregatedProfitUpperBound() { - propagator_.ComputeProfitBounds(); - const double propagator_upper_bound = propagator_.profit_upper_bound(); - return std::min(kInfinity, propagator_upper_bound); -} - -bool KnapsackSolverForCuts::MakeNewNode(const KnapsackSearchNodeForCuts& node, - bool is_in) { - if (node.next_item_id() == kNoSelection) { - return false; - } - KnapsackAssignmentForCuts assignment(node.next_item_id(), is_in); - KnapsackSearchNodeForCuts new_node(&node, assignment); - - KnapsackSearchPathForCuts path(&node, &new_node); - path.Init(); - const bool no_fail = UpdatePropagators(path); - if (no_fail) { - new_node.set_current_profit(GetCurrentProfit()); - new_node.set_profit_upper_bound(GetAggregatedProfitUpperBound()); - new_node.set_next_item_id(GetNextItemId()); - UpdateBestSolution(); - } - - // Revert to be able to create another node from parent. - KnapsackSearchPathForCuts revert_path(&new_node, &node); - revert_path.Init(); - UpdatePropagators(revert_path); - - if (!no_fail || new_node.profit_upper_bound() < best_solution_profit_) { - return false; - } - - // The node is relevant. - auto relevant_node = - std::make_unique(&node, assignment); - relevant_node->set_current_profit(new_node.current_profit()); - relevant_node->set_profit_upper_bound(new_node.profit_upper_bound()); - relevant_node->set_next_item_id(new_node.next_item_id()); - search_nodes_.push_back(std::move(relevant_node)); - - return true; -} - -bool KnapsackSolverForCuts::IncrementalUpdate( - bool revert, const KnapsackAssignmentForCuts& assignment) { - // Do not stop on a failure: To be able to be incremental on the update, - // partial solution (state) and propagators must all be in the same state. - bool no_fail = state_.UpdateState(revert, assignment); - no_fail = propagator_.Update(revert, assignment) && no_fail; - return no_fail; -} - -void KnapsackSolverForCuts::UpdateBestSolution() { - const double profit_lower_bound = propagator_.profit_lower_bound(); - - if (best_solution_profit_ < profit_lower_bound) { - best_solution_profit_ = profit_lower_bound; - propagator_.CopyCurrentStateToSolution(&best_solution_); - } -} - -} // namespace operations_research diff --git a/ortools/algorithms/knapsack_solver_for_cuts.h b/ortools/algorithms/knapsack_solver_for_cuts.h deleted file mode 100644 index 358f3725ae6..00000000000 --- a/ortools/algorithms/knapsack_solver_for_cuts.h +++ /dev/null @@ -1,388 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// This library solves 0-1 one-dimensional knapsack problems with fractional -// profits and weights using the branch and bound algorithm. Note that -// algorithms/knapsack_solver uses 'int64_t' for the profits and the weights. -// TODO(user): Merge this code with algorithms/knapsack_solver. -// -// Given n items, each with a profit and a weight and a knapsack of -// capacity c, the goal is to find a subset of the items which fits inside c -// and maximizes the total profit. -// Without loss of generality, profits and weights are assumed to be positive. -// -// From a mathematical point of view, the one-dimensional knapsack problem -// can be modeled by linear constraint: -// Sum(i:1..n)(weight_i * item_i) <= c, -// where item_i is a 0-1 integer variable. -// The goal is to maximize: Sum(i:1..n)(profit_i * item_i). -// -// Example Usage: -// std::vector profits = {0, 0.5, 0.4, 1, 1, 1.1}; -// std::vector weights = {9, 6, 2, 1.5, 1.5, 1.5}; -// KnapsackSolverForCuts solver("solver"); -// solver.Init(profits, weights, capacity); -// bool is_solution_optimal = false; -// std::unique_ptr time_limit = -// std::make_unique(time_limit_seconds); // Set the time limit. -// const double profit = solver.Solve(time_limit.get(), &is_solution_optimal); -// const int number_of_items(profits.size()); -// for (int item_id(0); item_id < number_of_items; ++item_id) { -// solver.best_solution(item_id); // Access the solution. -// } - -#ifndef OR_TOOLS_ALGORITHMS_KNAPSACK_SOLVER_FOR_CUTS_H_ -#define OR_TOOLS_ALGORITHMS_KNAPSACK_SOLVER_FOR_CUTS_H_ - -#include -#include -#include -#include -#include - -#include "absl/memory/memory.h" -#include "absl/types/span.h" -#include "ortools/base/int_type.h" -#include "ortools/base/logging.h" -#include "ortools/util/time_limit.h" - -namespace operations_research { - -// ----- KnapsackAssignmentForCuts ----- -// KnapsackAssignmentForCuts is a small struct used to pair an item with -// its assignment. It is mainly used for search nodes and updates. -struct KnapsackAssignmentForCuts { - KnapsackAssignmentForCuts(int item_id, bool is_in) - : item_id(item_id), is_in(is_in) {} - - int item_id; - bool is_in; -}; - -// ----- KnapsackItemForCuts ----- -// KnapsackItemForCuts is a small struct to pair an item weight with its -// corresponding profit. -// The aim of the knapsack problem is to pack as many valuable items as -// possible. A straight forward heuristic is to take those with the greatest -// profit-per-unit-weight. This ratio is called efficiency in this -// implementation. So items will be grouped in vectors, and sorted by -// decreasing efficiency. -struct KnapsackItemForCuts { - KnapsackItemForCuts(int id, double weight, double profit) - : id(id), weight(weight), profit(profit) {} - - double GetEfficiency(double profit_max) const { - return (weight > 0) ? profit / weight : profit_max; - } - - // The 'id' field is used to retrieve the initial item in order to - // communicate with other propagators and state. - const int id; - const double weight; - const double profit; -}; -using KnapsackItemForCutsPtr = std::unique_ptr; - -// ----- KnapsackSearchNodeForCuts ----- -// KnapsackSearchNodeForCuts is a class used to describe a decision in the -// decision search tree. -// The node is defined by a pointer to the parent search node and an -// assignment (see KnapsackAssignmentForCuts). -// As the current state is not explicitly stored in a search node, one should -// go through the search tree to incrementally build a partial solution from -// a previous search node. -class KnapsackSearchNodeForCuts { - public: - KnapsackSearchNodeForCuts(const KnapsackSearchNodeForCuts* parent, - const KnapsackAssignmentForCuts& assignment); - - KnapsackSearchNodeForCuts(const KnapsackSearchNodeForCuts&) = delete; - KnapsackSearchNodeForCuts& operator=(const KnapsackSearchNodeForCuts&) = - delete; - - int depth() const { return depth_; } - const KnapsackSearchNodeForCuts* parent() const { return parent_; } - const KnapsackAssignmentForCuts& assignment() const { return assignment_; } - - double current_profit() const { return current_profit_; } - void set_current_profit(double profit) { current_profit_ = profit; } - - double profit_upper_bound() const { return profit_upper_bound_; } - void set_profit_upper_bound(double profit) { profit_upper_bound_ = profit; } - - int next_item_id() const { return next_item_id_; } - void set_next_item_id(int id) { next_item_id_ = id; } - - private: - // 'depth_' is used to navigate efficiently through the search tree. - int depth_; - const KnapsackSearchNodeForCuts* const parent_; - KnapsackAssignmentForCuts assignment_; - - // 'current_profit_' and 'profit_upper_bound_' fields are used to sort search - // nodes using a priority queue. That allows to pop the node with the best - // upper bound, and more importantly to stop the search when optimality is - // proved. - double current_profit_; - double profit_upper_bound_; - - // 'next_item_id_' field allows to avoid an O(number_of_items) scan to find - // next item to select. This is done for free by the upper bound computation. - int next_item_id_; -}; - -// ----- KnapsackSearchPathForCuts ----- -// KnapsackSearchPathForCuts is a small class used to represent the path between -// a node to another node in the search tree. -// As the solution state is not stored for each search node, the state should -// be rebuilt at each node. One simple solution is to apply all decisions -// between the node 'to' and the root. This can be computed in -// O(number_of_items). -// -// However, it is possible to achieve better average complexity. Two -// consecutively explored nodes are usually close enough (i.e., much less than -// number_of_items) to benefit from an incremental update from the node -// 'from' to the node 'to'. -// -// The 'via' field is the common parent of 'from' field and 'to' field. -// So the state can be built by reverting all decisions from 'from' to 'via' -// and then applying all decisions from 'via' to 'to'. -class KnapsackSearchPathForCuts { - public: - KnapsackSearchPathForCuts(const KnapsackSearchNodeForCuts* from, - const KnapsackSearchNodeForCuts* to); - - KnapsackSearchPathForCuts(const KnapsackSearchPathForCuts&) = delete; - KnapsackSearchPathForCuts& operator=(const KnapsackSearchPathForCuts&) = - delete; - - void Init(); - const KnapsackSearchNodeForCuts& from() const { return *from_; } - const KnapsackSearchNodeForCuts& via() const { return *via_; } - const KnapsackSearchNodeForCuts& to() const { return *to_; } - - private: - const KnapsackSearchNodeForCuts* from_; - const KnapsackSearchNodeForCuts* via_; // Computed in 'Init'. - const KnapsackSearchNodeForCuts* to_; -}; - -// From the given node, this method moves up the tree and returns the node at -// given depth. -const KnapsackSearchNodeForCuts* MoveUpToDepth( - const KnapsackSearchNodeForCuts* node, int depth); - -// ----- KnapsackStateForCuts ----- -// KnapsackStateForCuts represents a partial solution to the knapsack problem. -class KnapsackStateForCuts { - public: - KnapsackStateForCuts(); - - KnapsackStateForCuts(const KnapsackStateForCuts&) = delete; - KnapsackStateForCuts& operator=(const KnapsackStateForCuts&) = delete; - - // Initializes vectors with number_of_items set to false (i.e. not bound yet). - void Init(int number_of_items); - - // Updates the state by applying or reverting a decision. - // Returns false if fails, i.e. trying to apply an inconsistent decision - // to an already assigned item. - bool UpdateState(bool revert, const KnapsackAssignmentForCuts& assignment); - - int GetNumberOfItems() const { return is_bound_.size(); } - bool is_bound(int id) const { return is_bound_.at(id); } - bool is_in(int id) const { return is_in_.at(id); } - - private: - // Vectors 'is_bound_' and 'is_in_' contain a boolean value for each item. - // 'is_bound_(item_i)' is false when there is no decision for item_i yet. - // When item_i is bound, 'is_in_(item_i)' represents the presence (true) or - // the absence (false) of item_i in the current solution. - std::vector is_bound_; - std::vector is_in_; -}; - -// ----- KnapsackPropagatorForCuts ----- -// KnapsackPropagatorForCuts is used to enforce a capacity constraint. -// It is supposed to compute profit lower and upper bounds, and get the next -// item to select, it can be seen as a 0-1 Knapsack solver. The most efficient -// way to compute the upper bound is to iterate on items in -// profit-per-unit-weight decreasing order. The break item is commonly defined -// as the first item for which there is not enough remaining capacity. Selecting -// this break item as the next-item-to-assign usually gives the best results -// (see Greenberg & Hegerich). -// -// This is exactly what is implemented in this class. -// -// It is possible to compute a better profit lower bound almost for free. During -// the scan to find the break element all unbound items are added just as if -// they were part of the current solution. This is used in both -// ComputeProfitBounds() and CopyCurrentSolution(). For incrementality reasons, -// the ith item should be accessible in O(1). That's the reason why the item -// vector has to be duplicated 'sorted_items_'. -class KnapsackPropagatorForCuts { - public: - explicit KnapsackPropagatorForCuts(const KnapsackStateForCuts* state); - ~KnapsackPropagatorForCuts(); - - KnapsackPropagatorForCuts(const KnapsackPropagatorForCuts&) = delete; - KnapsackPropagatorForCuts& operator=(const KnapsackPropagatorForCuts&) = - delete; - - // Initializes the data structure and then calls InitPropagator. - void Init(absl::Span profits, absl::Span weights, - double capacity); - - // Updates data structure. Returns false on failure. - bool Update(bool revert, const KnapsackAssignmentForCuts& assignment); - // ComputeProfitBounds should set 'profit_lower_bound_' and - // 'profit_upper_bound_' which are constraint specific. - void ComputeProfitBounds(); - // Returns the id of next item to assign. - // Returns kNoSelection when all items are bound. - int GetNextItemId() const { return break_item_id_; } - - double current_profit() const { return current_profit_; } - double profit_lower_bound() const { return profit_lower_bound_; } - double profit_upper_bound() const { return profit_upper_bound_; } - - // Copies the current state into 'solution'. - // All unbound items are set to false (i.e. not in the knapsack). - void CopyCurrentStateToSolution(std::vector* solution) const; - - // Initializes the propagator. This method is called by Init() after filling - // the fields defined in this class. - void InitPropagator(); - - const KnapsackStateForCuts& state() const { return *state_; } - const std::vector& items() const { return items_; } - - void set_profit_lower_bound(double profit) { profit_lower_bound_ = profit; } - void set_profit_upper_bound(double profit) { profit_upper_bound_ = profit; } - - private: - // An obvious additional profit upper bound corresponds to the linear - // relaxation: remaining_capacity * efficiency of the break item. - // It is possible to do better in O(1), using Martello-Toth bound U2. - // The main idea is to enforce integrality constraint on the break item, - // i.e. either the break item is part of the solution, or it is not. - // So basically the linear relaxation is done on the item before the break - // item, or the one after the break item. This is what GetAdditionalProfit - // method implements. - double GetAdditionalProfitUpperBound(double remaining_capacity, - int break_item_id) const; - - double capacity_; - double consumed_capacity_; - int break_item_id_; - std::vector sorted_items_; - double profit_max_; - std::vector items_; - double current_profit_; - double profit_lower_bound_; - double profit_upper_bound_; - const KnapsackStateForCuts* const state_; -}; - -// ----- KnapsackSolverForCuts ----- -// KnapsackSolverForCuts is the one-dimensional knapsack solver class. -// In the current implementation, the next item to assign is given by the -// primary propagator. Using SetPrimaryPropagator allows changing the default -// (propagator of the first dimension). -class KnapsackSolverForCuts { - public: - explicit KnapsackSolverForCuts(std::string solver_name); - - KnapsackSolverForCuts(const KnapsackSolverForCuts&) = delete; - KnapsackSolverForCuts& operator=(const KnapsackSolverForCuts&) = delete; - - // Initializes the solver and enters the problem to be solved. - void Init(absl::Span profits, absl::Span weights, - double capacity); - int GetNumberOfItems() const { return state_.GetNumberOfItems(); } - - // Gets the lower and the upper bound when the item is in or out of the - // knapsack. To ensure objects are correctly initialized, this method should - // not be called before Init(). - void GetLowerAndUpperBoundWhenItem(int item_id, bool is_item_in, - double* lower_bound, double* upper_bound); - - // Get the best upper bound found so far. - double GetUpperBound() { return GetAggregatedProfitUpperBound(); } - - // The solver stops if a solution with profit better than - // 'solution_lower_bound_threshold' is found. - void set_solution_lower_bound_threshold( - const double solution_lower_bound_threshold) { - solution_lower_bound_threshold_ = solution_lower_bound_threshold; - } - - // The solver stops if the upper bound on profit drops below - // 'solution_upper_bound_threshold'. - void set_solution_upper_bound_threshold( - const double solution_upper_bound_threshold) { - solution_upper_bound_threshold_ = solution_upper_bound_threshold; - } - - // Stops the knapsack solver after processing 'node_limit' nodes. - void set_node_limit(const int64_t node_limit) { node_limit_ = node_limit; } - - // Solves the problem and returns the profit of the best solution found. - double Solve(TimeLimit* time_limit, bool* is_solution_optimal); - // Returns true if the item 'item_id' is packed in the optimal knapsack. - bool best_solution(int item_id) const { - DCHECK(item_id < best_solution_.size()); - return best_solution_[item_id]; - } - - const std::string& GetName() const { return solver_name_; } - - private: - // Updates propagator reverting/applying all decision on the path. Returns - // true if the propagation fails. Note that even if it fails, propagator - // should be updated to be in a stable state in order to stay incremental. - bool UpdatePropagators(const KnapsackSearchPathForCuts& path); - // Updates propagator reverting/applying one decision. Returns true if - // the propagation fails. Note that even if it fails, propagator should - // be updated to be in a stable state in order to stay incremental. - bool IncrementalUpdate(bool revert, - const KnapsackAssignmentForCuts& assignment); - // Updates the best solution if the current solution has a better profit. - void UpdateBestSolution(); - - // Returns true if new relevant search node was added to the nodes array. That - // means this node should be added to the search queue too. - bool MakeNewNode(const KnapsackSearchNodeForCuts& node, bool is_in); - - // Gets the aggregated (min) profit upper bound among all propagators. - double GetAggregatedProfitUpperBound(); - double GetCurrentProfit() const { return propagator_.current_profit(); } - int GetNextItemId() const { return propagator_.GetNextItemId(); } - - KnapsackPropagatorForCuts propagator_; - std::vector> search_nodes_; - KnapsackStateForCuts state_; - double best_solution_profit_; - std::vector best_solution_; - const std::string solver_name_; - double solution_lower_bound_threshold_ = - std::numeric_limits::infinity(); - double solution_upper_bound_threshold_ = - -std::numeric_limits::infinity(); - int64_t node_limit_ = std::numeric_limits::max(); -}; -// TODO(user) : Add reduction algorithm. - -} // namespace operations_research - -#endif // OR_TOOLS_ALGORITHMS_KNAPSACK_SOLVER_FOR_CUTS_H_ diff --git a/ortools/algorithms/knapsack_solver_for_cuts_test.cc b/ortools/algorithms/knapsack_solver_for_cuts_test.cc deleted file mode 100644 index ecf23f57d2e..00000000000 --- a/ortools/algorithms/knapsack_solver_for_cuts_test.cc +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "ortools/algorithms/knapsack_solver_for_cuts.h" - -#include -#include -#include - -#include "gtest/gtest.h" - -namespace operations_research { -namespace { - -const int kInvalidSolution = -1; -bool IsSolutionValid(const std::vector& profits, - const std::vector& weights, const double capacity, - const std::vector& best_solution, - double optimal_profit) { - double remaining_capacity = capacity; - double profit = 0; - const int number_of_items(profits.size()); - for (int item_id(0); item_id < number_of_items; ++item_id) { - if (best_solution.at(item_id)) { - profit += profits[item_id]; - remaining_capacity -= weights[item_id]; - } - } - - if (remaining_capacity < 0) { - return false; - } - return profit == optimal_profit; -} - -double SolveKnapsackProblem(KnapsackSolverForCuts* solver) { - bool is_solution_optimal = false; - auto time_limit = - std::make_unique(std::numeric_limits::infinity()); - return solver->Solve(time_limit.get(), &is_solution_optimal); -} - -TEST(KnapsackSearchNodeForCutsTest, Depth) { - KnapsackAssignmentForCuts assignment(0, false); - KnapsackSearchNodeForCuts root(nullptr, assignment); - EXPECT_EQ(0, root.depth()); - - KnapsackSearchNodeForCuts node_0(&root, assignment); - EXPECT_EQ(1, node_0.depth()); - - KnapsackSearchNodeForCuts node_00(&node_0, assignment); - EXPECT_EQ(2, node_00.depth()); -} - -TEST(KnapsackSearchPathTest, MoveUpToDepth) { - KnapsackAssignmentForCuts assignment(0, false); - KnapsackSearchNodeForCuts root(nullptr, assignment); - KnapsackSearchNodeForCuts node_0(&root, assignment); - KnapsackSearchPathForCuts from_root_to_0(&root, &node_0); - const KnapsackSearchNodeForCuts* root_ptr = MoveUpToDepth(&node_0, 0); - EXPECT_EQ(&root, root_ptr); -} - -TEST(KnapsackSearchPathTest, InitAndMoveUpToDepth) { - KnapsackAssignmentForCuts assignment(0, false); - KnapsackSearchNodeForCuts root(nullptr, assignment); - KnapsackSearchNodeForCuts node_0(&root, assignment); - KnapsackSearchNodeForCuts node_00(&node_0, assignment); - KnapsackSearchNodeForCuts node_01(&node_0, assignment); - KnapsackSearchNodeForCuts node_001(&node_00, assignment); - KnapsackSearchNodeForCuts node_010(&node_01, assignment); - KnapsackSearchNodeForCuts node_0101(&node_010, assignment); - KnapsackSearchNodeForCuts node_01011(&node_0101, assignment); - - KnapsackSearchPathForCuts from_01011_to_001(&node_01011, &node_001); - const KnapsackSearchNodeForCuts* node_01_ptr = MoveUpToDepth(&node_01011, 2); - EXPECT_EQ(&node_01, node_01_ptr); - - from_01011_to_001.Init(); - EXPECT_EQ(&node_0, &from_01011_to_001.via()); - - KnapsackSearchPathForCuts from_001_to_01011(&node_001, &node_01011); - from_001_to_01011.Init(); - EXPECT_EQ(&from_01011_to_001.via(), &from_001_to_01011.via()); -} - -TEST(KnapsackItemForCutsTest, GetEfficiency) { - const int kId(7); - const double kWeight = 52; - const double kProfit = 130; - const double kEfficiency = 2.5; - const double kProfitMax = 1000; - const double kNullWeight = 0; - - const KnapsackItemForCuts item(kId, kWeight, kProfit); - EXPECT_EQ(kId, item.id); - EXPECT_EQ(kWeight, item.weight); - EXPECT_EQ(kProfit, item.profit); - EXPECT_EQ(kEfficiency, item.GetEfficiency(kProfitMax)); - - const KnapsackItemForCuts item2(kId, kNullWeight, kProfit); - EXPECT_EQ(kProfitMax, item2.GetEfficiency(kProfitMax)); -} - -TEST(KnapsackStateForCutsTest, Init) { - const int kNumberOfItems(12); - KnapsackStateForCuts state; - state.Init(kNumberOfItems); - for (int i(0); i < kNumberOfItems; ++i) { - EXPECT_FALSE(state.is_bound(i)); - } - EXPECT_EQ(kNumberOfItems, state.GetNumberOfItems()); -} - -TEST(KnapsackStateForCutsTest, UpdateState) { - const int kNumberOfItems(12); - KnapsackStateForCuts state; - state.Init(kNumberOfItems); - - const int item_id(7); - bool is_in = true; - KnapsackAssignmentForCuts assignment1(item_id, is_in); - bool no_fail = state.UpdateState(false, assignment1); - for (int i(0); i < kNumberOfItems; ++i) { - EXPECT_EQ(i == item_id, state.is_bound(i)); - } - EXPECT_EQ(is_in, state.is_in(item_id)); - EXPECT_TRUE(no_fail); - - is_in = false; - KnapsackAssignmentForCuts assignment2(item_id, is_in); - no_fail = state.UpdateState(false, assignment2); - EXPECT_TRUE(state.is_bound(item_id)); - EXPECT_FALSE(no_fail); - - no_fail = state.UpdateState(true, assignment2); - EXPECT_FALSE(state.is_bound(item_id)); - EXPECT_TRUE(no_fail); -} - -TEST(KnapsackPropagatorForCutsTest, InitAndUpdatePropagator) { - const std::vector profits = {1, 2, 3, 4, 5, 6, 7, 8, 9}; - const std::vector weights = {1, 1, 1, 1, 1, 1, 1, 1, 1}; - ASSERT_EQ(profits.size(), weights.size()); - const int kNumItems(profits.size()); - const int kNoSelection(-1); - - KnapsackStateForCuts state; - state.Init(kNumItems); - - KnapsackPropagatorForCuts capacity_propagator(&state); - capacity_propagator.Init(profits, weights, 2); - EXPECT_EQ(kNoSelection, capacity_propagator.GetNextItemId()); - - KnapsackAssignmentForCuts assignment1(3, true); - EXPECT_TRUE(state.UpdateState(false, assignment1)); - EXPECT_TRUE(capacity_propagator.Update(false, assignment1)); - EXPECT_EQ(4, capacity_propagator.current_profit()); - capacity_propagator.ComputeProfitBounds(); - EXPECT_EQ(7, capacity_propagator.GetNextItemId()); - const double kProfit13 = profits[3] + profits[8]; - EXPECT_EQ(kProfit13, capacity_propagator.profit_lower_bound()); - EXPECT_EQ(kProfit13, capacity_propagator.profit_upper_bound()); - - KnapsackAssignmentForCuts assignment2(8, true); - EXPECT_TRUE(state.UpdateState(false, assignment2)); - EXPECT_TRUE(capacity_propagator.Update(false, assignment2)); - EXPECT_EQ(kProfit13, capacity_propagator.current_profit()); - capacity_propagator.ComputeProfitBounds(); - EXPECT_EQ(7, capacity_propagator.GetNextItemId()); - EXPECT_EQ(kProfit13, capacity_propagator.profit_lower_bound()); - EXPECT_EQ(kProfit13, capacity_propagator.profit_upper_bound()); - - KnapsackAssignmentForCuts assignment3(5, true); - EXPECT_TRUE(state.UpdateState(false, assignment3)); - EXPECT_FALSE(capacity_propagator.Update(false, assignment3)); - const double kProfit19 = profits[3] + profits[8] + profits[5]; - EXPECT_EQ(kProfit19, capacity_propagator.current_profit()); - - EXPECT_TRUE(state.UpdateState(true, assignment2)); - EXPECT_TRUE(capacity_propagator.Update(true, assignment2)); - const double kProfit10 = profits[3] + profits[5]; - EXPECT_EQ(kProfit10, capacity_propagator.current_profit()); - capacity_propagator.ComputeProfitBounds(); - EXPECT_EQ(8, capacity_propagator.GetNextItemId()); - EXPECT_EQ(kProfit10, capacity_propagator.profit_lower_bound()); - EXPECT_EQ(kProfit10, capacity_propagator.profit_upper_bound()); -} - -TEST(KnapsackSolverForCutsTest, SolveOneDimension) { - const std::vector profits = {1, 2, 3, 4, 5, 6, 7, 8, 9}; - const std::vector weights = {1, 2, 3, 4, 5, 6, 7, 8, 9}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 34; - const double kOptimalProfit = 34; - KnapsackSolverForCuts solver("solver"); - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); -} - -TEST(KnapsackSolverForCutsTest, SolveOneDimensionInfeasible) { - const std::vector profits = {1, 2, 3, 4, 5, 6, 7, 8, 9}; - const std::vector weights = {1, 2, 3, 4, 5, 6, 7, 8, 9}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = -1; - KnapsackSolverForCuts solver("solver"); - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - const int number_of_items(profits.size()); - std::vector best_solution(number_of_items, false); - for (int item_id(0); item_id < number_of_items; ++item_id) { - best_solution.at(item_id) = solver.best_solution(item_id); - } - EXPECT_FALSE( - IsSolutionValid(profits, weights, kCapacity, best_solution, profit)); -} - -TEST(KnapsackSolverForCutsTest, MultipleSolves) { - KnapsackSolverForCuts solver("solver"); - { - const std::vector profits = {1, 2, 3}; - const std::vector weights = {4, 5, 6}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 10; - const double kOptimalProfit = 4; - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); - } - { - const std::vector profits = {1, 2, 3, 7}; - const std::vector weights = {4, 5, 6, 8}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 10; - const double kOptimalProfit = 7; - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); - } - { - const std::vector profits = {1, 2}; - const std::vector weights = {4, 5}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 10; - const double kOptimalProfit = 3; - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); - } -} - -TEST(KnapsackSolverForCutsTest, SolveBigOneDimension) { - const std::vector profits = { - 360, 83, 59, 130, 431, 67, 230, 52, 93, 125, 670, 892, 600, - 38, 48, 147, 78, 256, 63, 17, 120, 164, 432, 35, 92, 110, - 22, 42, 50, 323, 514, 28, 87, 73, 78, 15, 26, 78, 210, - 36, 85, 189, 274, 43, 33, 10, 19, 389, 276, 312}; - const std::vector weights = { - 7, 0, 30, 22, 80, 94, 11, 81, 70, 64, 59, 18, 0, 36, 3, 8, 15, - 42, 9, 0, 42, 47, 52, 32, 26, 48, 55, 6, 29, 84, 2, 4, 18, 56, - 7, 29, 93, 44, 71, 3, 86, 66, 31, 65, 0, 79, 20, 65, 52, 13}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 850; - const double kOptimalProfit = 7534; - KnapsackSolverForCuts solver("solver"); - { - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); - } - { - // Solve with lower bound threshold. - solver.Init(profits, weights, kCapacity); - solver.set_solution_lower_bound_threshold(100); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_GT(kOptimalProfit, profit); - } - { - // Solve with upper bound threshold. - solver.Init(profits, weights, kCapacity); - solver.set_solution_upper_bound_threshold(10000); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_GT(kOptimalProfit, profit); - } - { - solver.Init(profits, weights, kCapacity); - solver.set_node_limit(1); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_GT(kOptimalProfit, profit); - } -} - -TEST(KnapsackSolverForCutsTest, SolveOneDimensionFractionalProfits) { - const std::vector profits = {0, 0.5, 0.4, 1, 1, 1.1}; - const std::vector weights = {9, 6, 2, 1, 1, 1}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 4; - const double kOptimalProfit = 3.1; - KnapsackSolverForCuts solver("solver"); - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); -} - -TEST(KnapsackSolverForCutsTest, SolveOneDimensionFractionalWeights) { - const std::vector profits = {0, 1, 1, 1, 1, 2}; - const std::vector weights = {9, 6, 2, 1.5, 1.5, 1.5}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 4; - const double kOptimalProfit = 3; - KnapsackSolverForCuts solver("solver"); - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); -} - -TEST(KnapsackSolverForCutsTest, SolveOneDimensionFractional) { - const std::vector profits = {0, 0.5, 0.4, 1, 1, 1.1}; - const std::vector weights = {9, 6, 2, 1.5, 1.5, 1.5}; - ASSERT_EQ(profits.size(), weights.size()); - const double kCapacity = 4; - const double kOptimalProfit = 2.1; - KnapsackSolverForCuts solver("solver"); - solver.Init(profits, weights, kCapacity); - const double profit = SolveKnapsackProblem(&solver); - EXPECT_EQ(kOptimalProfit, profit); -} - -} // namespace -} // namespace operations_research diff --git a/ortools/algorithms/n_choose_k.cc b/ortools/algorithms/n_choose_k.cc new file mode 100644 index 00000000000..300a4645476 --- /dev/null +++ b/ortools/algorithms/n_choose_k.cc @@ -0,0 +1,145 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/algorithms/n_choose_k.h" + +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/numeric/int128.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" +#include "absl/time/clock.h" +#include "absl/time/time.h" +#include "ortools/algorithms/binary_search.h" +#include "ortools/base/logging.h" +#include "ortools/base/mathutil.h" + +namespace operations_research { +namespace { +// This is the actual computation. It's in O(k). +template +Int InternalChoose(Int n, Int k) { + DCHECK_LE(k, n - k); + // We compute n * (n-1) * ... * (n-k+1) / k! in the best possible order to + // guarantee exact results, while trying to avoid overflows. It's not perfect: + // we finish with a division by k, which means that me may overflow even if + // the result doesn't (by a factor of up to k). + Int result = 1; + Int i = 0; + while (i < k) { // We always have k < n/2. + result *= n--; + result /= ++i; // The product of i consecutive numbers is divisible by i. + } + return result; +} + +constexpr int64_t kint64max = std::numeric_limits::max(); + +// This function precomputes the maximum N such that (N choose K) doesn't +// overflow, for all K. +// When `overflows_intermediate_computation` is true, "overflow" means +// "some overflow happens inside InternalChoose()", and when it's false +// it simply means "the result doesn't fit in an int64_t". +// This is only used in contexts where K ≤ N-K, which implies N ≥ 2K, thus we +// can stop when (2K Choose K) overflows, because at and beyond such K, +// (N Choose K) will always overflow. In practice that happens for K=31 or 34 +// depending on `overflows_intermediate_computation`. +std::vector LastNThatDoesNotOverflowForAllK( + bool overflows_intermediate_computation) { + absl::Time start_time = absl::Now(); + // Given the algorithm used in InternalChoose(), it's not hard to + // find out when (N choose K) overflows an int64_t during its internal + // computation: that's when (N choose K) > kint64max / k. + + // For K ≤ 2, we hardcode the values of the maximum N. + std::vector result = { + kint64max, // K=0 + kint64max, // K=1 + // The binary search done below uses MathUtil::LogCombinations, which only + // works on int32_t, and that's problematic for the max N we get for K=2. + overflows_intermediate_computation + ? // Max N such that N*(N-1) < 2^63. N*(N-1) ≈ (N-0.5)². + static_cast(0.5 + std::pow(2.0, 63.0 / 2)) + : 1l << 32, // Max N such that N*(N-1) < 2^64. + }; + // We find the last N with binary search, for all K. We stop growing K + // when (2*K Choose K) overflows. + for (int64_t k = 3;; ++k) { + const double max_log_comb = overflows_intermediate_computation + ? 63 * std::log(2) - std::log(k) + : 63 * std::log(2); + result.push_back(BinarySearch( + /*x_true*/ k, /*x_false=*/(1l << 23) - 1, [k, max_log_comb](int64_t n) { + return MathUtil::LogCombinations(n, k) <= max_log_comb; + })); + if (result.back() < 2 * k) { + result.pop_back(); + break; + } + } + DCHECK_EQ(result.size(), + overflows_intermediate_computation + ? 31 // 60 Choose 30 < 2^63/30 but 62 Choose 31 > 2^63/31. + : 34); // 66 Choose 33 < 2^63 but 68 Choose 34 > 2^63. + VLOG(1) << "LastNThatDoesNotOverflowForAllK(): " << absl::Now() - start_time; + return result; +} + +bool NChooseKIntermediateComputationOverflowsInt64(int64_t n, int64_t k) { + DCHECK_LE(k, n - k); + static const auto* const result = + new std::vector(LastNThatDoesNotOverflowForAllK( + /*overflows_intermediate_computation=*/true)); + return k < result->size() ? n > (*result)[k] : true; +} + +bool NChooseKResultOverflowsInt64(int64_t n, int64_t k) { + DCHECK_LE(k, n - k); + static const auto* const result = + new std::vector(LastNThatDoesNotOverflowForAllK( + /*overflows_intermediate_computation=*/false)); + return k < result->size() ? n > (*result)[k] : true; +} +} // namespace + +absl::StatusOr NChooseK(int64_t n, int64_t k) { + if (n < 0) { + return absl::InvalidArgumentError(absl::StrFormat("n is negative (%d)", n)); + } + if (k < 0) { + return absl::InvalidArgumentError(absl::StrFormat("k is negative (%d)", k)); + } + if (k > n) { + return absl::InvalidArgumentError( + absl::StrFormat("k=%d is greater than n=%d", k, n)); + } + // NOTE(user): If performance ever matters, we could simply precompute and + // store all (N choose K) that don't overflow, there aren't that many of them: + // only a few tens of thousands, after removing simple cases like k ≤ 5. + if (k > n / 2) k = n - k; + if (!NChooseKIntermediateComputationOverflowsInt64(n, k)) { + return InternalChoose(n, k); + } + if (NChooseKResultOverflowsInt64(n, k)) { + return absl::InvalidArgumentError( + absl::StrFormat("(%d choose %d) overflows int64", n, k)); + } + return static_cast(InternalChoose(n, k)); +} + +} // namespace operations_research diff --git a/ortools/algorithms/n_choose_k.h b/ortools/algorithms/n_choose_k.h new file mode 100644 index 00000000000..019492c8f5e --- /dev/null +++ b/ortools/algorithms/n_choose_k.h @@ -0,0 +1,30 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef OR_TOOLS_ALGORITHMS_N_CHOOSE_K_H_ +#define OR_TOOLS_ALGORITHMS_N_CHOOSE_K_H_ + +#include + +#include "absl/status/statusor.h" + +namespace operations_research { +// Returns the number of ways to choose k elements among n, ignoring the order, +// i.e., the binomial coefficient (n, k). +// This is like std::exp(MathUtil::LogCombinations(n, k)), but with perfect +// accuracy, and returning an error iff the result would overflow an int64_t +// or if an argument is invalid (i.e., n < 0, k < 0, or k > n). +absl::StatusOr NChooseK(int64_t n, int64_t k); +} // namespace operations_research + +#endif // OR_TOOLS_ALGORITHMS_N_CHOOSE_K_H_ diff --git a/ortools/algorithms/n_choose_k_test.cc b/ortools/algorithms/n_choose_k_test.cc new file mode 100644 index 00000000000..601962f24bd --- /dev/null +++ b/ortools/algorithms/n_choose_k_test.cc @@ -0,0 +1,249 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/algorithms/n_choose_k.h" + +#include +#include +#include + +#include "absl/numeric/int128.h" +#include "absl/random/distributions.h" +#include "absl/random/random.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "gtest/gtest.h" +#include "ortools/base/dump_vars.h" +#include "ortools/base/gmock.h" +#include "ortools/util/flat_matrix.h" + +namespace operations_research { +namespace { +using ::testing::HasSubstr; +using ::testing::status::IsOkAndHolds; +using ::testing::status::StatusIs; + +constexpr int64_t kint64max = std::numeric_limits::max(); + +TEST(NChooseKTest, TrivialErrorCases) { + absl::BitGen random; + constexpr int kNumTests = 100'000; + for (int t = 0; t < kNumTests; ++t) { + const int64_t x = absl::LogUniform(random, 0, kint64max); + EXPECT_THAT(NChooseK(-1, x), StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("n is negative"))); + EXPECT_THAT(NChooseK(x, -1), StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("k is negative"))); + if (x != kint64max) { + EXPECT_THAT(NChooseK(x, x + 1), + StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("greater than n"))); + } + ASSERT_FALSE(HasFailure()) << DUMP_VARS(t, x); + } +} + +TEST(NChooseKTest, Symmetry) { + absl::BitGen random; + constexpr int kNumTests = 1'000'000; + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 0, kint64max); + const int64_t k = absl::LogUniform(random, 0, n); + const absl::StatusOr result1 = NChooseK(n, k); + const absl::StatusOr result2 = NChooseK(n, n - k); + if (result1.ok()) { + ASSERT_THAT(result2, IsOkAndHolds(result1.value())) << DUMP_VARS(t, n, k); + } else { + ASSERT_EQ(result2.status().code(), result1.status().code()) + << DUMP_VARS(t, n, k, result1, result2); + } + } +} + +TEST(NChooseKTest, Invariant) { + absl::BitGen random; + constexpr int kNumTests = 1'000'000; + int num_tested_invariants = 0; + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 2, 100); + const int64_t k = absl::LogUniform(random, 1, n - 1); + const absl::StatusOr n_k = NChooseK(n, k); + const absl::StatusOr nm1_k = NChooseK(n - 1, k); + const absl::StatusOr nm1_km1 = NChooseK(n - 1, k - 1); + if (n_k.ok()) { + ++num_tested_invariants; + ASSERT_OK(nm1_k); + ASSERT_OK(nm1_km1); + ASSERT_EQ(n_k.value(), nm1_k.value() + nm1_km1.value()) + << DUMP_VARS(t, n, k, n_k, nm1_k, nm1_km1); + } + } + EXPECT_GE(num_tested_invariants, kNumTests / 10); +} + +TEST(NChooseKTest, ComparisonAgainstClosedFormsForK0) { + for (int64_t n : {int64_t{0}, int64_t{1}, kint64max}) { + EXPECT_THAT(NChooseK(n, 0), IsOkAndHolds(1)) << n; + } + absl::BitGen random; + constexpr int kNumTests = 1'000'000; + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 0, kint64max); + ASSERT_THAT(NChooseK(n, 0), IsOkAndHolds(1)) << DUMP_VARS(n, t); + } +} + +TEST(NChooseKTest, ComparisonAgainstClosedFormsForK1) { + for (int64_t n : {int64_t{1}, kint64max}) { + EXPECT_THAT(NChooseK(n, 1), IsOkAndHolds(n)); + } + absl::BitGen random; + constexpr int kNumTests = 1'000'000; + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 1, kint64max); + ASSERT_THAT(NChooseK(n, 1), IsOkAndHolds(n)) << DUMP_VARS(t); + } +} + +TEST(NChooseKTest, ComparisonAgainstClosedFormsForK2) { + // 2^32 Choose 2 = 2^32 × (2^32-1) / 2 = 2^63 - 2^31 < kint64max, + // but (2^32+1) Choose 2 = 2^63 + 2^31 overflows. + constexpr int64_t max_n = int64_t{1} << 32; + for (int64_t n : {int64_t{2}, max_n}) { + const int64_t n_choose_2 = + static_cast(absl::uint128(n) * (n - 1) / 2); + EXPECT_THAT(NChooseK(n, 2), IsOkAndHolds(n_choose_2)) << DUMP_VARS(n); + } + EXPECT_THAT(NChooseK(max_n + 1, 2), + StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))); + + absl::BitGen random; + constexpr int kNumTests = 100'000; + // Random valid results. + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 2, max_n); + const int64_t n_choose_2 = + static_cast(absl::uint128(n) * (n - 1) / 2); + ASSERT_THAT(NChooseK(n, 2), IsOkAndHolds(n_choose_2)) << DUMP_VARS(t, n); + } + // Random overflows. + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, max_n + 1, kint64max); + ASSERT_THAT(NChooseK(n, 2), StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))) + << DUMP_VARS(t, n); + } +} + +TEST(NChooseKTest, ComparisonAgainstClosedFormsForK3) { + // This is 1 + ∛6×2^21. Checked manually on Google's scientific calculator. + const int64_t max_n = + static_cast(1 + std::pow(6, 1.0 / 3) * std::pow(2, 21)); + for (int64_t n : {int64_t{3}, max_n}) { + const int64_t n_choose_3 = + static_cast(absl::uint128(n) * (n - 1) * (n - 2) / 6); + EXPECT_THAT(NChooseK(n, 3), IsOkAndHolds(n_choose_3)) << DUMP_VARS(n); + } + EXPECT_THAT(NChooseK(max_n + 1, 3), + StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))); + + absl::BitGen random; + constexpr int kNumTests = 100'000; + // Random valid results. + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 3, max_n); + const int64_t n_choose_3 = + static_cast(absl::uint128(n) * (n - 1) * (n - 2) / 6); + ASSERT_THAT(NChooseK(n, 3), IsOkAndHolds(n_choose_3)) << DUMP_VARS(t, n); + } + // Random overflows. + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, max_n + 1, kint64max); + ASSERT_THAT(NChooseK(n, 3), StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))) + << DUMP_VARS(t, n); + } +} + +TEST(NChooseKTest, ComparisonAgainstClosedFormsForK4) { + // This is 1.5 + ∜24 × 2^(63/4). + // Checked manually on Google's scientific calculator. + const int64_t max_n = + static_cast(1.5 + std::pow(24, 1.0 / 4) * std::pow(2, 63.0 / 4)); + for (int64_t n : {int64_t{4}, max_n}) { + const int64_t n_choose_4 = static_cast(absl::uint128(n) * (n - 1) * + (n - 2) * (n - 3) / 24); + EXPECT_THAT(NChooseK(n, 4), IsOkAndHolds(n_choose_4)) << DUMP_VARS(n); + } + EXPECT_THAT(NChooseK(max_n + 1, 4), + StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))); + + absl::BitGen random; + constexpr int kNumTests = 100'000; + // Random valid results. + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, 4, max_n); + const int64_t n_choose_4 = static_cast(absl::uint128(n) * (n - 1) * + (n - 2) * (n - 3) / 24); + ASSERT_THAT(NChooseK(n, 4), IsOkAndHolds(n_choose_4)) << DUMP_VARS(t, n); + } + // Random overflows. + for (int t = 0; t < kNumTests; ++t) { + const int64_t n = absl::LogUniform(random, max_n + 1, kint64max); + ASSERT_THAT(NChooseK(n, 4), StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))) + << DUMP_VARS(t, n); + } +} + +TEST(NChooseKTest, ComparisonAgainstPascalTriangleForK5OrAbove) { + // Fill the Pascal triangle. Use -1 for int64_t overflows. We go up to n = + // 17000 because (17000 Choose 5) ≈ 1.2e19 which overflows an int64_t. + constexpr int max_n = 17000; + FlatMatrix triangle(max_n + 1, max_n + 1); + for (int n = 0; n <= max_n; ++n) { + triangle[n][0] = 1; + triangle[n][n] = 1; + for (int i = 1; i < n; ++i) { + const int64_t a = triangle[n - 1][i - 1]; + const int64_t b = triangle[n - 1][i]; + if (a < 0 || b < 0 || absl::int128(a) + b > kint64max) { + triangle[n][i] = -1; + } else { + triangle[n][i] = a + b; + } + } + } + // Checking all 17000²/2 slots would be too expensive, so we check each + // "column" downwards until the first 10 overflows, and stop. + for (int k = 5; k < max_n; ++k) { + int num_overflows = 0; + for (int n = k + 5; n < max_n; ++n) { + if (num_overflows > 0) EXPECT_EQ(triangle[n][k], -1); + if (triangle[n][k] < 0) { + ++num_overflows; + EXPECT_THAT(NChooseK(n, k), StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("overflows int64"))); + if (num_overflows > 10) break; + } else { + EXPECT_THAT(NChooseK(n, k), IsOkAndHolds(triangle[n][k])); + } + } + } +} + +} // namespace +} // namespace operations_research diff --git a/ortools/algorithms/python/BUILD.bazel b/ortools/algorithms/python/BUILD.bazel index f18af250055..fe3de2c5f94 100644 --- a/ortools/algorithms/python/BUILD.bazel +++ b/ortools/algorithms/python/BUILD.bazel @@ -11,12 +11,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +load("@bazel_skylib//rules:common_settings.bzl", "bool_flag") + # Python wrapper for .. load("@pip_deps//:requirements.bzl", "requirement") load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") -load("@rules_python//python:defs.bzl", "py_test") -load("@bazel_skylib//rules:common_settings.bzl", "bool_flag") load("@rules_cc//cc:defs.bzl", "cc_library") +load("@rules_python//python:defs.bzl", "py_test") # OSS solvers bool_flag( @@ -43,6 +44,7 @@ config_setting( }, ) +# knapsack_solver cc_library( name = "knapsack_solver_doc", hdrs = ["knapsack_solver_doc.h"], @@ -76,3 +78,30 @@ py_test( requirement("absl-py"), ], ) + +# set_cover +pybind_extension( + name = "set_cover", + srcs = ["set_cover.cc"], + visibility = ["//visibility:public"], + deps = [ + "//ortools/algorithms:set_cover_cc_proto", + "//ortools/algorithms:set_cover_heuristics", + "//ortools/algorithms:set_cover_invariant", + "//ortools/algorithms:set_cover_model", + "//ortools/algorithms:set_cover_reader", + "@com_google_absl//absl/strings", + "@pybind11_protobuf//pybind11_protobuf:native_proto_caster", + ], +) + +py_test( + name = "set_cover_test", + srcs = ["set_cover_test.py"], + python_version = "PY3", + deps = [ + ":set_cover", + "//ortools/algorithms:set_cover_py_pb2", + requirement("absl-py"), + ], +) diff --git a/ortools/algorithms/python/CMakeLists.txt b/ortools/algorithms/python/CMakeLists.txt index f60ee7c1d1e..332fec6d56e 100644 --- a/ortools/algorithms/python/CMakeLists.txt +++ b/ortools/algorithms/python/CMakeLists.txt @@ -11,6 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +# knapsack_solver pybind11_add_module(knapsack_solver_pybind11 MODULE knapsack_solver.cc) set_target_properties(knapsack_solver_pybind11 PROPERTIES LIBRARY_OUTPUT_NAME "knapsack_solver") @@ -33,6 +34,32 @@ endif() target_link_libraries(knapsack_solver_pybind11 PRIVATE ${PROJECT_NAMESPACE}::ortools) add_library(${PROJECT_NAMESPACE}::knapsack_solver_pybind11 ALIAS knapsack_solver_pybind11) +# set_cover +pybind11_add_module(set_cover_pybind11 MODULE set_cover.cc) +set_target_properties(set_cover_pybind11 PROPERTIES + LIBRARY_OUTPUT_NAME "set_cover") + +# note: macOS is APPLE and also UNIX ! +if(APPLE) + set_target_properties(set_cover_pybind11 PROPERTIES + SUFFIX ".so" + INSTALL_RPATH "@loader_path;@loader_path/../../../${PYTHON_PROJECT}/.libs" + ) + set_property(TARGET set_cover_pybind11 APPEND PROPERTY + LINK_FLAGS "-flat_namespace -undefined suppress" + ) +elseif(UNIX) + set_target_properties(set_cover_pybind11 PROPERTIES + INSTALL_RPATH "$ORIGIN:$ORIGIN/../../../${PYTHON_PROJECT}/.libs" + ) +endif() + +target_link_libraries(set_cover_pybind11 PRIVATE + ${PROJECT_NAMESPACE}::ortools + pybind11_native_proto_caster +) +add_library(${PROJECT_NAMESPACE}::set_cover_pybind11 ALIAS set_cover_pybind11) + if(BUILD_TESTING) file(GLOB PYTHON_SRCS "*_test.py") foreach(FILE_NAME IN LISTS PYTHON_SRCS) diff --git a/ortools/algorithms/python/knapsack_solver_test.py b/ortools/algorithms/python/knapsack_solver_test.py index 39f153ec567..8809980ca3f 100755 --- a/ortools/algorithms/python/knapsack_solver_test.py +++ b/ortools/algorithms/python/knapsack_solver_test.py @@ -22,6 +22,7 @@ class PyWrapAlgorithmsKnapsackSolverTest(absltest.TestCase): + def RealSolve(self, profits, weights, capacities, solver_type, use_reduction): solver = knapsack_solver.KnapsackSolver(solver_type, "solver") solver.set_use_reduction(use_reduction) diff --git a/ortools/algorithms/python/set_cover.cc b/ortools/algorithms/python/set_cover.cc new file mode 100644 index 00000000000..2d107fdfc05 --- /dev/null +++ b/ortools/algorithms/python/set_cover.cc @@ -0,0 +1,239 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// A pybind11 wrapper for set_cover_*. + +#include + +#include "absl/base/nullability.h" +#include "ortools/algorithms/set_cover_heuristics.h" +#include "ortools/algorithms/set_cover_invariant.h" +#include "ortools/algorithms/set_cover_model.h" +#include "ortools/algorithms/set_cover_reader.h" +#include "pybind11/pybind11.h" +#include "pybind11/pytypes.h" +#include "pybind11/stl.h" +#include "pybind11_protobuf/native_proto_caster.h" + +using ::operations_research::ElementDegreeSolutionGenerator; +using ::operations_research::GreedySolutionGenerator; +using ::operations_research::GuidedLocalSearch; +using ::operations_research::Preprocessor; +using ::operations_research::RandomSolutionGenerator; +using ::operations_research::ReadBeasleySetCoverProblem; +using ::operations_research::ReadRailSetCoverProblem; +using ::operations_research::SetCoverInvariant; +using ::operations_research::SetCoverModel; +using ::operations_research::SteepestSearch; +using ::operations_research::SubsetIndex; +using ::operations_research::TrivialSolutionGenerator; + +namespace py = pybind11; +using ::py::arg; + +// General note about TODOs: the corresponding functions/classes/methods are +// more complex to wrap, as they use nonstandard types, and are less important, +// as they are not as useful to most users (mostly useful to write some custom +// Python heuristics). + +PYBIND11_MODULE(set_cover, m) { + pybind11_protobuf::ImportNativeProtoCasters(); + + // set_cover_model.h + py::class_(m, "SetCoverModel") + .def(py::init<>()) + .def_property_readonly("num_elements", &SetCoverModel::num_elements) + .def_property_readonly("num_subsets", &SetCoverModel::num_subsets) + .def_property_readonly("num_nonzeros", &SetCoverModel::num_nonzeros) + .def_property_readonly("fill_rate", &SetCoverModel::FillRate) + .def("add_empty_subset", &SetCoverModel::AddEmptySubset, arg("cost")) + .def( + "add_element_to_last_subset", + [](SetCoverModel& model, int element) { + model.AddElementToLastSubset(element); + }, + arg("element")) + .def( + "set_subset_cost", + [](SetCoverModel& model, int subset, double cost) { + model.SetSubsetCost(subset, cost); + }, + arg("subset"), arg("cost")) + .def( + "add_element_to_subset", + [](SetCoverModel& model, int element, int subset) { + model.AddElementToSubset(element, subset); + }, + arg("subset"), arg("cost")) + .def("compute_feasibility", &SetCoverModel::ComputeFeasibility) + .def( + "reserve_num_subsets", + [](SetCoverModel& model, int num_subsets) { + model.ReserveNumSubsets(num_subsets); + }, + arg("num_subsets")) + .def( + "reserve_num_elements_in_subset", + [](SetCoverModel& model, int num_elements, int subset) { + model.ReserveNumElementsInSubset(num_elements, subset); + }, + arg("num_elements"), arg("subset")) + .def("export_model_as_proto", &SetCoverModel::ExportModelAsProto) + .def("import_model_from_proto", &SetCoverModel::ImportModelFromProto); + // TODO(user): add support for subset_costs, columns, rows, + // row_view_is_valid, SubsetRange, ElementRange, all_subsets, + // CreateSparseRowView, ComputeCostStats, ComputeRowStats, + // ComputeColumnStats, ComputeRowDeciles, ComputeColumnDeciles. + + // TODO(user): wrap IntersectingSubsetsIterator. + + // set_cover_invariant.h + py::class_(m, "SetCoverInvariant") + .def(py::init()) + .def("initialize", &SetCoverInvariant::Initialize) + .def("clear", &SetCoverInvariant::Clear) + .def("recompute_invariant", &SetCoverInvariant::RecomputeInvariant) + .def("model", &SetCoverInvariant::model) + .def_property( + "model", + // Expected semantics: give a pointer to Python **while + // keeping ownership** in C++. + [](SetCoverInvariant& invariant) -> std::shared_ptr { + // https://pybind11.readthedocs.io/en/stable/advanced/smart_ptrs.html#std-shared-ptr + std::shared_ptr ptr(invariant.model()); + return ptr; + }, + [](SetCoverInvariant& invariant, const SetCoverModel& model) { + *invariant.model() = model; + }) + .def("cost", &SetCoverInvariant::cost) + .def("num_uncovered_elements", &SetCoverInvariant::num_uncovered_elements) + .def("clear_trace", &SetCoverInvariant::ClearTrace) + .def("clear_removability_information", + &SetCoverInvariant::ClearRemovabilityInformation) + .def("compress_trace", &SetCoverInvariant::CompressTrace) + .def("check_consistency", &SetCoverInvariant::CheckConsistency) + .def( + "flip", + [](SetCoverInvariant& invariant, int subset) { + invariant.Flip(SubsetIndex(subset)); + }, + arg("subset")) + .def( + "flip_and_fully_update", + [](SetCoverInvariant& invariant, int subset) { + invariant.FlipAndFullyUpdate(SubsetIndex(subset)); + }, + arg("subset")) + .def( + "select", + [](SetCoverInvariant& invariant, int subset) { + invariant.Select(SubsetIndex(subset)); + }, + arg("subset")) + .def( + "select_and_fully_update", + [](SetCoverInvariant& invariant, int subset) { + invariant.SelectAndFullyUpdate(SubsetIndex(subset)); + }, + arg("subset")) + .def( + "deselect", + [](SetCoverInvariant& invariant, int subset) { + invariant.Deselect(SubsetIndex(subset)); + }, + arg("subset")) + .def( + "deselect_and_fully_update", + [](SetCoverInvariant& invariant, int subset) { + invariant.DeselectAndFullyUpdate(SubsetIndex(subset)); + }, + arg("subset")) + .def("export_solution_as_proto", + &SetCoverInvariant::ExportSolutionAsProto) + .def("import_solution_from_proto", + &SetCoverInvariant::ImportSolutionFromProto); + // TODO(user): add support for is_selected, num_free_elements, + // num_coverage_le_1_elements, coverage, ComputeCoverageInFocus, + // is_redundant, trace, new_removable_subsets, new_non_removable_subsets, + // LoadSolution, ComputeIsRedundant. + + // set_cover_heuristics.h + py::class_(m, "Preprocessor") + .def(py::init>()) + .def("next_solution", + [](Preprocessor& heuristic) -> bool { + return heuristic.NextSolution(); + }) + .def("num_columns_fixed_by_singleton_row", + &Preprocessor::num_columns_fixed_by_singleton_row); + // TODO(user): add support for focus argument. + + py::class_(m, "TrivialSolutionGenerator") + .def(py::init()) + .def("next_solution", [](TrivialSolutionGenerator& heuristic) -> bool { + return heuristic.NextSolution(); + }); + // TODO(user): add support for focus argument. + + py::class_(m, "RandomSolutionGenerator") + .def(py::init()) + .def("next_solution", [](RandomSolutionGenerator& heuristic) -> bool { + return heuristic.NextSolution(); + }); + // TODO(user): add support for focus argument. + + py::class_(m, "GreedySolutionGenerator") + .def(py::init()) + .def("next_solution", [](GreedySolutionGenerator& heuristic) -> bool { + return heuristic.NextSolution(); + }); + // TODO(user): add support for focus and cost arguments. + + py::class_(m, + "ElementDegreeSolutionGenerator") + .def(py::init()) + .def("next_solution", + [](ElementDegreeSolutionGenerator& heuristic) -> bool { + return heuristic.NextSolution(); + }); + // TODO(user): add support for focus and cost arguments. + + py::class_(m, "SteepestSearch") + .def(py::init()) + .def("next_solution", + [](SteepestSearch& heuristic, int num_iterations) -> bool { + return heuristic.NextSolution(num_iterations); + }); + // TODO(user): add support for focus and cost arguments. + + py::class_(m, "GuidedLocalSearch") + .def(py::init()) + .def("initialize", &GuidedLocalSearch::Initialize) + .def("next_solution", + [](GuidedLocalSearch& heuristic, int num_iterations) -> bool { + return heuristic.NextSolution(num_iterations); + }); + // TODO(user): add support for focus and cost arguments. + + // TODO(user): add support for ClearRandomSubsets, ClearRandomSubsets, + // ClearMostCoveredElements, ClearMostCoveredElements, TabuList, + // GuidedTabuSearch. + + // set_cover_reader.h + m.def("read_beasly_set_cover_problem", &ReadBeasleySetCoverProblem); + m.def("read_rail_set_cover_problem", &ReadRailSetCoverProblem); + + // set_cover_lagrangian.h + // TODO(user): add support for SetCoverLagrangian. +} diff --git a/ortools/algorithms/python/set_cover_test.py b/ortools/algorithms/python/set_cover_test.py new file mode 100644 index 00000000000..73624eaa8c3 --- /dev/null +++ b/ortools/algorithms/python/set_cover_test.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from absl import app +from absl.testing import absltest + +from ortools.algorithms.python import set_cover + + +def create_initial_cover_model(): + model = set_cover.SetCoverModel() + model.add_empty_subset(1.0) + model.add_element_to_last_subset(0) + model.add_empty_subset(1.0) + model.add_element_to_last_subset(1) + model.add_element_to_last_subset(2) + model.add_empty_subset(1.0) + model.add_element_to_last_subset(1) + model.add_empty_subset(1.0) + model.add_element_to_last_subset(2) + return model + + +def create_knights_cover_model(num_rows: int, num_cols: int) -> set_cover.SetCoverModel: + model = set_cover.SetCoverModel() + knight_row_move = [2, 1, -1, -2, -2, -1, 1, 2] + knight_col_move = [1, 2, 2, 1, -1, -2, -2, -1] + + for row in range(num_rows): + for col in range(num_cols): + model.add_empty_subset(1.0) + model.add_element_to_last_subset(row * num_cols + col) + + for i in range(8): + new_row = row + knight_row_move[i] + new_col = col + knight_col_move[i] + if 0 <= new_row < num_rows and 0 <= new_col < num_cols: + model.add_element_to_last_subset(new_row * num_cols + new_col) + + return model + + +# This test case is mostly a Python port of set_cover_test.cc. +class SetCoverTest(absltest.TestCase): + + def test_save_reload(self): + model = create_knights_cover_model(10, 10) + proto = model.export_model_as_proto() + reloaded = set_cover.SetCoverModel() + reloaded.import_model_from_proto(proto) + + self.assertEqual(model.num_subsets, reloaded.num_subsets) + self.assertEqual(model.num_elements, reloaded.num_elements) + # TODO(user): these methods are not yet wrapped. + # self.assertEqual(model.subset_costs, reloaded.subset_costs) + # self.assertEqual(model.columns, reloaded.columns) + + def test_save_reload_twice(self): + model = create_knights_cover_model(3, 3) + inv = set_cover.SetCoverInvariant(model) + + greedy = set_cover.GreedySolutionGenerator(inv) + self.assertTrue(greedy.next_solution()) + self.assertTrue(inv.check_consistency()) + greedy_proto = inv.export_solution_as_proto() + + steepest = set_cover.SteepestSearch(inv) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + steepest_proto = inv.export_solution_as_proto() + + inv.import_solution_from_proto(greedy_proto) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + reloaded_proto = inv.export_solution_as_proto() + self.assertEqual(str(steepest_proto), str(reloaded_proto)) + + def test_initial_values(self): + model = create_initial_cover_model() + self.assertTrue(model.compute_feasibility()) + + inv = set_cover.SetCoverInvariant(model) + trivial = set_cover.TrivialSolutionGenerator(inv) + self.assertTrue(trivial.next_solution()) + self.assertTrue(inv.check_consistency()) + + greedy = set_cover.GreedySolutionGenerator(inv) + self.assertTrue(greedy.next_solution()) + self.assertTrue(inv.check_consistency()) + + self.assertEqual(inv.num_uncovered_elements(), 0) + steepest = set_cover.SteepestSearch(inv) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + + def test_preprocessor(self): + model = create_initial_cover_model() + self.assertTrue(model.compute_feasibility()) + + inv = set_cover.SetCoverInvariant(model) + preprocessor = set_cover.Preprocessor(inv) + self.assertTrue(preprocessor.next_solution()) + self.assertTrue(inv.check_consistency()) + + greedy = set_cover.GreedySolutionGenerator(inv) + self.assertTrue(greedy.next_solution()) + self.assertTrue(inv.check_consistency()) + + def test_infeasible(self): + model = set_cover.SetCoverModel() + model.add_empty_subset(1.0) + model.add_element_to_last_subset(0) + model.add_empty_subset(1.0) + model.add_element_to_last_subset(3) + self.assertFalse(model.compute_feasibility()) + + def test_knights_cover_creation(self): + model = create_knights_cover_model(16, 16) + self.assertTrue(model.compute_feasibility()) + + def test_knights_cover_greedy(self): + model = create_knights_cover_model(16, 16) + self.assertTrue(model.compute_feasibility()) + inv = set_cover.SetCoverInvariant(model) + + greedy = set_cover.GreedySolutionGenerator(inv) + self.assertTrue(greedy.next_solution()) + self.assertTrue(inv.check_consistency()) + + steepest = set_cover.SteepestSearch(inv) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + + def test_knights_cover_degree(self): + model = create_knights_cover_model(16, 16) + self.assertTrue(model.compute_feasibility()) + inv = set_cover.SetCoverInvariant(model) + + degree = set_cover.ElementDegreeSolutionGenerator(inv) + self.assertTrue(degree.next_solution()) + self.assertTrue(inv.check_consistency()) + + steepest = set_cover.SteepestSearch(inv) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + + def test_knights_cover_gls(self): + model = create_knights_cover_model(16, 16) + self.assertTrue(model.compute_feasibility()) + inv = set_cover.SetCoverInvariant(model) + + greedy = set_cover.GreedySolutionGenerator(inv) + self.assertTrue(greedy.next_solution()) + self.assertTrue(inv.check_consistency()) + + gls = set_cover.GuidedLocalSearch(inv) + self.assertTrue(gls.next_solution(500)) + self.assertTrue(inv.check_consistency()) + + def test_knights_cover_random(self): + model = create_knights_cover_model(16, 16) + self.assertTrue(model.compute_feasibility()) + inv = set_cover.SetCoverInvariant(model) + + random = set_cover.RandomSolutionGenerator(inv) + self.assertTrue(random.next_solution()) + self.assertTrue(inv.check_consistency()) + + steepest = set_cover.SteepestSearch(inv) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + + def test_knights_cover_trivial(self): + model = create_knights_cover_model(16, 16) + self.assertTrue(model.compute_feasibility()) + inv = set_cover.SetCoverInvariant(model) + + trivial = set_cover.TrivialSolutionGenerator(inv) + self.assertTrue(trivial.next_solution()) + self.assertTrue(inv.check_consistency()) + + steepest = set_cover.SteepestSearch(inv) + self.assertTrue(steepest.next_solution(500)) + self.assertTrue(inv.check_consistency()) + + # TODO(user): KnightsCoverGreedyAndTabu, KnightsCoverGreedyRandomClear, + # KnightsCoverElementDegreeRandomClear, KnightsCoverRandomClearMip, + # KnightsCoverMip + + +def main(_): + absltest.main() + + +if __name__ == "__main__": + app.run(main) diff --git a/ortools/algorithms/samples/code_samples.bzl b/ortools/algorithms/samples/code_samples.bzl index 8045be9e77f..5f8039a325b 100644 --- a/ortools/algorithms/samples/code_samples.bzl +++ b/ortools/algorithms/samples/code_samples.bzl @@ -14,6 +14,7 @@ """Helper macro to compile and test code samples.""" load("@pip_deps//:requirements.bzl", "requirement") +load("@rules_python//python:defs.bzl", "py_binary", "py_test") def code_sample_cc(name): native.cc_binary( @@ -35,27 +36,26 @@ def code_sample_cc(name): ) def code_sample_py(name): - native.py_binary( + py_binary( name = name + "_py3", srcs = [name + ".py"], main = name + ".py", deps = [ - requirement("absl-py"), "//ortools/algorithms/python:knapsack_solver", + requirement("absl-py"), + requirement("numpy"), ], python_version = "PY3", srcs_version = "PY3", ) - native.py_test( + py_test( name = name + "_py_test", size = "small", srcs = [name + ".py"], main = name + ".py", - data = [ - "//ortools/algorithms/python:knapsack_solver", - ], deps = [ + "//ortools/algorithms/python:knapsack_solver", requirement("absl-py"), requirement("numpy"), ], @@ -63,6 +63,10 @@ def code_sample_py(name): srcs_version = "PY3", ) +def code_sample_cc_py(name): + code_sample_cc(name = name) + code_sample_py(name = name) + def code_sample_java(name): native.java_test( name = name + "_java_test", @@ -75,7 +79,3 @@ def code_sample_java(name): "//ortools/java/com/google/ortools:Loader", ], ) - -def code_sample_cc_py(name): - code_sample_cc(name = name) - code_sample_py(name = name) diff --git a/ortools/algorithms/samples/simple_knapsack_program.cc b/ortools/algorithms/samples/simple_knapsack_program.cc index f5be61072b8..64a80d3a46d 100644 --- a/ortools/algorithms/samples/simple_knapsack_program.cc +++ b/ortools/algorithms/samples/simple_knapsack_program.cc @@ -37,7 +37,7 @@ void SimpleKnapsackProgram() { 230, 315, 393, 125, 670, 892, 600, 293, 712, 147, 421, 255}}; std::vector capacities = {850}; - std::vector values = weights[0]; + const std::vector& values = weights[0]; // [END data] // [START solve] diff --git a/ortools/algorithms/set_cover.cc b/ortools/algorithms/set_cover.cc deleted file mode 100644 index 52ece22fc00..00000000000 --- a/ortools/algorithms/set_cover.cc +++ /dev/null @@ -1,397 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "ortools/algorithms/set_cover.h" - -#include -#include -#include -#include -#include -#include - -#include "absl/container/flat_hash_set.h" -#include "absl/log/check.h" -#include "absl/random/random.h" -#include "absl/types/span.h" -#include "ortools/algorithms/set_cover_invariant.h" -#include "ortools/algorithms/set_cover_model.h" -#include "ortools/algorithms/set_cover_utils.h" -#include "ortools/base/logging.h" - -namespace operations_research { - -constexpr SubsetIndex kNotFound(-1); - -// TrivialSolutionGenerator. - -bool TrivialSolutionGenerator::NextSolution() { - return NextSolution(inv_->model()->all_subsets()); -} - -bool TrivialSolutionGenerator::NextSolution( - absl::Span focus) { - const SubsetIndex num_subsets(inv_->model()->num_subsets()); - SubsetBoolVector choices(num_subsets, false); - for (const SubsetIndex subset : focus) { - choices[subset] = true; - } - inv_->LoadSolution(choices); - return true; -} - -// RandomSolutionGenerator. - -bool RandomSolutionGenerator::NextSolution() { - return NextSolution(inv_->model()->all_subsets()); -} - -bool RandomSolutionGenerator::NextSolution( - const std::vector& focus) { - std::vector shuffled = focus; - std::shuffle(shuffled.begin(), shuffled.end(), absl::BitGen()); - for (const SubsetIndex subset : shuffled) { - if (inv_->is_selected()[subset]) continue; - if (inv_->marginal_impacts()[subset] != 0) { - inv_->Toggle(subset, true); - } - } - DCHECK(inv_->CheckConsistency()); - return true; -} - -// GreedySolutionGenerator. - -void GreedySolutionGenerator::UpdatePriorities( - const std::vector& impacted_subsets, - const SubsetCostVector& costs) { - for (const SubsetIndex subset : impacted_subsets) { - const ElementIndex marginal_impact(inv_->marginal_impacts()[subset]); - if (marginal_impact != 0) { - const Cost marginal_cost_increase = - costs[subset] / marginal_impact.value(); - pq_.ChangePriority(subset, -marginal_cost_increase); - } else { - pq_.Remove(subset); - } - } -} - -bool GreedySolutionGenerator::NextSolution() { - return NextSolution(inv_->model()->all_subsets(), - inv_->model()->subset_costs()); -} - -bool GreedySolutionGenerator::NextSolution( - const std::vector& focus) { - return NextSolution(focus, inv_->model()->subset_costs()); -} - -bool GreedySolutionGenerator::NextSolution( - const std::vector& focus, const SubsetCostVector& costs) { - inv_->MakeDataConsistent(); - - // The priority is the minimum marginal cost increase. Since the - // priority queue returns the smallest value, we use the opposite. - // TODO(user): build in O(N) instead of O(n lg(N)). - for (const SubsetIndex subset : focus) { - if (!inv_->is_selected()[subset] && inv_->marginal_impacts()[subset] != 0) { - const Cost marginal_cost_increase = - costs[subset] / inv_->marginal_impacts()[subset].value(); - pq_.Add(subset, -marginal_cost_increase); - } - } - const ElementIndex num_elements(inv_->model()->num_elements()); - ElementIndex num_elements_covered(inv_->num_elements_covered()); - while (num_elements_covered < num_elements && !pq_.IsEmpty()) { - const SubsetIndex best_subset = pq_.TopSubset(); - DVLOG(1) << "Best subset: " << best_subset.value() - << " Priority = " << pq_.Priority(best_subset) - << " queue size = " << pq_.Size(); - const std::vector impacted_subsets = - inv_->Toggle(best_subset, true); - UpdatePriorities(impacted_subsets, costs); - num_elements_covered = inv_->num_elements_covered(); - DVLOG(1) << "Cost = " << inv_->cost() << " num_uncovered_elements = " - << num_elements - num_elements_covered; - } - DCHECK(pq_.IsEmpty()); - DCHECK(inv_->CheckConsistency()); - DCHECK(inv_->CheckSolution()); - return true; -} - -// SteepestSearch. - -void SteepestSearch::UpdatePriorities( - absl::Span impacted_subsets) { - // Update priority queue. Since best_subset is in impacted_subsets, it will - // be removed. - for (const SubsetIndex subset : impacted_subsets) { - pq_.Remove(subset); - } -} - -bool SteepestSearch::NextSolution(int num_iterations) { - return NextSolution(inv_->model()->all_subsets(), - inv_->model()->subset_costs(), num_iterations); -} - -bool SteepestSearch::NextSolution(absl::Span focus, - int num_iterations) { - return NextSolution(focus, inv_->model()->subset_costs(), num_iterations); -} - -bool SteepestSearch::NextSolution(absl::Span focus, - const SubsetCostVector& costs, - int num_iterations) { - // Return false if inv_ contains no solution. - if (!inv_->CheckSolution()) return false; - // Create priority queue with cost of using a subset, by decreasing order. - // Do it only for removable subsets. - for (const SubsetIndex subset : focus) { - // The priority is the gain from removing the subset from the solution. - if (inv_->is_selected()[subset] && inv_->is_removable()[subset]) { - pq_.Add(subset, costs[subset]); - } - } - for (int iteration = 0; iteration < num_iterations && !pq_.IsEmpty(); - ++iteration) { - const SubsetIndex best_subset = pq_.TopSubset(); - DCHECK_GT(costs[best_subset], 0.0); - DCHECK(inv_->is_removable()[best_subset]); - DCHECK(inv_->is_selected()[best_subset]); - const std::vector impacted_subsets = - inv_->Toggle(best_subset, false); - UpdatePriorities(impacted_subsets); - DVLOG(1) << "Cost = " << inv_->cost(); - } - DCHECK(inv_->CheckConsistency()); - DCHECK(inv_->CheckSolution()); - return true; -} - -// Guided Tabu Search - -void GuidedTabuSearch::Initialize() { - const SparseColumnView& columns = inv_->model()->columns(); - const SubsetCostVector& subset_costs = inv_->model()->subset_costs(); - times_penalized_.AssignToZero(columns.size()); - augmented_costs_ = subset_costs; - utilities_ = subset_costs; -} - -namespace { -bool FlipCoin() { - // TODO(user): use STL for repeatable testing. - return absl::Bernoulli(absl::BitGen(), 0.5); -} -} // namespace - -void GuidedTabuSearch::UpdatePenalties(absl::Span focus) { - const SubsetCostVector& subset_costs = inv_->model()->subset_costs(); - Cost max_utility = -1.0; - for (const SubsetIndex subset : focus) { - if (inv_->is_selected()[subset]) { - max_utility = std::max(max_utility, utilities_[subset]); - } - } - const double epsilon_utility = epsilon_ * max_utility; - for (const SubsetIndex subset : focus) { - if (inv_->is_selected()[subset]) { - const double utility = utilities_[subset]; - if ((max_utility - utility <= epsilon_utility) && FlipCoin()) { - ++times_penalized_[subset]; - const int times_penalized = times_penalized_[subset]; - const Cost cost = - subset_costs[subset]; // / columns[subset].size().value(); - utilities_[subset] = cost / (1 + times_penalized); - augmented_costs_[subset] = - cost * (1 + penalty_factor_ * times_penalized); - } - } - } -} - -bool GuidedTabuSearch::NextSolution(int num_iterations) { - return NextSolution(inv_->model()->all_subsets(), num_iterations); -} - -bool GuidedTabuSearch::NextSolution(const std::vector& focus, - int num_iterations) { - const SubsetCostVector& subset_costs = inv_->model()->subset_costs(); - constexpr Cost kMaxPossibleCost = std::numeric_limits::max(); - Cost best_cost = inv_->cost(); - SubsetBoolVector best_choices = inv_->is_selected(); - Cost augmented_cost = - std::accumulate(augmented_costs_.begin(), augmented_costs_.end(), 0.0); - for (int iteration = 0; iteration < num_iterations; ++iteration) { - Cost best_delta = kMaxPossibleCost; - SubsetIndex best_subset = kNotFound; - for (const SubsetIndex subset : focus) { - const Cost delta = augmented_costs_[subset]; - DVLOG(1) << "Subset, " << subset.value() << ", at ," - << inv_->is_selected()[subset] << ", is removable =, " - << inv_->is_removable()[subset] << ", delta =, " << delta - << ", best_delta =, " << best_delta; - if (inv_->is_selected()[subset]) { - // Try to remove subset from solution, if the gain from removing is - // worth it: - if (-delta < best_delta && - // and it can be removed, and - inv_->is_removable()[subset] && - // it is not Tabu OR decreases the actual cost (aspiration): - (!tabu_list_.Contains(subset) || - inv_->cost() - subset_costs[subset] < best_cost)) { - best_delta = -delta; - best_subset = subset; - } - } else { - // Try to use subset in solution, if its penalized delta is good. - if (delta < best_delta) { - // The limit kMaxPossibleCost is ill-defined, - // there is always a best_subset. Is it intended? - if (!tabu_list_.Contains(subset)) { - best_delta = delta; - best_subset = subset; - } - } - } - } - if (best_subset == kNotFound) { // Local minimum reached. - inv_->LoadSolution(best_choices); - return true; - } - DVLOG(1) << "Best subset, " << best_subset.value() << ", at ," - << inv_->is_selected()[best_subset] << ", is removable = ," - << inv_->is_removable()[best_subset] << ", best_delta = ," - << best_delta; - - UpdatePenalties(focus); - tabu_list_.Add(best_subset); - const std::vector impacted_subsets = - inv_->UnsafeToggle(best_subset, !inv_->is_selected()[best_subset]); - // TODO(user): make the cost computation incremental. - augmented_cost = - std::accumulate(augmented_costs_.begin(), augmented_costs_.end(), 0.0); - - DVLOG(1) << "Iteration, " << iteration << ", current cost = ," - << inv_->cost() << ", best cost = ," << best_cost - << ", penalized cost = ," << augmented_cost; - if (inv_->cost() < best_cost) { - LOG(INFO) << "Updated best cost, " << "Iteration, " << iteration - << ", current cost = ," << inv_->cost() << ", best cost = ," - << best_cost << ", penalized cost = ," << augmented_cost; - best_cost = inv_->cost(); - best_choices = inv_->is_selected(); - } - } - inv_->LoadSolution(best_choices); - DCHECK(inv_->CheckConsistency()); - DCHECK(inv_->CheckSolution()); - return true; -} - -namespace { -void SampleSubsets(std::vector* list, std::size_t num_subsets) { - num_subsets = std::min(num_subsets, list->size()); - CHECK_GE(num_subsets, 0); - std::shuffle(list->begin(), list->end(), absl::BitGen()); - list->resize(num_subsets); -} -} // namespace - -std::vector ClearRandomSubsets(std::size_t num_subsets, - SetCoverInvariant* inv) { - return ClearRandomSubsets(inv->model()->all_subsets(), num_subsets, inv); -} - -std::vector ClearRandomSubsets(absl::Span focus, - std::size_t num_subsets, - SetCoverInvariant* inv) { - num_subsets = std::min(num_subsets, focus.size()); - CHECK_GE(num_subsets, 0); - std::vector chosen_indices; - for (const SubsetIndex subset : focus) { - if (inv->is_selected()[subset]) { - chosen_indices.push_back(subset); - } - } - SampleSubsets(&chosen_indices, num_subsets); - for (const SubsetIndex subset : chosen_indices) { - // Use UnsafeToggle because we allow non-solutions. - inv->UnsafeToggle(subset, false); - } - return chosen_indices; -} - -std::vector ClearMostCoveredElements(std::size_t num_subsets, - SetCoverInvariant* inv) { - return ClearMostCoveredElements(inv->model()->all_subsets(), num_subsets, - inv); -} - -std::vector ClearMostCoveredElements( - absl::Span focus, std::size_t num_subsets, - SetCoverInvariant* inv) { - // This is the vector we will return. - std::vector chosen_indices; - - const ElementToSubsetVector& coverage = inv->coverage(); - - // Compute a permutation of the element indices by decreasing order of - // coverage by element. - std::vector permutation(coverage.size().value()); - std::iota(permutation.begin(), permutation.end(), 0); - std::sort(permutation.begin(), permutation.end(), - [&coverage](ElementIndex i, ElementIndex j) { - return coverage[i] > coverage[j]; - }); - - // Now, for the elements that are over-covered (coverage > 1), collect the - // sets that are used. - absl::flat_hash_set used_subsets_collection; - for (ElementIndex element : permutation) { - if (coverage[element] <= 1) break; - for (SubsetIndex subset : inv->model()->rows()[element]) { - if (inv->is_selected()[subset]) { - used_subsets_collection.insert(subset); - } - } - } - - // Now the impacted subset is a vector representation of the flat_hash_set - // collection. - std::vector impacted_subsets(used_subsets_collection.begin(), - used_subsets_collection.end()); - // Sort the impacted subsets to be able to intersect the vector later. - std::sort(impacted_subsets.begin(), impacted_subsets.end()); - - // chosen_indices = focus ⋂ impacted_subsets - std::set_intersection(focus.begin(), focus.end(), impacted_subsets.begin(), - impacted_subsets.end(), - std::back_inserter(chosen_indices)); - - std::shuffle(chosen_indices.begin(), chosen_indices.end(), absl::BitGen()); - chosen_indices.resize(std::min(chosen_indices.size(), num_subsets)); - - // Sort before traversing indices (and memory) in order. - std::sort(chosen_indices.begin(), chosen_indices.end()); - for (const SubsetIndex subset : chosen_indices) { - // Use UnsafeToggle because we allow non-solutions. - inv->UnsafeToggle(subset, false); - } - return chosen_indices; -} - -} // namespace operations_research diff --git a/ortools/algorithms/set_cover_heuristics.cc b/ortools/algorithms/set_cover_heuristics.cc new file mode 100644 index 00000000000..63c7fe30665 --- /dev/null +++ b/ortools/algorithms/set_cover_heuristics.cc @@ -0,0 +1,638 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/algorithms/set_cover_heuristics.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/random/random.h" +#include "absl/types/span.h" +#include "ortools/algorithms/adjustable_k_ary_heap.h" +#include "ortools/algorithms/set_cover_invariant.h" +#include "ortools/algorithms/set_cover_model.h" +#include "ortools/base/logging.h" + +namespace operations_research { + +constexpr SubsetIndex kNotFound(-1); +static constexpr Cost kMaxPossibleCost = std::numeric_limits::max(); +static constexpr double kInfinity = std::numeric_limits::infinity(); + +namespace { +SubsetBoolVector MakeBoolVector(absl::Span focus, + SubsetIndex size) { + SubsetBoolVector result(SubsetIndex(size), false); + for (const SubsetIndex subset : focus) { + result[subset] = true; + } + return result; +} +} // anonymous namespace + +// Preprocessor. + +bool Preprocessor::NextSolution() { + return NextSolution(inv_->model()->all_subsets()); +} + +bool Preprocessor::NextSolution(absl::Span focus) { + DVLOG(1) << "Entering Preprocessor::NextSolution"; + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + SubsetBoolVector choices(num_subsets, false); + const ElementIndex num_elements(inv_->model()->num_elements()); + const SparseRowView& rows = inv_->model()->rows(); + SubsetBoolVector in_focus = MakeBoolVector(focus, num_subsets); + for (const ElementIndex element : inv_->model()->ElementRange()) { + if (rows[element].size() == 1) { + const SubsetIndex subset = rows[element][RowEntryIndex(0)]; + if (in_focus[subset] && !inv_->is_selected()[subset]) { + inv_->Select(subset); + ++num_columns_fixed_by_singleton_row_; + } + } + } + inv_->CompressTrace(); + return true; +} + +// TrivialSolutionGenerator. + +bool TrivialSolutionGenerator::NextSolution() { + return NextSolution(inv_->model()->all_subsets()); +} + +bool TrivialSolutionGenerator::NextSolution( + absl::Span focus) { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + SubsetBoolVector choices(num_subsets, false); + for (const SubsetIndex subset : focus) { + choices[subset] = true; + } + inv_->LoadSolution(choices); + return true; +} + +// RandomSolutionGenerator. + +bool RandomSolutionGenerator::NextSolution() { + return NextSolution(inv_->model()->all_subsets()); +} + +bool RandomSolutionGenerator::NextSolution( + const std::vector& focus) { + inv_->ClearTrace(); + std::vector shuffled = focus; + std::shuffle(shuffled.begin(), shuffled.end(), absl::BitGen()); + for (const SubsetIndex subset : shuffled) { + if (inv_->is_selected()[subset]) continue; + if (inv_->num_free_elements()[subset] != 0) { + inv_->Select(subset); + } + } + inv_->CompressTrace(); + DCHECK(inv_->CheckConsistency()); + return true; +} + +// GreedySolutionGenerator. + +bool GreedySolutionGenerator::NextSolution() { + return NextSolution(inv_->model()->all_subsets(), + inv_->model()->subset_costs()); +} + +bool GreedySolutionGenerator::NextSolution( + const std::vector& focus) { + return NextSolution(focus, inv_->model()->subset_costs()); +} + +bool GreedySolutionGenerator::NextSolution( + const std::vector& focus, const SubsetCostVector& costs) { + DCHECK(inv_->CheckConsistency()); + inv_->ClearTrace(); + SubsetCostVector elements_per_cost(costs.size(), 0.0); + for (const SubsetIndex subset : focus) { + elements_per_cost[subset] = 1.0 / costs[subset]; + } + std::vector> subset_priorities; + DVLOG(1) << "focus.size(): " << focus.size(); + subset_priorities.reserve(focus.size()); + for (const SubsetIndex subset : focus) { + if (!inv_->is_selected()[subset] && + inv_->num_free_elements()[subset] != 0) { + // NOMUTANTS -- reason, for C++ + const float priority = + elements_per_cost[subset] * inv_->num_free_elements()[subset]; + subset_priorities.push_back({priority, subset.value()}); + } + } + // The priority queue maintains the maximum number of elements covered by unit + // of cost. We chose 16 as the arity of the heap after some testing. + // TODO(user): research more about the best value for Arity. + AdjustableKAryHeap pq( + subset_priorities, inv_->model()->num_subsets()); + while (!pq.IsEmpty()) { + const SubsetIndex best_subset(pq.TopIndex()); + pq.Pop(); + inv_->Select(best_subset); + // NOMUTANTS -- reason, for C++ + if (inv_->num_uncovered_elements() == 0) break; + for (IntersectingSubsetsIterator it(*inv_->model(), best_subset); + !it.at_end(); ++it) { + const SubsetIndex subset = *it; + const BaseInt marginal_impact(inv_->num_free_elements()[subset]); + if (marginal_impact > 0) { + const float priority = marginal_impact * elements_per_cost[subset]; + pq.Update({priority, subset.value()}); + } else { + pq.Remove(subset.value()); + } + } + DVLOG(1) << "Cost = " << inv_->cost() + << " num_uncovered_elements = " << inv_->num_uncovered_elements(); + } + inv_->CompressTrace(); + // Don't expect the queue to be empty, because of the break in the while + // loop. + DCHECK(inv_->CheckConsistency()); + return true; +} + +// ElementDegreeSolutionGenerator. +// There is no need to use a priority queue here, as the ratios are computed +// on-demand. Also elements are sorted based on degree once and for all and +// moved past when the elements become already covered. +bool ElementDegreeSolutionGenerator::NextSolution() { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetBoolVector in_focus(num_subsets, true); + return NextSolution(in_focus, inv_->model()->subset_costs()); +} + +bool ElementDegreeSolutionGenerator::NextSolution( + absl::Span focus) { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetBoolVector in_focus = MakeBoolVector(focus, num_subsets); + return NextSolution(in_focus, inv_->model()->subset_costs()); +} + +bool ElementDegreeSolutionGenerator::NextSolution( + absl::Span focus, const SubsetCostVector& costs) { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetBoolVector in_focus = MakeBoolVector(focus, num_subsets); + return NextSolution(in_focus, costs); +} + +bool ElementDegreeSolutionGenerator::NextSolution( + const SubsetBoolVector& in_focus, const SubsetCostVector& costs) { + DVLOG(1) << "Entering ElementDegreeSolutionGenerator::NextSolution"; + DCHECK(inv_->CheckConsistency()); + // Create the list of all the indices in the problem. + const BaseInt num_elements = inv_->model()->num_elements(); + std::vector degree_sorted_elements(num_elements); + std::iota(degree_sorted_elements.begin(), degree_sorted_elements.end(), + ElementIndex(0)); + const SparseRowView& rows = inv_->model()->rows(); + // Sort indices by degree i.e. the size of the row corresponding to an + // element. + std::sort(degree_sorted_elements.begin(), degree_sorted_elements.end(), + [&rows](const ElementIndex a, const ElementIndex b) { + if (rows[a].size() < rows[b].size()) return true; + if (rows[a].size() == rows[b].size()) return a < b; + return false; + }); + for (const ElementIndex element : degree_sorted_elements) { + // No need to cover an element that is already covered. + if (inv_->coverage()[element] != 0) continue; + Cost min_ratio = std::numeric_limits::max(); + SubsetIndex best_subset(-1); + for (const SubsetIndex subset : rows[element]) { + if (!in_focus[subset]) continue; + const Cost ratio = costs[subset] / inv_->num_free_elements()[subset]; + if (ratio < min_ratio) { + min_ratio = ratio; + best_subset = subset; + } + } + DCHECK_NE(best_subset, SubsetIndex(-1)); + inv_->Select(best_subset); + DVLOG(1) << "Cost = " << inv_->cost() + << " num_uncovered_elements = " << inv_->num_uncovered_elements(); + } + inv_->CompressTrace(); + DCHECK(inv_->CheckConsistency()); + return true; +} + +// SteepestSearch. + +void SteepestSearch::UpdatePriorities(absl::Span) {} + +bool SteepestSearch::NextSolution(int num_iterations) { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetBoolVector in_focus(num_subsets, true); + return NextSolution(in_focus, inv_->model()->subset_costs(), num_iterations); +} + +bool SteepestSearch::NextSolution(absl::Span focus, + int num_iterations) { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetBoolVector in_focus = MakeBoolVector(focus, num_subsets); + return NextSolution(focus, inv_->model()->subset_costs(), num_iterations); +} + +bool SteepestSearch::NextSolution(absl::Span focus, + const SubsetCostVector& costs, + int num_iterations) { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetBoolVector in_focus = MakeBoolVector(focus, num_subsets); + return NextSolution(in_focus, costs, num_iterations); +} + +bool SteepestSearch::NextSolution(const SubsetBoolVector& in_focus, + const SubsetCostVector& costs, + int num_iterations) { + DCHECK(inv_->CheckConsistency()); + DVLOG(1) << "Entering SteepestSearch::NextSolution, num_iterations = " + << num_iterations; + // Return false if inv_ contains no solution. + // TODO(user): This should be relaxed for partial solutions. + if (inv_->num_uncovered_elements() != 0) { + return false; + } + + // Create priority queue with cost of using a subset, by decreasing order. + // Do it only for selected AND removable subsets. + std::vector> subset_priorities; + subset_priorities.reserve(in_focus.size()); + for (const SetCoverDecision& decision : inv_->trace()) { + const SubsetIndex subset = decision.subset(); + if (in_focus[subset] && inv_->is_selected()[subset] && + inv_->ComputeIsRedundant(subset)) { + const float delta_per_element = costs[subset]; + subset_priorities.push_back({delta_per_element, subset.value()}); + } + } + DVLOG(1) << "subset_priorities.size(): " << subset_priorities.size(); + AdjustableKAryHeap pq( + subset_priorities, inv_->model()->num_subsets()); + for (int iteration = 0; iteration < num_iterations && !pq.IsEmpty(); + ++iteration) { + const SubsetIndex best_subset(pq.TopIndex()); + pq.Pop(); + DCHECK(inv_->is_selected()[best_subset]); + DCHECK(inv_->ComputeIsRedundant(best_subset)); + DCHECK_GT(costs[best_subset], 0.0); + inv_->Deselect(best_subset); + + for (IntersectingSubsetsIterator it(*inv_->model(), best_subset); + !it.at_end(); ++it) { + const SubsetIndex subset = *it; + if (!inv_->ComputeIsRedundant(subset)) { + pq.Remove(subset.value()); + } + } + DVLOG(1) << "Cost = " << inv_->cost(); + } + inv_->CompressTrace(); + // TODO(user): change this to enable working on partial solutions. + DCHECK_EQ(inv_->num_uncovered_elements(), 0); + DCHECK(inv_->CheckConsistency()); + return true; +} + +// Guided Tabu Search + +void GuidedTabuSearch::Initialize() { + const SubsetIndex num_subsets(inv_->model()->num_subsets()); + const SubsetCostVector& subset_costs = inv_->model()->subset_costs(); + times_penalized_.assign(num_subsets.value(), 0); + augmented_costs_ = subset_costs; + utilities_ = subset_costs; +} + +namespace { +bool FlipCoin() { + // TODO(user): use STL for repeatable testing. + return absl::Bernoulli(absl::BitGen(), 0.5); +} +} // namespace + +void GuidedTabuSearch::UpdatePenalties(absl::Span focus) { + const SubsetCostVector& subset_costs = inv_->model()->subset_costs(); + Cost max_utility = -1.0; + for (const SubsetIndex subset : focus) { + if (inv_->is_selected()[subset]) { + max_utility = std::max(max_utility, utilities_[subset]); + } + } + const double epsilon_utility = epsilon_ * max_utility; + for (const SubsetIndex subset : focus) { + if (inv_->is_selected()[subset]) { + const double utility = utilities_[subset]; + if ((max_utility - utility <= epsilon_utility) && FlipCoin()) { + ++times_penalized_[subset]; + const int times_penalized = times_penalized_[subset]; + const Cost cost = + subset_costs[subset]; // / columns[subset].size().value(); + utilities_[subset] = cost / (1 + times_penalized); + augmented_costs_[subset] = + cost * (1 + penalty_factor_ * times_penalized); + } + } + } +} + +bool GuidedTabuSearch::NextSolution(int num_iterations) { + return NextSolution(inv_->model()->all_subsets(), num_iterations); +} + +bool GuidedTabuSearch::NextSolution(absl::Span focus, + int num_iterations) { + DCHECK(inv_->CheckConsistency()); + DVLOG(1) << "Entering GuidedTabuSearch::NextSolution, num_iterations = " + << num_iterations; + const SubsetCostVector& subset_costs = inv_->model()->subset_costs(); + Cost best_cost = inv_->cost(); + SubsetBoolVector best_choices = inv_->is_selected(); + Cost augmented_cost = + std::accumulate(augmented_costs_.begin(), augmented_costs_.end(), 0.0); + BaseInt trace_size = inv_->trace().size(); + for (int iteration = 0; iteration < num_iterations; ++iteration) { + if (inv_->trace().size() > 2 * trace_size) { + inv_->CompressTrace(); + trace_size = inv_->trace().size(); + } + Cost best_delta = kMaxPossibleCost; + SubsetIndex best_subset = kNotFound; + for (const SubsetIndex subset : focus) { + const Cost delta = augmented_costs_[subset]; + DVLOG(1) << "Subset, " << subset.value() << ", at ," + << inv_->is_selected()[subset] << ", delta =, " << delta + << ", best_delta =, " << best_delta; + if (inv_->is_selected()[subset]) { + // Try to remove subset from solution, if the gain from removing is + // worth it: + if (-delta < best_delta && + // and it can be removed, and + inv_->ComputeIsRedundant(subset) && + // it is not Tabu OR decreases the actual cost (aspiration): + (!tabu_list_.Contains(subset) || + inv_->cost() - subset_costs[subset] < best_cost)) { + best_delta = -delta; + best_subset = subset; + } + } else { + // Try to use subset in solution, if its penalized delta is good. + if (delta < best_delta) { + // The limit kMaxPossibleCost is ill-defined, + // there is always a best_subset. Is it intended? + if (!tabu_list_.Contains(subset)) { + best_delta = delta; + best_subset = subset; + } + } + } + } + if (best_subset == kNotFound) { // Local minimum reached. + inv_->LoadSolution(best_choices); + return true; + } + DVLOG(1) << "Best subset, " << best_subset.value() << ", at ," + << inv_->is_selected()[best_subset] << ", best_delta = ," + << best_delta; + + UpdatePenalties(focus); + tabu_list_.Add(best_subset); + inv_->Flip(best_subset); + // TODO(user): make the cost computation incremental. + augmented_cost = + std::accumulate(augmented_costs_.begin(), augmented_costs_.end(), 0.0); + + DVLOG(1) << "Iteration, " << iteration << ", current cost = ," + << inv_->cost() << ", best cost = ," << best_cost + << ", penalized cost = ," << augmented_cost; + if (inv_->cost() < best_cost) { + LOG(INFO) << "Updated best cost, " << "Iteration, " << iteration + << ", current cost = ," << inv_->cost() << ", best cost = ," + << best_cost << ", penalized cost = ," << augmented_cost; + best_cost = inv_->cost(); + best_choices = inv_->is_selected(); + } + } + inv_->LoadSolution(best_choices); + inv_->CompressTrace(); + DCHECK(inv_->CheckConsistency()); + return true; +} + +// Guided Local Search +void GuidedLocalSearch::Initialize() { + const SparseColumnView& columns = inv_->model()->columns(); + penalties_.assign(columns.size(), 0); + penalization_factor_ = alpha_ * inv_->cost() * 1.0 / (columns.size()); + for (const SetCoverDecision& decision : inv_->trace()) { + const SubsetIndex subset = decision.subset(); + if (inv_->is_selected()[subset]) { + utility_heap_.Insert( + {static_cast(inv_->model()->subset_costs()[subset] / + (1 + penalties_[subset])), + subset.value()}); + } + } +} + +bool GuidedLocalSearch::NextSolution(int num_iterations) { + return NextSolution(inv_->model()->all_subsets(), num_iterations); +} + +Cost GuidedLocalSearch::ComputeDelta(SubsetIndex subset) const { + float delta = (penalization_factor_ * penalties_[subset] + + inv_->model()->subset_costs()[subset]); + if (inv_->is_selected()[subset] && inv_->ComputeIsRedundant(subset)) { + return delta; + } else if (!inv_->is_selected()[subset]) { + return -delta; + } + return kInfinity; +} + +bool GuidedLocalSearch::NextSolution(absl::Span focus, + int num_iterations) { + inv_->MakeFullyUpdated(); + Cost best_cost = inv_->cost(); + SubsetBoolVector best_choices = inv_->is_selected(); + + for (const SubsetIndex& subset : focus) { + const float delta = ComputeDelta(subset); + if (delta < kInfinity) { + priority_heap_.Insert({delta, subset.value()}); + } + } + + for (int iteration = 0; iteration < num_iterations; ++iteration) { + // Improve current solution respective to the current penalties. + const SubsetIndex best_subset(priority_heap_.TopIndex()); + if (inv_->is_selected()[best_subset]) { + utility_heap_.Insert({0, best_subset.value()}); + } else { + utility_heap_.Insert( + {static_cast(inv_->model()->subset_costs()[best_subset] / + (1 + penalties_[best_subset])), + best_subset.value()}); + } + inv_->FlipAndFullyUpdate(best_subset); // Flip the best subset. + + // Getting the subset with highest utility. + const SubsetIndex penalized_subset(utility_heap_.TopIndex()); + utility_heap_.Pop(); + ++penalties_[penalized_subset]; + utility_heap_.Insert( + {static_cast(inv_->model()->subset_costs()[penalized_subset] / + (1 + penalties_[penalized_subset])), + penalized_subset.value()}); + + // Get removable subsets (Add them to the heap). + for (const SubsetIndex subset : inv_->new_removable_subsets()) { + const float delta_selected = (penalization_factor_ * penalties_[subset] + + inv_->model()->subset_costs()[subset]); + priority_heap_.Insert({delta_selected, subset.value()}); + } + + for (const SubsetIndex subset : {penalized_subset, best_subset}) { + const float delta = ComputeDelta(subset); + if (delta < kInfinity) { + priority_heap_.Insert({delta, subset.value()}); + } + } + + // Get new non removable subsets. + // (Delete them from the heap) + for (const SubsetIndex subset : inv_->new_non_removable_subsets()) { + priority_heap_.Remove(subset.value()); + } + + if (inv_->cost() < best_cost) { + best_cost = inv_->cost(); + best_choices = inv_->is_selected(); + } + } + inv_->LoadSolution(best_choices); + + // Improve the solution by removing redundant subsets. + for (const SubsetIndex& subset : focus) { + if (inv_->is_selected()[subset] && inv_->ComputeIsRedundant(subset)) + inv_->DeselectAndFullyUpdate(subset); + } + DCHECK_EQ(inv_->num_uncovered_elements(), 0); + return true; +} + +namespace { +void SampleSubsets(std::vector* list, std::size_t num_subsets) { + num_subsets = std::min(num_subsets, list->size()); + CHECK_GE(num_subsets, 0); + std::shuffle(list->begin(), list->end(), absl::BitGen()); + list->resize(num_subsets); +} +} // namespace + +std::vector ClearRandomSubsets(std::size_t num_subsets, + SetCoverInvariant* inv) { + return ClearRandomSubsets(inv->model()->all_subsets(), num_subsets, inv); +} + +std::vector ClearRandomSubsets(absl::Span focus, + std::size_t num_subsets, + SetCoverInvariant* inv) { + num_subsets = std::min(num_subsets, focus.size()); + CHECK_GE(num_subsets, 0); + std::vector chosen_indices; + for (const SubsetIndex subset : focus) { + if (inv->is_selected()[subset]) { + chosen_indices.push_back(subset); + } + } + SampleSubsets(&chosen_indices, num_subsets); + std::size_t num_deselected = 0; + for (const SubsetIndex subset : chosen_indices) { + inv->Deselect(subset); + ++num_deselected; + for (IntersectingSubsetsIterator it(*inv->model(), subset); !it.at_end(); + ++it) { + if (!inv->is_selected()[subset]) continue; + inv->Deselect(subset); + ++num_deselected; + } + // Note that num_deselected may exceed num_subsets by more than 1. + if (num_deselected > num_subsets) break; + } + return chosen_indices; +} + +std::vector ClearMostCoveredElements(std::size_t max_num_subsets, + SetCoverInvariant* inv) { + return ClearMostCoveredElements(inv->model()->all_subsets(), max_num_subsets, + inv); +} + +std::vector ClearMostCoveredElements( + absl::Span focus, std::size_t max_num_subsets, + SetCoverInvariant* inv) { + // This is the vector we will return. + std::vector sampled_subsets; + + const ElementToIntVector& coverage = inv->coverage(); + const BaseInt num_subsets = inv->model()->num_subsets(); + const SparseRowView& rows = inv->model()->rows(); + + // Collect the sets which have at least one element whose coverage > 1, + // even if those sets are not removable. + SubsetBoolVector subset_is_collected(num_subsets, false); + for (const ElementIndex element : inv->model()->ElementRange()) { + if (coverage[element] <= 1) continue; + for (const SubsetIndex subset : rows[element]) { + if (inv->is_selected()[subset] && !subset_is_collected[subset]) { + subset_is_collected[subset] = true; + } + } + } + + // Now intersect with focus: sampled_subsets = focus ⋂ impacted_subsets. + // NOTE(user): this might take too long. TODO(user):find another algorithm if + // necessary. + for (const SubsetIndex subset : focus) { + if (subset_is_collected[subset]) { + sampled_subsets.push_back(subset); + } + } + + // Actually *sample* sampled_subset. + // TODO(user): find another algorithm if necessary. + std::shuffle(sampled_subsets.begin(), sampled_subsets.end(), absl::BitGen()); + sampled_subsets.resize(std::min(sampled_subsets.size(), max_num_subsets)); + + // Testing has shown that sorting sampled_subsets is not necessary. + // Now, un-select the subset in sampled_subsets. + for (const SubsetIndex subset : sampled_subsets) { + inv->Deselect(subset); + } + return sampled_subsets; +} + +} // namespace operations_research diff --git a/ortools/algorithms/set_cover.h b/ortools/algorithms/set_cover_heuristics.h similarity index 53% rename from ortools/algorithms/set_cover.h rename to ortools/algorithms/set_cover_heuristics.h index 55cdb4cdead..04d3b31b9f8 100644 --- a/ortools/algorithms/set_cover.h +++ b/ortools/algorithms/set_cover_heuristics.h @@ -11,19 +11,42 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_H_ -#define OR_TOOLS_ALGORITHMS_SET_COVER_H_ +#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_HEURISTICS_H_ +#define OR_TOOLS_ALGORITHMS_SET_COVER_HEURISTICS_H_ #include #include +#include "absl/base/nullability.h" #include "absl/types/span.h" +#include "ortools/algorithms/adjustable_k_ary_heap.h" #include "ortools/algorithms/set_cover_invariant.h" #include "ortools/algorithms/set_cover_model.h" -#include "ortools/algorithms/set_cover_utils.h" namespace operations_research { +// Priority aggregate for the subset priority queue. +class SubsetIndexWithPriority { + public: + using Index = SubsetIndex::ValueType; + using Priority = float; + SubsetIndexWithPriority() : index_(-1), priority_(0) {} + SubsetIndexWithPriority(Priority priority, Index index) + : index_(index), priority_(priority) {} + Priority priority() const { return priority_; } + Index index() const { return index_; } + inline bool operator<(const SubsetIndexWithPriority other) const { + if (other.priority() != priority()) { + return priority() < other.priority(); + } + return index() < other.index(); + } + + private: + Index index_; + Priority priority_; +}; + // Solver classes for the weighted set covering problem. // // The solution procedure is based on the general scheme known as local search. @@ -32,19 +55,51 @@ namespace operations_research { // But first, we have to generate a first solution that is as good as possible. // // The first solution is then improved by using local search descent, which -// eliminates the T_j's that have no interest in the solution. +// eliminates the S_j's that have no interest in the solution. // // A mix of the guided local search (GLS) and Tabu Search (TS) metaheuristic // is also provided. // -// The term 'focus' hereafter means a subset of the T_j's designated by their +// The term 'focus' hereafter means a subset of the S_j's designated by their // indices. Focus make it possible to run the algorithms on the corresponding // subproblems. // // TODO(user): make the different algorithms concurrent, solving independent // subproblems in different threads. // -// An obvious idea is to take all the T_j's (or equivalently to set all the + +// The preprocessor finds the elements that can only be covered by one subset. +// Obviously, such subsets which are the only ones that can cover a given +// element are chosen. +class Preprocessor { + public: + explicit Preprocessor(absl::Nonnull inv) + : inv_(inv), num_columns_fixed_by_singleton_row_(0) {} + + // Returns true if a solution was found. + // TODO(user): Add time-outs and exit with a partial solution. This seems + // unlikely, though. + bool NextSolution(); + + // Computes the next partial solution considering only the subsets whose + // indices are in focus. + bool NextSolution(absl::Span focus); + + // Returns the number of columns that are the only one for a given row. + int num_columns_fixed_by_singleton_row() const { + return num_columns_fixed_by_singleton_row_; + } + + private: + // The data structure that will maintain the invariant for the model. + SetCoverInvariant* inv_; + + // The number of columns that are the only one for a given row, i.e. + // the subsets that are unique in covering a particular element. + BaseInt num_columns_fixed_by_singleton_row_; +}; + +// An obvious idea is to take all the S_j's (or equivalently to set all the // x_j's to 1). It's a bit silly but fast, and we can improve on it later using // local search. class TrivialSolutionGenerator { @@ -108,8 +163,7 @@ class RandomSolutionGenerator { class GreedySolutionGenerator { public: - explicit GreedySolutionGenerator(SetCoverInvariant* inv) - : inv_(inv), pq_(inv_) {} + explicit GreedySolutionGenerator(SetCoverInvariant* inv) : inv_(inv) {} // Returns true if a solution was found. // TODO(user): Add time-outs and exit with a partial solution. @@ -119,51 +173,127 @@ class GreedySolutionGenerator { // indices are in focus. bool NextSolution(const std::vector& focus); + // Same with a different set of costs. bool NextSolution(const std::vector& focus, const SubsetCostVector& costs); private: - // Updates the priorities on the impacted_subsets. - void UpdatePriorities(const std::vector& impacted_subsets, - const SubsetCostVector& costs); - // The data structure that will maintain the invariant for the model. SetCoverInvariant* inv_; +}; + +// Solution generator based on the degree of elements. +// The degree of an element is the number of subsets covering it. +// The generator consists in iteratively choosing a non-covered element with the +// smallest degree, and selecting a subset that covers it with the least cost. +// The newly-covered elements degree are also updated. +class ElementDegreeSolutionGenerator { + public: + explicit ElementDegreeSolutionGenerator(SetCoverInvariant* inv) : inv_(inv) {} + + // Returns true if a solution was found. + // TODO(user): Add time-outs and exit with a partial solution. + bool NextSolution(); + + // Computes the next partial solution considering only the subsets whose + // indices are in focus. + bool NextSolution(absl::Span focus); + + // Same with a different set of costs. + bool NextSolution(absl::Span focus, + const SubsetCostVector& costs); - // The priority queue used for maintaining the subset with the lower marginal - // cost. - SubsetPriorityQueue pq_; + private: + // Same with a different set of costs, and the focus defined as a vector of + // Booleans. This is the actual implementation of NextSolution. + bool NextSolution(const SubsetBoolVector& in_focus, + const SubsetCostVector& costs); + + // The data structure that will maintain the invariant for the model. + SetCoverInvariant* inv_; }; // Once we have a first solution to the problem, there may be (most often, -// there are) elements in S that are covered several times. To decrease the -// total cost, SteepestSearch tries to eliminate some redundant T_j's from +// there are) elements in E that are covered several times. To decrease the +// total cost, SteepestSearch tries to eliminate some redundant S_j's from // the solution or equivalently, to flip some x_j's from 1 to 0. the algorithm // gets its name because it goes in the steepest immediate direction, taking -// the T_j with the largest total cost. +// the S_j with the largest total cost. class SteepestSearch { public: - explicit SteepestSearch(SetCoverInvariant* inv) : inv_(inv), pq_(inv_) {} + explicit SteepestSearch(SetCoverInvariant* inv) : inv_(inv) {} // Returns true if a solution was found within num_iterations. // TODO(user): Add time-outs and exit with a partial solution. bool NextSolution(int num_iterations); + // Computes the next partial solution considering only the subsets whose + // indices are in focus. bool NextSolution(absl::Span focus, int num_iterations); + // Same as above, with a different set of costs. bool NextSolution(absl::Span focus, const SubsetCostVector& costs, int num_iterations); private: + // Same with a different set of costs, and the focus defined as a vector of + // Booleans. This is the actual implementation of NextSolution. + bool NextSolution(const SubsetBoolVector& in_focus, + const SubsetCostVector& costs, int num_iterations); + // Updates the priorities on the impacted_subsets. void UpdatePriorities(absl::Span impacted_subsets); // The data structure that will maintain the invariant for the model. SetCoverInvariant* inv_; +}; - // The priority queue used for maintaining the subset with the largest total - // cost. - SubsetPriorityQueue pq_; +// A Tabu list is a fixed-sized set with FIFO replacement. It is expected to +// be of small size, usually a few dozens of elements. +template +class TabuList { + public: + explicit TabuList(T size) : array_(0), fill_(0), index_(0) { + array_.resize(size.value(), T(-1)); + } + + // Returns the size of the array. + int size() const { return array_.size(); } + + // Initializes the array of the Tabu list. + void Init(int size) { + array_.resize(size, T(-1)); + fill_ = 0; + index_ = 0; + } + + // Adds t to the array. When the end of the array is reached, re-start at 0. + void Add(T t) { + const int size = array_.size(); + array_[index_] = t; + ++index_; + if (index_ >= size) { + index_ = 0; + } + if (fill_ < size) { + ++fill_; + } + } + + // Returns true if t is in the array. This is O(size), but small. + bool Contains(T t) const { + for (int i = 0; i < fill_; ++i) { + if (t == array_[i]) { + return true; + } + } + return false; + } + + private: + std::vector array_; + int fill_; + int index_; }; // As usual and well-known with local search, SteepestSearch reaches a local @@ -192,7 +322,6 @@ class GuidedTabuSearch { public: explicit GuidedTabuSearch(SetCoverInvariant* inv) : inv_(inv), - pq_(inv_), lagrangian_factor_(kDefaultLagrangianFactor), penalty_factor_(kDefaultPenaltyFactor), epsilon_(kDefaultEpsilon), @@ -211,7 +340,9 @@ class GuidedTabuSearch { // Computes the next partial solution considering only the subsets whose // indices are in focus. - bool NextSolution(const std::vector& focus, int num_iterations); + bool NextSolution(absl::Span focus, int num_iterations); + + bool NextSolution(const SubsetBoolVector& in_focus, int num_iterations); // TODO(user): re-introduce this is the code. It was used to favor // subsets with the same marginal costs but that would cover more elements. @@ -236,9 +367,6 @@ class GuidedTabuSearch { // The data structure that will maintain the invariant for the model. SetCoverInvariant* inv_; - // The priority queue used *** - SubsetPriorityQueue pq_; - // Search handling variables and default parameters. static constexpr double kDefaultLagrangianFactor = 100.0; double lagrangian_factor_; @@ -251,14 +379,14 @@ class GuidedTabuSearch { double penalty_factor_; // Tabu Search parameters. - static constexpr double kDefaultEpsilon = 1e-8; + static constexpr double kDefaultEpsilon = 1e-6; double epsilon_; // Penalized costs for each subset as used in Guided Tabu Search. SubsetCostVector augmented_costs_; // The number of times each subset was penalized during Guided Tabu Search. - SubsetCountVector times_penalized_; + SubsetToIntVector times_penalized_; // TODO(user): remove and use priority_queue. // Utilities for the different subsets. They are updated ("penalized") costs. @@ -269,8 +397,86 @@ class GuidedTabuSearch { TabuList tabu_list_; }; +// Guided Local Search penalizes the parts of the solution that have been often +// used. It behaves as a long-term memory which "learns" the most used +// features and introduces some diversification in the search. +// At each iteration, the algorithm selects a subset from the focus with maximum +// utility of penalization and penalizes it. + +// It has been observed that good values for the penalisation factor can be +// found by dividing the value of the objective function of a local minimum +// with the number of features present in it [1]. In our case, the penalisation +// factor is the sum of the costs of the subsets selected in the focus divided +// by the number of subsets in the focus times a tunable factor alpha_. +// [1] C. Voudouris (1997) "Guided local search for combinatorial optimisation +// problems", PhD Thesis, University of Essex, Colchester, UK, July, 1997. +class GuidedLocalSearch { + public: + explicit GuidedLocalSearch(SetCoverInvariant* inv) + : inv_(inv), epsilon_(kDefaultEpsilon), alpha_(kDefaultAlpha) { + Initialize(); + } + + // Initializes the Guided Local Search algorithm. + void Initialize(); + + // Returns the next solution by running the Guided Local algorithm for + // maximum num_iterations iterations. + bool NextSolution(int num_iterations); + + // Computes the next partial solution considering only the subsets whose + // indices are in focus. + bool NextSolution(absl::Span focus, int num_iterations); + + bool NextSolution(const SubsetBoolVector& in_focus, int num_iterations); + + private: + // The data structure that will maintain the invariant for the model. + SetCoverInvariant* inv_; + + // Setters and getters for the Guided Local Search algorithm parameters. + void SetEpsilon(double r) { epsilon_ = r; } + + double GetEpsilon() const { return epsilon_; } + + void SetAlpha(double r) { alpha_ = r; } + + double GetAlpha() const { return alpha_; } + + // The epsilon value for the Guided Local Search algorithm. + // Used to penalize the subsets within epsilon of the maximum utility. + static constexpr double kDefaultEpsilon = 1e-8; + double epsilon_; + + // The alpha value for the Guided Local Search algorithm. + // Tunable factor used to penalize the subsets. + static constexpr double kDefaultAlpha = 0.5; + double alpha_; + + // The penalization value for the Guided Local Search algorithm. + double penalization_factor_; + + // The penalties of each feature during Guided Local Search. + SubsetToIntVector penalties_; + + // Computes the delta of the cost of the solution if subset state changed. + Cost ComputeDelta(SubsetIndex subset) const; + + // The priority heap used to select the subset with the maximum priority to be + // updated. + AdjustableKAryHeap priority_heap_; + + // The utility heap used to select the subset with the maximum utility to be + // penalized. + AdjustableKAryHeap utility_heap_; +}; + // Randomly clears a proportion num_subsets variables in the solution. // Returns a list of subset indices to be potentially reused as a focus. +// Randomly clears at least num_subsets variables in the +// solution. There can be more than num_subsets variables cleared because the +// intersecting subsets are also removed from the solution. Returns a list of +// subset indices that can be reused as a focus. std::vector ClearRandomSubsets(std::size_t num_subsets, SetCoverInvariant* inv); @@ -279,9 +485,11 @@ std::vector ClearRandomSubsets(absl::Span focus, std::size_t num_subsets, SetCoverInvariant* inv); -// Clears the variables that cover the most covered elements. This is capped -// by num_subsets. -// Return the list of chosen subset indices to be potentially reused as a focus. +// Clears the variables (subsets) that cover the most covered elements. This is +// capped by num_subsets. If the cap is reached, the subsets are chosen +// randomly. +// Returns the list of the chosen subset indices. +// This indices can then be used ax a focus. std::vector ClearMostCoveredElements(std::size_t num_subsets, SetCoverInvariant* inv); @@ -289,7 +497,6 @@ std::vector ClearMostCoveredElements(std::size_t num_subsets, std::vector ClearMostCoveredElements( absl::Span focus, std::size_t num_subsets, SetCoverInvariant* inv); - } // namespace operations_research -#endif // OR_TOOLS_ALGORITHMS_SET_COVER_H_ +#endif // OR_TOOLS_ALGORITHMS_SET_COVER_HEURISTICS_H_ diff --git a/ortools/algorithms/set_cover_invariant.cc b/ortools/algorithms/set_cover_invariant.cc index 6b17c1092fa..128dff78a8f 100644 --- a/ortools/algorithms/set_cover_invariant.cc +++ b/ortools/algorithms/set_cover_invariant.cc @@ -15,6 +15,7 @@ #include #include +#include #include #include "absl/log/check.h" @@ -23,369 +24,346 @@ #include "ortools/base/logging.h" namespace operations_research { + +namespace { +bool SupportsAvx512() { return false; } +} // namespace + // Note: in many of the member functions, variables have "crypterse" names // to avoid confusing them with member data. For example mrgnl_impcts is used -// to avoid confusion with marginal_impacts_. +// to avoid confusion with num_free_elements_. void SetCoverInvariant::Initialize() { DCHECK(model_->ComputeFeasibility()); model_->CreateSparseRowView(); - const SubsetIndex num_subsets(model_->num_subsets()); + cost_ = 0.0; + + const BaseInt num_subsets = model_->num_subsets(); + const BaseInt num_elements = model_->num_elements(); + is_selected_.assign(num_subsets, false); - is_removable_.assign(num_subsets, false); - marginal_impacts_.assign(num_subsets, ElementIndex(0)); + num_free_elements_.assign(num_subsets, 0); + num_non_overcovered_elements_.assign(num_subsets, 0); + is_redundant_.assign(num_subsets, false); const SparseColumnView& columns = model_->columns(); - for (SubsetIndex subset(0); subset < num_subsets; ++subset) { - marginal_impacts_[subset] = columns[subset].size().value(); + for (const SubsetIndex subset : model_->SubsetRange()) { + num_free_elements_[subset] = columns[subset].size(); + num_non_overcovered_elements_[subset] = columns[subset].size(); } - const ElementIndex num_elements(model_->num_elements()); - coverage_.assign(num_elements, SubsetIndex(0)); - cost_ = 0.0; - num_elements_covered_ = ElementIndex(0); -} -bool SetCoverInvariant::CheckConsistency() const { - CHECK(CheckCoverageAndMarginalImpacts(is_selected_)); - CHECK(CheckIsRemovable()); - return true; -} + coverage_.assign(num_elements, 0); -void SetCoverInvariant::LoadSolution(const SubsetBoolVector& c) { - is_selected_ = c; - MakeDataConsistent(); -} + // No need to reserve for trace_ and other vectors as extending with + // push_back is fast enough. -bool SetCoverInvariant::CheckSolution() const { - bool is_ok = true; + num_uncovered_elements_ = num_elements; + supports_avx512_ = SupportsAvx512(); + is_fully_updated_ = true; +} - const ElementToSubsetVector cvrg = ComputeCoverage(is_selected_); - const ElementIndex num_elements(model_->num_elements()); - for (ElementIndex element(0); element < num_elements; ++element) { - if (cvrg[element] == 0) { - LOG(ERROR) << "Recomputed coverage_ for element " << element << " = 0"; - is_ok = false; - } +bool SetCoverInvariant::CheckConsistency() const { + auto [cst, cvrg] = ComputeCostAndCoverage(is_selected_); + CHECK_EQ(cost_, cst); + for (const ElementIndex element : model_->ElementRange()) { + CHECK_EQ(cvrg[element], coverage_[element]); } - - const Cost recomputed_cost = ComputeCost(is_selected_); - if (cost_ != recomputed_cost) { - LOG(ERROR) << "Cost = " << cost_ - << ", while recomputed cost_ = " << recomputed_cost; - is_ok = false; + auto [num_uncvrd_elts, num_free_elts] = + ComputeNumUncoveredAndFreeElements(cvrg); + auto [num_non_ovrcvrd_elts, is_rdndnt] = + ComputeNumNonOvercoveredElementsAndIsRedundant(cvrg); + for (const SubsetIndex subset : model_->SubsetRange()) { + CHECK_EQ(num_free_elts[subset], num_free_elements_[subset]); + if (is_fully_updated_) { + CHECK_EQ(is_rdndnt[subset], is_redundant_[subset]); + CHECK_EQ(is_rdndnt[subset], num_non_ovrcvrd_elts[subset] == 0); + } } - return is_ok; + return true; } -bool SetCoverInvariant::CheckCoverageAgainstSolution( - const SubsetBoolVector& choices) const { - const SubsetIndex num_subsets(model_->num_subsets()); - DCHECK_EQ(num_subsets, choices.size()); - const ElementToSubsetVector cvrg = ComputeCoverage(choices); - bool is_ok = true; - const ElementIndex num_elements(model_->num_elements()); - for (ElementIndex element(0); element < num_elements; ++element) { - if (coverage_[element] != cvrg[element]) { - LOG(ERROR) << "Recomputed coverage_ for element " << element << " = " - << cvrg[element] - << ", while updated coverage_ = " << coverage_[element]; - is_ok = false; - } - } - return is_ok; +void SetCoverInvariant::LoadSolution(const SubsetBoolVector& solution) { + is_selected_ = solution; + RecomputeInvariant(); } -bool SetCoverInvariant::CheckCoverageAndMarginalImpacts( - const SubsetBoolVector& choices) const { - const SubsetIndex num_subsets(model_->num_subsets()); - DCHECK_EQ(num_subsets, choices.size()); - const ElementToSubsetVector cvrg = ComputeCoverage(choices); - bool is_ok = CheckCoverageAgainstSolution(choices); - const SubsetToElementVector mrgnl_impcts = ComputeMarginalImpacts(cvrg); - for (SubsetIndex subset(0); subset < num_subsets; ++subset) { - if (marginal_impacts_[subset] != mrgnl_impcts[subset]) { - LOG(ERROR) << "Recomputed marginal impact for subset " << subset << " = " - << mrgnl_impcts[subset] << ", while updated marginal impact = " - << marginal_impacts_[subset]; - is_ok = false; - } - } - return is_ok; +void SetCoverInvariant::RecomputeInvariant() { + std::tie(cost_, coverage_) = ComputeCostAndCoverage(is_selected_); + std::tie(num_uncovered_elements_, num_free_elements_) = + ComputeNumUncoveredAndFreeElements(coverage_); + std::tie(num_non_overcovered_elements_, is_redundant_) = + ComputeNumNonOvercoveredElementsAndIsRedundant(coverage_); + is_fully_updated_ = true; } -// Used only once, for testing. TODO(user): Merge with -// CheckCoverageAndMarginalImpacts. -SubsetToElementVector SetCoverInvariant::ComputeMarginalImpacts( - const ElementToSubsetVector& cvrg) const { - const ElementIndex num_elements(model_->num_elements()); - DCHECK_EQ(num_elements, cvrg.size()); - const SparseColumnView& columns = model_->columns(); - const SubsetIndex num_subsets(model_->num_subsets()); - SubsetToElementVector mrgnl_impcts(num_subsets, ElementIndex(0)); - for (SubsetIndex subset(0); subset < num_subsets; ++subset) { - for (ElementIndex element : columns[subset]) { - if (cvrg[element] == 0) { - ++mrgnl_impcts[subset]; - } - } - DCHECK_LE(mrgnl_impcts[subset], columns[subset].size().value()); - DCHECK_GE(mrgnl_impcts[subset], 0); - } - return mrgnl_impcts; +void SetCoverInvariant::MakeFullyUpdated() { + std::tie(num_non_overcovered_elements_, is_redundant_) = + ComputeNumNonOvercoveredElementsAndIsRedundant(coverage_); + is_fully_updated_ = true; } -Cost SetCoverInvariant::ComputeCost(const SubsetBoolVector& c) const { - DCHECK_EQ(c.size(), model_->num_subsets()); - Cost recomputed_cost = 0; +std::tuple SetCoverInvariant::ComputeCostAndCoverage( + const SubsetBoolVector& choices) const { + Cost cst = 0.0; + ElementToIntVector cvrg(model_->num_elements(), 0); + const SparseColumnView& columns = model_->columns(); + // Initialize coverage, update cost, and compute the coverage for + // all the elements covered by the selected subsets. const SubsetCostVector& subset_costs = model_->subset_costs(); - for (SubsetIndex subset(0); bool b : c) { + for (SubsetIndex subset(0); bool b : choices) { if (b) { - recomputed_cost += subset_costs[subset]; + cst += subset_costs[subset]; + for (const ElementIndex element : columns[subset]) { + ++cvrg[element]; + } } ++subset; } - return recomputed_cost; + return {cst, cvrg}; } -ElementIndex SetCoverInvariant::ComputeNumElementsCovered( - const ElementToSubsetVector& cvrg) const { - // Use "crypterse" naming to avoid confusing with num_elements_. - int num_elmnts_cvrd = 0; - for (ElementIndex element(0); element < model_->num_elements(); ++element) { - if (cvrg[element] >= 1) { - ++num_elmnts_cvrd; - } - } - return ElementIndex(num_elmnts_cvrd); -} - -ElementToSubsetVector SetCoverInvariant::ComputeCoverage( - const SubsetBoolVector& choices) const { - const ElementIndex num_elements(model_->num_elements()); - const SparseRowView& rows = model_->rows(); - // Use "crypterse" naming to avoid confusing with coverage_. - ElementToSubsetVector cvrg(num_elements, SubsetIndex(0)); - for (ElementIndex element(0); element < num_elements; ++element) { - for (SubsetIndex subset : rows[element]) { - if (choices[subset]) { - ++cvrg[element]; +ElementToIntVector SetCoverInvariant::ComputeCoverageInFocus( + const absl::Span focus) const { + ElementToIntVector coverage(coverage_.size()); + for (const SubsetIndex subset : focus) { + if (is_selected_[subset]) { + for (const ElementIndex element : model_->columns()[subset]) { + ++coverage[element]; } } - DCHECK_LE(cvrg[element], rows[element].size().value()); - DCHECK_GE(cvrg[element], 0); } - return cvrg; + return coverage; } -bool SetCoverInvariant::CheckSingleSubsetCoverage(SubsetIndex subset) const { - ElementToSubsetVector cvrg = ComputeSingleSubsetCoverage(subset); +std::tuple +SetCoverInvariant::ComputeNumUncoveredAndFreeElements( + const ElementToIntVector& cvrg) const { + BaseInt num_uncvrd_elts = model_->num_elements(); + + const BaseInt num_subsets(model_->num_subsets()); + SubsetToIntVector num_free_elts(num_subsets, 0); + const SparseColumnView& columns = model_->columns(); - for (const ElementIndex element : columns[subset]) { - DCHECK_EQ(coverage_[element], cvrg[element]) << " Element = " << element; + // Initialize number of free elements and number of elements covered 0 or 1. + for (const SubsetIndex subset : model_->SubsetRange()) { + num_free_elts[subset] = columns[subset].size(); } - return true; -} -// Used only once, for testing. TODO(user): Merge with -// CheckSingleSubsetCoverage. -ElementToSubsetVector SetCoverInvariant::ComputeSingleSubsetCoverage( - SubsetIndex subset) const { - const SparseColumnView& columns = model_->columns(); - const ElementIndex num_elements(model_->num_elements()); - // Use "crypterse" naming to avoid confusing with cvrg. - ElementToSubsetVector cvrg(num_elements, SubsetIndex(0)); const SparseRowView& rows = model_->rows(); - for (const ElementIndex element : columns[subset]) { - for (SubsetIndex subset : rows[element]) { - if (is_selected_[subset]) { - ++cvrg[element]; + for (const ElementIndex element : model_->ElementRange()) { + if (cvrg[element] >= 1) { + --num_uncvrd_elts; + for (const SubsetIndex subset : rows[element]) { + --num_free_elts[subset]; } } - DCHECK_LE(cvrg[element], rows[element].size().value()); - DCHECK_GE(cvrg[element], 0); } - return cvrg; + return {num_uncvrd_elts, num_free_elts}; } -std::vector SetCoverInvariant::Toggle(SubsetIndex subset, - bool value) { - // Note: "if p then q" is also "not(p) or q", or p <= q (p LE q). - // If selected, then is_removable, to make sure we still have a solution. - DCHECK(is_selected_[subset] <= is_removable_[subset]); - // If value, then marginal_impact > 0, to not increase the cost. - DCHECK((value <= (marginal_impacts_[subset] > 0))); - return UnsafeToggle(subset, value); -} +std::tuple +SetCoverInvariant::ComputeNumNonOvercoveredElementsAndIsRedundant( + const ElementToIntVector& cvrg) const { + const BaseInt num_subsets(model_->num_subsets()); + SubsetToIntVector num_cvrg_le_1_elts(num_subsets, 0); + SubsetBoolVector is_rdndnt(num_subsets, false); -std::vector SetCoverInvariant::UnsafeToggle(SubsetIndex subset, - bool value) { - // We allow to deselect a non-removable subset, but at least the - // Toggle should be a real change. - DCHECK_NE(is_selected_[subset], value); - // If selected, then marginal_impact == 0. - DCHECK(is_selected_[subset] <= (marginal_impacts_[subset] == 0)); - DVLOG(1) << (value ? "S" : "Des") << "electing subset " << subset; - const SubsetCostVector& subset_costs = model_->subset_costs(); - cost_ += value ? subset_costs[subset] : -subset_costs[subset]; - is_selected_[subset] = value; - UpdateCoverage(subset, value); - const std::vector impacted_subsets = - ComputeImpactedSubsets(subset); - UpdateIsRemovable(impacted_subsets); - UpdateMarginalImpacts(impacted_subsets); - DCHECK((is_selected_[subset] <= (marginal_impacts_[subset] == 0))); - return impacted_subsets; -} - -void SetCoverInvariant::UpdateCoverage(SubsetIndex subset, bool value) { const SparseColumnView& columns = model_->columns(); + // Initialize number of free elements and number of elements covered 0 or 1. + for (const SubsetIndex subset : model_->SubsetRange()) { + num_cvrg_le_1_elts[subset] = columns[subset].size(); + } + const SparseRowView& rows = model_->rows(); - const int delta = value ? 1 : -1; - for (const ElementIndex element : columns[subset]) { - DVLOG(2) << "Coverage of element " << element << " changed from " - << coverage_[element] << " to " << coverage_[element] + delta; - coverage_[element] += delta; - DCHECK_GE(coverage_[element], 0); - DCHECK_LE(coverage_[element], rows[element].size().value()); - if (coverage_[element] == 1) { - ++num_elements_covered_; - } else if (coverage_[element] == 0) { - --num_elements_covered_; + for (const ElementIndex element : model_->ElementRange()) { + if (cvrg[element] >= 2) { + for (const SubsetIndex subset : rows[element]) { + --num_cvrg_le_1_elts[subset]; + if (num_cvrg_le_1_elts[subset] == 0) { + is_rdndnt[subset] = true; + } + } } } - DCHECK(CheckSingleSubsetCoverage(subset)); + return {num_cvrg_le_1_elts, is_rdndnt}; } -// Compute the impact of the change in the assignment for each subset -// containing element. Be careful to add the elements only once. -std::vector SetCoverInvariant::ComputeImpactedSubsets( - SubsetIndex subset) const { - const SparseColumnView& columns = model_->columns(); - const SparseRowView& rows = model_->rows(); - SubsetBoolVector subset_seen(columns.size(), false); - std::vector impacted_subsets; - impacted_subsets.reserve(columns.size().value()); - for (const ElementIndex element : columns[subset]) { - for (const SubsetIndex subset : rows[element]) { - if (!subset_seen[subset]) { - subset_seen[subset] = true; - impacted_subsets.push_back(subset); - } +void SetCoverInvariant::CompressTrace() { + // As of 2024-05-14, this is as fast as "smarter" alternatives that try to + // avoid some memory writes by keeping track of already visited subsets. + // We also tried to use is_selected_ as a helper, which slowed down + // everything. + const SubsetIndex num_subsets(model_->num_subsets()); + SubsetBoolVector last_value_seen(num_subsets, false); + for (BaseInt i = 0; i < trace_.size(); ++i) { + const SubsetIndex subset(trace_[i].subset()); + last_value_seen[subset] = trace_[i].decision(); + } + BaseInt w = 0; // Write index. + for (BaseInt i = 0; i < trace_.size(); ++i) { + const SubsetIndex subset(trace_[i].subset()); + if (last_value_seen[subset]) { + last_value_seen[subset] = false; + trace_[w] = SetCoverDecision(subset, true); + ++w; } } - DCHECK_LE(impacted_subsets.size(), model_->num_subsets()); - // Testing has shown there is no gain in sorting impacted_subsets. - return impacted_subsets; + trace_.resize(w); } -bool SetCoverInvariant::ComputeIsRemovable(SubsetIndex subset) const { - DCHECK(CheckSingleSubsetCoverage(subset)); - const SparseColumnView& columns = model_->columns(); - for (const ElementIndex element : columns[subset]) { - if (coverage_[element] <= 1) { - return false; +bool SetCoverInvariant::ComputeIsRedundant(SubsetIndex subset) const { + if (is_fully_updated_) { + return is_redundant_[subset]; + } + if (is_selected_[subset]) { + for (const ElementIndex element : model_->columns()[subset]) { + if (coverage_[element] <= 1) { // If deselected, it will be <= 0... + return false; + } + } + } else { + for (const ElementIndex element : model_->columns()[subset]) { + if (coverage_[element] == 0) { // Cannot be removed from the problem. + return false; + } } } return true; } -void SetCoverInvariant::UpdateIsRemovable( - absl::Span impacted_subsets) { - for (const SubsetIndex subset : impacted_subsets) { - is_removable_[subset] = ComputeIsRemovable(subset); +void SetCoverInvariant::Flip(SubsetIndex subset, bool incremental_full_update) { + if (!is_selected_[subset]) { + Select(subset, incremental_full_update); + } else { + Deselect(subset, incremental_full_update); } } -SubsetBoolVector SetCoverInvariant::ComputeIsRemovable( - const ElementToSubsetVector& cvrg) const { - DCHECK(CheckCoverageAgainstSolution(is_selected_)); - const SubsetIndex num_subsets(model_->num_subsets()); - SubsetBoolVector is_rmvble(num_subsets, true); +void SetCoverInvariant::Select(SubsetIndex subset, + bool incremental_full_update) { + if (incremental_full_update) { + ClearRemovabilityInformation(); + } else { + is_fully_updated_ = false; + } + DVLOG(1) << "Selecting subset " << subset; + DCHECK(!is_selected_[subset]); + DCHECK(CheckConsistency()); + trace_.push_back(SetCoverDecision(subset, true)); + is_selected_[subset] = true; + const SubsetCostVector& subset_costs = model_->subset_costs(); + cost_ += subset_costs[subset]; + if (supports_avx512_) { + SelectAvx512(subset); + return; + } + const SparseColumnView& columns = model_->columns(); const SparseRowView& rows = model_->rows(); - for (ElementIndex element(0); element < rows.size(); ++element) { - if (cvrg[element] <= 1) { - for (const SubsetIndex subset : rows[element]) { - is_rmvble[subset] = false; + for (const ElementIndex element : columns[subset]) { + if (coverage_[element] == 0) { + // `element` will be newly covered. + --num_uncovered_elements_; + for (const SubsetIndex impacted_subset : rows[element]) { + --num_free_elements_[impacted_subset]; + } + } else if (incremental_full_update && coverage_[element] == 1) { + // `element` will be newly overcovered. + for (const SubsetIndex impacted_subset : rows[element]) { + --num_non_overcovered_elements_[impacted_subset]; + if (num_non_overcovered_elements_[impacted_subset] == 0) { + // All the elements in impacted_subset are now overcovered, so it + // is removable. Note that this happens only when the last element + // of impacted_subset becomes overcovered. + DCHECK(!is_redundant_[impacted_subset]); + if (is_selected_[impacted_subset]) { + new_removable_subsets_.push_back(impacted_subset); + } + is_redundant_[impacted_subset] = true; + } } } + // Update coverage. Notice the asymmetry with Deselect where coverage is + // **decremented** before being tested. This allows to have more symmetrical + // code for conditions. + ++coverage_[element]; } - for (SubsetIndex subset(0); subset < num_subsets; ++subset) { - DCHECK_EQ(is_rmvble[subset], ComputeIsRemovable(subset)); + if (incremental_full_update) { + if (is_redundant_[subset]) { + new_removable_subsets_.push_back(subset); + } else { + new_non_removable_subsets_.push_back(subset); + } } - return is_rmvble; + DCHECK(CheckConsistency()); } -bool SetCoverInvariant::CheckIsRemovable() const { - const SubsetBoolVector is_rmvble = ComputeIsRemovable(coverage_); - const SubsetIndex num_subsets(model_->num_subsets()); - for (SubsetIndex subset(0); subset < num_subsets; ++subset) { - DCHECK_EQ(is_rmvble[subset], ComputeIsRemovable(subset)); +void SetCoverInvariant::Deselect(SubsetIndex subset, + bool incremental_full_update) { + if (incremental_full_update) { + ClearRemovabilityInformation(); + } else { + is_fully_updated_ = false; + } + DVLOG(1) << "Deselecting subset " << subset; + // If already selected, then num_free_elements == 0. + DCHECK(is_selected_[subset]); + DCHECK_EQ(num_free_elements_[subset], 0); + DCHECK(CheckConsistency()); + trace_.push_back(SetCoverDecision(subset, false)); + is_selected_[subset] = false; + const SubsetCostVector& subset_costs = model_->subset_costs(); + cost_ -= subset_costs[subset]; + if (supports_avx512_) { + DeselectAvx512(subset); + return; } - return true; -} - -void SetCoverInvariant::UpdateMarginalImpacts( - absl::Span impacted_subsets) { const SparseColumnView& columns = model_->columns(); - for (const SubsetIndex subset : impacted_subsets) { - ElementIndex impact(0); - for (const ElementIndex element : columns[subset]) { - if (coverage_[element] == 0) { - ++impact; + const SparseRowView& rows = model_->rows(); + for (const ElementIndex element : columns[subset]) { + // Update coverage. Notice the asymmetry with Select where coverage is + // incremented after being tested. + --coverage_[element]; + if (coverage_[element] == 0) { + // `element` is no longer covered. + ++num_uncovered_elements_; + for (const SubsetIndex impacted_subset : rows[element]) { + ++num_free_elements_[impacted_subset]; + } + } else if (incremental_full_update && coverage_[element] == 1) { + // `element` will be no longer overcovered. + for (const SubsetIndex impacted_subset : rows[element]) { + if (num_non_overcovered_elements_[impacted_subset] == 0) { + // There is one element of impacted_subset which is not overcovered. + // impacted_subset has just become non-removable. + DCHECK(is_redundant_[impacted_subset]); + if (is_selected_[impacted_subset]) { + new_non_removable_subsets_.push_back(impacted_subset); + } + is_redundant_[impacted_subset] = false; + } + ++num_non_overcovered_elements_[impacted_subset]; } } - DVLOG(2) << "Changing impact of subset " << subset << " from " - << marginal_impacts_[subset] << " to " << impact; - marginal_impacts_[subset] = impact; - DCHECK_LE(marginal_impacts_[subset], columns[subset].size().value()); - DCHECK_GE(marginal_impacts_[subset], 0); } - DCHECK(CheckCoverageAndMarginalImpacts(is_selected_)); + // Since subset is now deselected, there is no need + // nor meaning in adding it a list of removable or non-removable subsets. + // This is a dissymmetry with Select. + DCHECK(CheckConsistency()); } -std::vector SetCoverInvariant::ComputeSettableSubsets() const { - SubsetBoolVector subset_seen(model_->num_subsets(), false); - std::vector focus; - focus.reserve(model_->num_subsets().value()); - const SparseRowView& rows = model_->rows(); - for (ElementIndex element(0); element < rows.size(); ++element) { - if (coverage_[element] >= 1) continue; - for (const SubsetIndex subset : rows[element]) { - if (!is_selected_[subset]) continue; - if (subset_seen[subset]) continue; - subset_seen[subset] = true; - focus.push_back(subset); - } - } - DCHECK_LE(focus.size(), model_->num_subsets()); - // Testing has shown there is no gain in sorting focus. - return focus; +void SetCoverInvariant::SelectAvx512(SubsetIndex) { + LOG(FATAL) << "SelectAvx512 is not implemented"; } -std::vector SetCoverInvariant::ComputeResettableSubsets() const { - SubsetBoolVector subset_seen(model_->num_subsets(), false); - std::vector focus; - focus.reserve(model_->num_subsets().value()); - const SparseRowView& rows = model_->rows(); - for (ElementIndex element(0); element < rows.size(); ++element) { - if (coverage_[element] < 1) continue; - for (const SubsetIndex subset : rows[element]) { - if (!is_selected_[subset]) continue; - if (subset_seen[subset]) continue; - subset_seen[subset] = true; - focus.push_back(subset); - } - } - DCHECK_LE(focus.size(), model_->num_subsets()); - // Testing has shown there is no gain in sorting focus. - return focus; +void SetCoverInvariant::DeselectAvx512(SubsetIndex) { + LOG(FATAL) << "DeselectAvx512 is not implemented"; } SetCoverSolutionResponse SetCoverInvariant::ExportSolutionAsProto() const { SetCoverSolutionResponse message; - message.set_num_subsets(is_selected_.size().value()); + message.set_num_subsets(is_selected_.size()); Cost lower_bound = std::numeric_limits::max(); - for (SubsetIndex subset(0); subset < model_->num_subsets(); ++subset) { + for (const SubsetIndex subset : model_->SubsetRange()) { if (is_selected_[subset]) { message.add_subset(subset.value()); } @@ -402,7 +380,7 @@ void SetCoverInvariant::ImportSolutionFromProto( for (auto s : message.subset()) { is_selected_[SubsetIndex(s)] = true; } - MakeDataConsistent(); + RecomputeInvariant(); Cost cost = message.cost(); CHECK_EQ(cost, cost_); } diff --git a/ortools/algorithms/set_cover_invariant.h b/ortools/algorithms/set_cover_invariant.h index 797da2c4949..15d90f9a6a5 100644 --- a/ortools/algorithms/set_cover_invariant.h +++ b/ortools/algorithms/set_cover_invariant.h @@ -14,18 +14,36 @@ #ifndef OR_TOOLS_ALGORITHMS_SET_COVER_INVARIANT_H_ #define OR_TOOLS_ALGORITHMS_SET_COVER_INVARIANT_H_ -#include - +#include #include -#include "absl/types/span.h" +#include "absl/log/check.h" #include "ortools/algorithms/set_cover.pb.h" #include "ortools/algorithms/set_cover_model.h" +#include "ortools/base/logging.h" namespace operations_research { -using SubsetCountVector = glop::StrictITIVector; -using SubsetBoolVector = glop::StrictITIVector; +// A helper class used to store the decisions made during a search. +class SetCoverDecision { + public: + SetCoverDecision() : decision_(0) {} + + SetCoverDecision(SubsetIndex subset, bool value) { + static_assert(sizeof(subset) == sizeof(decision_)); + DCHECK_GE(subset.value(), 0); + decision_ = value ? subset.value() : ~subset.value(); + } + + SubsetIndex subset() const { + return SubsetIndex(decision_ >= 0 ? decision_ : ~decision_); + } + + bool decision() const { return decision_ >= 0; } + + private: + BaseInt decision_; +}; // SetCoverInvariant does the bookkeeping for a solution to the // SetCoverModel passed as argument. @@ -37,101 +55,141 @@ using SubsetBoolVector = glop::StrictITIVector; // for an explanation of the terminology. // // A SetCoverInvariant is (relatively) small: -// is_selected_, a partial solution, vector of Booleans of size #subsets. +// is_selected_: a partial solution, vector of Booleans of size #subsets. // From this, the following can be computed: -// coverage_, the number of times an elememt is covered; -// marginal_impacts_, the number of elements of a subset still uncovered; -// is_removable_, whether a subset can be removed from the solution. -// Note that is_removable_[subset] implies is_selected_[subset], and thus -// (is_removable_[subset] <= is_selected_[subset]) == true. +// coverage_ : number of times an element is covered; +// num_free_elements_: number of elements in a subset that are uncovered. +// num_non_overcovered_elements_: the number of elements of a subset that +// are covered 1 time or less (not overcovered) in the current solution; +// is_redundant_, whether a subset can be removed from the solution. +// is_redundant_[subset] == (num_non_overcovered_elements_[subet] == 0). + class SetCoverInvariant { public: // Constructs an empty weighted set covering solver state. - // The model may not change after the ledger was built. + // The model may not change after the invariant was built. explicit SetCoverInvariant(SetCoverModel* m) : model_(m) { Initialize(); } // Initializes the solver once the data is set. The model cannot be changed // afterwards. void Initialize(); - // Recomputes all the invariants for the current solution. - void MakeDataConsistent() { - cost_ = ComputeCost(is_selected_); - coverage_ = ComputeCoverage(is_selected_); - is_removable_ = ComputeIsRemovable(coverage_); - marginal_impacts_ = ComputeMarginalImpacts(coverage_); - num_elements_covered_ = ComputeNumElementsCovered(coverage_); + void Clear() { + is_selected_.assign(model_->num_subsets(), false); + RecomputeInvariant(); } + // Recomputes all the invariants for the current solution. + void RecomputeInvariant(); + // Returns the weighted set covering model to which the state applies. SetCoverModel* model() const { return model_; } + const SetCoverModel* const_model() const { return model_; } + // Returns the cost of current solution. Cost cost() const { return cost_; } + // Returns the number of uncovered elements. + BaseInt num_uncovered_elements() const { return num_uncovered_elements_; } + // Returns the subset assignment vector. const SubsetBoolVector& is_selected() const { return is_selected_; } // Returns vector containing the number of elements in each subset that are // not covered in the current solution. - const SubsetToElementVector& marginal_impacts() const { - return marginal_impacts_; + const SubsetToIntVector& num_free_elements() const { + return num_free_elements_; } + // Returns the vector of numbers of free or exactly covered elements for + // each subset. + const SubsetToIntVector& num_coverage_le_1_elements() const { + return num_non_overcovered_elements_; + } // Returns vector containing number of subsets covering each element. - const ElementToSubsetVector& coverage() const { return coverage_; } + const ElementToIntVector& coverage() const { return coverage_; } + + // Returns a vector containing the number of subsets within `focus` covering + // each element. Subsets that are without `focus` are not considered. + ElementToIntVector ComputeCoverageInFocus( + absl::Span focus) const; // Returns vector of Booleans telling whether each subset can be removed from // the solution. - const SubsetBoolVector& is_removable() const { return is_removable_; } - - // Returns the number of elements covered. - ElementIndex num_elements_covered() const { return num_elements_covered_; } + const SubsetBoolVector& is_redundant() const { return is_redundant_; } - // Stores the solution and recomputes the data in the ledger. - void LoadSolution(const SubsetBoolVector& c); + // Returns the vector of the decisions which has led to the current solution. + const std::vector& trace() const { return trace_; } - // Returns true if the data stored in the ledger is consistent. - bool CheckConsistency() const; - - // Computes is_removable_ from scratch for every subset. - // TODO(user): reconsider exposing this. - void RecomputeIsRemovable() { is_removable_ = ComputeIsRemovable(coverage_); } - - // Returns the subsets that share at least one element with subset. - // TODO(user): is it worth to precompute this? - std::vector ComputeImpactedSubsets(SubsetIndex subset) const; + // Clears the trace. + void ClearTrace() { trace_.clear(); } - // Toggles is_selected_[subset] to value, and incrementally updates the - // ledger. - // Returns a vector of subsets impacted by the change, in case they need - // to be reconsidered in a solution geneator or a local search algorithm. - // Calls UnsafeToggle, with the added checks: - // If value is true, DCHECKs that subset is removable. - // If value is true, DCHECKs that marginal impact of subset is removable. - std::vector Toggle(SubsetIndex subset, bool value); + // Clear the removability information. + void ClearRemovabilityInformation() { + new_removable_subsets_.clear(); + new_non_removable_subsets_.clear(); + } - // Same as Toggle, with less DCHECKS. - // Useful for some meta-heuristics that allow to go through infeasible - // solutions. - // Only checks that value is different from is_selected_[subset]. - std::vector UnsafeToggle(SubsetIndex subset, bool value); + // Returns the subsets that become removable after the last update. + const std::vector& new_removable_subsets() const { + return new_removable_subsets_; + } - // Update coverage_ for subset when setting is_selected_[subset] to value. - void UpdateCoverage(SubsetIndex subset, bool value); + // Returns the subsets that become non removable after the last update. + const std::vector& new_non_removable_subsets() const { + return new_non_removable_subsets_; + } - // Returns true if the elements selected in the current solution cover all - // the elements of the set. - bool CheckSolution() const; + // Compresses the trace so that: + // - each subset appears only once, + // - there are only "positive" decisions. + // This trace is equivalent to the original trace in the sense that the cost + // and the covered elements are the same. + // This can be used to recover the solution by indices after local search. + void CompressTrace(); - // Checks that coverage_ and marginal_impacts_ are consistent with choices. - bool CheckCoverageAndMarginalImpacts(const SubsetBoolVector& choices) const; + // Loads the solution and recomputes the data in the invariant. + void LoadSolution(const SubsetBoolVector& solution); - // Returns the subsets that are unused that could be used to cover the still - // uncovered subsets. - std::vector ComputeSettableSubsets() const; + // Returns true if the data stored in the invariant is consistent. + // The body of the function will CHECK-fail the first time an inconsistency + // is encountered. + bool CheckConsistency() const; - std::vector ComputeResettableSubsets() const; + // Returns true if the subset is redundant within the current solution, i.e. + // when all its elements are already covered twice. Note that the set need + // not be selected for this to happen. + // TODO(user): Implement this using AVX-512? + bool ComputeIsRedundant(SubsetIndex subset) const; + + // Updates the invariant fully, so that is_redundant_ can be updated + // incrementally later with SelectAndFullyUpdate and + // DeselectSelectAndFullyUpdate. + void MakeFullyUpdated(); + + // Flips is_selected_[subset] to its negation, by calling Select or Deselect + // depending on value. Updates the invariant incrementally. + // FlipAndFullyUpdate performs a full incremental update of the invariant, + // including num_non_overcovered_elements_, is_redundant_, + // new_removable_subsets_, new_non_removable_subsets_. This is useful for some + // meta-heuristics. + void Flip(SubsetIndex subset) { Flip(subset, false); } + void FlipAndFullyUpdate(SubsetIndex subset) { Flip(subset, true); } + + // Includes subset in the solution by setting is_selected_[subset] to true + // and incrementally updating the invariant. + // SelectAndFullyUpdate also updates the invariant in a more thorough way as + // explained with FlipAndFullyUpdate. + void Select(SubsetIndex subset) { Select(subset, false); } + void SelectAndFullyUpdate(SubsetIndex subset) { Select(subset, true); } + + // Excludes subset from the solution by setting is_selected_[subset] to false + // and incrementally updating the invariant. + // DeselectAndFullyUpdate also updates the invariant in a more thorough way as + // explained with FlipAndFullyUpdate. + void Deselect(SubsetIndex subset) { Deselect(subset, false); } + void DeselectAndFullyUpdate(SubsetIndex subset) { Deselect(subset, true); } // Returns the current solution as a proto. SetCoverSolutionResponse ExportSolutionAsProto() const; @@ -140,45 +198,49 @@ class SetCoverInvariant { void ImportSolutionFromProto(const SetCoverSolutionResponse& message); private: - // Recomputes the cost from scratch from c. - Cost ComputeCost(const SubsetBoolVector& c) const; - - // Computes is_removable based on a coverage cvrg. - SubsetBoolVector ComputeIsRemovable(const ElementToSubsetVector& cvrg) const; - - // Computes marginal impacts based on a coverage cvrg. - SubsetToElementVector ComputeMarginalImpacts( - const ElementToSubsetVector& cvrg) const; - - // Updates marginal_impacts_ for each subset in impacted_subsets. - void UpdateMarginalImpacts(absl::Span impacted_subsets); - - // Computes the number of elements covered based on coverage vector 'cvrg'. - ElementIndex ComputeNumElementsCovered( - const ElementToSubsetVector& cvrg) const; - - // Returns true if subset can be removed from the solution, i.e. it is - // redundant to cover all the elements. - // This function is used to check that is_removable[subset] is consistent. - bool ComputeIsRemovable(SubsetIndex subset) const; - - // Updates is_removable_ for each subset in impacted_subsets. - void UpdateIsRemovable(absl::Span impacted_subsets); - - // Returns the number of elements currently covered by subset. - ElementToSubsetVector ComputeSingleSubsetCoverage(SubsetIndex subset) const; - - // Returns a vector containing the number of subsets covering each element. - ElementToSubsetVector ComputeCoverage(const SubsetBoolVector& choices) const; - - // Checks that the value of coverage_ is correct by recomputing and comparing. - bool CheckSingleSubsetCoverage(SubsetIndex subset) const; - - // Checks that coverage_ is consistent with choices. - bool CheckCoverageAgainstSolution(const SubsetBoolVector& choices) const; - - // Returns true if is_removable_ is consistent. - bool CheckIsRemovable() const; + // Computes the cost and the coverage vector for the given choices. + // Temporarily uses |E| BaseInts. + std::tuple ComputeCostAndCoverage( + const SubsetBoolVector& choices) const; + + // Computes the global number of uncovered elements and the + // vector containing the number of free elements for each subset from + // a coverage vector. + // Temporarily uses |S| BaseInts. + std::tuple // Vector of number of free elements. + ComputeNumUncoveredAndFreeElements(const ElementToIntVector& cvrg) const; + + // Computes the vector containing the number of non-overcovered elements per + // subset and the Boolean vector telling whether a subset is redundant w.r.t. + // the current solution. + // Temporarily uses |S| BaseInts. + std::tuple // Redundancy for each of the subsets. + ComputeNumNonOvercoveredElementsAndIsRedundant( + const ElementToIntVector& cvrg) const; + + // Flips is_selected_[subset] to its negation, by calling Select or Deselect + // depending on value. Updates the invariant incrementally. + // When incremental_full_update is true, the following fields are also + // updated: num_non_overcovered_elements_, is_redundant_, + // new_removable_subsets_, new_non_removable_subsets_. This is useful for some + // meta-heuristics. + void Flip(SubsetIndex, bool incremental_full_update); + + // Sets is_selected_[subset] to true and incrementally updates the invariant. + // Parameter incremental_full_update has the same meaning as with Flip. + void Select(SubsetIndex subset, bool incremental_full_update); + + // Sets is_selected_[subset] to false and incrementally updates the invariant. + // Parameter incremental_full_update has the same meaning as with Flip. + void Deselect(SubsetIndex subset, bool incremental_full_update); + + // Helper function for Select when AVX-512 is supported by the processor. + void SelectAvx512(SubsetIndex subset); + + // Helper function for Deselect when AVX-512 is supported by the processor. + void DeselectAvx512(SubsetIndex subset); // The weighted set covering model on which the solver is run. SetCoverModel* model_; @@ -186,23 +248,56 @@ class SetCoverInvariant { // Current cost. Cost cost_; - // The number of elements covered in the current solution. - ElementIndex num_elements_covered_; + // The number of uncovered (or free) elements in the current solution. + BaseInt num_uncovered_elements_; // Current assignment. + // Takes |S| bits. SubsetBoolVector is_selected_; - // The marginal impact of a subset is the number of elements in that subset - // that are not covered in the current solution. - SubsetToElementVector marginal_impacts_; + // A trace of the decisions, i.e. a list of decisions (subset, Boolean) that + // lead to the current solution. + // Takes at most |S| BaseInts. + std::vector trace_; // The coverage of an element is the number of used subsets which contains // the said element. - ElementToSubsetVector coverage_; - - // True if the subset can be removed from the solution without making it - // infeasible. - SubsetBoolVector is_removable_; + // Takes |E| BaseInts + ElementToIntVector coverage_; + + // A vector that for each subset gives the number of free elements, i.e. + // elements whose coverage is 0. + // problem. + // Takes |S| BaseInts. + SubsetToIntVector num_free_elements_; + + // Counts the number of free or exactly covered elements, i.e. whose coverage + // is 0 or 1. + // Takes at most |S| BaseInts. (More likely a few percent of that). + SubsetToIntVector num_non_overcovered_elements_; + + // True if the subset is redundant, i.e. can be removed from the solution + // without making it infeasible. + // Takes |S| bits. + SubsetBoolVector is_redundant_; + + // Subsets that become removable after the last update. + // Takes at most |S| BaseInts. (More likely a few percent of that). + std::vector new_removable_subsets_; + + // Subsets that become non removable after the last update. + // Takes at most |S| BaseInts. (More likely a few percent of that). + std::vector new_non_removable_subsets_; + + // Denotes whether is_redundant_ and num_non_overcovered_elements_ have been + // updated. Initially true, it becomes false as soon as Flip, + // Select and Deselect are called with incremental_full_update = false. The + // fully updated status can be achieved again with a call to FullUpdate(), + // which can be expensive, + bool is_fully_updated_; + + // True if the CPU supports the AVX-512 instruction set. + bool supports_avx512_; }; } // namespace operations_research diff --git a/ortools/algorithms/set_cover_lagrangian.cc b/ortools/algorithms/set_cover_lagrangian.cc new file mode 100644 index 00000000000..18dea932dfd --- /dev/null +++ b/ortools/algorithms/set_cover_lagrangian.cc @@ -0,0 +1,520 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/algorithms/set_cover_lagrangian.h" + +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "ortools/algorithms/adjustable_k_ary_heap.h" +#include "ortools/algorithms/set_cover_invariant.h" +#include "ortools/algorithms/set_cover_model.h" +#include "ortools/base/threadpool.h" + +namespace operations_research { + +// Notes from a discussion with Luca Accorsi (accorsi@) and Francesco Cavaliere +// regarding [1]: +// - the 3-phase algorithm in the paper actually uses pricing (which would +// better be called "partial" pricing), +// - the columns that were used in the preceding solutions should be fixed, +// because otherwise it diversifies too much and degrades the best solution +// (under "queue" in the paper). +// - the median algorithm is already in the STL (nth_element). + +// Denoted as u in [1], it is a dual vector: a column vector of nonnegative +// (zero is included) multipliers for the different constraints. +// A deterministic way to compute a feasible (non-optimal) u: +// For all element indices i, u_i = min {j \in J_i} c_j / |I_j|, where +// |I_j| denotes the number of elements covered by subset j. +// +// Concerning the fundamental ideas behind this approach, one may refer to [2]. +ElementCostVector SetCoverLagrangian::InitializeLagrangeMultipliers() const { + ElementCostVector multipliers(model_.num_elements(), + std::numeric_limits::infinity()); + SubsetCostVector marginal_costs(model_.num_subsets()); + // TODO(user): Parallelize this. + for (const SubsetIndex subset : model_.SubsetRange()) { + marginal_costs[subset] = + model_.subset_costs()[subset] / model_.columns()[subset].size(); + } + // TODO(user): Parallelize this. + for (const ElementIndex element : model_.ElementRange()) { + // Minimum marginal cost to cover this element. + Cost min_marginal_cost = std::numeric_limits::infinity(); + const SparseRowView& rows = model_.rows(); + // TODO(user): use std::min_element on rows[element] with a custom + // comparator that gets marginal_costs[subset]. Check performance. + for (const SubsetIndex subset : rows[element]) { + min_marginal_cost = std::min(min_marginal_cost, marginal_costs[subset]); + } + multipliers[element] = min_marginal_cost; + } + return multipliers; +} + +namespace { +// Computes the scalar product between a column and a vector of duals. +// Profiling has shown that this is where most of the time is spent. +// TODO(user): make this visible to other algorithms. +// TODO(user): Investigate. +Cost ScalarProduct(const SparseColumn& column, const ElementCostVector& dual) { + Cost result = 0.0; + for (ColumnEntryIndex pos(0); pos.value() < column.size(); ++pos) { + result += dual[column[pos]]; + } + return result; +} + +// Computes the reduced costs for a subset of subsets. +// This is a helper function for ParallelComputeReducedCosts(). +// It is called on a slice of subsets, defined by start and end. +// The reduced costs are computed using the multipliers vector. +// The columns of the subsets are given by the columns view. +// The result is stored in reduced_costs. +void FillReducedCostsSlice(SubsetIndex start, SubsetIndex end, + const SubsetCostVector& costs, + const ElementCostVector& multipliers, + const SparseColumnView& columns, + SubsetCostVector* reduced_costs) { + for (SubsetIndex subset = start; subset < end; ++subset) { + (*reduced_costs)[subset] = + costs[subset] - ScalarProduct(columns[subset], multipliers); + } +} +} // namespace + +// Computes the reduced costs for all subsets in parallel using ThreadPool. +SubsetCostVector SetCoverLagrangian::ParallelComputeReducedCosts( + const SubsetCostVector& costs, const ElementCostVector& multipliers) const { + const SubsetIndex num_subsets(model_.num_subsets()); + // TODO(user): compute a close-to-optimal k-subset partitioning. + const SubsetIndex block_size = + SubsetIndex(1) + num_subsets / num_threads_; // [***] Arbitrary choice. + const SparseColumnView& columns = model_.columns(); + SubsetCostVector reduced_costs(num_subsets); + ThreadPool thread_pool("ParallelComputeReducedCosts", num_threads_); + thread_pool.StartWorkers(); + { + // TODO(user): check how costly it is to create a new ThreadPool. + // TODO(user): use a queue of subsets to process? instead of a fixed range. + + // This parallelization is not very efficient, because all the threads + // use the same costs vector. Maybe it should be local to the thread. + // It's unclear whether sharing columns and costs is better than having + // each thread use its own partial copy. + // Finally, it might be better to use a queue of subsets to process, instead + // of a fixed range. + for (SubsetIndex start(0); start < num_subsets; start += block_size) { + thread_pool.Schedule([start, block_size, num_subsets, &costs, + &multipliers, &columns, &reduced_costs]() { + const SubsetIndex end = std::min(start + block_size, num_subsets); + FillReducedCostsSlice(start, end, costs, multipliers, columns, + &reduced_costs); + }); + } + } // Synchronize all the threads. This is equivalent to a wait. + return reduced_costs; +} + +// Reduced cost (row vector). Denoted as c_j(u) in [1], right after equation +// (5). For a subset j, c_j(u) = c_j - sum_{i \in I_j}.u_i. I_j is the set of +// indices for elements in subset j. For a general Integer Program A.x <= b, +// this would be: +// c_j(u) = c_j - sum_{i \in I_j} a_{ij}.u_i +SubsetCostVector SetCoverLagrangian::ComputeReducedCosts( + const SubsetCostVector& costs, const ElementCostVector& multipliers) const { + const SparseColumnView& columns = model_.columns(); + SubsetCostVector reduced_costs(costs.size()); + FillReducedCostsSlice(SubsetIndex(0), SubsetIndex(reduced_costs.size()), + costs, multipliers, columns, &reduced_costs); + return reduced_costs; +} + +namespace { +// Helper function to compute the subgradient. +// It fills a slice of the subgradient vector from indices start to end. +// This is a helper function for ParallelComputeSubgradient(). +// The subgradient is computed using the reduced costs vector. +void FillSubgradientSlice(SubsetIndex start, SubsetIndex end, + const SparseColumnView& columns, + const SubsetCostVector& reduced_costs, + ElementCostVector* subgradient) { + for (SubsetIndex subset(start); subset < end; ++subset) { + if (reduced_costs[subset] < 0.0) { + for (const ElementIndex element : columns[subset]) { + (*subgradient)[element] -= 1.0; + } + } + } +} +} // namespace + +// Vector of primal slack variable. Denoted as s_i(u) in [1], equation (6). +// For all element indices i, s_i(u) = 1 - sum_{j \in J_i} x_j(u), +// where J_i denotes the set of indices of subsets j covering element i. +// For a general Integer Program A x <= b, the subgradient cost vector is +// defined as A x - b. See [2]. +ElementCostVector SetCoverLagrangian::ComputeSubgradient( + const SubsetCostVector& reduced_costs) const { + // NOTE(user): Should the initialization be done with coverage[element]? + ElementCostVector subgradient(model_.num_elements(), 1.0); + FillSubgradientSlice(SubsetIndex(0), SubsetIndex(reduced_costs.size()), + model_.columns(), reduced_costs, &subgradient); + return subgradient; +} + +ElementCostVector SetCoverLagrangian::ParallelComputeSubgradient( + const SubsetCostVector& reduced_costs) const { + const SubsetIndex num_subsets(model_.num_subsets()); + const SubsetIndex block_size = + SubsetIndex(1) + num_subsets / num_threads_; // [***] + const SparseColumnView& columns = model_.columns(); + ElementCostVector subgradient(model_.num_elements(), 1.0); + // The subgradient has one component per element, each thread processes + // several subsets. Hence, have one vector per thread to avoid data races. + // TODO(user): it may be better to split the elements among the threads, + // although this might be less well-balanced. + std::vector subgradients( + num_threads_, ElementCostVector(model_.num_elements())); + ThreadPool thread_pool("ParallelComputeSubgradient", num_threads_); + thread_pool.StartWorkers(); + { + int thread_index = 0; + for (SubsetIndex start(0); start < num_subsets; + start += block_size, ++thread_index) { + thread_pool.Schedule([start, block_size, num_subsets, &reduced_costs, + &columns, &subgradients, thread_index]() { + const SubsetIndex end = std::min(start + block_size, num_subsets); + FillSubgradientSlice(start, end, columns, reduced_costs, + &subgradients[thread_index]); + }); + } + } // Synchronize all the threads. + for (int thread_index = 0; thread_index < num_threads_; ++thread_index) { + for (const ElementIndex element : model_.ElementRange()) { + subgradient[element] += subgradients[thread_index][element]; + } + } + return subgradient; +} + +namespace { +// Helper function to compute the value of the Lagrangian. +// This is a helper function for ParallelComputeLagrangianValue(). +// It is called on a slice of elements, defined by start and end. +// The value of the Lagrangian is computed using the reduced costs vector and +// the multipliers vector. +// The result is stored in lagrangian_value. +void FillLagrangianValueSlice(SubsetIndex start, SubsetIndex end, + const SubsetCostVector& reduced_costs, + Cost* lagrangian_value) { + // This is min \sum_{j \in N} c_j(u) x_j. This captures the remark above (**), + // taking into account the possible values for x_j, and using them to minimize + // the terms. + for (SubsetIndex subset(start); subset < end; ++subset) { + if (reduced_costs[subset] < 0.0) { + *lagrangian_value += reduced_costs[subset]; + } + } +} +} // namespace + +// Compute the (scalar) value of the Lagrangian vector by fixing the value of +// x_j based on the sign of c_j(u). In [1] equation (4), it is: +// L(u) = min \sum_{j \in N} c_j(u) x_j + \sum{i \in M} u_i . This is obtained +// - if c_j(u) < 0: x_j(u) = 1, +// - if c_j(u) > 0: x_j(u) = 0, (**) +// - if c_j(u) = 0: x_j(u) is unbound, in {0, 1}, we use 0. +// For a general Integer Program A x <= b, the Lagrangian vector L(u) [2] is +// L(u) = min \sum_{j \in N} c_j(u) x_j + \sum{i \in M} b_i.u_i. +Cost SetCoverLagrangian::ComputeLagrangianValue( + const SubsetCostVector& reduced_costs, + const ElementCostVector& multipliers) const { + Cost lagrangian_value = 0.0; + // This is \sum{i \in M} u_i. + for (const Cost u : multipliers) { + lagrangian_value += u; + } + FillLagrangianValueSlice(SubsetIndex(0), SubsetIndex(reduced_costs.size()), + reduced_costs, &lagrangian_value); + return lagrangian_value; +} + +Cost SetCoverLagrangian::ParallelComputeLagrangianValue( + const SubsetCostVector& reduced_costs, + const ElementCostVector& multipliers) const { + const SubsetIndex num_subsets(model_.num_subsets()); + const SubsetIndex block_size = + SubsetIndex(1) + num_subsets / num_threads_; // [***] Arbitrary. + Cost lagrangian_value = 0.0; + // This is \sum{i \in M} u_i. + + for (const Cost u : multipliers) { + lagrangian_value += u; + } + std::vector lagrangian_values(num_threads_, 0.0); + ThreadPool thread_pool("ParallelComputeLagrangianValue", num_threads_); + thread_pool.StartWorkers(); + { + int thread_index = 0; + for (SubsetIndex start(0); start < num_subsets; start += block_size) { + thread_pool.Schedule([start, block_size, num_subsets, thread_index, + &reduced_costs, &lagrangian_values]() { + const SubsetIndex end = std::min(start + block_size, num_subsets); + FillLagrangianValueSlice(start, end, reduced_costs, + &lagrangian_values[thread_index]); + }); + ++thread_index; + } + } // Synchronize all the threads. + for (const Cost l : lagrangian_values) { + lagrangian_value += l; + } + return lagrangian_value; +} + +// Perform a subgradient step. +// In the general case, for an Integer Program A.x <=b, the Lagragian +// multipliers vector at step k+1 is defined as: u^{k+1} = u^k + t_k (A x^k - b) +// with term t_k = lambda_k * (UB - L(u^k)) / |A x^k - b|^2. +// |.| is the 2-norm (i.e. Euclidean) +// In our case, the problem A x <= b is in the form A x >= 1. We need to +// replace A x - b by s_i(u) = 1 - sum_{j \in J_i} x_j(u). +// |A x^k - b|^2 = |s(u)|^2, and t_k is of the form: +// t_k = lambda_k * (UB - L(u^k)) / |s^k(u)|^2. +// Now, the coordinates of the multipliers vectors u^k, u^k_i are nonnegative, +// i.e. u^k_i >= 0. Negative values are simply cut off. Following [3], each of +// the coordinates is defined as: u^{k+1}_i = +// max(u^k_i + lambda_k * (UB - L(u^k)) / |s^k(u)|^2 * s^k_i(u), 0). +// This is eq. (7) in [1]. +void SetCoverLagrangian::UpdateMultipliers( + double step_size, Cost lagrangian_value, Cost upper_bound, + const SubsetCostVector& reduced_costs, + ElementCostVector* multipliers) const { + // step_size is \lambda_k in [1]. + DCHECK_GT(step_size, 0); + // Compute the square of the Euclidean norm of the subgradient vector. + const ElementCostVector subgradient = ComputeSubgradient(reduced_costs); + Cost subgradient_square_norm = 0.0; + for (const Cost x : subgradient) { + subgradient_square_norm += x * x; + } + // First compute lambda_k * (UB - L(u^k)). + const Cost factor = + step_size * (upper_bound - lagrangian_value) / subgradient_square_norm; + for (const ElementIndex element : model_.ElementRange()) { + // Avoid multipliers to go negative and to go over the roof. 1e6 chosen + // arbitrarily. [***] + (*multipliers)[element] = std::clamp( + (*multipliers)[element] + factor * subgradient[element], 0.0, 1e6); + } +} + +void SetCoverLagrangian::ParallelUpdateMultipliers( + double step_size, Cost lagrangian_value, Cost upper_bound, + const SubsetCostVector& reduced_costs, + ElementCostVector* multipliers) const { + // step_size is \lambda_k in [1]. + DCHECK_GT(step_size, 0); + // Compute the square of the Euclidean norm of the subgradient vector. + const ElementCostVector subgradient = + ParallelComputeSubgradient(reduced_costs); + Cost subgradient_square_norm = 0.0; + for (const Cost x : subgradient) { + subgradient_square_norm += x * x; + } + // First compute lambda_k * (UB - L(u^k)). + const Cost factor = + step_size * (upper_bound - lagrangian_value) / subgradient_square_norm; + for (const ElementIndex element : model_.ElementRange()) { + // Avoid multipliers to go negative and to go through the roof. 1e6 chosen + // arbitrarily. [***] + (*multipliers)[element] = std::clamp( + (*multipliers)[element] + factor * subgradient[element], 0.0, 1e6); + } +} + +Cost SetCoverLagrangian::ComputeGap( + const SubsetCostVector& reduced_costs, const SubsetBoolVector& solution, + const ElementCostVector& multipliers) const { + Cost gap = 0.0; + // TODO(user): Parallelize this, if need be. + for (const SubsetIndex subset : model_.SubsetRange()) { + if (solution[subset] && reduced_costs[subset] > 0.0) { + gap += reduced_costs[subset]; + } else if (!solution[subset] && reduced_costs[subset] < 0.0) { + // gap += std::abs(reduced_costs[subset]); We know the sign of rhs. + gap -= reduced_costs[subset]; + } + } + const ElementToIntVector& coverage = inv_->coverage(); + for (const ElementIndex element : model_.ElementRange()) { + gap += (coverage[element] - 1) * multipliers[element]; + } + return gap; +} + +SubsetCostVector SetCoverLagrangian::ComputeDelta( + const SubsetCostVector& reduced_costs, + const ElementCostVector& multipliers) const { + SubsetCostVector delta(model_.num_subsets()); + const ElementToIntVector& coverage = inv_->coverage(); + // This is definition (9) in [1]. + const SparseColumnView& columns = model_.columns(); + // TODO(user): Parallelize this. + for (const SubsetIndex subset : model_.SubsetRange()) { + delta[subset] = std::max(reduced_costs[subset], 0.0); + for (const ElementIndex element : columns[subset]) { + const double size = coverage[element]; + delta[subset] += multipliers[element] * (size - 1.0) / size; + } + } + return delta; +} + +namespace { +// Helper class to compute the step size for the multipliers. +// The step size is updated every period iterations. +// The step size is updated by multiplying it by a factor 0.5 or 1.5 +// on the relative change in the lower bound is greater than 0.01 or less +// than 0.001, respectively. The lower bound is updated every period +// iterations. +class StepSizer { + public: + StepSizer(int period, double step_size) + : period_(period), iter_to_check_(period), step_size_(step_size) { + ResetBounds(); + } + double UpdateStepSize(int iter, Cost lower_bound) { + min_lb_ = std::min(min_lb_, lower_bound); + max_lb_ = std::max(max_lb_, lower_bound); + if (iter == iter_to_check_) { + iter_to_check_ += period_; + // Bounds can be negative, so we need to take the absolute value. + // We also need to avoid division by zero. We decide to return 0.0 in + // that case, which is the same as not updating the step size. + const Cost lb_gap = + max_lb_ == 0.0 ? 0.0 : (max_lb_ - min_lb_) / std::abs(max_lb_); + DCHECK_GE(lb_gap, 0.0); + if (lb_gap > 0.01) { + step_size_ *= 0.5; + } else if (lb_gap <= 0.001) { + step_size_ *= 1.5; + } + step_size_ = std::clamp(step_size_, 1e-6, 10.0); + ResetBounds(); + } + return step_size_; + } + + private: + void ResetBounds() { + min_lb_ = std::numeric_limits::infinity(); + max_lb_ = -std::numeric_limits::infinity(); + } + int period_; + int iter_to_check_; + double step_size_; + Cost min_lb_; + Cost max_lb_; +}; + +// Helper class to decide whether to stop the algorithm. +// The algorithm stops when the lower bound is not updated for a certain +// number of iterations. +class Stopper { + public: + explicit Stopper(int period) + : period_(period), + iter_to_check_(period), + lower_bound_(std::numeric_limits::min()) {} + bool DecideWhetherToStop(int iter, Cost lb) { + DCHECK_GE(lb, lower_bound_); + if (iter == iter_to_check_) { + iter_to_check_ += period_; + const Cost delta = lb - lower_bound_; + const Cost relative_delta = delta / lb; + DCHECK_GE(delta, 0.0); + DCHECK_GE(relative_delta, 0.0); + lower_bound_ = lb; + return relative_delta < 0.001 && delta < 1; + } + return false; + } + + private: + int period_; + int iter_to_check_; + Cost lower_bound_; +}; + +} // namespace + +namespace { +// TODO(user): Add this to the file defining AdjustableKAryHeap. +template +class TopKHeap { + public: + explicit TopKHeap(int max_size) : heap_(), max_size_(max_size) {} + void Clear() { heap_.Clear(); } + void Add(Priority priority, Index index) { + if (heap_.Size() < max_size_) { + heap_.Add(priority, index); + } else if (heap_.HasPriority(priority, heap_.TopPriority())) { + heap_.ReplaceTop(priority, index); + } + } + + private: + AdjustableKAryHeap heap_; + int max_size_; +}; +} // namespace + +// Computes a lower bound on the optimal cost. +std::tuple +SetCoverLagrangian::ComputeLowerBound(const SubsetCostVector& costs, + Cost upper_bound) { + Cost lower_bound = 0.0; + ElementCostVector multipliers = InitializeLagrangeMultipliers(); + double step_size = 0.1; // [***] arbitrary, from [1]. + StepSizer step_sizer(20, step_size); // [***] arbitrary, from [1]. + Stopper stopper(100); // [***] arbitrary, from [1]. + SubsetCostVector reduced_costs(costs); + // For the time being, 4 threads seems to be the fastest. + // Running linux perf of the process shows that up to 60% of the cycles are + // lost as idle cycles in the CPU backend, probably because the algorithm is + // memory bound. + for (int iter = 0; iter < 1000; ++iter) { + reduced_costs = ParallelComputeReducedCosts(costs, multipliers); + const Cost lagrangian_value = + ComputeLagrangianValue(reduced_costs, multipliers); + UpdateMultipliers(step_size, lagrangian_value, upper_bound, reduced_costs, + &multipliers); + lower_bound = std::max(lower_bound, lagrangian_value); + // step_size should be updated like this. For the time besing, we keep the + // step size, because the implementation of the rest is not adequate yet + // step_size = step_sizer.UpdateStepSize(iter, lagrangian_value); + // if (stopper.DecideWhetherToStop(iter, lower_bound)) { + // break; + // } + } + return std::make_tuple(lower_bound, reduced_costs, multipliers); +} + +} // namespace operations_research diff --git a/ortools/algorithms/set_cover_lagrangian.h b/ortools/algorithms/set_cover_lagrangian.h new file mode 100644 index 00000000000..aa63627ad15 --- /dev/null +++ b/ortools/algorithms/set_cover_lagrangian.h @@ -0,0 +1,154 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_LAGRANGIAN_H_ +#define OR_TOOLS_ALGORITHMS_SET_COVER_LAGRANGIAN_H_ + +#include +#include +#include + +#include "ortools/algorithms/set_cover_invariant.h" +#include "ortools/algorithms/set_cover_model.h" +#include "ortools/base/threadpool.h" + +namespace operations_research { + +// The SetCoverLagrangian class implements the Lagrangian relaxation of the set +// cover problem. + +// In the following, we refer to the following articles: +// [1] Caprara, Alberto, Matteo Fischetti, and Paolo Toth. 1999. “A Heuristic +// Method for the Set Covering Problem.” Operations Research 47 (5): 730–43. +// https://www.jstor.org/stable/223097 +// [2] Fisher, Marshall L. 1981. “The Lagrangian Relaxation Method for Solving +// Integer Programming Problems.” Management Science 27 (1): 1–18. +// https://www.jstor.org/stable/2631139 +// [3] Held, M., Karp, R.M. The traveling-salesman problem and minimum spanning +// trees: Part II. Mathematical Programming 1, 6–25 (1971). +// https://link.springer.com/article/10.1007/BF01584070 +// [4] Williamson, David P. 2002. “The Primal-Dual Method for Approximation +// Algorithms.” Mathematical Programming, 91 (3): 447–78. +// https://link.springer.com/article/10.1007/s101070100262 + +class SetCoverLagrangian { + public: + explicit SetCoverLagrangian(SetCoverInvariant* inv, int num_threads = 1) + : inv_(inv), model_(*inv->model()), num_threads_(num_threads) {} + + // Returns true if a solution was found. + // TODO(user): Add time-outs and exit with a partial solution. This seems + // unlikely, though. + bool NextSolution(); + + // Computes the next partial solution considering only the subsets whose + // indices are in focus. + bool NextSolution(const std::vector& focus); + + // Initializes the multipliers vector (u) based on the cost per subset. + ElementCostVector InitializeLagrangeMultipliers() const; + + // Computes the Lagrangian (row-)cost vector. + // For a subset j, c_j(u) = c_j - sum_{i \in I_j} u_i. + // I_j denotes the indices of elements present in subset j. + SubsetCostVector ComputeReducedCosts( + const SubsetCostVector& costs, + const ElementCostVector& multipliers) const; + + // Same as above, but parallelized, using the number of threads specified in + // the constructor. + SubsetCostVector ParallelComputeReducedCosts( + const SubsetCostVector& costs, + const ElementCostVector& multipliers) const; + + // Computes the subgradient (column-)cost vector. + // For all element indices i, s_i(u) = 1 - sum_{j \in J_i} x_j(u), + // where J_i denotes the set of indices of subsets j covering element i. + ElementCostVector ComputeSubgradient( + const SubsetCostVector& reduced_costs) const; + + // Same as above, but parallelized, using the number of threads specified in + // the constructor. + ElementCostVector ParallelComputeSubgradient( + const SubsetCostVector& reduced_costs) const; + + // Computes the value of the Lagrangian L(u). + // L(u) = min \sum_{j \in N} c_j(u) x_j + \sum{i \in M} u_i. + // If c_j(u) < 0: x_j(u) = 1, if c_j(u) > 0: x_j(u) = 0, + // otherwise x_j(u) is unbound, it can take any value in {0, 1}. + Cost ComputeLagrangianValue(const SubsetCostVector& reduced_costs, + const ElementCostVector& multipliers) const; + + // Same as above, but parallelized, using the number of threads specified in + // the constructor. + Cost ParallelComputeLagrangianValue( + const SubsetCostVector& reduced_costs, + const ElementCostVector& multipliers) const; + + // Updates the multipliers vector (u) with the given step size. + // Following [3], each of the coordinates is defined as: u^{k+1}_i = + // max(u^k_i + lambda_k * (UB - L(u^k)) / |s^k(u)|^2 * s^k_i(u), 0). + // lambda_k is step_size in the function signature below. UB is upper_bound. + void UpdateMultipliers(double step_size, Cost lagrangian_value, + Cost upper_bound, + const SubsetCostVector& reduced_costs, + ElementCostVector* multipliers) const; + + // Same as above, but parallelized, using the number of threads specified in + // the constructor. + void ParallelUpdateMultipliers(double step_size, Cost lagrangian_value, + Cost upper_bound, + const SubsetCostVector& reduced_costs, + ElementCostVector* multipliers) const; + + // Computes the gap between the current solution and the optimal solution. + // This is the sum of the multipliers for the elements that are not covered + // by the current solution. + Cost ComputeGap(const SubsetCostVector& reduced_costs, + const SubsetBoolVector& solution, + const ElementCostVector& multipliers) const; + + // Performs the three-phase algorithm. + void ThreePhase(Cost upper_bound); + + // Computes a lower bound on the optimal cost. + // The returned value is the lower bound, the reduced costs, and the + // multipliers. + std::tuple ComputeLowerBound( + const SubsetCostVector& costs, Cost upper_bound); + + private: + // The invariant on which the algorithm will run. + SetCoverInvariant* inv_; + + // The model on which the invariant is defined. + const SetCoverModel& model_; + + // The number of threads to use for parallelization. + int num_threads_; + + // Total (scalar) Lagrangian cost. + Cost lagrangian_; + + // Lagrangian cost vector, per subset. + SubsetCostVector lagrangians_; + + // Computes the delta vector. + // This is definition (9) in [1]. + SubsetCostVector ComputeDelta(const SubsetCostVector& reduced_costs, + const ElementCostVector& multipliers) const; +}; + +} // namespace operations_research + +#endif // OR_TOOLS_ALGORITHMS_SET_COVER_LAGRANGIAN_H_ diff --git a/ortools/algorithms/set_cover_mip.cc b/ortools/algorithms/set_cover_mip.cc index f54a6f0c2cf..00dec965d26 100644 --- a/ortools/algorithms/set_cover_mip.cc +++ b/ortools/algorithms/set_cover_mip.cc @@ -14,7 +14,9 @@ #include "ortools/algorithms/set_cover_mip.h" #include +#include +#include "absl/log/check.h" #include "absl/types/span.h" #include "ortools/algorithms/set_cover_invariant.h" #include "ortools/algorithms/set_cover_model.h" @@ -24,14 +26,31 @@ namespace operations_research { +namespace { +// Returns the vector a - b. +ElementToIntVector Subtract(const ElementToIntVector& a, + const ElementToIntVector& b) { + ElementToIntVector delta(a.size()); + DCHECK_EQ(a.size(), b.size()); + for (const ElementIndex i : a.index_range()) { + delta[i] = a[i] - b[i]; + } + return delta; +} +} // namespace + template using StrictVector = glop::StrictITIVector; -bool SetCoverMip::NextSolution() { - return NextSolution(inv_->model()->all_subsets()); +bool SetCoverMip::NextSolution(bool use_integers, + double time_limit_in_seconds) { + return NextSolution(inv_->model()->all_subsets(), use_integers, + time_limit_in_seconds); } -bool SetCoverMip::NextSolution(absl::Span focus) { +bool SetCoverMip::NextSolution(absl::Span focus, + bool use_integers, + double time_limit_in_seconds) { SetCoverModel* model = inv_->model(); const SubsetIndex num_subsets(model->num_subsets()); const ElementIndex num_elements(model->num_elements()); @@ -42,11 +61,31 @@ bool SetCoverMip::NextSolution(absl::Span focus) { problem_type = MPSolver::SCIP_MIXED_INTEGER_PROGRAMMING; break; case SetCoverMipSolver::GUROBI: - problem_type = MPSolver::GUROBI_MIXED_INTEGER_PROGRAMMING; + if (use_integers) { + problem_type = MPSolver::GUROBI_MIXED_INTEGER_PROGRAMMING; + } else { + problem_type = MPSolver::GUROBI_LINEAR_PROGRAMMING; + } break; case SetCoverMipSolver::SAT: + if (!use_integers) { + LOG(INFO) << "Defaulting to integer variables with SAT"; + use_integers = true; + } problem_type = MPSolver::SAT_INTEGER_PROGRAMMING; break; + case SetCoverMipSolver::GLOP: + LOG(INFO) << "Defaulting to linear relaxation with GLOP"; + use_integers = false; + problem_type = MPSolver::GLOP_LINEAR_PROGRAMMING; + break; + case SetCoverMipSolver::PDLP: + if (use_integers) { + LOG(INFO) << "Defaulting to linear relaxation with PDLP"; + use_integers = false; + } + problem_type = MPSolver::PDLP_LINEAR_PROGRAMMING; + break; default: LOG(WARNING) << "Unknown solver value, defaulting to SCIP"; problem_type = MPSolver::SCIP_MIXED_INTEGER_PROGRAMMING; @@ -63,11 +102,16 @@ bool SetCoverMip::NextSolution(absl::Span focus) { StrictVector constraints(num_elements, nullptr); StrictVector vars(num_subsets, nullptr); + ElementToIntVector coverage_outside_focus = + Subtract(inv_->coverage(), inv_->ComputeCoverageInFocus(focus)); for (const SubsetIndex subset : focus) { - vars[subset] = solver.MakeBoolVar(""); + vars[subset] = solver.MakeVar(0, 1, use_integers, ""); objective->SetCoefficient(vars[subset], model->subset_costs()[subset]); - for (ElementIndex element : model->columns()[subset]) { - if (inv_->coverage()[element] > 0) continue; + for (const ElementIndex element : model->columns()[subset]) { + // The model should only contain elements that are not forcibly covered by + // subsets outside the focus. + if (coverage_outside_focus[element] == 0) continue; + if (constraints[element] == nullptr) { constexpr double kInfinity = std::numeric_limits::infinity(); constraints[element] = solver.MakeRowConstraint(1.0, kInfinity); @@ -76,7 +120,7 @@ bool SetCoverMip::NextSolution(absl::Span focus) { } } // set_time_limit takes milliseconds as a unit. - solver.set_time_limit(static_cast(time_limit_in_seconds_ * 1000)); + solver.set_time_limit(static_cast(time_limit_in_seconds * 1000)); // Call the solver. const MPSolver::ResultStatus solve_status = solver.Solve(); @@ -95,10 +139,14 @@ bool SetCoverMip::NextSolution(absl::Span focus) { LOG(ERROR) << "Solving resulted in an error."; return false; } - for (const SubsetIndex subset : focus) { - choices[subset] = (vars[subset]->solution_value() > 0.9); + if (use_integers) { + for (const SubsetIndex subset : focus) { + choices[subset] = (vars[subset]->solution_value() > 0.9); + } + inv_->LoadSolution(choices); + } else { + lower_bound_ = solver.Objective().Value(); } - inv_->LoadSolution(choices); return true; } diff --git a/ortools/algorithms/set_cover_mip.h b/ortools/algorithms/set_cover_mip.h index 8925b9a1f81..f7a86857905 100644 --- a/ortools/algorithms/set_cover_mip.h +++ b/ortools/algorithms/set_cover_mip.h @@ -19,29 +19,40 @@ #include "ortools/algorithms/set_cover_model.h" namespace operations_research { -enum class SetCoverMipSolver : int { SCIP = 0, SAT = 1, GUROBI = 2 }; +enum class SetCoverMipSolver : int { + SCIP = 0, + SAT = 1, + GUROBI = 2, + GLOP = 3, + PDLP = 4 +}; class SetCoverMip { public: + // Simpler constructor that uses SCIP by default. explicit SetCoverMip(SetCoverInvariant* inv) - : inv_(inv), - mip_solver_(SetCoverMipSolver::SCIP), - time_limit_in_seconds_(0.02) {} + : inv_(inv), mip_solver_(SetCoverMipSolver::SCIP) {} + + // The constructor takes a SetCoverInvariant that will store the resulting + // variable choices, and a MIP Solver. + SetCoverMip(SetCoverInvariant* inv, SetCoverMipSolver mip_solver) + : inv_(inv), mip_solver_(mip_solver) {} // Returns true if a solution was found. + // If use_integers is false, lower_bound_ is populated with a linear + // lower bound. + // time_limit_in_seconds is a (rather soft) time limit for the execution time. // TODO(user): Add time-outs and exit with a partial solution. This seems // unlikely, though. - bool NextSolution(); + bool NextSolution(bool use_integers, double time_limit_in_seconds); // Computes the next partial solution considering only the subsets whose // indices are in focus. - bool NextSolution(absl::Span focus); - - void SetMipSolver(const SetCoverMipSolver mip_solver) { - mip_solver_ = mip_solver; - } + bool NextSolution(absl::Span focus, bool use_integers, + double time_limit_in_seconds); - void SetTimeLimitInSeconds(double limit) { time_limit_in_seconds_ = limit; } + // Returns the lower bound of the linear relaxation of the problem. + double lower_bound() const { return lower_bound_; } private: // The invariant used to maintain the state of the problem. @@ -50,7 +61,9 @@ class SetCoverMip { // The MIP solver flavor used by the instance. SetCoverMipSolver mip_solver_; - double time_limit_in_seconds_; + // The lower bound of the problem, when use_integers is false. The MIP with + // continuous variables becomes a computationally simpler linear program. + double lower_bound_; }; } // namespace operations_research diff --git a/ortools/algorithms/set_cover_model.cc b/ortools/algorithms/set_cover_model.cc index 81f26b78d16..f7b5faf24c4 100644 --- a/ortools/algorithms/set_cover_model.cc +++ b/ortools/algorithms/set_cover_model.cc @@ -15,84 +15,103 @@ #include #include +#include +#include +#include +#include +#include +#include #include "absl/log/check.h" #include "ortools/algorithms/set_cover.pb.h" #include "ortools/base/logging.h" -#include "ortools/lp_data/lp_types.h" // For StrictITIVector. namespace operations_research { void SetCoverModel::UpdateAllSubsetsList() { - const SubsetIndex new_size = columns_.size(); - const SubsetIndex old_size(all_subsets_.size()); - DCHECK_LE(old_size, new_size); - all_subsets_.resize(new_size.value()); - for (SubsetIndex subset(old_size); subset < new_size; ++subset) { - all_subsets_[subset.value()] = subset; + const BaseInt old_size = all_subsets_.size(); + DCHECK_LE(old_size, num_subsets()); + all_subsets_.resize(num_subsets()); + for (BaseInt subset(old_size); subset < num_subsets(); ++subset) { + all_subsets_[subset] = SubsetIndex(subset); } } void SetCoverModel::AddEmptySubset(Cost cost) { subset_costs_.push_back(cost); columns_.push_back(SparseColumn()); - const SubsetIndex num_subsets(all_subsets_.size()); - all_subsets_.push_back(num_subsets); - CHECK_EQ(all_subsets_.size(), columns_.size()); - CHECK_EQ(all_subsets_.size(), subset_costs_.size()); + all_subsets_.push_back(SubsetIndex(num_subsets_)); + ++num_subsets_; + CHECK_EQ(columns_.size(), num_subsets()); + CHECK_EQ(subset_costs_.size(), num_subsets()); + CHECK_EQ(all_subsets_.size(), num_subsets()); row_view_is_valid_ = false; } -void SetCoverModel::AddElementToLastSubset(const ElementIndex element) { - columns_.back().push_back(element); +void SetCoverModel::AddElementToLastSubset(BaseInt element) { + columns_.back().push_back(ElementIndex(element)); num_elements_ = std::max(num_elements_, element + 1); // No need to update the list all_subsets_. + ++num_nonzeros_; row_view_is_valid_ = false; } -void SetCoverModel::AddElementToLastSubset(int element) { - AddElementToLastSubset(ElementIndex(element)); +void SetCoverModel::AddElementToLastSubset(ElementIndex element) { + AddElementToLastSubset(element.value()); } -void SetCoverModel::SetSubsetCost(int subset, Cost cost) { +void SetCoverModel::SetSubsetCost(BaseInt subset, Cost cost) { CHECK(std::isfinite(cost)); DCHECK_GE(subset, 0); - const SubsetIndex subset_index(subset); - const SubsetIndex num_subsets = columns_.size(); - const SubsetIndex new_size = std::max(num_subsets, subset_index + 1); - columns_.resize(new_size, SparseColumn()); - subset_costs_.resize(new_size, 0.0); - subset_costs_[subset_index] = cost; + num_subsets_ = std::max(num_subsets_, subset + 1); + columns_.resize(num_subsets_, SparseColumn()); + subset_costs_.resize(num_subsets_, 0.0); + subset_costs_[SubsetIndex(subset)] = cost; UpdateAllSubsetsList(); row_view_is_valid_ = false; // Probably overkill, but better safe than sorry. } -void SetCoverModel::AddElementToSubset(int element, int subset) { - const SubsetIndex subset_index(subset); - const SubsetIndex new_size = std::max(columns_.size(), subset_index + 1); - subset_costs_.resize(new_size, 0.0); - columns_.resize(new_size, SparseColumn()); +void SetCoverModel::SetSubsetCost(SubsetIndex subset, Cost cost) { + SetSubsetCost(subset.value(), cost); +} + +void SetCoverModel::AddElementToSubset(BaseInt element, BaseInt subset) { + num_subsets_ = std::max(num_subsets_, subset + 1); + subset_costs_.resize(num_subsets_, 0.0); + columns_.resize(num_subsets_, SparseColumn()); UpdateAllSubsetsList(); - const ElementIndex new_element(element); - columns_[subset_index].push_back(new_element); - num_elements_ = std::max(num_elements_, new_element + 1); + columns_[SubsetIndex(subset)].push_back(ElementIndex(element)); + num_elements_ = std::max(num_elements_, element + 1); + ++num_nonzeros_; row_view_is_valid_ = false; } +void SetCoverModel::AddElementToSubset(ElementIndex element, + SubsetIndex subset) { + AddElementToSubset(element.value(), subset.value()); +} + // Reserves num_subsets columns in the model. -void SetCoverModel::ReserveNumSubsets(int num_subsets) { - SubsetIndex size(num_subsets); - columns_.resize(size, SparseColumn()); - subset_costs_.resize(size, 0.0); +void SetCoverModel::ReserveNumSubsets(BaseInt number_of_subsets) { + num_subsets_ = std::max(num_subsets_, number_of_subsets); + columns_.resize(num_subsets_, SparseColumn()); + subset_costs_.resize(num_subsets_, 0.0); +} + +void SetCoverModel::ReserveNumSubsets(SubsetIndex num_subsets) { + ReserveNumSubsets(num_subsets.value()); } // Reserves num_elements rows in the column indexed by subset. -void SetCoverModel::ReserveNumElementsInSubset(int num_elements, int subset) { - const SubsetIndex size = std::max(columns_.size(), SubsetIndex(subset + 1)); - subset_costs_.resize(size, 0.0); - columns_.resize(size, SparseColumn()); - const EntryIndex num_entries(num_elements); - columns_[SubsetIndex(subset)].reserve(num_entries); +void SetCoverModel::ReserveNumElementsInSubset(BaseInt num_elements, + BaseInt subset) { + ReserveNumSubsets(subset); + columns_[SubsetIndex(subset)].reserve(ColumnEntryIndex(num_elements)); +} + +void SetCoverModel::ReserveNumElementsInSubset(ElementIndex num_elements, + SubsetIndex subset) { + ReserveNumElementsInSubset(num_elements.value(), subset.value()); } void SetCoverModel::CreateSparseRowView() { @@ -100,17 +119,19 @@ void SetCoverModel::CreateSparseRowView() { return; } rows_.resize(num_elements_, SparseRow()); - glop::StrictITIVector row_sizes(num_elements_, 0); - for (SubsetIndex subset(0); subset < columns_.size(); ++subset) { + ElementToIntVector row_sizes(num_elements_, 0); + for (const SubsetIndex subset : SubsetRange()) { + // Sort the columns. It's not super-critical to improve performance here as + // this needs to be done only once. std::sort(columns_[subset].begin(), columns_[subset].end()); for (const ElementIndex element : columns_[subset]) { ++row_sizes[element]; } } - for (ElementIndex element(0); element < num_elements_; ++element) { - rows_[element].reserve(EntryIndex(row_sizes[element])); + for (const ElementIndex element : ElementRange()) { + rows_[element].reserve(RowEntryIndex(row_sizes[element])); } - for (SubsetIndex subset(0); subset < columns_.size(); ++subset) { + for (const SubsetIndex subset : SubsetRange()) { for (const ElementIndex element : columns_[subset]) { rows_[element].push_back(subset); } @@ -119,11 +140,12 @@ void SetCoverModel::CreateSparseRowView() { } bool SetCoverModel::ComputeFeasibility() const { - CHECK_GT(num_elements_, 0); - CHECK_GT(columns_.size(), 0); - CHECK_EQ(columns_.size(), subset_costs_.size()); - - ElementToSubsetVector coverage(num_elements_, SubsetIndex(0)); + CHECK_GT(num_elements(), 0); + CHECK_GT(num_subsets(), 0); + CHECK_EQ(columns_.size(), num_subsets()); + CHECK_EQ(subset_costs_.size(), num_subsets()); + CHECK_EQ(all_subsets_.size(), num_subsets()); + ElementToIntVector coverage(num_elements_, 0); for (const Cost cost : subset_costs_) { CHECK_GT(cost, 0.0); } @@ -133,7 +155,7 @@ bool SetCoverModel::ComputeFeasibility() const { ++coverage[element]; } } - for (ElementIndex element(0); element < num_elements_; ++element) { + for (const ElementIndex element : ElementRange()) { CHECK_GE(coverage[element], 0); if (coverage[element] == 0) { return false; @@ -141,7 +163,7 @@ bool SetCoverModel::ComputeFeasibility() const { } VLOG(1) << "Max possible coverage = " << *std::max_element(coverage.begin(), coverage.end()); - for (SubsetIndex subset(0); subset < columns_.size(); ++subset) { + for (const SubsetIndex subset : SubsetRange()) { CHECK_EQ(all_subsets_[subset.value()], subset) << "subset = " << subset; } return true; @@ -149,7 +171,7 @@ bool SetCoverModel::ComputeFeasibility() const { SetCoverProto SetCoverModel::ExportModelAsProto() { SetCoverProto message; - for (SubsetIndex subset(0); subset < columns_.size(); ++subset) { + for (const SubsetIndex subset : SubsetRange()) { SetCoverProto::Subset* subset_proto = message.add_subset(); subset_proto->set_cost(subset_costs_[subset]); std::sort(columns_[subset].begin(), columns_[subset].end()); @@ -166,13 +188,13 @@ void SetCoverModel::ImportModelFromProto(const SetCoverProto& message) { ReserveNumSubsets(message.subset_size()); SubsetIndex subset_index(0); for (const SetCoverProto::Subset& subset_proto : message.subset()) { - subset_costs_[SubsetIndex(subset_index)] = subset_proto.cost(); + subset_costs_[subset_index] = subset_proto.cost(); if (subset_proto.element_size() > 0) { - columns_[subset_index].reserve(EntryIndex(subset_proto.element_size())); - for (auto element : subset_proto.element()) { + columns_[subset_index].reserve( + ColumnEntryIndex(subset_proto.element_size())); + for (const BaseInt element : subset_proto.element()) { columns_[subset_index].push_back(ElementIndex(element)); - num_elements_ = - ElementIndex(std::max(num_elements_.value(), element + 1)); + num_elements_ = std::max(num_elements_, element + 1); } ++subset_index; } @@ -181,4 +203,92 @@ void SetCoverModel::ImportModelFromProto(const SetCoverProto& message) { CreateSparseRowView(); } +namespace { +// Returns the standard deviation of the vector v, excluding those values that +// are zero. +template +double StandardDeviation(const std::vector& values) { + const size_t size = values.size(); + double n = 0.0; // n is used in a calculation involving doubles. + double sum_of_squares = 0.0; + double sum = 0.0; + for (size_t i = 0; i < size; ++i) { + double sample = static_cast(values[i]); + if (sample == 0.0) continue; + sum_of_squares += sample * sample; + sum += sample; + ++n; + } + return n == 0.0 ? 0.0 : sqrt((sum_of_squares - sum * sum / n) / n); +} + +template +SetCoverModel::Stats ComputeStats(std::vector sizes) { + SetCoverModel::Stats stats; + stats.min = *std::min_element(sizes.begin(), sizes.end()); + stats.max = *std::max_element(sizes.begin(), sizes.end()); + stats.mean = std::accumulate(sizes.begin(), sizes.end(), 0.0) / sizes.size(); + std::nth_element(sizes.begin(), sizes.begin() + sizes.size() / 2, + sizes.end()); + stats.median = sizes[sizes.size() / 2]; + stats.stddev = StandardDeviation(sizes); + return stats; +} + +template +std::vector ComputeDeciles(std::vector values) { + const int kNumDeciles = 10; + std::vector deciles; + deciles.reserve(kNumDeciles); + for (int i = 1; i <= kNumDeciles; ++i) { + const size_t point = values.size() * i / kNumDeciles - 1; + std::nth_element(values.begin(), values.begin() + point, values.end()); + deciles.push_back(values[point]); + } + return deciles; +} +} // namespace + +SetCoverModel::Stats SetCoverModel::ComputeCostStats() { + std::vector subset_costs(num_subsets()); + std::copy(subset_costs_.begin(), subset_costs_.end(), subset_costs.begin()); + return ComputeStats(std::move(subset_costs)); +} + +SetCoverModel::Stats SetCoverModel::ComputeRowStats() { + std::vector row_sizes(num_elements(), 0); + for (const SparseColumn& column : columns_) { + for (const ElementIndex element : column) { + ++row_sizes[element.value()]; + } + } + return ComputeStats(std::move(row_sizes)); +} + +SetCoverModel::Stats SetCoverModel::ComputeColumnStats() { + std::vector column_sizes(columns_.size()); + for (const SubsetIndex subset : SubsetRange()) { + column_sizes[subset.value()] = columns_[subset].size(); + } + return ComputeStats(std::move(column_sizes)); +} + +std::vector SetCoverModel::ComputeRowDeciles() const { + std::vector row_sizes(num_elements(), 0); + for (const SparseColumn& column : columns_) { + for (const ElementIndex element : column) { + ++row_sizes[element.value()]; + } + } + return ComputeDeciles(std::move(row_sizes)); +} + +std::vector SetCoverModel::ComputeColumnDeciles() const { + std::vector column_sizes(columns_.size()); + for (const SubsetIndex subset : SubsetRange()) { + column_sizes[subset.value()] = columns_[subset].size(); + } + return ComputeDeciles(std::move(column_sizes)); +} + } // namespace operations_research diff --git a/ortools/algorithms/set_cover_model.h b/ortools/algorithms/set_cover_model.h index 0cbd8bbbfc8..fa3f55430b8 100644 --- a/ortools/algorithms/set_cover_model.h +++ b/ortools/algorithms/set_cover_model.h @@ -14,60 +14,109 @@ #ifndef OR_TOOLS_ALGORITHMS_SET_COVER_MODEL_H_ #define OR_TOOLS_ALGORITHMS_SET_COVER_MODEL_H_ +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#else +#include +#endif // defined(_MSC_VER) + +#include #include #include "absl/log/check.h" +#include "absl/strings/str_cat.h" #include "ortools/algorithms/set_cover.pb.h" -#include "ortools/lp_data/lp_types.h" // For StrictITIVector. -#include "ortools/util/strong_integers.h" +#include "ortools/base/strong_int.h" +#include "ortools/base/strong_vector.h" +#include "ortools/util/aligned_memory.h" // Representation class for the weighted set-covering problem. // -// Let S be a set, let (T_j) be a family (j in J) of subsets of S, and c_j costs -// associated to each T_j. +// Let E be a "universe" set, let (S_j) be a family (j in J) of subsets of E, +// and c_j costs associated to each S_j. Note that J = {j in 1..|S|}. // -// The minimum-cost set-covering problem consists in finding K, a subset of J -// such that the union of all the T_j for k in K is equal to S (the subsets -// indexed by K "cover" S), with the minimal total cost sum c_k (k in K). +// The minimum-cost set-covering problem consists in finding K (for covering), +// a subset of J such that the union of all the S_j for k in K is equal to E +// (the subsets indexed by K "cover" E), while minimizing total cost sum c_k (k +// in K). // // In Mixed-Integer Programming and matrix terms, the goal is to find values -// of binary variables x_j, where x_j is 1 when subset T_j is in K, 0 +// of binary variables x_j, where x_j is 1 when subset S_j is in K, 0 // otherwise, that minimize the sum of c_j * x_j subject to M.x >= 1. Each row -// corresponds to an element in S. +// corresponds to an element in E. // // The matrix M for linear constraints is defined as follows: -// - it has as many rows as there are elements in S. -// - its columns are such that M(i, j) = 1 iff the i-th element of S is present -// in T_j. +// - it has as many rows as there are elements in E. +// - its columns are such that M(i, j) = 1 iff the i-th element of E is present +// in S_j. +// +// We alse use m to denote |E|, the number of elements, and n to denote |S|, the +// number of subsets. +// Finally, nnz or #nz denotes the numbers of non-zeros, i.e. the sum of the +// cardinalities of all the subsets. namespace operations_research { -// Basic non-strict type for cost. -typedef double Cost; +// Basic non-strict type for cost. The speed penalty for using double is ~2%. +using Cost = double; + +// Base non-strict integer type for counting elements and subsets. +// Using ints makes it possible to represent problems with more than 2 billion +// (2e9) elements and subsets. If need arises one day, BaseInt can be split +// into SubsetBaseInt and ElementBaseInt. +// Quick testing has shown a slowdown of about 20-25% when using int64_t. +using BaseInt = int; // We make heavy use of strong typing to avoid obvious mistakes. // Subset index. -DEFINE_STRONG_INDEX_TYPE(SubsetIndex); +DEFINE_STRONG_INT_TYPE(SubsetIndex, BaseInt); // Element index. -DEFINE_STRONG_INDEX_TYPE(ElementIndex); +DEFINE_STRONG_INT_TYPE(ElementIndex, BaseInt); // Position in a vector. The vector may either represent a column, i.e. a // subset with all its elements, or a row, i,e. the list of subsets which // contain a given element. -DEFINE_STRONG_INDEX_TYPE(EntryIndex); - -// TODO(user): consider replacing with StrongVectors, which behave differently. -// The return type for size() is a simple size_t and not an Index as in -// StrictITIVector, which makes the code less elegant. -using SubsetCostVector = glop::StrictITIVector; -using ElementCostVector = glop::StrictITIVector; -using SparseColumn = glop::StrictITIVector; -using SparseRow = glop::StrictITIVector; - -using SparseColumnView = glop::StrictITIVector; -using SparseRowView = glop::StrictITIVector; -using ElementToSubsetVector = glop::StrictITIVector; -using SubsetToElementVector = glop::StrictITIVector; +DEFINE_STRONG_INT_TYPE(ColumnEntryIndex, BaseInt); +DEFINE_STRONG_INT_TYPE(RowEntryIndex, BaseInt); + +using SubsetRange = util_intops::StrongIntRange; +using ElementRange = util_intops::StrongIntRange; +using ColumnEntryRange = util_intops::StrongIntRange; + +// SIMD operations require vectors to be aligned at 64-bytes on x86-64 +// processors as of 2024-05-03. +// TODO(user): improve the code to make it possible to use unaligned memory. +constexpr int kSetCoverAlignmentInBytes = 64; + +using CostAllocator = AlignedAllocator; +using ElementAllocator = + AlignedAllocator; +using SubsetAllocator = + AlignedAllocator; + +using SubsetCostVector = + util_intops::StrongVector; +using ElementCostVector = + util_intops::StrongVector; + +using SparseColumn = + util_intops::StrongVector; +using SparseRow = + util_intops::StrongVector; + +using IntAllocator = AlignedAllocator; +using ElementToIntVector = + util_intops::StrongVector; +using SubsetToIntVector = + util_intops::StrongVector; + +// Views of the sparse vectors. These need not be aligned as it's their contents +// that need to be aligned. +using SparseColumnView = util_intops::StrongVector; +using SparseRowView = util_intops::StrongVector; + +using SubsetBoolVector = util_intops::StrongVector; // Main class for describing a weighted set-covering problem. class SetCoverModel { @@ -75,6 +124,8 @@ class SetCoverModel { // Constructs an empty weighted set-covering problem. SetCoverModel() : num_elements_(0), + num_subsets_(0), + num_nonzeros_(0), row_view_is_valid_(false), subset_costs_(), columns_(), @@ -83,11 +134,18 @@ class SetCoverModel { // Current number of elements to be covered in the model, i.e. the number of // elements in S. In matrix terms, this is the number of rows. - ElementIndex num_elements() const { return num_elements_; } + BaseInt num_elements() const { return num_elements_; } // Current number of subsets in the model. In matrix terms, this is the // number of columns. - SubsetIndex num_subsets() const { return columns_.size(); } + BaseInt num_subsets() const { return num_subsets_; } + + // Current number of nonzeros in the matrix. + ssize_t num_nonzeros() const { return num_nonzeros_; } + + double FillRate() const { + return 1.0 * num_nonzeros() / (1.0 * num_elements() * num_subsets()); + } // Vector of costs for each subset. const SubsetCostVector& subset_costs() const { return subset_costs_; } @@ -104,6 +162,16 @@ class SetCoverModel { // Returns true if rows_ and columns_ represent the same problem. bool row_view_is_valid() const { return row_view_is_valid_; } + // Access to the ranges of subsets and elements. + util_intops::StrongIntRange SubsetRange() const { + return util_intops::StrongIntRange(SubsetIndex(num_subsets_)); + } + + util_intops::StrongIntRange ElementRange() const { + return util_intops::StrongIntRange( + ElementIndex(num_elements_)); + } + // Returns the list of indices for all the subsets in the model. std::vector all_subsets() const { return all_subsets_; } @@ -113,15 +181,18 @@ class SetCoverModel { // Adds an element to the last subset created. In matrix terms, this adds a // 1 on row 'element' of the current last column of the matrix. - void AddElementToLastSubset(int element); + void AddElementToLastSubset(BaseInt element); void AddElementToLastSubset(ElementIndex element); // Sets 'cost' to an already existing 'subset'. // This will CHECK-fail if cost is infinite or a NaN. - void SetSubsetCost(int subset, Cost cost); + void SetSubsetCost(BaseInt subset, Cost cost); + void SetSubsetCost(SubsetIndex subset, Cost cost); - // Adds 'element' to and already existing 'subset'. - void AddElementToSubset(int element, int subset); + // Adds 'element' to an already existing 'subset'. + // No check is done if element is already in the subset. + void AddElementToSubset(BaseInt element, BaseInt subset); + void AddElementToSubset(ElementIndex element, SubsetIndex subset); // Creates the sparse ("dual") representation of the problem. void CreateSparseRowView(); @@ -131,10 +202,13 @@ class SetCoverModel { bool ComputeFeasibility() const; // Reserves num_subsets columns in the model. - void ReserveNumSubsets(int num_subsets); + void ReserveNumSubsets(BaseInt num_subsets); + void ReserveNumSubsets(SubsetIndex num_subsets); // Reserves num_elements rows in the column indexed by subset. - void ReserveNumElementsInSubset(int num_elements, int subset); + void ReserveNumElementsInSubset(BaseInt num_elements, BaseInt subset); + void ReserveNumElementsInSubset(ElementIndex num_elements, + SubsetIndex subset); // Returns the model as a SetCoverProto. The function is not const because // the element indices in the columns need to be sorted for the representation @@ -144,35 +218,155 @@ class SetCoverModel { // Imports the model from a SetCoverProto. void ImportModelFromProto(const SetCoverProto& message); + // A struct enabling to show basic statistics on rows and columns. + // The meaning of the fields is obvious. + struct Stats { + double min; + double max; + double median; + double mean; + double stddev; + + std::string DebugString() const { + return absl::StrCat("min = ", min, ", max = ", max, ", mean = ", mean, + ", median = ", median, ", stddev = ", stddev, ", "); + } + }; + + // Computes basic statistics on costs and returns a Stats structure. + Stats ComputeCostStats(); + + // Computes basic statistics on rows and returns a Stats structure. + Stats ComputeRowStats(); + + // Computes basic statistics on columns and returns a Stats structure. + Stats ComputeColumnStats(); + + // Computes deciles on rows and returns a vector of deciles. + std::vector ComputeRowDeciles() const; + + // Computes deciles on columns and returns a vector of deciles. + std::vector ComputeColumnDeciles() const; + private: // Updates the all_subsets_ vector so that it always contains 0 to // columns.size() - 1 void UpdateAllSubsetsList(); // Number of elements. - ElementIndex num_elements_; + BaseInt num_elements_; + + // Number of subsets. Maintained for ease of access. + BaseInt num_subsets_; + + // Number of nonzeros in the matrix. + ssize_t num_nonzeros_; // True when the SparseRowView is up-to-date. bool row_view_is_valid_; // Costs for each subset. + SubsetCostVector subset_costs_; // Vector of columns. Each column corresponds to a subset and contains the // elements of the given subset. + // This takes nnz (number of non-zeros) BaseInts, or |E| * |S| * fill_rate. + // On classical benchmarks, the fill rate is in the 2 to 5% range. + // Some synthetic benchmarks have fill rates of 20%, while benchmarks for + // rail rotations have a fill rate of 0.2 to 0.4%. + // TODO(user): try using a compressed representation like Protocol Buffers, + // since the data is only iterated upon. SparseColumnView columns_; // Vector of rows. Each row corresponds to an element and contains the // subsets containing the element. + // The size is exactly the same as for columns_. SparseRowView rows_; // Vector of indices from 0 to columns.size() - 1. (Like std::iota, but built // incrementally.) Used to (un)focus optimization algorithms on the complete // problem. + // This takes |S| BaseInts. // TODO(user): use this to enable deletion and recycling of columns/subsets. + // TODO(user): replace this with an iterator? std::vector all_subsets_; }; +// The IntersectingSubsetsIterator is a forward iterator that returns the next +// intersecting subset for a fixed seed_subset. +// The iterator is initialized with a model and a seed_subset and +// allows a speedup in getting the intersecting subsets +// by not storing them in memory. +// The iterator is at the end when the last intersecting subset has been +// returned. +// TODO(user): Add the possibility for range-for loops. +class IntersectingSubsetsIterator { + public: + IntersectingSubsetsIterator(const SetCoverModel& model, + SubsetIndex seed_subset) + : intersecting_subset_(-1), + element_entry_(0), + subset_entry_(0), + seed_subset_(seed_subset), + model_(model), + subset_seen_(model_.columns().size(), false) { + CHECK(model_.row_view_is_valid()); + subset_seen_[seed_subset] = true; // Avoid iterating on `seed_subset`. + ++(*this); // Move to the first intersecting subset. + } + + // Returns (true) whether the iterator is at the end. + bool at_end() const { + return element_entry_.value() == model_.columns()[seed_subset_].size(); + } + + // Returns the intersecting subset. + SubsetIndex operator*() const { return intersecting_subset_; } + + // Move the iterator to the next intersecting subset. + IntersectingSubsetsIterator& operator++() { + DCHECK(model_.row_view_is_valid()); + DCHECK(!at_end()); + const SparseRowView& rows = model_.rows(); + const SparseColumn& column = model_.columns()[seed_subset_]; + for (; element_entry_ < ColumnEntryIndex(column.size()); ++element_entry_) { + const ElementIndex current_element = column[element_entry_]; + const SparseRow& current_row = rows[current_element]; + for (; subset_entry_ < RowEntryIndex(current_row.size()); + ++subset_entry_) { + intersecting_subset_ = current_row[subset_entry_]; + if (!subset_seen_[intersecting_subset_]) { + subset_seen_[intersecting_subset_] = true; + return *this; + } + } + subset_entry_ = RowEntryIndex(0); // 'carriage-return' + } + return *this; + } + + private: + // The intersecting subset. + SubsetIndex intersecting_subset_; + + // The position of the entry in the column corresponding to `seed_subset_`. + ColumnEntryIndex element_entry_; + + // The position of the entry in the row corresponding to `element_entry`. + RowEntryIndex subset_entry_; + + // The seed subset. + SubsetIndex seed_subset_; + + // The model to which the iterator is applying. + const SetCoverModel& model_; + + // A vector of Booleans indicating whether the current subset has been + // already seen by the iterator. + SubsetBoolVector subset_seen_; +}; + } // namespace operations_research #endif // OR_TOOLS_ALGORITHMS_SET_COVER_MODEL_H_ diff --git a/ortools/algorithms/set_cover_orlib_test.cc b/ortools/algorithms/set_cover_orlib_test.cc index 20f1b542ae1..8dd153e44bb 100644 --- a/ortools/algorithms/set_cover_orlib_test.cc +++ b/ortools/algorithms/set_cover_orlib_test.cc @@ -15,13 +15,14 @@ #include #include -#include "absl/flags/flag.h" #include "absl/log/check.h" #include "absl/strings/str_cat.h" +#include "absl/strings/str_join.h" #include "absl/time/time.h" #include "gtest/gtest.h" -#include "ortools/algorithms/set_cover.h" +#include "ortools/algorithms/set_cover_heuristics.h" #include "ortools/algorithms/set_cover_invariant.h" +#include "ortools/algorithms/set_cover_lagrangian.h" #include "ortools/algorithms/set_cover_mip.h" #include "ortools/algorithms/set_cover_model.h" #include "ortools/algorithms/set_cover_reader.h" @@ -31,51 +32,178 @@ namespace operations_research { -double RunSolver(std::string name, SetCoverModel* model) { +void LogStats(std::string name, SetCoverModel* model) { + LOG(INFO) << ", " << name << ", num_elements, " << model->num_elements() + << ", num_subsets, " << model->num_subsets(); + LOG(INFO) << ", " << name << ", num_nonzeros, " << model->num_nonzeros() + << ", fill rate, " << model->FillRate(); + LOG(INFO) << ", " << name << ", cost, " + << model->ComputeCostStats().DebugString(); + + LOG(INFO) << ", " << name << ", num_rows, " << model->num_elements() + << ", rows sizes, " << model->ComputeRowStats().DebugString(); + LOG(INFO) << ", " << name << ", row size deciles, " + << absl::StrJoin(model->ComputeRowDeciles(), ", "); + LOG(INFO) << ", " << name << ", num_columns, " << model->num_subsets() + << ", columns sizes, " << model->ComputeColumnStats().DebugString(); + LOG(INFO) << ", " << name << ", column size deciles, " + << absl::StrJoin(model->ComputeColumnDeciles(), ", "); SetCoverInvariant inv(model); + Preprocessor preprocessor(&inv); + preprocessor.NextSolution(); + LOG(INFO) << ", " << name << ", num_columns_fixed_by_singleton_row, " + << preprocessor.num_columns_fixed_by_singleton_row(); +} + +void LogCostAndTiming(std::string name, std::string algo, double cost, + absl::Duration duration) { + LOG(INFO) << ", " << name << ", " << algo << "_cost, " << cost << ", " + << absl::ToInt64Microseconds(duration) << "e-6, s"; +} +SetCoverInvariant RunChvatalAndSteepest(std::string name, + SetCoverModel* model) { + SetCoverInvariant inv(model); GreedySolutionGenerator greedy(&inv); - WallTimer global_timer; WallTimer timer; - global_timer.Start(); timer.Start(); CHECK(greedy.NextSolution()); - DCHECK(inv.CheckSolution()); - LOG(INFO) << name << "_GreedySolutionGenerator_cost, " << inv.cost() << ", " - << absl::ToInt64Microseconds(timer.GetDuration()) << ", us"; - timer.Stop(); - timer.Reset(); + DCHECK(inv.CheckConsistency()); + LogCostAndTiming(name, "GreedySolutionGenerator", inv.cost(), + timer.GetDuration()); + SteepestSearch steepest(&inv); + steepest.NextSolution(100000); + LogCostAndTiming(name, "GreedySteepestSearch", inv.cost(), + timer.GetDuration()); + DCHECK(inv.CheckConsistency()); + return inv; +} + +SetCoverInvariant RunChvatalAndGLS(std::string name, SetCoverModel* model) { + SetCoverInvariant inv(model); + GreedySolutionGenerator greedy(&inv); + WallTimer timer; + timer.Start(); + CHECK(greedy.NextSolution()); + DCHECK(inv.CheckConsistency()); + LogCostAndTiming(name, "GreedySolutionGenerator", inv.cost(), + timer.GetDuration()); + GuidedLocalSearch gls(&inv); + gls.NextSolution(100'000); + LogCostAndTiming(name, "GLS", inv.cost(), timer.GetDuration()); + DCHECK(inv.CheckConsistency()); + return inv; +} + +SetCoverInvariant RunElementDegreeGreedyAndSteepest(std::string name, + SetCoverModel* model) { + SetCoverInvariant inv(model); + ElementDegreeSolutionGenerator element_degree(&inv); + WallTimer timer; timer.Start(); - operations_research::SteepestSearch steepest(&inv); + CHECK(element_degree.NextSolution()); + DCHECK(inv.CheckConsistency()); + LogCostAndTiming(name, "ElementDegreeSolutionGenerator", inv.cost(), + timer.GetDuration()); + SteepestSearch steepest(&inv); steepest.NextSolution(100000); - LOG(INFO) << name << "_SteepestSearch_cost, " << inv.cost() << ", " - << absl::ToInt64Microseconds(timer.GetDuration()) << ", us"; - double best_cost = inv.cost(); - DCHECK(inv.CheckSolution()); - SubsetBoolVector best_choices = inv.is_selected(); - std::vector focus = model->all_subsets(); - timer.Stop(); - timer.Reset(); + LogCostAndTiming(name, "ElementDegreeSteepestSearch", inv.cost(), + timer.GetDuration()); + DCHECK(inv.CheckConsistency()); + return inv; +} + +void IterateClearAndMip(std::string name, SetCoverInvariant* inv) { + WallTimer timer; timer.Start(); + std::vector focus = inv->model()->all_subsets(); + double best_cost = inv->cost(); + SubsetBoolVector best_choices = inv->is_selected(); for (int i = 0; i < 10; ++i) { std::vector range = - ClearMostCoveredElements(std::min(100UL, focus.size()), &inv); - SetCoverMip mip(&inv); - mip.NextSolution(range); - if (inv.cost() < best_cost) { - best_cost = inv.cost(); - best_choices = inv.is_selected(); + ClearMostCoveredElements(std::min(100UL, focus.size()), inv); + SetCoverMip mip(inv); + mip.NextSolution(range, true, 0.02); + DCHECK(inv->CheckConsistency()); + if (inv->cost() < best_cost) { + best_cost = inv->cost(); + best_choices = inv->is_selected(); } } timer.Stop(); - LOG(INFO) << name << "_MIP_cost, " << best_cost << ", " - << absl::ToInt64Microseconds(timer.GetDuration()) << ", us"; - global_timer.Stop(); - LOG(INFO) << name << "_total_running_time, " << best_cost << ", " - << absl::ToInt64Microseconds(global_timer.GetDuration()) - << ", us, total_time"; - return best_cost; - // TODO(user): add guided local search. + LogCostAndTiming(name, "IterateClearAndMip", best_cost, timer.GetDuration()); +} + +SetCoverInvariant ComputeLPLowerBound(std::string name, SetCoverModel* model) { + SetCoverInvariant inv(model); + WallTimer timer; + timer.Start(); + SetCoverMip mip(&inv, SetCoverMipSolver::SCIP); // Use Gurobi for large pbs. + mip.NextSolution(false, .3); // Use 300s or more for large problems. + LogCostAndTiming(name, "LPLowerBound", mip.lower_bound(), + timer.GetDuration()); + return inv; +} + +void ComputeLagrangianLowerBound(std::string name, SetCoverInvariant* inv) { + const SetCoverModel* model = inv->model(); + WallTimer timer; + timer.Start(); + SetCoverLagrangian lagrangian(inv, /*num_threads=*/4); + const auto [lower_bound, reduced_costs, multipliers] = + lagrangian.ComputeLowerBound(model->subset_costs(), inv->cost()); + LogCostAndTiming(name, "LagrangianLowerBound", lower_bound, + timer.GetDuration()); +} + +SetCoverInvariant RunMip(std::string name, SetCoverModel* model) { + SetCoverInvariant inv(model); + WallTimer timer; + timer.Start(); + SetCoverMip mip(&inv, SetCoverMipSolver::SCIP); // Use Gurobi for large pbs. + mip.NextSolution(true, .5); // Use 300s or more for large problems. + timer.Stop(); + LogCostAndTiming(name, "MIP", inv.cost(), timer.GetDuration()); + return inv; +} + +void IterateClearElementDegreeAndSteepest(std::string name, + SetCoverInvariant* inv) { + WallTimer timer; + timer.Start(); + double best_cost = inv->cost(); + SubsetBoolVector best_choices = inv->is_selected(); + ElementDegreeSolutionGenerator element_degree(inv); + SteepestSearch steepest(inv); + for (int i = 0; i < 1000; ++i) { + std::vector range = + ClearRandomSubsets(0.1 * inv->trace().size(), inv); + CHECK(element_degree.NextSolution()); + steepest.NextSolution(range, 100000); + DCHECK(inv->CheckConsistency()); + if (inv->cost() < best_cost) { + best_cost = inv->cost(); + best_choices = inv->is_selected(); + } + } + timer.Stop(); + LogCostAndTiming(name, "IterateClearElementDegreeAndSteepest", best_cost, + timer.GetDuration()); +} + +double RunSolver(std::string name, SetCoverModel* model) { + LogStats(name, model); + WallTimer global_timer; + global_timer.Start(); + RunChvatalAndSteepest(name, model); + // SetCoverInvariant inv = ComputeLPLowerBound(name, model); + RunMip(name, model); + RunChvatalAndGLS(name, model); + SetCoverInvariant inv = RunElementDegreeGreedyAndSteepest(name, model); + ComputeLagrangianLowerBound(name, &inv); + // IterateClearAndMip(name, inv); + IterateClearElementDegreeAndSteepest(name, &inv); + return inv.cost(); } // We break down the ORLIB set covering problems by their expected runtime with @@ -122,26 +250,26 @@ const char data_dir[] = #define ORLIB_TEST(name, best_objective, expected_objective, size, function) \ TEST(OrlibTest, APPEND_AND_EVAL(TestOnLine, __LINE__)) { \ - auto filespec = file::JoinPathRespectAbsolute( \ - absl::GetFlag(FLAGS_test_srcdir), data_dir, name); \ + auto filespec = \ + file::JoinPathRespectAbsolute(::testing::SrcDir(), data_dir, name); \ LOG(INFO) << "Reading " << name; \ operations_research::SetCoverModel model = function(filespec); \ double cost = RunSolver(name, &model); \ (void)cost; \ } -#define ORLIB_UNICOST_TEST(name, best_objective, expected_objective, size, \ - function) \ - TEST(OrlibUnicostTest, APPEND_AND_EVAL(TestOnLine, __LINE__)) { \ - auto filespec = file::JoinPathRespectAbsolute( \ - absl::GetFlag(FLAGS_test_srcdir), data_dir, name); \ - LOG(INFO) << "Reading " << name; \ - operations_research::SetCoverModel model = function(filespec); \ - for (int i = 0; i < model.num_subsets(); ++i) { \ - model.SetSubsetCost(i, 1.0); \ - } \ - double cost = RunSolver(absl::StrCat(name, "_unicost"), &model); \ - (void)cost; \ +#define ORLIB_UNICOST_TEST(name, best_objective, expected_objective, size, \ + function) \ + TEST(OrlibUnicostTest, APPEND_AND_EVAL(TestOnLine, __LINE__)) { \ + auto filespec = \ + file::JoinPathRespectAbsolute(::testing::SrcDir(), data_dir, name); \ + LOG(INFO) << "Reading " << name; \ + operations_research::SetCoverModel model = function(filespec); \ + for (SubsetIndex i : model.SubsetRange()) { \ + model.SetSubsetCost(i, 1.0); \ + } \ + double cost = RunSolver(absl::StrCat(name, "_unicost"), &model); \ + (void)cost; \ } #define SCP_TEST(name, best_objective, expected_objective, size) \ @@ -156,6 +284,11 @@ const char data_dir[] = ORLIB_UNICOST_TEST(name, best_objective, expected_objective, size, \ operations_research::ReadRailSetCoverProblem) +#define BASIC_SCP +#define EXTRA_SCP +#define RAIL + +#ifdef BASIC_SCP SCP_TEST("scp41.txt", 429, 442, FEWMILLIS); SCP_TEST("scp42.txt", 512, 555, FEWMILLIS); SCP_TEST("scp43.txt", 516, 557, FEWMILLIS); @@ -237,15 +370,9 @@ SCP_TEST("scpnrh2.txt", 63, 70, FEWTENTHS); SCP_TEST("scpnrh3.txt", 59, 65, FEWTENTHS); SCP_TEST("scpnrh4.txt", 58, 66, FEWTENTHS); SCP_TEST("scpnrh5.txt", 55, 62, FEWTENTHS); +#endif -RAIL_TEST("rail507.txt", 174, 218, FEWTENTHS); -RAIL_TEST("rail516.txt", 182, 204, FEWTENTHS); -RAIL_TEST("rail582.txt", 211, 250, FEWTENTHS); -RAIL_TEST("rail2536.txt", 691, 889, MANYSECONDS); -RAIL_TEST("rail2586.txt", 952, 1139, MANYSECONDS); -RAIL_TEST("rail4284.txt", 1065, 1362, MANYSECONDS); -RAIL_TEST("rail4872.txt", 1527, 1861, MANYSECONDS); // [2] - +#ifdef EXTRA_SCP SCP_TEST("scpclr10.txt", 0, 32, FEWMILLIS); SCP_TEST("scpclr11.txt", 0, 30, FEWMILLIS); SCP_TEST("scpclr12.txt", 0, 31, FEWMILLIS); @@ -257,6 +384,21 @@ SCP_TEST("scpcyc08.txt", 0, 360, FEWMILLIS); SCP_TEST("scpcyc09.txt", 0, 816, SUBHUNDREDTH); SCP_TEST("scpcyc10.txt", 0, 1920, FEWHUNDREDTHS); SCP_TEST("scpcyc11.txt", 0, 4284, SUBTENTH); +#endif + +#ifdef RAIL +RAIL_TEST("rail507.txt", 174, 218, FEWTENTHS); +RAIL_TEST("rail516.txt", 182, 204, FEWTENTHS); +RAIL_TEST("rail582.txt", 211, 250, FEWTENTHS); +RAIL_TEST("rail2536.txt", 691, 889, MANYSECONDS); +RAIL_TEST("rail2586.txt", 952, 1139, MANYSECONDS); +RAIL_TEST("rail4284.txt", 1065, 1362, MANYSECONDS); +RAIL_TEST("rail4872.txt", 1527, 1861, MANYSECONDS); // [2] +#endif + +#undef BASIC_SCP +#undef EXTRA_SCP +#undef RAIL #undef ORLIB_TEST #undef ORLIB_UNICOST_TEST diff --git a/ortools/algorithms/set_cover_reader.cc b/ortools/algorithms/set_cover_reader.cc index 72d49351887..d4c9283d429 100644 --- a/ortools/algorithms/set_cover_reader.cc +++ b/ortools/algorithms/set_cover_reader.cc @@ -95,17 +95,17 @@ SetCoverModel ReadBeasleySetCoverProblem(absl::string_view filename) { File* file(file::OpenOrDie(filename, "r", file::Defaults())); SetCoverReader reader(file); const ElementIndex num_rows(reader.ParseNextInteger()); - const int num_cols(reader.ParseNextInteger()); - model.ReserveNumSubsets(num_cols); - for (int i = 0; i < num_cols; ++i) { + const SubsetIndex num_cols(reader.ParseNextInteger()); + model.ReserveNumSubsets(num_cols.value()); + for (SubsetIndex subset : SubsetRange(num_cols)) { const double cost(reader.ParseNextDouble()); - model.SetSubsetCost(i, cost); + model.SetSubsetCost(subset.value(), cost); } - for (int element(0); element < num_rows; ++element) { - const EntryIndex row_size(reader.ParseNextInteger()); - for (EntryIndex entry(0); entry < row_size; ++entry) { + for (ElementIndex element : ElementRange(num_rows)) { + const RowEntryIndex row_size(reader.ParseNextInteger()); + for (RowEntryIndex entry(0); entry < row_size; ++entry) { const int subset(reader.ParseNextInteger() - 1); - model.AddElementToSubset(element, subset); + model.AddElementToSubset(element.value(), subset); } } file->Close(file::Defaults()).IgnoreError(); @@ -122,11 +122,11 @@ SetCoverModel ReadRailSetCoverProblem(absl::string_view filename) { for (int i(0); i < num_cols; ++i) { const double cost(reader.ParseNextDouble()); model.SetSubsetCost(i, cost); - const int column_size(reader.ParseNextInteger()); - model.ReserveNumElementsInSubset(i, column_size); - for (EntryIndex entry(0); entry < column_size; ++entry) { - const int element(reader.ParseNextInteger() - 1); - model.AddElementToSubset(element, i); + const ColumnEntryIndex column_size(reader.ParseNextInteger()); + model.ReserveNumElementsInSubset(i, column_size.value()); + for (const ColumnEntryIndex _ : ColumnEntryRange(column_size)) { + const ElementIndex element(reader.ParseNextInteger() - 1); + model.AddElementToSubset(element.value(), i); } } file->Close(file::Defaults()).IgnoreError(); diff --git a/ortools/algorithms/set_cover_test.cc b/ortools/algorithms/set_cover_test.cc index f4a388e4334..c1da8e0bdeb 100644 --- a/ortools/algorithms/set_cover_test.cc +++ b/ortools/algorithms/set_cover_test.cc @@ -11,8 +11,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "ortools/algorithms/set_cover.h" - #include #include @@ -21,6 +19,7 @@ #include "benchmark/benchmark.h" #include "gtest/gtest.h" #include "ortools/algorithms/set_cover.pb.h" +#include "ortools/algorithms/set_cover_heuristics.h" #include "ortools/algorithms/set_cover_invariant.h" #include "ortools/algorithms/set_cover_mip.h" #include "ortools/algorithms/set_cover_model.h" @@ -82,20 +81,19 @@ TEST(SetCoverProtoTest, SaveReload) { } TEST(SolutionProtoTest, SaveReloadTwice) { - SetCoverModel model = CreateKnightsCoverModel(10, 10); + SetCoverModel model = CreateKnightsCoverModel(3, 3); SetCoverInvariant inv(&model); GreedySolutionGenerator greedy(&inv); CHECK(greedy.NextSolution()); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); SetCoverSolutionResponse greedy_proto = inv.ExportSolutionAsProto(); SteepestSearch steepest(&inv); CHECK(steepest.NextSolution(500)); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); SetCoverSolutionResponse steepest_proto = inv.ExportSolutionAsProto(); inv.ImportSolutionFromProto(greedy_proto); CHECK(steepest.NextSolution(500)); - EXPECT_TRUE(inv.CheckSolution()); - SetCoverSolutionResponse reloaded_proto = inv.ExportSolutionAsProto(); + EXPECT_TRUE(inv.CheckConsistency()); } TEST(SetCoverTest, InitialValues) { @@ -115,17 +113,41 @@ TEST(SetCoverTest, InitialValues) { TrivialSolutionGenerator trivial(&inv); CHECK(trivial.NextSolution()); LOG(INFO) << "TrivialSolutionGenerator cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); GreedySolutionGenerator greedy(&inv); - CHECK(greedy.NextSolution()); + EXPECT_TRUE(greedy.NextSolution()); LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); + EXPECT_EQ(inv.num_uncovered_elements(), 0); SteepestSearch steepest(&inv); CHECK(steepest.NextSolution(500)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); +} + +TEST(SetCoverTest, Preprocessor) { + SetCoverModel model; + model.AddEmptySubset(1); + model.AddElementToLastSubset(0); + model.AddEmptySubset(1); + model.AddElementToLastSubset(1); + model.AddElementToLastSubset(2); + model.AddEmptySubset(1); + model.AddElementToLastSubset(1); + model.AddEmptySubset(1); + model.AddElementToLastSubset(2); + EXPECT_TRUE(model.ComputeFeasibility()); + + SetCoverInvariant inv(&model); + Preprocessor preprocessor(&inv); + preprocessor.NextSolution(); + EXPECT_TRUE(inv.CheckConsistency()); + GreedySolutionGenerator greedy(&inv); + EXPECT_TRUE(greedy.NextSolution()); + LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); + EXPECT_TRUE(inv.CheckConsistency()); } TEST(SetCoverTest, Infeasible) { @@ -156,19 +178,19 @@ TEST(SetCoverTest, KnightsCoverTrivalAndGreedy) { TrivialSolutionGenerator trivial(&inv); CHECK(trivial.NextSolution()); LOG(INFO) << "TrivialSolutionGenerator cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); // Reinitialize before using Greedy, to start from scratch. inv.Initialize(); GreedySolutionGenerator greedy(&inv); CHECK(greedy.NextSolution()); LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(100000)); + CHECK(steepest.NextSolution(100'000)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); } TEST(SetCoverTest, KnightsCoverGreedy) { @@ -180,10 +202,34 @@ TEST(SetCoverTest, KnightsCoverGreedy) { LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(100000)); + CHECK(steepest.NextSolution(100'000)); + LOG(INFO) << "SteepestSearch cost: " << inv.cost(); +} + +TEST(SetCoverTest, KnightsCoverDegree) { + SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE); + SetCoverInvariant inv(&model); + + ElementDegreeSolutionGenerator degree(&inv); + CHECK(degree.NextSolution()); + LOG(INFO) << "ElementDegreeSolutionGenerator cost: " << inv.cost(); + + SteepestSearch steepest(&inv); + CHECK(steepest.NextSolution(100'000)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); } +TEST(SetCoverTest, KnightsCoverGLS) { + SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE); + SetCoverInvariant inv(&model); + GreedySolutionGenerator greedy(&inv); + CHECK(greedy.NextSolution()); + LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); + GuidedLocalSearch gls(&inv); + CHECK(gls.NextSolution(100000)); + LOG(INFO) << "GuidedLocalSearch cost: " << inv.cost(); +} + TEST(SetCoverTest, KnightsCoverRandom) { SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE); EXPECT_TRUE(model.ComputeFeasibility()); @@ -192,12 +238,12 @@ TEST(SetCoverTest, KnightsCoverRandom) { RandomSolutionGenerator random(&inv); CHECK(random.NextSolution()); LOG(INFO) << "RandomSolutionGenerator cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(100000)); + CHECK(steepest.NextSolution(100'000)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); } TEST(SetCoverTest, KnightsCoverTrivial) { @@ -208,12 +254,12 @@ TEST(SetCoverTest, KnightsCoverTrivial) { TrivialSolutionGenerator trivial(&inv); CHECK(trivial.NextSolution()); LOG(INFO) << "TrivialSolutionGenerator cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(100000)); + CHECK(steepest.NextSolution(100'000)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); } TEST(SetCoverTest, KnightsCoverGreedyAndTabu) { @@ -230,18 +276,18 @@ TEST(SetCoverTest, KnightsCoverGreedyAndTabu) { LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(10000)); + CHECK(steepest.NextSolution(10'000)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); GuidedTabuSearch gts(&inv); - CHECK(gts.NextSolution(10000)); + CHECK(gts.NextSolution(10'000)); LOG(INFO) << "GuidedTabuSearch cost: " << inv.cost(); - EXPECT_TRUE(inv.CheckSolution()); + EXPECT_TRUE(inv.CheckConsistency()); DisplayKnightsCoverSolution(inv.is_selected(), BoardSize, BoardSize); } -TEST(SetCoverTest, KnightsCoverRandomClear) { +TEST(SetCoverTest, KnightsCoverGreedyRandomClear) { #ifdef NDEBUG constexpr int BoardSize = 50; #else @@ -251,17 +297,16 @@ TEST(SetCoverTest, KnightsCoverRandomClear) { SetCoverInvariant inv(&model); Cost best_cost = std::numeric_limits::max(); SubsetBoolVector best_choices = inv.is_selected(); - for (int i = 0; i < 10000; ++i) { + for (int i = 0; i < 10'000; ++i) { inv.LoadSolution(best_choices); - ClearRandomSubsets(0.1 * model.num_subsets().value(), &inv); + ClearRandomSubsets(0.1 * inv.trace().size(), &inv); GreedySolutionGenerator greedy(&inv); CHECK(greedy.NextSolution()); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(10000)); + CHECK(steepest.NextSolution(10'000)); - EXPECT_TRUE(inv.CheckSolution()); if (inv.cost() < best_cost) { best_cost = inv.cost(); best_choices = inv.is_selected(); @@ -278,7 +323,7 @@ TEST(SetCoverTest, KnightsCoverRandomClear) { } } -TEST(SetCoverTest, KnightsCoverRandomClearMip) { +TEST(SetCoverTest, KnightsCoverElementDegreeRandomClear) { #ifdef NDEBUG constexpr int BoardSize = 50; #else @@ -288,23 +333,56 @@ TEST(SetCoverTest, KnightsCoverRandomClearMip) { SetCoverInvariant inv(&model); Cost best_cost = std::numeric_limits::max(); SubsetBoolVector best_choices = inv.is_selected(); + for (int i = 0; i < 1'000; ++i) { + inv.LoadSolution(best_choices); + ClearRandomSubsets(0.1 * inv.trace().size(), &inv); + + ElementDegreeSolutionGenerator degree(&inv); + CHECK(degree.NextSolution()); + + SteepestSearch steepest(&inv); + CHECK(steepest.NextSolution(10'000)); + + if (inv.cost() < best_cost) { + best_cost = inv.cost(); + best_choices = inv.is_selected(); + LOG(INFO) << "Best cost: " << best_cost << " at iteration = " << i; + } + } + inv.LoadSolution(best_choices); + DisplayKnightsCoverSolution(best_choices, BoardSize, BoardSize); + LOG(INFO) << "RandomClear cost: " << best_cost; + // The best solution found until 2023-08 has a cost of 350. + // http://www.contestcen.com/kn50.htm + if (BoardSize == 50) { + CHECK_GE(inv.cost(), 350); + } +} + +TEST(SetCoverTest, KnightsCoverRandomClearMip) { +#ifdef NDEBUG + constexpr int BoardSize = 50; +#else + constexpr int BoardSize = 15; +#endif + SetCoverModel model = CreateKnightsCoverModel(BoardSize, BoardSize); + SetCoverInvariant inv(&model); GreedySolutionGenerator greedy(&inv); CHECK(greedy.NextSolution()); LOG(INFO) << "GreedySolutionGenerator cost: " << inv.cost(); SteepestSearch steepest(&inv); - CHECK(steepest.NextSolution(10000)); + CHECK(steepest.NextSolution(10'000)); LOG(INFO) << "SteepestSearch cost: " << inv.cost(); - best_cost = inv.cost(); - best_choices = inv.is_selected(); + Cost best_cost = inv.cost(); + SubsetBoolVector best_choices = inv.is_selected(); for (int i = 0; i < 100; ++i) { inv.LoadSolution(best_choices); - auto focus = ClearRandomSubsets(0.1 * model.num_subsets().value(), &inv); + auto focus = ClearRandomSubsets(0.1 * model.num_subsets(), &inv); SetCoverMip mip(&inv); - mip.SetTimeLimitInSeconds(1); - mip.NextSolution(focus); - EXPECT_TRUE(inv.CheckSolution()); + mip.NextSolution(focus, true, 1); + EXPECT_TRUE(inv.CheckConsistency()); if (inv.cost() < best_cost) { best_cost = inv.cost(); best_choices = inv.is_selected(); @@ -330,8 +408,7 @@ TEST(SetCoverTest, KnightsCoverMip) { SetCoverModel model = CreateKnightsCoverModel(BoardSize, BoardSize); SetCoverInvariant inv(&model); SetCoverMip mip(&inv); - mip.SetTimeLimitInSeconds(10); - mip.NextSolution(); + mip.NextSolution(true, 10); SubsetBoolVector best_choices = inv.is_selected(); DisplayKnightsCoverSolution(best_choices, BoardSize, BoardSize); LOG(INFO) << "Mip cost: " << inv.cost(); diff --git a/ortools/algorithms/set_cover_utils.cc b/ortools/algorithms/set_cover_utils.cc deleted file mode 100644 index e3eb949f0e9..00000000000 --- a/ortools/algorithms/set_cover_utils.cc +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "ortools/algorithms/set_cover_utils.h" - -#include "ortools/base/adjustable_priority_queue-inl.h" // IWYU pragma: keep - -namespace operations_research { - -void SubsetPriorityQueue::Initialize() { - max_pq_.Clear(); - pq_elements_.assign(inv_->model()->num_subsets(), SubsetPriority()); -} - -void SubsetPriorityQueue::Add(SubsetIndex subset, Cost priority) { - pq_elements_[subset] = SubsetPriority(subset, priority); - max_pq_.Add(&pq_elements_[subset]); -} - -void SubsetPriorityQueue::ChangePriority(SubsetIndex subset, Cost priority) { - // TODO(user): see if the reference to inv_ can be removed. - if (inv_->marginal_impacts()[subset] != 0) { - pq_elements_[subset].SetPriority(priority); - max_pq_.NoteChangedPriority(&pq_elements_[subset]); - DVLOG(1) << "Priority of subset " << subset << " is now " - << pq_elements_[subset].GetPriority(); - } -} - -void SubsetPriorityQueue::Remove(SubsetIndex subset) { - if (max_pq_.Contains(&pq_elements_[subset])) { - DVLOG(1) << "Removing subset " << subset << " from priority queue"; - max_pq_.Remove(&pq_elements_[subset]); - } -} - -SubsetIndex SubsetPriorityQueue::TopSubset() const { - return max_pq_.Top()->GetSubset(); -} - -} // namespace operations_research diff --git a/ortools/algorithms/set_cover_utils.h b/ortools/algorithms/set_cover_utils.h deleted file mode 100644 index 635cdcfc51e..00000000000 --- a/ortools/algorithms/set_cover_utils.h +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2010-2024 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_UTILS_H_ -#define OR_TOOLS_ALGORITHMS_SET_COVER_UTILS_H_ - -#include -#include -#if defined(_MSC_VER) -#include -#define ssize_t SSIZE_T -#endif - -#include "ortools/algorithms/set_cover_invariant.h" -#include "ortools/algorithms/set_cover_model.h" -#include "ortools/base/adjustable_priority_queue.h" - -namespace operations_research { - -// Element used for AdjustablePriorityQueue. It's an implementation detail. -class SubsetPriority { - public: - SubsetPriority() - : heap_index_(-1), - subset_(0), - priority_(std::numeric_limits::infinity()) {} - - SubsetPriority(SubsetIndex s, Cost cost) - : heap_index_(s.value()), subset_(s), priority_(cost) {} - - void SetHeapIndex(int h) { heap_index_ = h; } - int GetHeapIndex() const { return heap_index_; } - - // The priority queue maintains the max element. This comparator breaks ties - // between subsets using their cardinalities. - bool operator<(const SubsetPriority& other) const { - return priority_ < other.priority_ || - (priority_ == other.priority_ && subset_ < other.subset_); - } - - SubsetIndex GetSubset() const { return subset_; } - void SetPriority(Cost priority) { priority_ = priority; } - Cost GetPriority() const { return priority_; } - - private: - int heap_index_; - SubsetIndex subset_; - Cost priority_; -}; - -using SubsetPriorityVector = glop::StrictITIVector; - -// Also an implementation detail. -class SubsetPriorityQueue { - public: - explicit SubsetPriorityQueue(SetCoverInvariant* inv) : inv_(inv) { - Initialize(); - } - - // Adds subset to the priority queue. - void Add(SubsetIndex subset, Cost priority); - - // Changes the priority of subset in the queue. - void ChangePriority(SubsetIndex subset, Cost priority); - - // Removes subset from the queue, if it is in the queue. - void Remove(SubsetIndex subset); - - // Returns true if the subset is in the queue. - bool Contains(SubsetIndex subset) { - return max_pq_.Contains(&pq_elements_[subset]); - } - - // Returns true if the queue is empty. - bool IsEmpty() const { return max_pq_.IsEmpty(); } - - // Returns the top subset in the queue. - SubsetIndex TopSubset() const; - - // Returns the priority of the subset in the queue. - Cost Priority(SubsetIndex subset) { - return pq_elements_[subset].GetPriority(); - } - - // Returns the size of the queue. - ssize_t Size() const { return max_pq_.Size(); } - - private: - // Initializes the priority queue. - void Initialize(); - - // The set-cover invariant to which the priority queue applies. - SetCoverInvariant* inv_; - - // The adjustable priority queue per se. - AdjustablePriorityQueue max_pq_; - - // The elements of the priority queue. - SubsetPriorityVector pq_elements_; -}; - -// A Tabu list is a fixed-sized circular array of small size, usually a few -// dozens of elements. -template -class TabuList { - public: - explicit TabuList(T size) : array_(0), fill_(0), index_(0) { - array_.resize(size.value(), T(-1)); - } - - // Returns the size of the array. - int size() const { return array_.size(); } - - // Initializes the array of the Tabu list. - void Init(int size) { - array_.resize(size, T(-1)); - fill_ = 0; - index_ = 0; - } - - // Adds t to the array. When the end of the array is reached, re-start at 0. - void Add(T t) { - const int size = array_.size(); - array_[index_] = t; - ++index_; - if (index_ >= size) { - index_ = 0; - } - if (fill_ < size) { - ++fill_; - } - } - - // Returns true if t is in the array. This is O(size), but small. - bool Contains(T t) const { - for (int i = 0; i < fill_; ++i) { - if (t == array_[i]) { - return true; - } - } - return false; - } - - private: - std::vector array_; - int fill_; - int index_; -}; - -} // namespace operations_research - -#endif // OR_TOOLS_ALGORITHMS_SET_COVER_UTILS_H_ diff --git a/ortools/algorithms/sparse_permutation.cc b/ortools/algorithms/sparse_permutation.cc index 1713a37b922..ab6801fef14 100644 --- a/ortools/algorithms/sparse_permutation.cc +++ b/ortools/algorithms/sparse_permutation.cc @@ -15,6 +15,7 @@ #include #include +#include #include #include "absl/strings/str_join.h" @@ -66,7 +67,7 @@ std::string SparsePermutation::DebugString() const { std::vector cycle; for (int i = min_pos; i < end; ++i) cycle.push_back(cycles_[i]); for (int i = start; i < min_pos; ++i) cycle.push_back(cycles_[i]); - cycles.push_back(cycle); + cycles.push_back(std::move(cycle)); start = end; } std::sort(cycles.begin(), cycles.end()); diff --git a/ortools/base/BUILD.bazel b/ortools/base/BUILD.bazel index 1a754e05f90..c57c0d22894 100644 --- a/ortools/base/BUILD.bazel +++ b/ortools/base/BUILD.bazel @@ -51,7 +51,7 @@ cc_library( ], copts = [ "-DOR_TOOLS_MAJOR=9", - "-DOR_TOOLS_MINOR=9", + "-DOR_TOOLS_MINOR=11", "-DOR_TOOLS_PATCH=9999", ], linkopts = select({ @@ -195,9 +195,13 @@ cc_library( cc_library( name = "file", - srcs = ["file.cc"], + srcs = [ + "file.cc", + "filesystem.cc", + ], hdrs = [ "file.h", + "filesystem.h", "helpers.h", "options.h", ], @@ -211,17 +215,6 @@ cc_library( ], ) -cc_library( - name = "filesystem", - srcs = ["filesystem.cc"], - hdrs = ["filesystem.h"], - deps = [ - ":file", - "@com_google_absl//absl/status", - "@com_google_absl//absl/strings", - ], -) - cc_library( name = "status_matchers", hdrs = ["status_matchers.h"], @@ -361,6 +354,7 @@ cc_library( cc_library( name = "mathutil", + srcs = ["mathutil.cc"], hdrs = ["mathutil.h"], deps = [ ":base", @@ -418,6 +412,20 @@ cc_library( ], ) +cc_library( + name = "temp_path", + srcs = ["temp_path.cc"], + hdrs = ["temp_path.h"], + deps = [ + ":base", + ":file", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + ], +) + cc_library( name = "protobuf_util", hdrs = ["protobuf_util.h"], diff --git a/ortools/base/CMakeLists.txt b/ortools/base/CMakeLists.txt index 81d2409e2dc..cffeca83ea8 100644 --- a/ortools/base/CMakeLists.txt +++ b/ortools/base/CMakeLists.txt @@ -38,5 +38,5 @@ target_link_libraries(${NAME} PRIVATE absl::strings absl::str_format protobuf::libprotobuf - ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto) + ${PROJECT_NAMESPACE}::ortools_proto) #add_library(${PROJECT_NAMESPACE}::base ALIAS ${NAME}) diff --git a/ortools/base/base.i b/ortools/base/base.i index 976adbd874e..5aa0599e122 100644 --- a/ortools/base/base.i +++ b/ortools/base/base.i @@ -89,6 +89,10 @@ PRIMITIVE_TYPEMAP(unsigned long int, unsigned long long); #undef PRIMITIVE_TYPEMAP #endif // defined(SWIGWORDSIZE64) +%include "exception.i" +%include "std_common.i" +%include "std_pair.i" + #endif // defined(SWIGCSHARP) // SWIG macros for explicit API declaration. diff --git a/ortools/base/file.cc b/ortools/base/file.cc index 6588bb67173..cd165938124 100644 --- a/ortools/base/file.cc +++ b/ortools/base/file.cc @@ -62,22 +62,36 @@ size_t File::Size() { bool File::Flush() { return fflush(f_) == 0; } +// Deletes "this" on closing. bool File::Close() { + bool ok = true; + if (f_ == nullptr) { + return ok; + } if (fclose(f_) == 0) { f_ = nullptr; - return true; } else { - return false; + ok = false; } + delete this; + return ok; } -absl::Status File::Close(int flags) { - if (flags != file::Defaults()) - return absl::Status(absl::StatusCode::kInvalidArgument, "Wrong flags"); - return Close() - ? absl::OkStatus() - : absl::Status(absl::StatusCode::kInvalidArgument, - absl::StrCat("Could not close file '", name_, "'")); +// Deletes "this" on closing. +absl::Status File::Close(int /*flags*/) { + absl::Status status; + if (f_ == nullptr) { + return status; + } + if (fclose(f_) == 0) { + f_ = nullptr; + } else { + status.Update( + absl::Status(absl::StatusCode::kInvalidArgument, + absl::StrCat("Could not close file '", name_, "'"))); + } + delete this; + return status; } void File::ReadOrDie(void* buf, size_t size) { @@ -154,8 +168,8 @@ void File::Init() {} namespace file { absl::Status Open(absl::string_view filename, absl::string_view mode, File** f, - int flags) { - if (flags == Defaults()) { + Options options) { + if (options == Defaults()) { *f = File::Open(filename, mode); if (*f != nullptr) { return absl::OkStatus(); @@ -165,50 +179,58 @@ absl::Status Open(absl::string_view filename, absl::string_view mode, File** f, absl::StrCat("Could not open '", filename, "'")); } -File* OpenOrDie(absl::string_view filename, absl::string_view mode, int flags) { +File* OpenOrDie(absl::string_view filename, absl::string_view mode, + Options options) { File* f; - CHECK_EQ(flags, Defaults()); + CHECK_EQ(options, Defaults()); f = File::Open(filename, mode); CHECK(f != nullptr) << absl::StrCat("Could not open '", filename, "'"); return f; } +absl::StatusOr GetContents(absl::string_view path, + Options options) { + std::string contents; + absl::Status status = GetContents(path, &contents, options); + if (!status.ok()) { + return status; + } + return contents; +} + absl::Status GetContents(absl::string_view filename, std::string* output, - int flags) { + Options options) { File* file; - auto status = file::Open(filename, "r", &file, flags); + auto status = file::Open(filename, "r", &file, options); if (!status.ok()) return status; const int64_t size = file->Size(); if (file->ReadToString(output, size) == size) { - status.Update(file->Close(flags)); - delete file; + status.Update(file->Close(options)); return status; } #if defined(_MSC_VER) // On windows, binary files needs to be opened with the "rb" flags. file->Close(); - delete file; // Retry in binary mode. - status = file::Open(filename, "rb", &file, flags); + status = file::Open(filename, "rb", &file, options); if (!status.ok()) return status; const int64_t b_size = file->Size(); if (file->ReadToString(output, b_size) == b_size) { - status.Update(file->Close(flags)); - delete file; + status.Update(file->Close(options)); return status; } #endif // _MSC_VER - file->Close(flags).IgnoreError(); // Even if ReadToString() fails! - delete file; + file->Close(options).IgnoreError(); // Even if ReadToString() fails! return absl::Status(absl::StatusCode::kInvalidArgument, absl::StrCat("Could not read from '", filename, "'.")); } -absl::Status WriteString(File* file, absl::string_view contents, int flags) { - if (flags == Defaults() && file != nullptr && +absl::Status WriteString(File* file, absl::string_view contents, + Options options) { + if (options == Defaults() && file != nullptr && file->Write(contents.data(), contents.size()) == contents.size()) { return absl::OkStatus(); } @@ -218,13 +240,12 @@ absl::Status WriteString(File* file, absl::string_view contents, int flags) { } absl::Status SetContents(absl::string_view filename, absl::string_view contents, - int flags) { + Options options) { File* file; - auto status = file::Open(filename, "w", &file, flags); + auto status = file::Open(filename, "w", &file, options); if (!status.ok()) return status; - status = file::WriteString(file, contents, flags); - status.Update(file->Close(flags)); // Even if WriteString() fails! - delete file; + status = file::WriteString(file, contents, options); + status.Update(file->Close(options)); // Even if WriteString() fails! return status; } @@ -240,7 +261,8 @@ namespace { class NoOpErrorCollector : public google::protobuf::io::ErrorCollector { public: ~NoOpErrorCollector() override = default; - void AddError(int line, int column, const std::string& message) override {} + void RecordError(int /*line*/, int /*column*/, + absl::string_view /*message*/) override {} }; } // namespace @@ -303,8 +325,8 @@ void WriteProtoToFileOrDie(const google::protobuf::Message& proto, } absl::Status GetTextProto(absl::string_view filename, - google::protobuf::Message* proto, int flags) { - if (flags == Defaults()) { + google::protobuf::Message* proto, Options options) { + if (options == Defaults()) { if (ReadFileToProto(filename, proto)) return absl::OkStatus(); } return absl::Status( @@ -313,8 +335,9 @@ absl::Status GetTextProto(absl::string_view filename, } absl::Status SetTextProto(absl::string_view filename, - const google::protobuf::Message& proto, int flags) { - if (flags == Defaults()) { + const google::protobuf::Message& proto, + Options options) { + if (options == Defaults()) { if (WriteProtoToASCIIFile(proto, filename)) return absl::OkStatus(); } return absl::Status( @@ -323,9 +346,9 @@ absl::Status SetTextProto(absl::string_view filename, } absl::Status GetBinaryProto(const absl::string_view filename, - google::protobuf::Message* proto, const int flags) { + google::protobuf::Message* proto, Options options) { std::string str; - if (flags == Defaults() && ReadFileToString(filename, &str) && + if (options == Defaults() && ReadFileToString(filename, &str) && proto->ParseFromString(str)) { return absl::OkStatus(); } @@ -335,8 +358,9 @@ absl::Status GetBinaryProto(const absl::string_view filename, } absl::Status SetBinaryProto(absl::string_view filename, - const google::protobuf::Message& proto, int flags) { - if (flags == Defaults()) { + const google::protobuf::Message& proto, + Options options) { + if (options == Defaults()) { if (WriteProtoToFile(proto, filename)) return absl::OkStatus(); } return absl::Status( @@ -344,8 +368,8 @@ absl::Status SetBinaryProto(absl::string_view filename, absl::StrCat("Could not write proto to '", filename, "'.")); } -absl::Status Delete(absl::string_view path, int flags) { - if (flags == Defaults()) { +absl::Status Delete(absl::string_view path, Options options) { + if (options == Defaults()) { std::string null_terminated_path = std::string(path); if (remove(null_terminated_path.c_str()) == 0) return absl::OkStatus(); } @@ -353,8 +377,8 @@ absl::Status Delete(absl::string_view path, int flags) { absl::StrCat("Could not delete '", path, "'.")); } -absl::Status Exists(absl::string_view path, int flags) { - if (flags == Defaults()) { +absl::Status Exists(absl::string_view path, Options options) { + if (options == Defaults()) { std::string null_terminated_path = std::string(path); if (access(null_terminated_path.c_str(), F_OK) == 0) { return absl::OkStatus(); diff --git a/ortools/base/file.h b/ortools/base/file.h index 0c408ed51b3..dcaa5269eff 100644 --- a/ortools/base/file.h +++ b/ortools/base/file.h @@ -112,35 +112,41 @@ inline Options Defaults() { return 0xBABA; } // The caller should free the File after closing it by passing *f to delete. absl::Status Open(absl::string_view filename, absl::string_view mode, File** f, - int flags); + Options options); // The caller should free the File after closing it by passing the returned // pointer to delete. -File* OpenOrDie(absl::string_view filename, absl::string_view mode, int flags); +File* OpenOrDie(absl::string_view filename, absl::string_view mode, + Options options); absl::Status GetTextProto(absl::string_view filename, - google::protobuf::Message* proto, int flags); + google::protobuf::Message* proto, Options options); template -absl::StatusOr GetTextProto(absl::string_view filename, int flags) { +absl::StatusOr GetTextProto(absl::string_view filename, Options options) { T proto; - RETURN_IF_ERROR(GetTextProto(filename, &proto, flags)); + RETURN_IF_ERROR(GetTextProto(filename, &proto, options)); return proto; } absl::Status SetTextProto(absl::string_view filename, - const google::protobuf::Message& proto, int flags); + const google::protobuf::Message& proto, + Options options); absl::Status GetBinaryProto(absl::string_view filename, - google::protobuf::Message* proto, int flags); + google::protobuf::Message* proto, Options options); template -absl::StatusOr GetBinaryProto(absl::string_view filename, int flags) { +absl::StatusOr GetBinaryProto(absl::string_view filename, Options options) { T proto; - RETURN_IF_ERROR(GetBinaryProto(filename, &proto, flags)); + RETURN_IF_ERROR(GetBinaryProto(filename, &proto, options)); return proto; } absl::Status SetBinaryProto(absl::string_view filename, - const google::protobuf::Message& proto, int flags); + const google::protobuf::Message& proto, + Options options); absl::Status SetContents(absl::string_view filename, absl::string_view contents, - int flags); + Options options); +absl::StatusOr GetContents(absl::string_view path, + Options options); absl::Status GetContents(absl::string_view filename, std::string* output, - int flags); -absl::Status WriteString(File* file, absl::string_view contents, int flags); + Options options); +absl::Status WriteString(File* file, absl::string_view contents, + Options options); bool ReadFileToString(absl::string_view file_name, std::string* output); bool WriteStringToFile(absl::string_view data, absl::string_view file_name); @@ -157,8 +163,8 @@ bool WriteProtoToFile(const google::protobuf::Message& proto, void WriteProtoToFileOrDie(const google::protobuf::Message& proto, absl::string_view file_name); -absl::Status Delete(absl::string_view path, int flags); -absl::Status Exists(absl::string_view path, int flags); +absl::Status Delete(absl::string_view path, Options options); +absl::Status Exists(absl::string_view path, Options options); } // namespace file diff --git a/ortools/base/filesystem.cc b/ortools/base/filesystem.cc index 0d874e18599..a6d4e9164aa 100644 --- a/ortools/base/filesystem.cc +++ b/ortools/base/filesystem.cc @@ -14,14 +14,51 @@ #include "ortools/base/filesystem.h" #include // NOLINT(build/c++17) +#include // NOLINT #include "absl/status/status.h" +#include "absl/strings/str_replace.h" + +namespace fs = std::filesystem; + +// Converts a absl::string_view into an object compatible with std::filesystem. +#ifdef ABSL_USES_STD_STRING_VIEW +#define SV_ABSL_TO_STD(X) X +#else +#define SV_ABSL_TO_STD(X) std::string(X) +#endif namespace file { absl::Status Match(std::string_view pattern, std::vector* result, const file::Options& options) { - return absl::Status(); + try { + const auto search_dir = fs::path(SV_ABSL_TO_STD(pattern)).parent_path(); + const auto filename = fs::path(SV_ABSL_TO_STD(pattern)).filename().string(); + std::string regexp_filename = + absl::StrReplaceAll(filename, {{".", "\\."}, {"*", ".*"}, {"?", "."}}); + std::regex regexp_pattern(regexp_filename); + std::error_code error; + + const fs::directory_iterator path_end; + for (auto path = fs::directory_iterator(search_dir, error); + !error && path != path_end; path.increment(error)) { + if (!fs::is_regular_file(path->path())) { + continue; + } + if (std::regex_match(path->path().filename().string(), regexp_pattern)) { + result->push_back(path->path().string()); + } + } + if (error) { + return absl::InvalidArgumentError(error.message()); + } + + std::sort(result->begin(), result->end()); + return absl::OkStatus(); + } catch (const std::exception& e) { + return absl::InvalidArgumentError(e.what()); + } } absl::Status IsDirectory(std::string_view path, const file::Options& options) { @@ -34,4 +71,15 @@ absl::Status IsDirectory(std::string_view path, const file::Options& options) { } } +absl::Status RecursivelyCreateDir(std::string_view path, + const file::Options& options) { + (void)options; + try { + std::filesystem::create_directories(std::filesystem::path(path)); + return absl::OkStatus(); + } catch (const std::exception& e) { + return absl::InvalidArgumentError(e.what()); + } +} + } // namespace file diff --git a/ortools/base/filesystem.h b/ortools/base/filesystem.h index 5f0d57eae9b..466cb2ab0d1 100644 --- a/ortools/base/filesystem.h +++ b/ortools/base/filesystem.h @@ -28,6 +28,9 @@ absl::Status Match(std::string_view pattern, std::vector* result, absl::Status IsDirectory(std::string_view path, const file::Options& options); +absl::Status RecursivelyCreateDir(std::string_view path, + const file::Options& options); + } // namespace file #endif // OR_TOOLS_BASE_FILESYSTEM_H_ diff --git a/ortools/base/logging.cc b/ortools/base/logging.cc index d04eb6e0375..c88adac71c3 100644 --- a/ortools/base/logging.cc +++ b/ortools/base/logging.cc @@ -20,8 +20,6 @@ #include "absl/log/globals.h" #include "absl/log/initialize.h" -ABSL_FLAG(bool, logtostderr, false, "no op compatibility flag"); - namespace operations_research { namespace { diff --git a/ortools/base/logging.h b/ortools/base/logging.h index 4fdc7f8ab8c..79e745d2a8b 100644 --- a/ortools/base/logging.h +++ b/ortools/base/logging.h @@ -20,15 +20,13 @@ #include "absl/log/check.h" #include "absl/log/die_if_null.h" #include "absl/log/log.h" +#include "absl/log/vlog_is_on.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "ortools/base/macros.h" -// Compatibility layer for glog/previous logging code. -ABSL_DECLARE_FLAG(bool, logtostderr); - // Forward the new flag. ABSL_DECLARE_FLAG(int, stderrthreshold); diff --git a/ortools/base/map_util.h b/ortools/base/map_util.h index c44acfbe4bb..bcc20b55f15 100644 --- a/ortools/base/map_util.h +++ b/ortools/base/map_util.h @@ -19,17 +19,23 @@ #include "ortools/base/logging.h" namespace gtl { +template +using MapUtilValueT = typename M::value_type; +template +using MapUtilKeyT = typename MapUtilValueT::first_type; +template +using MapUtilMappedT = typename MapUtilValueT::second_type; + // Perform a lookup in a std::map or std::unordered_map. // If the key is present in the map then the value associated with that // key is returned, otherwise the value passed as a default is returned. // // Prefer the two-argument form unless you need to specify a custom default // value (i.e., one that is not equal to a value-initialized instance). -template -const typename Collection::value_type::second_type& FindWithDefault( - const Collection& collection, - const typename Collection::value_type::first_type& key, - const typename Collection::value_type::second_type& value) { +template > +const MapUtilMappedT& FindWithDefault( + const Collection& collection, const KeyType& key, + const MapUtilMappedT& value) { typename Collection::const_iterator it = collection.find(key); if (it == collection.end()) { return value; @@ -40,12 +46,11 @@ const typename Collection::value_type::second_type& FindWithDefault( // Returns a const reference to the value associated with the given key if it // exists, otherwise returns a const reference to a value-initialized object // that is never destroyed. -template -const typename Collection::value_type::second_type& FindWithDefault( - const Collection& collection, - const typename Collection::value_type::first_type& key) { - static const typename Collection::value_type::second_type* const - default_value = new typename Collection::value_type::second_type{}; +template > +const MapUtilMappedT& FindWithDefault(const Collection& collection, + const KeyType& key) { + static const MapUtilMappedT* const default_value = + new MapUtilMappedT{}; typename Collection::const_iterator it = collection.find(key); if (it == collection.end()) { return *default_value; diff --git a/ortools/base/mathutil.cc b/ortools/base/mathutil.cc new file mode 100644 index 00000000000..254bfccf650 --- /dev/null +++ b/ortools/base/mathutil.cc @@ -0,0 +1,55 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if defined(_MSC_VER) +#define _USE_MATH_DEFINES +#include +#endif + +#include "ortools/base/logging.h" +#include "ortools/base/mathutil.h" + +namespace operations_research { + +// The formula is extracted from the following page +// http://en.wikipedia.org/w/index.php?title=Stirling%27s_approximation +double MathUtil::Stirling(double n) { + static const double kLog2Pi = log(2 * M_PI); + const double logN = log(n); + return (n * logN - n + 0.5 * (kLog2Pi + logN) // 0.5 * log(2 * M_PI * n) + + 1 / (12 * n) - 1 / (360 * n * n * n)); +} + +double MathUtil::LogCombinations(int n, int k) { + CHECK_GE(n, k); + CHECK_GT(n, 0); + CHECK_GE(k, 0); + + // use symmetry to pick the shorter calculation + if (k > n / 2) { + k = n - k; + } + + // If we have more than 30 logarithms to calculate, we'll use + // Stirling's approximation for log(n!). + if (k > 15) { + return Stirling(n) - Stirling(k) - Stirling(n - k); + } else { + double result = 0; + for (int i = 1; i <= k; i++) { + result += log(n - k + i) - log(i); + } + return result; + } +} +} // namespace operations_research diff --git a/ortools/base/mathutil.h b/ortools/base/mathutil.h index fdb824d0acf..0d24888d323 100644 --- a/ortools/base/mathutil.h +++ b/ortools/base/mathutil.h @@ -17,7 +17,9 @@ #include #include +#include #include +#include #include #include "absl/base/casts.h" @@ -134,7 +136,148 @@ class MathUtil { return static_cast(x < 0 ? (x - 0.5) : (x + 0.5)); } + // Returns the minimum integer value which is a multiple of rounding_value, + // and greater than or equal to input_value. + // The input_value must be greater than or equal to zero, and the + // rounding_value must be greater than zero. + template + static IntType RoundUpTo(IntType input_value, IntType rounding_value) { + static_assert(std::numeric_limits::is_integer, + "RoundUpTo() operation type is not integer"); + DCHECK_GE(input_value, 0); + DCHECK_GT(rounding_value, 0); + const IntType remainder = input_value % rounding_value; + return (remainder == 0) ? input_value + : (input_value - remainder + rounding_value); + } + + // Convert a floating-point number to an integer. For all inputs x where + // static_cast(x) is legal according to the C++ standard, the result + // is identical to that cast (i.e. the result is x with its fractional part + // truncated whenever that is representable as IntOut). + // + // static_cast would cause undefined behavior for the following cases, which + // have well-defined behavior for this function: + // + // 1. If x is NaN, the result is zero. + // + // 2. If the truncated form of x is above the representable range of IntOut, + // the result is std::numeric_limits::max(). + // + // 3. If the truncated form of x is below the representable range of IntOut, + // the result is std::numeric_limits::lowest(). + // + // Note that cases #2 and #3 cover infinities as well as finite numbers. + // + // The range of FloatIn must include the range of IntOut, otherwise + // the results are undefined. + template + static IntOut SafeCast(FloatIn x) { + COMPILE_ASSERT(!std::numeric_limits::is_integer, + FloatIn_is_integer); + COMPILE_ASSERT(std::numeric_limits::is_integer, + IntOut_is_not_integer); + COMPILE_ASSERT(std::numeric_limits::radix == 2, IntOut_is_base_2); + + // Special case NaN, for which the logic below doesn't work. + if (std::isnan(x)) { + return 0; + } + + // Negative values all clip to zero for unsigned results. + if (!std::numeric_limits::is_signed && x < 0) { + return 0; + } + + // Handle infinities. + if (std::isinf(x)) { + return x < 0 ? std::numeric_limits::lowest() + : std::numeric_limits::max(); + } + + // Set exp such that x == f * 2^exp for some f with |f| in [0.5, 1.0), + // unless x is zero in which case exp == 0. Note that this implies that the + // magnitude of x is strictly less than 2^exp. + int exp = 0; + std::frexp(x, &exp); + + // Let N be the number of non-sign bits in the representation of IntOut. If + // the magnitude of x is strictly less than 2^N, the truncated version of x + // is representable as IntOut. The only representable integer for which this + // is not the case is kMin for signed types (i.e. -2^N), but that is covered + // by the fall-through below. + if (exp <= std::numeric_limits::digits) { + return x; + } + + // Handle numbers with magnitude >= 2^N. + return x < 0 ? std::numeric_limits::lowest() + : std::numeric_limits::max(); + } + + // -------------------------------------------------------------------- + // SafeRound + // These functions round a floating-point number to an integer. + // Results are identical to Round, except in cases where + // the argument is NaN, or when the rounded value would overflow the + // return type. In those cases, Round has undefined + // behavior. SafeRound returns 0 when the argument is + // NaN, and returns the closest possible integer value otherwise (i.e. + // std::numeric_limits::max() for large positive values, and + // std::numeric_limits::lowest() for large negative values). + // The range of FloatIn must include the range of IntOut, otherwise + // the results are undefined. + // -------------------------------------------------------------------- + template + static IntOut SafeRound(FloatIn x) { + COMPILE_ASSERT(!std::numeric_limits::is_integer, + FloatIn_is_integer); + COMPILE_ASSERT(std::numeric_limits::is_integer, + IntOut_is_not_integer); + + if (std::isnan(x)) { + return 0; + } else { + return SafeCast((x < 0.) ? (x - 0.5) : (x + 0.5)); + } + } + + // -------------------------------------------------------------------- + // FastInt64Round + // Fast routines for converting floating-point numbers to integers. + // + // These routines are approximately 6 times faster than the default + // implementation of Round on Intel processors (12 times faster on + // the Pentium 3). They are also more than 5 times faster than simply + // casting a "double" to an "int" using static_cast. This is + // because casts are defined to truncate towards zero, which on Intel + // processors requires changing the rounding mode and flushing the + // floating-point pipeline (unless programs are compiled specifically + // for the Pentium 4, which has a new instruction to avoid this). + // + // Numbers that are halfway between two integers may be rounded up or + // down. This is because the conversion is done using the default + // rounding mode, which rounds towards the closest even number in case + // of ties. So for example, FastIntRound(0.5) == 0, but + // FastIntRound(1.5) == 2. These functions should only be used with + // applications that don't care about which way such half-integers are + // rounded. + // + // There are template specializations of Round() which call these + // functions (for "int" and "int64" only), but it's safer to call them + // directly. static int64_t FastInt64Round(double x) { return Round(x); } + + // Returns Stirling's Approximation for log(n!) which has an error + // of at worst 1/(1260*n^5). + static double Stirling(double n); + + // Returns the log of the binomial coefficient C(n, k), known in the + // vernacular as "N choose K". Why log? Because the integer number + // for non-trivial N and K would overflow. + // Note that if k > 15, this uses Stirling's approximation of log(n!). + // The relative error is about 1/(1260*k^5) (which is 7.6e-10 when k=16). + static double LogCombinations(int n, int k); }; } // namespace operations_research diff --git a/ortools/base/strong_int.h b/ortools/base/strong_int.h index 3ec2e118e02..aed5f295c83 100644 --- a/ortools/base/strong_int.h +++ b/ortools/base/strong_int.h @@ -153,6 +153,7 @@ #include #include "absl/base/port.h" +#include "absl/strings/str_format.h" #include "absl/strings/string_view.h" #include "ortools/base/macros.h" @@ -298,6 +299,23 @@ std::ostream& operator<<(std::ostream& os, // NOLINT return os << arg.value(); } +// Define AbslStringify, for absl::StrAppend, absl::StrCat, and absl::StrFormat. +// +// When using StrongInt with absl::StrFormat, use the "%v" specifier. +template +void AbslStringify(Sink& sink, StrongInt arg) { + using ValueType = typename decltype(arg)::ValueType; + // int8_t/uint8_t are not supported by the "%v" specifier due to it being + // ambiguous whether an integer or character should be printed. + if constexpr (std::is_same_v) { + absl::Format(&sink, "%d", arg.value()); + } else if constexpr (std::is_same_v) { + absl::Format(&sink, "%u", arg.value()); + } else { + absl::Format(&sink, "%v", arg.value()); + } +} + // -- NON-MEMBER ARITHMETIC OPERATORS ------------------------------------------ // We support only the +, -, *, and / operators with the same StrongInt and // ValueType types. The reason is to allow simple manipulation on these IDs diff --git a/ortools/base/strong_vector.h b/ortools/base/strong_vector.h index 38a7ba4f276..3441492ab7f 100644 --- a/ortools/base/strong_vector.h +++ b/ortools/base/strong_vector.h @@ -12,31 +12,30 @@ // limitations under the License. // This file provides the StrongVector container that wraps around the STL -// std::vector. -// The wrapper restricts indexing to a pre-specified type-safe integer type or -// IntType (see int_type.h). It prevents accidental indexing -// by different "logical" integer-like types (e.g. another IntType) or native -// integer types. The wrapper is useful as C++ and the standard template -// library allows the user to mix "logical" integral indices that might have a -// different role. +// vector. The wrapper restrict indexing to a pre-specified type-safe integer +// type or StrongInt (see util/intops/strong_int.h). It prevents accidental +// indexing by different "logical" integer-like types (e.g. another StrongInt) +// or native integer types. The wrapper is useful as C++ and the standard +// template library allows the user to mix "logical" integral indices that might +// have a different role. // -// The container can only be indexed by an instance of an IntType class, which +// The container can only be indexed by an instance of an StrongInt class, which // can be declared as: // -// DEFINE_INT_TYPE(IntTypeName, IntTypeValueType); +// DEFINE_STRONG_INT_TYPE(type_name, value_type); // -// where IntTypeName is the desired name for the "logical" integer-like type -// and the ValueType is a supported native integer type such as int or -// uint64_t (see int_type.h for details). +// where type_name is the desired name for the "logical" integer-like type +// and the value_type is a supported native integer type such as int or +// uint64_t (see util/intops/strong_int.h for details). // // The wrapper exposes all public methods of STL vector and behaves mostly as -// pass-through. The only method modified to ensure type-safety is the operator -// [] and the at() method. +// pass-through. The only methods modified to ensure type-safety are the +// operator [] and the at() methods. // // EXAMPLES -------------------------------------------------------------------- // -// DEFINE_INT_TYPE(PhysicalChildIndex, int32_t); -// absl::StrongVector vec; +// DEFINE_STRONG_INT_TYPE(PhysicalChildIndex, int32_t); +// StrongVector vec; // // PhysicalChildIndex physical_index; // vec[physical_index] = ...; <-- index type match: compiles properly. @@ -46,42 +45,39 @@ // vec[physical_index] = ...; <-- fails to compile. // vec.at(physical_index) = ...; <-- fails to compile. // -// DEFINE_INT_TYPE(LogicalChildIndex, int32_t); -// int32_t logical_index; +// DEFINE_STRONG_INT_TYPE(LogicalChildIndex, int32_t); +// LogicalChildIndex logical_index; // vec[logical_index] = ...; <-- fails to compile. // vec.at(logical_index) = ...; <-- fails to compile. // -// NB: Iterator arithmetic is not allowed as the iterators are not wrapped -// themselves. Therefore, the following caveat is possible: -// *(vec.begin() + 0) = ...; +// NB: Iterator arithmetic bypasses strong typing for the index. +// +// OVERFLOW BEHAVIOR +// +// This class ONLY guards against growing the size beyond the range +// indexable by the index type in debug mode. In optimized mode the +// user can CHECK IsValidSize() when deemed important. #ifndef OR_TOOLS_BASE_STRONG_VECTOR_H_ #define OR_TOOLS_BASE_STRONG_VECTOR_H_ -#include - -#include -#include +#include #include -#include #include -#include "ortools/base/int_type.h" -#include "ortools/base/macros.h" +#include "ortools/base/logging.h" +#include "ortools/base/strong_int.h" -namespace absl { +namespace util_intops { -// STL vector ------------------------------------------------------------------ -template > -class StrongVector { +template > +class StrongVector : protected std::vector { public: - typedef IntType IndexType; - typedef std::vector ParentType; - + typedef std::vector ParentType; typedef typename ParentType::size_type size_type; typedef typename ParentType::allocator_type allocator_type; typedef typename ParentType::value_type value_type; - typedef typename ParentType::difference_type difference_type; typedef typename ParentType::reference reference; typedef typename ParentType::const_reference const_reference; typedef typename ParentType::pointer pointer; @@ -93,115 +89,225 @@ class StrongVector { public: StrongVector() {} - - explicit StrongVector(const allocator_type& a) : v_(a) {} - explicit StrongVector(size_type n) : v_(n) {} + explicit StrongVector(const allocator_type& a) : ParentType(a) {} + explicit StrongVector(size_type n) : ParentType(n) { DCHECK(IsValidSize()); } explicit StrongVector(IntType n) : StrongVector(static_cast(n.value())) {} - - StrongVector(size_type n, const value_type& v, + explicit StrongVector(size_type n, const value_type& v, + const allocator_type& a = allocator_type()) + : ParentType(n, v, a) { + DCHECK(IsValidSize()); + } + explicit StrongVector(IntType n, const value_type& v, + const allocator_type& a = allocator_type()) + : StrongVector(static_cast(n.value()), v, a) {} + StrongVector(const StrongVector& x) : ParentType(x.get()) { + DCHECK(IsValidSize()); + } + StrongVector(StrongVector&& x) = default; + StrongVector(std::initializer_list l, const allocator_type& a = allocator_type()) - : v_(n, v, a) {} - - StrongVector( - std::initializer_list il) // NOLINT(runtime/explicit) - : v_(il) {} - + : ParentType(l, a) { + DCHECK(IsValidSize()); + } template StrongVector(InputIteratorType first, InputIteratorType last, const allocator_type& a = allocator_type()) - : v_(first, last, a) {} + : ParentType(first, last, a) { + DCHECK(IsValidSize()); + } + ~StrongVector() {} // -- Accessors -------------------------------------------------------------- // This const accessor is useful in defining the comparison operators below. - const ParentType& get() const { return v_; } - // The mutable accessor is useful when using auxiliar methods relying on - // vector parameters such as JoinUsing(), SplitStringUsing(), etc. Methods + const ParentType& get() const { return *this; } + // The mutable accessor is useful when using auxiliary methods relying on + // vector parameters such as JoinUsing(), SplitStringUsing(), etc. Methods // relying solely on iterators (e.g. STLDeleteElements) should work just fine - // without the need for mutable_get(). NB: It should be used only in this + // without the need for mutable_get(). NB: It should be used only in this // case and thus should not be abused to index the underlying vector without // the appropriate IntType. - ParentType* mutable_get() { return &v_; } + ParentType* mutable_get() { return this; } // -- Modified methods ------------------------------------------------------- - reference operator[](IndexType i) { return v_[Value(i)]; } - const_reference operator[](IndexType i) const { return v_[Value(i)]; } - reference at(IndexType i) { return v_.at(Value(i)); } - const_reference at(IndexType i) const { return v_.at(Value(i)); } + reference operator[](IntType i) { + return ParentType::operator[](static_cast(i.value())); + } + const_reference operator[](IntType i) const { + return ParentType::operator[](static_cast(i.value())); + } + reference at(IntType i) { + return ParentType::at(static_cast(i.value())); + } + const_reference at(IntType i) const { + return ParentType::at(static_cast(i.value())); + } + + // -- Extension methods ------------------------------------------------------ + + // Iteration related methods. Useful for parallel iteration and + // non-trivial access patterns. Typical loop will be: + // for (auto i = v.start_index(); i < v.end_index(); ++i) ... + IntType start_index() const { return IntType(0); } + // Index following the last valid index into the vector. In case + // size() has grown beyond values representable by IntType, this + // function will truncate the result. There is a debugging check for + // such behavior, but it is unlikely to be triggered in testing. + IntType end_index() const { + DCHECK(IsValidSize()); + return IntType(size()); + } + + // Returns true if the vector is fully addressable by the index type. + bool IsValidSize() const { return ValidSize(size()); } + + // Most methods from vector can be reused without any changes. + using ParentType::back; + using ParentType::begin; + using ParentType::capacity; + using ParentType::cbegin; + using ParentType::cend; + using ParentType::clear; + using ParentType::empty; + using ParentType::end; + using ParentType::erase; + using ParentType::front; + using ParentType::max_size; + using ParentType::pop_back; + using ParentType::rbegin; + using ParentType::rend; + using ParentType::shrink_to_fit; + + // Returns an iterator of valid indices into this vector. Goes from + // start_index() to end_index(). This is useful for cases of + // parallel iteration over several vectors indexed by the same type, e.g. + // StrongVector v1; + // StrongVector v2; + // CHECK_EQ(v1.size(), v2.size()); + // for (const auto i : v1.index_range()) { + // do_stuff(v1[i], v2[i]); + // } + StrongIntRange index_range() const { + return StrongIntRange(start_index(), end_index()); + } // -- Pass-through methods to STL vector ------------------------------------- - void assign(size_type n, const value_type& val) { v_.assign(n, val); } + + // Note that vector::data() does not exist. By wrapping data() + // below, this allows StrongVector to still compile, as long as + // StrongVector::data() is never called. + value_type* data() { return ParentType::data(); } + const value_type* data() const { return ParentType::data(); } + + StrongVector& operator=(const StrongVector& x) { + ParentType::operator=(x.get()); + return *this; + } + StrongVector& operator=(StrongVector&& x) = default; + StrongVector& operator=(std::initializer_list l) { + ParentType::operator=(l); + DCHECK(IsValidSize()); + return *this; + } + + void swap(StrongVector& x) noexcept { ParentType::swap(*x.mutable_get()); } + + void assign(size_type n, const value_type& val) { + DCHECK(ValidSize(n)); + ParentType::assign(n, val); + } template void assign(InputIt f, InputIt l) { - v_.assign(f, l); + ParentType::assign(f, l); + DCHECK(IsValidSize()); + } + void assign(std::initializer_list l) { + ParentType::assign(l); + DCHECK(IsValidSize()); } - void assign(std::initializer_list ilist) { v_.assign(ilist); } - iterator begin() { return v_.begin(); } - const_iterator begin() const { return v_.begin(); } - iterator end() { return v_.end(); } - const_iterator end() const { return v_.end(); } - reverse_iterator rbegin() { return v_.rbegin(); } - const_reverse_iterator rbegin() const { return v_.rbegin(); } - reverse_iterator rend() { return v_.rend(); } - const_reverse_iterator rend() const { return v_.rend(); } + template + iterator emplace(const_iterator pos, Args&&... args) { + iterator result = ParentType::emplace(pos, std::forward(args)...); + DCHECK(IsValidSize()); + return result; + } - size_type size() const { return v_.size(); } - size_type max_size() const { return v_.max_size(); } + template + reference emplace_back(Args&&... args) { + reference value = ParentType::emplace_back(std::forward(args)...); + DCHECK(IsValidSize()); + return value; + } - void resize(size_type new_size) { v_.resize(new_size); } - void resize(size_type new_size, const value_type& x) { - v_.resize(new_size, x); + iterator insert(const_iterator pos, const value_type& x) { + iterator result = ParentType::insert(pos, x); + DCHECK(IsValidSize()); + return result; } - void resize(IntType new_size) { v_.resize(new_size.value()); } - void resize(IntType new_size, const value_type& x) { - v_.resize(new_size.value(), x); + iterator insert(const_iterator pos, value_type&& x) { + iterator result = ParentType::insert(pos, std::move(x)); + DCHECK(IsValidSize()); + return result; + } + void insert(const_iterator pos, size_type n, const value_type& x) { + ParentType::insert(pos, n, x); + DCHECK(IsValidSize()); + } + template + void insert(const_iterator pos, SIT first, SIT last) { + ParentType::insert(pos, first, last); + DCHECK(IsValidSize()); } - size_type capacity() const { return v_.capacity(); } - bool empty() const { return v_.empty(); } - void reserve(size_type n) { v_.reserve(n); } - void reserve(IntType n) { reserve(static_cast(n.value())); } - void push_back(const value_type& x) { v_.push_back(x); } - void push_back(value_type&& x) { v_.push_back(std::move(x)); } // NOLINT - template - void emplace_back(Args&&... args) { - v_.emplace_back(std::forward(args)...); + void push_back(const value_type& val) { + ParentType::push_back(val); + DCHECK(IsValidSize()); } - template - iterator emplace(const_iterator pos, Args&&... args) { - return v_.emplace(pos, std::forward(args)...); + void push_back(value_type&& val) { + ParentType::push_back(std::move(val)); + DCHECK(IsValidSize()); + } + + void reserve(size_type n) { + DCHECK(ValidSize(n)); + ParentType::reserve(n); } - void pop_back() { v_.pop_back(); } - void swap(StrongVector& x) { v_.swap(x.v_); } - void clear() { v_.clear(); } - reference front() { return v_.front(); } - const_reference front() const { return v_.front(); } - reference back() { return v_.back(); } - const_reference back() const { return v_.back(); } - pointer data() { return v_.data(); } - const_pointer data() const { return v_.data(); } + void reserve(IntType n) { reserve(static_cast(n.value())); } - iterator erase(const_iterator pos) { return v_.erase(pos); } - iterator erase(const_iterator first, const_iterator last) { - return v_.erase(first, last); + void resize(size_type new_size) { + DCHECK(ValidSize(new_size)); + ParentType::resize(new_size); } - iterator insert(const_iterator pos, const value_type& x) { - return v_.insert(pos, x); + + void resize(IntType new_size) { + resize(static_cast(new_size.value())); } - iterator insert(const_iterator pos, value_type&& x) { // NOLINT - return v_.insert(pos, std::move(x)); + + void resize(size_type new_size, const value_type& x) { + DCHECK(ValidSize(new_size)); + ParentType::resize(new_size, x); } - iterator insert(const_iterator pos, size_type n, const value_type& x) { - return v_.insert(pos, n, x); + + void resize(IntType new_size, const value_type& x) { + resize(static_cast(new_size.value()), x); } - template - iterator insert(const_iterator pos, IIt first, IIt last) { - return v_.insert(pos, first, last); + + using ParentType::size; + + static_assert(std::is_integral::value, + "int type indexed vector must have integral index"); + + template + friend H AbslHashValue(H h, const StrongVector& v) { + return H::combine(std::move(h), v.get()); } - iterator insert(const_iterator pos, std::initializer_list ilist) { - return v_.insert(pos, ilist); + + private: + // Checks that the given value n is in range of the index type. + static bool ValidSize(size_type n) { + return n <= std::numeric_limits::max(); } friend bool operator==(const StrongVector& x, const StrongVector& y) { @@ -222,22 +328,9 @@ class StrongVector { friend bool operator>=(const StrongVector& x, const StrongVector& y) { return x.get() >= y.get(); } - friend void swap(StrongVector& x, StrongVector& y) { x.swap(y); } - - template - friend H AbslHashValue(H h, const StrongVector& v) { - return H::combine(std::move(h), v.v_); - } - - private: - static size_type Value(IndexType i) { return i.template value(); } - - ParentType v_; - - COMPILE_ASSERT(std::is_integral::value, - int_type_indexed_vector_must_have_integral_index); + friend void swap(StrongVector& x, StrongVector& y) noexcept { x.swap(y); } }; -} // namespace absl +} // namespace util_intops #endif // OR_TOOLS_BASE_STRONG_VECTOR_H_ diff --git a/ortools/base/sysinfo.cc b/ortools/base/sysinfo.cc index 7231eb67c72..bb019f1a4cb 100644 --- a/ortools/base/sysinfo.cc +++ b/ortools/base/sysinfo.cc @@ -17,7 +17,7 @@ #if defined(__APPLE__) && defined(__GNUC__) // MacOS #include #include -#elif defined(__FreeBSD__) // FreeBSD +#elif (defined(__FreeBSD__) || defined(__OpenBSD__)) // FreeBSD or OpenBSD #include #include // Windows @@ -48,8 +48,8 @@ int64_t GetProcessMemoryUsage() { int64_t resident_memory = t_info.resident_size; return resident_memory; } -#elif defined(__GNUC__) && !defined(__FreeBSD__) && \ - !defined(__EMSCRIPTEN__) && !defined(_WIN32) // Linux +#elif defined(__GNUC__) && !defined(__FreeBSD__) && !defined(__OpenBSD__) && \ + !defined(__EMSCRIPTEN__) && !defined(_WIN32) // Linux int64_t GetProcessMemoryUsage() { unsigned size = 0; char buf[30]; @@ -61,15 +61,15 @@ int64_t GetProcessMemoryUsage() { fclose(pf); return int64_t{1024} * size; } -#elif defined(__FreeBSD__) // FreeBSD +#elif (defined(__FreeBSD__) || defined(__OpenBSD__)) // FreeBSD or OpenBSD int64_t GetProcessMemoryUsage() { int who = RUSAGE_SELF; struct rusage rusage; getrusage(who, &rusage); return (int64_t)(int64_t{1024} * rusage.ru_maxrss); } -// Windows -#elif defined(_MSC_VER) || defined(__MINGW32__) || defined(__MINGW64__) +#elif defined(_MSC_VER) || defined(__MINGW32__) || \ + defined(__MINGW64__) // Windows int64_t GetProcessMemoryUsage() { HANDLE hProcess; PROCESS_MEMORY_COUNTERS pmc; @@ -84,7 +84,7 @@ int64_t GetProcessMemoryUsage() { } return memory; } -#else // Unknown, returning 0. +#else // Unknown, returning 0. int64_t GetProcessMemoryUsage() { return 0; } #endif diff --git a/ortools/base/temp_path.cc b/ortools/base/temp_path.cc new file mode 100644 index 00000000000..1e34b6ec7f1 --- /dev/null +++ b/ortools/base/temp_path.cc @@ -0,0 +1,82 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/base/temp_path.h" + +#include + +#include "absl/log/check.h" +#include "absl/strings/str_cat.h" +#include "absl/time/time.h" +#include "ortools/base/filesystem.h" + +namespace file { + +std::string TempFile(absl::string_view prefix) { + std::string path; + if (prefix.empty()) { + path = absl::StrCat(absl::ToUnixMicros(absl::Now())); + } else { + path = absl::StrCat(prefix, "_", absl::ToUnixMicros(absl::Now())); + } + return path; +} + +} // namespace file + +TempPath::TempPath(absl::string_view prefix) : path_(file::TempFile(prefix)) { + CHECK_OK(Init(kDefaultMode)); +} + +TempPath::TempPath(absl::string_view prefix, absl::Status* status) + : path_(file::TempFile(prefix)) { + *status = Init(kDefaultMode); +} + + TempPath::TempPath(TempPath && rhs) : path_(std::move(rhs.path_)) {} + + TempPath& TempPath::operator=(TempPath&& rhs) { + TempPath tmp(std::move(*this)); + path_ = std::move(rhs.path_); + return *this; + } + + TempPath::~TempPath() {} + + TempPath* TempPath::Create(Location location) { + std::string dirname; + switch (location) { + case Local: + dirname = file::TempFile(""); + } + if (dirname.empty()) { + return nullptr; + } + absl::Status status; + TempPath* temp_path = new TempPath(dirname, &status); + if (!status.ok()) { + delete temp_path; + return nullptr; + } + return temp_path; + } + + TempPath::TempPath(const std::string& dirname, file::Options options, + absl::Status* status) + : path_(dirname) { + *status = Init(options); + } + + absl::Status TempPath::Init(file::Options options) { + return file::RecursivelyCreateDir(path(), options); + } diff --git a/ortools/base/temp_path.h b/ortools/base/temp_path.h new file mode 100644 index 00000000000..14f4388d5a2 --- /dev/null +++ b/ortools/base/temp_path.h @@ -0,0 +1,62 @@ +// Copyright 2010-2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef OR_TOOLS_BASE_TEMP_PATH_H_ +#define OR_TOOLS_BASE_TEMP_PATH_H_ + +#include + +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "ortools/base/file.h" + +class TempPath { + public: + // default mode to create directories (a+rwx): + static constexpr int kDefaultMode = 0777; + + explicit TempPath(absl::string_view prefix); + TempPath(absl::string_view prefix, absl::Status* status); + + // TempPath is moveable, but not copyable. + TempPath(TempPath&& rhs); + TempPath(const TempPath& rhs) = delete; + TempPath& operator=(TempPath&& rhs); + TempPath& operator=(const TempPath& rhs) = delete; + + ~TempPath(); + + // Returns the path which was created by this object. + std::string path() const { return path_; } + + enum Location { + Local, + }; + + static TempPath* Create(Location location); + + private: + // Internal constructor for Create* methods. + TempPath(const std::string& dirname, file::Options options, + absl::Status* status); + + // Shared initialization among constructors. + // Makes directory given by path() and `options`. + absl::Status Init(file::Options options); + + std::string path_; +}; + +namespace file {} // namespace file + +#endif // OR_TOOLS_BASE_TEMP_PATH_H_ diff --git a/ortools/base/threadpool.cc b/ortools/base/threadpool.cc index 16f352d4855..9dc07fa315f 100644 --- a/ortools/base/threadpool.cc +++ b/ortools/base/threadpool.cc @@ -29,8 +29,10 @@ void RunWorker(void* data) { } } -ThreadPool::ThreadPool(absl::string_view prefix, int num_workers) - : num_workers_(num_workers) {} +ThreadPool::ThreadPool(int num_threads) : num_workers_(num_threads) {} + +ThreadPool::ThreadPool(absl::string_view /*prefix*/, int num_threads) + : num_workers_(num_threads) {} ThreadPool::~ThreadPool() { if (started_) { diff --git a/ortools/base/threadpool.h b/ortools/base/threadpool.h index 0f7e6a8df9c..7a31b0f2641 100644 --- a/ortools/base/threadpool.h +++ b/ortools/base/threadpool.h @@ -27,6 +27,7 @@ namespace operations_research { class ThreadPool { public: + explicit ThreadPool(int num_threads); ThreadPool(absl::string_view prefix, int num_threads); ~ThreadPool(); diff --git a/ortools/bop/CMakeLists.txt b/ortools/bop/CMakeLists.txt index 31441a13141..d608d5b24d7 100644 --- a/ortools/bop/CMakeLists.txt +++ b/ortools/bop/CMakeLists.txt @@ -28,5 +28,5 @@ target_link_libraries(${NAME} PRIVATE absl::synchronization absl::str_format protobuf::libprotobuf - ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto) + ${PROJECT_NAMESPACE}::ortools_proto) #add_library(${PROJECT_NAMESPACE}::bop ALIAS ${NAME}) diff --git a/ortools/bop/bop_base.h b/ortools/bop/bop_base.h index 43e6bab0da9..2ef5eb89367 100644 --- a/ortools/bop/bop_base.h +++ b/ortools/bop/bop_base.h @@ -182,7 +182,7 @@ class ProblemState { // Returns true when the variable var is fixed in the current problem state. // The value of the fixed variable is returned by GetVariableFixedValue(var). bool IsVariableFixed(VariableIndex var) const { return is_fixed_[var]; } - const absl::StrongVector& is_fixed() const { + const util_intops::StrongVector& is_fixed() const { return is_fixed_; } @@ -191,7 +191,7 @@ class ProblemState { bool GetVariableFixedValue(VariableIndex var) const { return fixed_values_[var]; } - const absl::StrongVector& fixed_values() const { + const util_intops::StrongVector& fixed_values() const { return fixed_values_; } @@ -235,8 +235,8 @@ class ProblemState { const sat::LinearBooleanProblem& original_problem_; BopParameters parameters_; int64_t update_stamp_; - absl::StrongVector is_fixed_; - absl::StrongVector fixed_values_; + util_intops::StrongVector is_fixed_; + util_intops::StrongVector fixed_values_; glop::DenseRow lp_values_; BopSolution solution_; std::vector assignment_preference_; diff --git a/ortools/bop/bop_lns.h b/ortools/bop/bop_lns.h index f825dc44ab6..73e49602a9f 100644 --- a/ortools/bop/bop_lns.h +++ b/ortools/bop/bop_lns.h @@ -34,7 +34,7 @@ namespace bop { // Uses SAT to solve the full problem under the constraint that the new solution // should be under a given Hamming distance of the current solution. -class BopCompleteLNSOptimizer : public BopOptimizerBase { +class BopCompleteLNSOptimizer final : public BopOptimizerBase { public: BopCompleteLNSOptimizer(absl::string_view name, const BopConstraintTerms& objective_terms); @@ -93,7 +93,7 @@ class NeighborhoodGenerator { // A generic LNS optimizer which generates neighborhoods according to the given // NeighborhoodGenerator and automatically adapt the neighborhood size depending // on how easy it is to solve the associated problem. -class BopAdaptiveLNSOptimizer : public BopOptimizerBase { +class BopAdaptiveLNSOptimizer final : public BopOptimizerBase { public: // Takes ownership of the given neighborhood_generator. // The sat_propagator is assumed to contains the current problem. @@ -119,7 +119,7 @@ class BopAdaptiveLNSOptimizer : public BopOptimizerBase { // Generates a neighborhood by randomly fixing a subset of the objective // variables that are currently at their lower cost. -class ObjectiveBasedNeighborhood : public NeighborhoodGenerator { +class ObjectiveBasedNeighborhood final : public NeighborhoodGenerator { public: ObjectiveBasedNeighborhood(const BopConstraintTerms* objective_terms, absl::BitGenRef random) @@ -137,7 +137,7 @@ class ObjectiveBasedNeighborhood : public NeighborhoodGenerator { // Generates a neighborhood by randomly selecting a subset of constraints and // fixing the objective variables that are currently at their lower cost and // not in the given subset of constraints. -class ConstraintBasedNeighborhood : public NeighborhoodGenerator { +class ConstraintBasedNeighborhood final : public NeighborhoodGenerator { public: ConstraintBasedNeighborhood(const BopConstraintTerms* objective_terms, absl::BitGenRef random) @@ -155,7 +155,7 @@ class ConstraintBasedNeighborhood : public NeighborhoodGenerator { // Generates a neighborhood by taking a random local neighborhood in an // undirected graph where the nodes are the variables and two nodes are linked // if they appear in the same constraint. -class RelationGraphBasedNeighborhood : public NeighborhoodGenerator { +class RelationGraphBasedNeighborhood final : public NeighborhoodGenerator { public: RelationGraphBasedNeighborhood(const sat::LinearBooleanProblem& problem, absl::BitGenRef random); @@ -168,7 +168,8 @@ class RelationGraphBasedNeighborhood : public NeighborhoodGenerator { // TODO(user): reuse by_variable_matrix_ from the LS? Note however than we // don't need the coefficients here. - absl::StrongVector> columns_; + util_intops::StrongVector> + columns_; absl::BitGenRef random_; }; diff --git a/ortools/bop/bop_ls.cc b/ortools/bop/bop_ls.cc index fc85ed9b91b..af4cd53e1d7 100644 --- a/ortools/bop/bop_ls.cc +++ b/ortools/bop/bop_ls.cc @@ -571,7 +571,7 @@ ConstraintIndex OneFlipConstraintRepairer::ConstraintToRepair() const { TermIndex OneFlipConstraintRepairer::NextRepairingTerm( ConstraintIndex ct_index, TermIndex init_term_index, TermIndex start_term_index) const { - const absl::StrongVector& terms = + const util_intops::StrongVector& terms = by_constraint_matrix_[ct_index]; const int64_t constraint_value = maintainer_.ConstraintValue(ct_index); const int64_t lb = maintainer_.ConstraintLowerBound(ct_index); @@ -623,13 +623,13 @@ sat::Literal OneFlipConstraintRepairer::GetFlip(ConstraintIndex ct_index, } void OneFlipConstraintRepairer::SortTermsOfEachConstraints(int num_variables) { - absl::StrongVector objective(num_variables, 0); + util_intops::StrongVector objective(num_variables, 0); for (const ConstraintTerm& term : by_constraint_matrix_[AssignmentAndConstraintFeasibilityMaintainer:: kObjectiveConstraint]) { objective[term.var] = std::abs(term.weight); } - for (absl::StrongVector& terms : + for (util_intops::StrongVector& terms : by_constraint_matrix_) { std::sort(terms.begin(), terms.end(), [&objective](const ConstraintTerm& a, const ConstraintTerm& b) { diff --git a/ortools/bop/bop_ls.h b/ortools/bop/bop_ls.h index 859ba325f2d..2a5ea078422 100644 --- a/ortools/bop/bop_ls.h +++ b/ortools/bop/bop_ls.h @@ -250,7 +250,7 @@ class NonOrderedSetHasher { private: absl::BitGenRef random_; - absl::StrongVector hashes_; + util_intops::StrongVector hashes_; }; // This class is used to incrementally maintain an assignment and the @@ -413,16 +413,16 @@ class AssignmentAndConstraintFeasibilityMaintainer { int64_t weight; }; - absl::StrongVector> + util_intops::StrongVector< + VariableIndex, util_intops::StrongVector> by_variable_matrix_; - absl::StrongVector constraint_lower_bounds_; - absl::StrongVector constraint_upper_bounds_; + util_intops::StrongVector constraint_lower_bounds_; + util_intops::StrongVector constraint_upper_bounds_; BopSolution assignment_; BopSolution reference_; - absl::StrongVector constraint_values_; + util_intops::StrongVector constraint_values_; BacktrackableIntegerSet infeasible_constraint_set_; // This contains the list of variable flipped in assignment_. @@ -514,8 +514,8 @@ class OneFlipConstraintRepairer { // on most promising variables first. void SortTermsOfEachConstraints(int num_variables); - absl::StrongVector> + util_intops::StrongVector< + ConstraintIndex, util_intops::StrongVector> by_constraint_matrix_; const AssignmentAndConstraintFeasibilityMaintainer& maintainer_; const sat::VariablesAssignment& sat_assignment_; @@ -632,7 +632,7 @@ class LocalSearchAssignmentIterator { SatWrapper* const sat_wrapper_; OneFlipConstraintRepairer repairer_; std::vector search_nodes_; - absl::StrongVector initial_term_index_; + util_intops::StrongVector initial_term_index_; // Temporary vector used by ApplyDecision(). std::vector tmp_propagated_literals_; diff --git a/ortools/bop/bop_portfolio.cc b/ortools/bop/bop_portfolio.cc index 5fce8591ea6..b3cbbb11f46 100644 --- a/ortools/bop/bop_portfolio.cc +++ b/ortools/bop/bop_portfolio.cc @@ -353,7 +353,8 @@ void PortfolioOptimizer::CreateOptimizers( // OptimizerSelector //------------------------------------------------------------------------------ OptimizerSelector::OptimizerSelector( - const absl::StrongVector& optimizers) + const util_intops::StrongVector& + optimizers) : run_infos_(), selected_index_(optimizers.size()) { for (OptimizerIndex i(0); i < optimizers.size(); ++i) { info_positions_.push_back(run_infos_.size()); diff --git a/ortools/bop/bop_portfolio.h b/ortools/bop/bop_portfolio.h index d40c259a346..aefc8d803e4 100644 --- a/ortools/bop/bop_portfolio.h +++ b/ortools/bop/bop_portfolio.h @@ -95,7 +95,7 @@ class PortfolioOptimizer : public BopOptimizerBase { int64_t state_update_stamp_; BopConstraintTerms objective_terms_; std::unique_ptr selector_; - absl::StrongVector optimizers_; + util_intops::StrongVector optimizers_; sat::SatSolver sat_propagator_; BopParameters parameters_; double lower_bound_; @@ -110,7 +110,8 @@ class OptimizerSelector { // Note that the list of optimizers is only used to get the names for // debug purposes, the ownership of the optimizers is not transferred. explicit OptimizerSelector( - const absl::StrongVector& optimizers); + const util_intops::StrongVector& + optimizers); // Selects the next optimizer to run based on the user defined order and // history of success. Returns kInvalidOptimizerIndex if no optimizer is @@ -202,7 +203,7 @@ class OptimizerSelector { }; std::vector run_infos_; - absl::StrongVector info_positions_; + util_intops::StrongVector info_positions_; int selected_index_; }; diff --git a/ortools/bop/bop_solution.h b/ortools/bop/bop_solution.h index 0d2d4dad91d..a940ee04ca3 100644 --- a/ortools/bop/bop_solution.h +++ b/ortools/bop/bop_solution.h @@ -81,10 +81,10 @@ class BopSolution { } // For range based iteration, i.e. for (const bool value : solution) {...}. - absl::StrongVector::const_iterator begin() const { + util_intops::StrongVector::const_iterator begin() const { return values_.begin(); } - absl::StrongVector::const_iterator end() const { + util_intops::StrongVector::const_iterator end() const { return values_.end(); } @@ -103,7 +103,7 @@ class BopSolution { const sat::LinearBooleanProblem* problem_; std::string name_; - absl::StrongVector values_; + util_intops::StrongVector values_; // Those are mutable because they behave as const values for a given solution // but for performance reasons we want to be lazy on their computation, diff --git a/ortools/bop/bop_types.h b/ortools/bop/bop_types.h index f08a82d3c1e..f9843afb8ca 100644 --- a/ortools/bop/bop_types.h +++ b/ortools/bop/bop_types.h @@ -85,7 +85,8 @@ struct BopConstraintTerm { return search_id < other.search_id; } }; -typedef absl::StrongVector BopConstraintTerms; +typedef util_intops::StrongVector + BopConstraintTerms; } // namespace bop } // namespace operations_research diff --git a/ortools/bop/integral_solver.cc b/ortools/bop/integral_solver.cc index bb0a6669b3d..00d6c25d5e1 100644 --- a/ortools/bop/integral_solver.cc +++ b/ortools/bop/integral_solver.cc @@ -408,7 +408,7 @@ class IntegralProblemConverter { // constraint. Fractional AddWeightedIntegralVariable( ColIndex col, Fractional weight, - absl::StrongVector* dense_weights); + util_intops::StrongVector* dense_weights); // Scales weights and adds all non-zero scaled weights and literals to t. // t is a constraint or the objective. @@ -418,25 +418,27 @@ class IntegralProblemConverter { template double ScaleAndSparsifyWeights( double scaling_factor, int64_t gcd, - const absl::StrongVector& dense_weights, T* t); + const util_intops::StrongVector& dense_weights, + T* t); // Returns true when at least one element is non-zero. bool HasNonZeroWeights( - const absl::StrongVector& dense_weights) const; + const util_intops::StrongVector& dense_weights) + const; bool problem_is_boolean_and_has_only_integral_constraints_; // global_to_boolean_[i] represents the Boolean variable index in Bop; when // negative -global_to_boolean_[i] - 1 represents the index of the // integral variable in integral_variables_. - absl::StrongVector + util_intops::StrongVector global_to_boolean_; std::vector integral_variables_; std::vector integral_indices_; int num_boolean_variables_; enum VariableType { BOOLEAN, INTEGRAL, INTEGRAL_EXPRESSED_AS_BOOLEAN }; - absl::StrongVector variable_types_; + util_intops::StrongVector variable_types_; }; IntegralProblemConverter::IntegralProblemConverter() @@ -610,7 +612,7 @@ void IntegralProblemConverter::ConvertAllConstraints( std::vector coefficients; for (RowIndex row(0); row < linear_problem.num_constraints(); ++row) { Fractional offset = 0.0; - absl::StrongVector dense_weights( + util_intops::StrongVector dense_weights( num_boolean_variables_, 0.0); for (const SparseColumn::Entry e : transpose.column(RowToColIndex(row))) { // Cast in ColIndex due to the transpose. @@ -686,7 +688,7 @@ void IntegralProblemConverter::ConvertObjective( LinearBooleanProblem* boolean_problem) { LinearObjective* objective = boolean_problem->mutable_objective(); Fractional offset = 0.0; - absl::StrongVector dense_weights( + util_intops::StrongVector dense_weights( num_boolean_variables_, 0.0); // Compute the objective weights for the binary variable model. for (ColIndex col(0); col < linear_problem.num_variables(); ++col) { @@ -821,7 +823,7 @@ bool IntegralProblemConverter::CreateVariableUsingConstraint( integral_var->Clear(); const SparseMatrix& transpose = linear_problem.GetTransposeSparseMatrix(); - absl::StrongVector dense_weights( + util_intops::StrongVector dense_weights( num_boolean_variables_, 0.0); Fractional scale = 1.0; int64_t variable_offset = 0; @@ -872,7 +874,7 @@ bool IntegralProblemConverter::CreateVariableUsingConstraint( Fractional IntegralProblemConverter::AddWeightedIntegralVariable( ColIndex col, Fractional weight, - absl::StrongVector* dense_weights) { + util_intops::StrongVector* dense_weights) { CHECK(nullptr != dense_weights); if (weight == 0.0) { @@ -899,7 +901,8 @@ Fractional IntegralProblemConverter::AddWeightedIntegralVariable( template double IntegralProblemConverter::ScaleAndSparsifyWeights( double scaling_factor, int64_t gcd, - const absl::StrongVector& dense_weights, T* t) { + const util_intops::StrongVector& dense_weights, + T* t) { CHECK(nullptr != t); double bound_error = 0.0; @@ -915,7 +918,8 @@ double IntegralProblemConverter::ScaleAndSparsifyWeights( return bound_error; } bool IntegralProblemConverter::HasNonZeroWeights( - const absl::StrongVector& dense_weights) const { + const util_intops::StrongVector& dense_weights) + const { for (const Fractional weight : dense_weights) { if (weight != 0.0) { return true; diff --git a/ortools/constraint_solver/constraint_solveri.h b/ortools/constraint_solver/constraint_solveri.h index 95459cfd8f8..323595486fa 100644 --- a/ortools/constraint_solver/constraint_solveri.h +++ b/ortools/constraint_solver/constraint_solveri.h @@ -1769,11 +1769,11 @@ class SubDagComputer { // Initialized by BuildGraph(), after which the outgoing arcs of node n are // the range from arcs_[arcs_of_node_[n]] included to // arcs_[arcs_of_node_[n+1]] excluded. - absl::StrongVector arcs_of_node_; + util_intops::StrongVector arcs_of_node_; // Must be false before BuildGraph() is called, true afterwards. bool graph_was_built_ = false; // Used by ComputeSortedSubDagArcs. - absl::StrongVector indegree_of_node_; + util_intops::StrongVector indegree_of_node_; // Used by ComputeSortedSubDagArcs. std::vector nodes_to_visit_; // Used as output, set up as a member to allow reuse. @@ -1839,14 +1839,14 @@ class LocalSearchState { const VariableDomain& d2) const { return d1.max < d2.min || d2.max < d1.min; } - absl::StrongVector relaxed_domains_; - absl::StrongVector current_domains_; + util_intops::StrongVector relaxed_domains_; + util_intops::StrongVector current_domains_; struct TrailedVariableDomain { VariableDomain committed_domain; VariableDomainId domain_id; }; std::vector trailed_domains_; - absl::StrongVector domain_is_trailed_; + util_intops::StrongVector domain_is_trailed_; // True iff all domains have their min <= max. bool state_domains_are_all_nonempty_ = true; bool state_has_relaxed_domains_ = false; @@ -1907,11 +1907,11 @@ class LocalSearchState { // Structure of the expression DAG, used to buffer propagation storage. SubDagComputer dag_; // Maps arcs of dag_ to domain/constraint dependencies. - absl::StrongVector dependency_of_dag_arc_; + util_intops::StrongVector dependency_of_dag_arc_; // Maps domain ids to dag_ nodes. - absl::StrongVector dag_node_of_domain_; + util_intops::StrongVector dag_node_of_domain_; // Maps constraint ids to dag_ nodes. - absl::StrongVector dag_node_of_constraint_; + util_intops::StrongVector dag_node_of_constraint_; // Number of nodes currently allocated in dag_. // Reserve node 0 as a default dummy node with no dependencies. int num_dag_nodes_ = 1; @@ -1931,7 +1931,7 @@ class LocalSearchState { // The triggers of domain i are stored from triggers_of_domain_[i] // to triggers_of_domain_[i+1] excluded. std::vector triggers_; - absl::StrongVector triggers_of_domain_; + util_intops::StrongVector triggers_of_domain_; // Constraints are used to form expressions that make up the objective. // Constraints are directed: they have inputs and an output, moreover the @@ -2001,7 +2001,7 @@ class LocalSearchState { bool constraint_is_trailed_ = false; }; // Used to identify constraints and hold ownership. - absl::StrongVector> constraints_; + util_intops::StrongVector> constraints_; }; // A LocalSearchState Variable can only be created by a LocalSearchState, diff --git a/ortools/constraint_solver/expressions.cc b/ortools/constraint_solver/expressions.cc index 76f391b13d4..edf8d3553af 100644 --- a/ortools/constraint_solver/expressions.cc +++ b/ortools/constraint_solver/expressions.cc @@ -5361,8 +5361,10 @@ class PosIntSquare : public IntSquare { if (m <= 0) { return; } - const int64_t root = - static_cast(ceil(sqrt(static_cast(m)))); + int64_t root = static_cast(ceil(sqrt(static_cast(m)))); + if (CapProd(root, root) < m) { + root++; + } expr_->SetMin(root); } int64_t Max() const override { @@ -5378,8 +5380,11 @@ class PosIntSquare : public IntSquare { if (m == std::numeric_limits::max()) { return; } - const int64_t root = - static_cast(floor(sqrt(static_cast(m)))); + int64_t root = static_cast(floor(sqrt(static_cast(m)))); + if (CapProd(root, root) > m) { + root--; + } + expr_->SetMax(root); } }; diff --git a/ortools/constraint_solver/local_search.cc b/ortools/constraint_solver/local_search.cc index 0fbaa42dcdb..d92402fa677 100644 --- a/ortools/constraint_solver/local_search.cc +++ b/ortools/constraint_solver/local_search.cc @@ -3785,8 +3785,8 @@ void SubDagComputer::BuildGraph(int num_nodes) { bool SubDagComputer::HasDirectedCycle() const { DCHECK(graph_was_built_); - absl::StrongVector node_is_open(num_nodes_, false); - absl::StrongVector node_was_visited(num_nodes_, false); + util_intops::StrongVector node_is_open(num_nodes_, false); + util_intops::StrongVector node_was_visited(num_nodes_, false); // Depth first search event: a node and a boolean indicating whether // to open or to close it. struct DFSEvent { diff --git a/ortools/constraint_solver/python/CMakeLists.txt b/ortools/constraint_solver/python/CMakeLists.txt index c1379863142..c527ba7e5ff 100644 --- a/ortools/constraint_solver/python/CMakeLists.txt +++ b/ortools/constraint_solver/python/CMakeLists.txt @@ -47,6 +47,7 @@ if(MSVC) target_link_libraries(pywrapcp PRIVATE ${Python3_LIBRARIES}) endif() +# Test if(BUILD_TESTING) file(GLOB PYTHON_SRCS "*_test.py") foreach(FILE_NAME IN LISTS PYTHON_SRCS) diff --git a/ortools/constraint_solver/python/pywrapcp_test.py b/ortools/constraint_solver/python/pywrapcp_test.py index 3a25cf98482..22c6b8b3c4b 100755 --- a/ortools/constraint_solver/python/pywrapcp_test.py +++ b/ortools/constraint_solver/python/pywrapcp_test.py @@ -15,6 +15,7 @@ """Test Constraint Solver API.""" import sys + from absl.testing import absltest from ortools.constraint_solver import search_limit_pb2 from ortools.constraint_solver import solver_parameters_pb2 @@ -26,6 +27,7 @@ def inc_callback(i): class ClassIncCallback: + def __init__(self, increment): self.__increment = increment @@ -34,6 +36,7 @@ def inc_method(self, i): class TestIntVarContainerAPI(absltest.TestCase): + def test_contains(self): self.assertTrue( hasattr(pywrapcp.IntVarContainer, "Contains"), @@ -65,6 +68,7 @@ def test_restore(self): class TestIntervalVarContainerAPI(absltest.TestCase): + def test_contains(self): self.assertTrue( hasattr(pywrapcp.IntervalVarContainer, "Contains"), @@ -97,6 +101,7 @@ def test_restore(self): class TestSequenceVarContainerAPI(absltest.TestCase): + def test_contains(self): self.assertTrue( hasattr(pywrapcp.SequenceVarContainer, "Contains"), @@ -129,6 +134,7 @@ def test_restore(self): class PyWrapCPTest(absltest.TestCase): + def testRabbitPheasant(self): # Create the solver. solver = pywrapcp.Solver("testRabbitPheasant") @@ -527,6 +533,7 @@ def test_search_alldiff(self): class CustomSearchMonitor(pywrapcp.SearchMonitor): + def __init__(self, solver, nexts): pywrapcp.SearchMonitor.__init__(self, solver) self._nexts = nexts @@ -539,6 +546,7 @@ def EndInitialPropagation(self): class SearchMonitorTest(absltest.TestCase): + def test_search_monitor(self): print("test_search_monitor") solver = pywrapcp.Solver("test search monitor") @@ -551,6 +559,7 @@ def test_search_monitor(self): class CustomDemon(pywrapcp.PyDemon): + def __init__(self, x): super().__init__() self._x = x @@ -561,6 +570,7 @@ def Run(self, solver): class DemonTest(absltest.TestCase): + def test_demon(self): print("test_demon") solver = pywrapcp.Solver("test export") @@ -570,6 +580,7 @@ def test_demon(self): class CustomConstraint(pywrapcp.PyConstraint): + def __init__(self, solver, x): super().__init__(solver) self._x = x @@ -592,6 +603,7 @@ def DebugString(self): class InitialPropagateDemon(pywrapcp.PyDemon): + def __init__(self, constraint): super().__init__() self._ct = constraint @@ -601,6 +613,7 @@ def Run(self, solver): class DumbGreaterOrEqualToFive(pywrapcp.PyConstraint): + def __init__(self, solver, x): super().__init__(solver) self._x = x @@ -619,6 +632,7 @@ def InitialPropagate(self): class WatchDomain(pywrapcp.PyDemon): + def __init__(self, x): super().__init__() self._x = x @@ -629,6 +643,7 @@ def Run(self, solver): class HoleConstraint(pywrapcp.PyConstraint): + def __init__(self, solver, x): super().__init__(solver) self._x = x @@ -642,6 +657,7 @@ def InitialPropagate(self): class BinarySum(pywrapcp.PyConstraint): + def __init__(self, solver, x, y, z): super().__init__(solver) self._x = x @@ -661,6 +677,7 @@ def InitialPropagate(self): class ConstraintTest(absltest.TestCase): + def test_member(self): print("test_member") solver = pywrapcp.Solver("test member") @@ -759,6 +776,7 @@ def test_sum_constraint(self): class CustomDecisionBuilder(pywrapcp.PyDecisionBuilder): + def __init__(self): super().__init__() self._counter = 0 @@ -773,6 +791,7 @@ def DebugString(self): class CustomDecision(pywrapcp.PyDecision): + def __init__(self): print("In CustomDecision ctor", file=sys.stderr) super().__init__() @@ -792,6 +811,7 @@ def DebugString(self): class CustomDecisionBuilderCustomDecision(pywrapcp.PyDecisionBuilder): + def __init__(self): super().__init__() self.__done = False @@ -811,6 +831,7 @@ def DebugString(self): class DecisionTest(absltest.TestCase): + def test_custom_decision_builder(self): solver = pywrapcp.Solver("test_custom_decision_builder") db = CustomDecisionBuilder() @@ -827,6 +848,7 @@ def test_custom_decision(self): class LocalSearchTest(absltest.TestCase): + class OneVarLNS(pywrapcp.BaseLns): """One Var LNS.""" @@ -959,6 +981,7 @@ def test_local_search_with_filter(self): class MyDecisionBuilder(pywrapcp.PyDecisionBuilder): + def __init__(self, var, value): super().__init__() self.__var = var @@ -971,6 +994,7 @@ def Next(self, solver): class MyLns(pywrapcp.BaseLns): + def __init__(self, int_vars): super().__init__(int_vars) self.__current = 0 @@ -989,6 +1013,7 @@ def NextFragment(self, fragment, values): class MyLnsNoValues(pywrapcp.BaseLns): + def __init__(self, int_vars): super().__init__(int_vars) self.__current = 0 @@ -1005,6 +1030,7 @@ def NextFragment(self, fragment): class MyDecisionBuilderWithRev(pywrapcp.PyDecisionBuilder): + def __init__(self, var, value, rev): super().__init__() self.__var = var @@ -1022,6 +1048,7 @@ def Next(self, solver): class MyDecisionBuilderThatFailsWithRev(pywrapcp.PyDecisionBuilder): + def Next(self, solver): solver.Fail() return None @@ -1260,6 +1287,7 @@ def testNoNewSearch(self): class SplitDomainDecisionBuilder(pywrapcp.PyDecisionBuilder): + def __init__(self, var, value, lower): super().__init__() self.__var = var @@ -1275,6 +1303,7 @@ def Next(self, solver): class PyWrapCPDecisionTest(absltest.TestCase): + def testSplitDomainLower(self): solver = pywrapcp.Solver("testSplitDomainLower") x = solver.IntVar(0, 10, "x") @@ -1331,6 +1360,7 @@ def testFalseConstraint(self): class IntVarLocalSearchOperatorTest(absltest.TestCase): + def test_ctor(self): solver = pywrapcp.Solver("Solve") int_vars = [solver.IntVar(0, 4) for _ in range(4)] diff --git a/ortools/constraint_solver/routing.cc b/ortools/constraint_solver/routing.cc index 9ef00d55b28..1bd2ad04827 100644 --- a/ortools/constraint_solver/routing.cc +++ b/ortools/constraint_solver/routing.cc @@ -1028,7 +1028,7 @@ namespace { struct ResourceClass { using DimensionIndex = RoutingModel::DimensionIndex; /// The attributes for each dimension. - absl::StrongVector + util_intops::StrongVector dimension_attributes; /// Assignability of vehicles. std::vector assignable_to_vehicle; @@ -1054,7 +1054,7 @@ void ResourceGroup::ComputeResourceClasses() { for (int r = 0; r < resources_.size(); ++r) { ResourceClass resource_class; - absl::StrongVector& dim_attributes = + util_intops::StrongVector& dim_attributes = resource_class.dimension_attributes; dim_attributes.resize(model_->dimensions_.size(), Attributes()); for (const auto& [dim_index, attributes] : @@ -1355,14 +1355,14 @@ struct VehicleClass { int end_equivalence_class; /// Bounds of cumul variables at start and end vehicle nodes. /// dimension_{start,end}_cumuls_{min,max}[d] is the bound for dimension d. - absl::StrongVector dimension_start_cumuls_min; - absl::StrongVector dimension_start_cumuls_max; - absl::StrongVector dimension_end_cumuls_min; - absl::StrongVector dimension_end_cumuls_max; - absl::StrongVector dimension_capacities; + util_intops::StrongVector dimension_start_cumuls_min; + util_intops::StrongVector dimension_start_cumuls_max; + util_intops::StrongVector dimension_end_cumuls_min; + util_intops::StrongVector dimension_end_cumuls_max; + util_intops::StrongVector dimension_capacities; /// dimension_evaluators[d]->Run(from, to) is the transit value of arc /// from->to for a dimension d. - absl::StrongVector dimension_evaluator_classes; + util_intops::StrongVector dimension_evaluator_classes; /// Hash of the visitability of (non-start/end) nodes. uint64_t visitable_nodes_hash; /// Hash of allowed resources for each resource group, or -1 if a given diff --git a/ortools/constraint_solver/routing.h b/ortools/constraint_solver/routing.h index 131aa6adad1..1d40fa6a365 100644 --- a/ortools/constraint_solver/routing.h +++ b/ortools/constraint_solver/routing.h @@ -540,7 +540,7 @@ class RoutingModel { return resource_indices_per_class_[resource_class]; } // clang-format off - const absl::StrongVector >& + const util_intops::StrongVector >& GetResourceIndicesPerClass() const { return resource_indices_per_class_; } @@ -569,7 +569,7 @@ class RoutingModel { // ComputeResourceClasses()). std::vector resource_class_indices_; // clang-format off - absl::StrongVector > + util_intops::StrongVector > resource_indices_per_class_; // clang-format on @@ -2358,7 +2358,7 @@ class RoutingModel { mutable RevSwitch is_bound_to_end_ct_added_; /// Dimensions absl::flat_hash_map dimension_name_to_index_; - absl::StrongVector dimensions_; + util_intops::StrongVector dimensions_; /// Resource Groups. /// If resource_groups_ is not empty, then for each group of resources, each /// (used) vehicle must be assigned to exactly 1 resource, and each resource @@ -2366,7 +2366,7 @@ class RoutingModel { // clang-format off std::vector > resource_groups_; /// Stores the set of resource groups related to each dimension. - absl::StrongVector > + util_intops::StrongVector > dimension_resource_group_indices_; /// TODO(user): Define a new Dimension[Global|Local]OptimizerIndex type @@ -2374,10 +2374,10 @@ class RoutingModel { /// mappings below. std::vector > global_dimension_optimizers_; - absl::StrongVector global_optimizer_index_; + util_intops::StrongVector global_optimizer_index_; std::vector > local_dimension_optimizers_; - absl::StrongVector local_optimizer_index_; + util_intops::StrongVector local_optimizer_index_; // clang-format on std::string primary_constrained_dimension_; /// Costs @@ -2405,7 +2405,7 @@ class RoutingModel { absl::flat_hash_map, std::vector, absl::Hash>> force_distance_to_vehicle_unit_costs_; - absl::StrongVector cost_classes_; + util_intops::StrongVector cost_classes_; #endif // SWIG bool costs_are_homogeneous_across_vehicles_; bool cache_callbacks_; @@ -2416,7 +2416,7 @@ class RoutingModel { VehicleTypeContainer vehicle_type_container_; std::function vehicle_start_class_callback_; /// Disjunctions - absl::StrongVector disjunctions_; + util_intops::StrongVector disjunctions_; // clang-format off std::vector > index_to_disjunctions_; /// Same vehicle costs diff --git a/ortools/constraint_solver/routing_constraints.cc b/ortools/constraint_solver/routing_constraints.cc index c1b4c5cefc2..9ee9c2701af 100644 --- a/ortools/constraint_solver/routing_constraints.cc +++ b/ortools/constraint_solver/routing_constraints.cc @@ -179,7 +179,7 @@ class ResourceAssignmentConstraint : public Constraint { }; using RCIndex = RoutingModel::ResourceClassIndex; - const absl::StrongVector> + const util_intops::StrongVector> ignored_resources_per_class(resource_group_.GetResourceClassesCount()); std::vector> assignment_costs(model_.vehicles()); for (int v : resource_group_.GetVehiclesRequiringAResource()) { diff --git a/ortools/constraint_solver/routing_decision_builders.cc b/ortools/constraint_solver/routing_decision_builders.cc index 8adf36d8aed..6e0543e7746 100644 --- a/ortools/constraint_solver/routing_decision_builders.cc +++ b/ortools/constraint_solver/routing_decision_builders.cc @@ -239,7 +239,7 @@ class SetCumulsFromLocalDimensionCosts : public DecisionBuilder { std::vector vehicles_without_resource_assignment; std::vector vehicles_with_resource_assignment; - absl::StrongVector> + util_intops::StrongVector> used_resources_per_class; DetermineVehiclesRequiringResourceAssignment( &vehicles_without_resource_assignment, @@ -309,7 +309,7 @@ class SetCumulsFromLocalDimensionCosts : public DecisionBuilder { void DetermineVehiclesRequiringResourceAssignment( std::vector* vehicles_without_resource_assignment, std::vector* vehicles_with_resource_assignment, - absl::StrongVector>* + util_intops::StrongVector>* used_resources_per_class) const { vehicles_without_resource_assignment->clear(); vehicles_with_resource_assignment->clear(); @@ -408,7 +408,7 @@ class SetCumulsFromLocalDimensionCosts : public DecisionBuilder { bool ComputeVehicleResourceClassValuesAndIndices( const std::vector& vehicles_to_assign, - const absl::StrongVector>& + const util_intops::StrongVector>& used_resources_per_class, const std::function& next_accessor, std::vector* resource_indices) { diff --git a/ortools/constraint_solver/routing_filters.cc b/ortools/constraint_solver/routing_filters.cc index 3e8ea5829a0..52f2e363dfd 100644 --- a/ortools/constraint_solver/routing_filters.cc +++ b/ortools/constraint_solver/routing_filters.cc @@ -259,9 +259,9 @@ class NodeDisjunctionFilter : public IntVarLocalSearchFilter { const RoutingModel& routing_model_; - absl::StrongVector + util_intops::StrongVector active_per_disjunction_; - absl::StrongVector + util_intops::StrongVector inactive_per_disjunction_; int64_t synchronized_objective_value_; int64_t accepted_objective_value_; @@ -2820,7 +2820,7 @@ bool ResourceGroupAssignmentFilter::AcceptPath(int64_t path_start, // AcceptPath(), and delay calls to // ComputeVehicleToResourceClassAssignmentCosts() to FinalizeAcceptPath(). using RCIndex = RoutingModel::ResourceClassIndex; - const absl::StrongVector> + const util_intops::StrongVector> ignored_resources_per_class(resource_group_.GetResourceClassesCount()); return ComputeVehicleToResourceClassAssignmentCosts( vehicle, resource_group_, ignored_resources_per_class, @@ -2834,7 +2834,7 @@ bool ResourceGroupAssignmentFilter::AcceptPath(int64_t path_start, bool ResourceGroupAssignmentFilter::FinalizeAcceptPath( int64_t /*objective_min*/, int64_t objective_max) { using RCIndex = RoutingModel::ResourceClassIndex; - const absl::StrongVector> + const util_intops::StrongVector> ignored_resources_per_class(resource_group_.GetResourceClassesCount()); delta_cost_without_transit_ = ComputeBestVehicleToResourceAssignment( resource_group_.GetVehiclesRequiringAResource(), @@ -2871,7 +2871,7 @@ void ResourceGroupAssignmentFilter::OnSynchronizePathFromStart(int64_t start) { // OnSynchronizePathFromStart(), and delay calls to // ComputeVehicleToResourceClassAssignmentCosts() to OnAfterSynchronizePaths() using RCIndex = RoutingModel::ResourceClassIndex; - const absl::StrongVector> + const util_intops::StrongVector> ignored_resources_per_class(resource_group_.GetResourceClassesCount()); if (!ComputeVehicleToResourceClassAssignmentCosts( v, resource_group_, ignored_resources_per_class, next_accessor, @@ -2886,7 +2886,7 @@ void ResourceGroupAssignmentFilter::OnSynchronizePathFromStart(int64_t start) { void ResourceGroupAssignmentFilter::OnAfterSynchronizePaths() { using RCIndex = RoutingModel::ResourceClassIndex; - const absl::StrongVector> + const util_intops::StrongVector> ignored_resources_per_class(resource_group_.GetResourceClassesCount()); synchronized_cost_without_transit_ = (current_synch_failed_ || !filter_objective_cost_) diff --git a/ortools/constraint_solver/routing_index_manager.h b/ortools/constraint_solver/routing_index_manager.h index be84c60423b..ca97eca74c2 100644 --- a/ortools/constraint_solver/routing_index_manager.h +++ b/ortools/constraint_solver/routing_index_manager.h @@ -109,7 +109,7 @@ class RoutingIndexManager { const std::vector >& starts_ends); std::vector index_to_node_; - absl::StrongVector node_to_index_; + util_intops::StrongVector node_to_index_; std::vector vehicle_to_start_; std::vector vehicle_to_end_; int num_nodes_; diff --git a/ortools/constraint_solver/routing_lp_scheduling.cc b/ortools/constraint_solver/routing_lp_scheduling.cc index 5df8dc4c175..c6766127d5f 100644 --- a/ortools/constraint_solver/routing_lp_scheduling.cc +++ b/ortools/constraint_solver/routing_lp_scheduling.cc @@ -2645,7 +2645,7 @@ void MoveValuesToIndicesFrom(std::vector* out_values, bool ComputeVehicleToResourceClassAssignmentCosts( int v, const RoutingModel::ResourceGroup& resource_group, - const absl::StrongVector>& ignored_resources_per_class, const std::function& next_accessor, @@ -2791,9 +2791,9 @@ bool ComputeVehicleToResourceClassAssignmentCosts( int64_t ComputeBestVehicleToResourceAssignment( const std::vector& vehicles, - const absl::StrongVector>& resource_indices_per_class, - const absl::StrongVector>& ignored_resources_per_class, std::function*(int)> diff --git a/ortools/constraint_solver/routing_lp_scheduling.h b/ortools/constraint_solver/routing_lp_scheduling.h index d4308dc4545..49461f9e8f2 100644 --- a/ortools/constraint_solver/routing_lp_scheduling.h +++ b/ortools/constraint_solver/routing_lp_scheduling.h @@ -971,9 +971,9 @@ class GlobalDimensionCumulOptimizer { // O(num_resource_classes * vehicles.size() + resource_indices->size()). int64_t ComputeBestVehicleToResourceAssignment( const std::vector& vehicles, - const absl::StrongVector>& resource_indices_per_class, - const absl::StrongVector>& ignored_resources_per_class, std::function*(int)> @@ -991,7 +991,7 @@ int64_t ComputeBestVehicleToResourceAssignment( // are also set in cumul_values and break_values, if non-null. bool ComputeVehicleToResourceClassAssignmentCosts( int v, const RoutingModel::ResourceGroup& resource_group, - const absl::StrongVector>& ignored_resources_per_class, const std::function& next_accessor, diff --git a/ortools/dotnet/Google.OrTools-full.csproj.in b/ortools/dotnet/Google.OrTools-full.csproj.in index be355d7ce5f..94fe0d295a1 100644 --- a/ortools/dotnet/Google.OrTools-full.csproj.in +++ b/ortools/dotnet/Google.OrTools-full.csproj.in @@ -184,7 +184,7 @@ - + diff --git a/ortools/dotnet/Google.OrTools-local.csproj.in b/ortools/dotnet/Google.OrTools-local.csproj.in index ca71502cc9c..851b23a537e 100644 --- a/ortools/dotnet/Google.OrTools-local.csproj.in +++ b/ortools/dotnet/Google.OrTools-local.csproj.in @@ -172,7 +172,7 @@ - + diff --git a/ortools/flatzinc/challenge/Makefile b/ortools/flatzinc/challenge/Makefile new file mode 100644 index 00000000000..f696954955a --- /dev/null +++ b/ortools/flatzinc/challenge/Makefile @@ -0,0 +1,75 @@ +# Copyright 2010-2024 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ifdef NOCACHE +DOCKER_BUILD_CMD := docker build --no-cache +else +DOCKER_BUILD_CMD := docker build +endif +DOCKER_RUN_CMD := docker run --rm --init + +MZN_SUFFIX=2024v4 +DOCKER_NAME=cp-sat-minizinc-challenge +MZN_TAG=${DOCKER_NAME}:${MZN_SUFFIX} +MZN_LS_TAG=${DOCKER_NAME}-ls:${MZN_SUFFIX} + +# Standard CP-SAT build. +.PHONY: minizinc-challenge +minizinc-challenge: minizinc-challenge.Dockerfile + ${DOCKER_BUILD_CMD} -f minizinc-challenge.Dockerfile -t ${MZN_TAG} . + +.PHONY: minizinc-challenge-test +minizinc-challenge-test: minizinc-challenge + ${DOCKER_RUN_CMD} ${MZN_TAG} solver -v /minizinc/test.mzn /minizinc/2.dzn + ${DOCKER_RUN_CMD} ${MZN_TAG} solver -v --free-search /minizinc/test.mzn /minizinc/2.dzn + ${DOCKER_RUN_CMD} ${MZN_TAG} solver -v -p 2 /minizinc/test.mzn /minizinc/2.dzn + +.PHONY: minizinc-challenge-check +minizinc-challenge-check: minizinc-challenge + ${DOCKER_RUN_CMD} ${MZN_TAG} grep FZNEXEC /minizinc/mzn-exec-fd + +.PHONY: minizinc-challenge-bash +minizinc-challenge-bash: + ${DOCKER_RUN_CMD} -it --name minizinc_chalenge ${MZN_TAG} + +.PHONY: minizinc-challenge-export +minizinc-challenge-export: minizinc-challenge + docker tag ${MZN_TAG} laurentperron/${MZN_TAG} + docker push laurentperron/${MZN_TAG} + + +# Local search build. +.PHONY: minizinc-challenge-ls +minizinc-challenge-ls: minizinc-challenge-ls.Dockerfile + ${DOCKER_BUILD_CMD} -f minizinc-challenge-ls.Dockerfile -t ${MZN_LS_TAG} . + +.PHONY: minizinc-challenge-ls-test +minizinc-challenge-ls-test: minizinc-challenge-ls + ${DOCKER_RUN_CMD} ${MZN_LS_TAG} solver -v /minizinc/test.mzn /minizinc/2.dzn + ${DOCKER_RUN_CMD} ${MZN_LS_TAG} solver -v --free-search /minizinc/test.mzn /minizinc/2.dzn + ${DOCKER_RUN_CMD} ${MZN_LS_TAG} solver -v -p 2 /minizinc/test.mzn /minizinc/2.dzn + +.PHONY: minizinc-challenge-ls-check +minizinc-challenge-ls-check: minizinc-challenge-ls + ${DOCKER_RUN_CMD} ${MZN_LS_TAG} grep FZNEXEC /minizinc/mzn-exec-fd + ${DOCKER_RUN_CMD} ${MZN_LS_TAG} grep FZNEXEC /minizinc/mzn-exec-free + ${DOCKER_RUN_CMD} ${MZN_LS_TAG} grep FZNEXEC /minizinc/mzn-exec-par + +.PHONY: minizinc-challenge-ls-bash +minizinc-challenge-ls-bash: + ${DOCKER_RUN_CMD} -it --name minizinc_chalenge-ls ${MZN_LS_TAG} + +.PHONY: minizinc-challenge-ls-export +minizinc-challenge-ls-export: minizinc-challenge-ls + docker tag ${MZN_LS_TAG} laurentperron/${MZN_LS_TAG} + docker push laurentperron/${MZN_LS_TAG} diff --git a/ortools/flatzinc/challenge/cp-sat-description.txt b/ortools/flatzinc/challenge/cp-sat-description.txt new file mode 100644 index 00000000000..006d74c19d8 --- /dev/null +++ b/ortools/flatzinc/challenge/cp-sat-description.txt @@ -0,0 +1,27 @@ +CP-SAT is a discrete optimization solver built on top of a SAT engine. + +It is available within the OR-Tools open-source repository + - website: https://developers.google.com/optimization + - github repository: https://github.com/google/or-Tools +It has won multiple gold medals at the MiniZinc challenge: + https://www.minizinc.org/challenge.html since its debut in 2017. + +The CP-SAT solver is architectured around five components: + - The base layer is a clause learning SAT solver. + - Above the SAT layer sits a Constraint Programming (CP) module with Boolean, + integer and interval variables, and standard integer, scheduling and routing + constraints. + - Alongside the CP solver, a simplex provides a global linear relaxation. + Its integration with the CP and SAT layers enable the CP-SAT solver to solve + MIP problems with the same techniques as (commercial) MIP solvers: + relaxation, cuts, heuristics and duality based techniques. + - Both the CP and MIP modules rely on a unified protobuf representation of the + model that can serve as a file format, as well as an intermediate + representation of the model during all phases of the solve (input format, + presolved model, LNS fragment, Local Search). + - On top, the search layer implements a robust information-sharing portfolio + of specialized workers that offers both good and fast solutions, and + superior optimality proving capabilities. + +This work was presented at the CPAIOR 2020 masterclass. The recording is +available on youtube (https://www.youtube.com/watch?v=lmy1ddn4cyw). diff --git a/ortools/flatzinc/challenge/cp-sat-ls-description.txt b/ortools/flatzinc/challenge/cp-sat-ls-description.txt new file mode 100644 index 00000000000..f4f624c4c94 --- /dev/null +++ b/ortools/flatzinc/challenge/cp-sat-ls-description.txt @@ -0,0 +1,24 @@ +CP-SAT-LS is a discrete optimization solver built on top of a SAT engine. + +It is available within the OR-Tools open-source repository + - website: https://developers.google.com/optimization + - github repository: https://github.com/google/or-Tools +It has won multiple gold medals at the MiniZinc challenge: + https://www.minizinc.org/challenge.html since its debut in 2017. + +The CP-SAT-LS solver is a pure Local Search solver using the same interface as +the CP-SAT solver. + +It is build on top of the Feasibility Jump article: + Luteberget, B., Sartor, G. + Feasibility Jump: an LP-free Lagrangian MIP heuristic. + Math. Prog. Comp. 15, 365–388 (2023). + https://doi.org/10.1007/s12532-023-00234-8 + +Complemented by the ViolationLS article: + Davies, T.O., Didier, F., Perron, L. (2024). + ViolationLS: Constraint-Based Local Search in CP-SAT. + Dilkina, B. (eds) Integration of Constraint Programming, Artificial + Intelligence, and Operations Research. CPAIOR 2024. + Lecture Notes in Computer Science, vol 14742. Springer, Cham. + https://doi.org/10.1007/978-3-031-60597-0_16 diff --git a/ortools/flatzinc/challenge/minizinc-challenge-ls.Dockerfile b/ortools/flatzinc/challenge/minizinc-challenge-ls.Dockerfile new file mode 100644 index 00000000000..81db76a0ed2 --- /dev/null +++ b/ortools/flatzinc/challenge/minizinc-challenge-ls.Dockerfile @@ -0,0 +1,32 @@ +FROM minizinc/mznc2024:latest AS env + +ENV SRC_GIT_BRANCH v99bugfix + +ENV TZ=America/Los_Angeles + +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update -yq \ +&& apt-get -y install pkg-config git wget autoconf libtool zlib1g-dev gawk g++ \ + curl make lsb-release + +# Bazelisk +RUN wget \ +https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 \ +&& chmod +x bazelisk-linux-amd64 \ +&& mv bazelisk-linux-amd64 /root/bazel + +FROM env AS devel +WORKDIR /root +RUN git clone -b "$SRC_GIT_BRANCH" --single-branch https://github.com/google/or-tools + +FROM devel AS build +WORKDIR /root/or-tools +RUN /root/bazel build -c opt //ortools/flatzinc:fz + +RUN ln -s /root/or-tools/bazel-bin/ortools/flatzinc/fz /entry_data/fzn-exec + +RUN cp /root/or-tools/ortools/flatzinc/mznlib/*mzn /entry_data/mzn-lib + +# Patch the run scripts +RUN sed -i -e "s/-G/--fzn-flags --params=use_ls_only:true -G/g" /minizinc/mzn-exec-* \ No newline at end of file diff --git a/ortools/flatzinc/challenge/minizinc-challenge.Dockerfile b/ortools/flatzinc/challenge/minizinc-challenge.Dockerfile new file mode 100644 index 00000000000..d111f1e5f88 --- /dev/null +++ b/ortools/flatzinc/challenge/minizinc-challenge.Dockerfile @@ -0,0 +1,32 @@ +FROM minizinc/mznc2024:latest AS env + +ENV SRC_GIT_BRANCH v99bugfix + +ENV TZ=America/Los_Angeles + +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +RUN apt-get update -yq \ +&& apt-get -y install pkg-config git wget autoconf libtool zlib1g-dev gawk g++ \ + curl make lsb-release + +# Bazelisk +RUN wget \ +https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-amd64 \ +&& chmod +x bazelisk-linux-amd64 \ +&& mv bazelisk-linux-amd64 /root/bazel + +FROM env AS devel +WORKDIR /root +RUN git clone -b "$SRC_GIT_BRANCH" --single-branch https://github.com/google/or-tools + +FROM devel AS build +WORKDIR /root/or-tools +RUN /root/bazel build -c opt //ortools/flatzinc:fz + +RUN ln -s /root/or-tools/bazel-bin/ortools/flatzinc/fz /entry_data/fzn-exec + +RUN cp /root/or-tools/ortools/flatzinc/mznlib/*mzn /entry_data/mzn-lib + +# Patch the run scripts +RUN sed -i -e "s/-G/-p 1 -G/g" /minizinc/mzn-exec-fd diff --git a/ortools/flatzinc/cpsat.msc.in b/ortools/flatzinc/cp-sat.msc.in similarity index 83% rename from ortools/flatzinc/cpsat.msc.in rename to ortools/flatzinc/cp-sat.msc.in index ffc97d0d182..82bfbf842af 100644 --- a/ortools/flatzinc/cpsat.msc.in +++ b/ortools/flatzinc/cp-sat.msc.in @@ -1,11 +1,11 @@ { - "id": "com.google.ortools.sat", + "id": "cp-sat", "name": "OR Tools CP-SAT", "description": "Google's Operations Research CP-SAT-LP FlatZinc interface", "version": "@PROJECT_VERSION@", - "mznlib": "../cpsat", + "mznlib": "../cp-sat", "executable": "@FZ_REL_INSTALL_BINARY@", - "tags": ["cpsatlp", "cp", "lcg", "int"], + "tags": ["cp-sat", "cp", "lcg", "int"], "stdFlags": ["-a", "-f", "-p", "-r", "-s", "-v"], "extraFlags": [ ["--params", "Provide parameters interpreted as a text SatParameters proto", "string", ""] diff --git a/ortools/flatzinc/cp_model_fz_solver.cc b/ortools/flatzinc/cp_model_fz_solver.cc index 58924cf353f..d343d1b4b72 100644 --- a/ortools/flatzinc/cp_model_fz_solver.cc +++ b/ortools/flatzinc/cp_model_fz_solver.cc @@ -1204,8 +1204,9 @@ void CpModelProtoWithMapping::TranslateSearchAnnotations( // The format is fixed in the flatzinc specification. std::string SolutionString( const fz::SolutionOutputSpecs& output, - const std::function& value_func) { - if (output.variable != nullptr) { + const std::function& value_func, + double objective_value) { + if (output.variable != nullptr && !output.variable->domain.is_float) { const int64_t value = value_func(output.variable); if (output.display_as_boolean) { return absl::StrCat(output.name, " = ", value == 1 ? "true" : "false", @@ -1213,6 +1214,8 @@ std::string SolutionString( } else { return absl::StrCat(output.name, " = ", value, ";"); } + } else if (output.variable != nullptr && output.variable->domain.is_float) { + return absl::StrCat(output.name, " = ", objective_value, ";"); } else { const int bound_size = output.bounds.size(); std::string result = @@ -1245,10 +1248,12 @@ std::string SolutionString( std::string SolutionString( const fz::Model& model, - const std::function& value_func) { + const std::function& value_func, + double objective_value) { std::string solution_string; for (const auto& output_spec : model.output()) { - solution_string.append(SolutionString(output_spec, value_func)); + solution_string.append( + SolutionString(output_spec, value_func, objective_value)); solution_string.append("\n"); } return solution_string; @@ -1338,6 +1343,15 @@ void SolveFzWithCpModelProto(const fz::Model& fz_model, objective->add_coeffs(1); objective->add_vars(m.fz_var_to_index[fz_model.objective()]); } + } else if (!fz_model.float_objective_variables().empty()) { + FloatObjectiveProto* objective = m.proto.mutable_floating_point_objective(); + for (int i = 0; i < fz_model.float_objective_variables().size(); ++i) { + objective->add_vars( + m.fz_var_to_index[fz_model.float_objective_variables()[i]]); + objective->add_coeffs(fz_model.float_objective_coefficients()[i]); + } + objective->set_offset(fz_model.float_objective_offset()); + objective->set_maximize(fz_model.maximize()); } // Fill the search order. @@ -1427,10 +1441,12 @@ void SolveFzWithCpModelProto(const fz::Model& fz_model, if (p.display_all_solutions || p.search_all_solutions) { solution_observer = [&fz_model, &m, &p, solution_logger](const CpSolverResponse& r) { - const std::string solution_string = - SolutionString(fz_model, [&m, &r](fz::Variable* v) { + const std::string solution_string = SolutionString( + fz_model, + [&m, &r](fz::Variable* v) { return r.solution(m.fz_var_to_index.at(v)); - }); + }, + r.objective_value()); SOLVER_LOG(solution_logger, solution_string); if (p.display_statistics) { OutputFlatzincStats(r, solution_logger); @@ -1440,12 +1456,16 @@ void SolveFzWithCpModelProto(const fz::Model& fz_model, } Model sat_model; + + // Setup logging. + // Note that we need to do that before we start calling the sat functions + // below that might create a SolverLogger() themselves. + sat_model.Register(logger); + sat_model.Add(NewSatParameters(m.parameters)); if (solution_observer != nullptr) { sat_model.Add(NewFeasibleSolutionObserver(solution_observer)); } - // Setup logging. - sat_model.Register(logger); const CpSolverResponse response = SolveCpModel(m.proto, &sat_model); @@ -1464,11 +1484,14 @@ void SolveFzWithCpModelProto(const fz::Model& fz_model, if (p.ortools_mode) { if (response.status() == CpSolverStatus::FEASIBLE || response.status() == CpSolverStatus::OPTIMAL) { - if (!p.display_all_solutions) { // Already printed otherwise. - const std::string solution_string = - SolutionString(fz_model, [&response, &m](fz::Variable* v) { + if (!p.display_all_solutions && !p.search_all_solutions) { + // Already printed otherwise. + const std::string solution_string = SolutionString( + fz_model, + [&response, &m](fz::Variable* v) { return response.solution(m.fz_var_to_index.at(v)); - }); + }, + response.objective_value()); SOLVER_LOG(solution_logger, solution_string); SOLVER_LOG(solution_logger, "----------"); } diff --git a/ortools/flatzinc/fz.cc b/ortools/flatzinc/fz.cc index 26b6158d3df..042102dae78 100644 --- a/ortools/flatzinc/fz.cc +++ b/ortools/flatzinc/fz.cc @@ -208,7 +208,7 @@ int main(int argc, char** argv) { operations_research::SolverLogger logger; if (absl::GetFlag(FLAGS_ortools_mode)) { - logger.EnableLogging(true); + logger.EnableLogging(absl::GetFlag(FLAGS_fz_logging)); // log_to_stdout is disabled later. logger.AddInfoLoggingCallback(operations_research::fz::LogInFlatzincFormat); } else { diff --git a/ortools/flatzinc/model.cc b/ortools/flatzinc/model.cc index 922ba9c1e02..cb984afc90a 100644 --- a/ortools/flatzinc/model.cc +++ b/ortools/flatzinc/model.cc @@ -759,6 +759,9 @@ int Argument::Size() const { case VOID_ARGUMENT: { return 0; } + case FLOAT_LIST: { + return floats.size(); + } default: { LOG(FATAL) << "Should not be here"; return 0; @@ -1091,6 +1094,14 @@ std::string Model::DebugString() const { absl::StrAppendFormat(&output, "%s %s\n %s\n", maximize_ ? "Maximize" : "Minimize", objective_->name, JoinDebugString(search_annotations_, ", ")); + } else if (!float_objective_variables_.empty()) { + absl::StrAppendFormat(&output, "%s [%s] * [%s] + %f\n %s\n", + maximize_ ? "Maximize" : "Minimize", + JoinDebugStringPtr(float_objective_variables_, ", "), + absl::StrJoin(float_objective_coefficients_, ", "), + float_objective_offset_, + JoinDebugString(search_annotations_, ", ")); + } else { absl::StrAppendFormat(&output, "Satisfy\n %s\n", JoinDebugString(search_annotations_, ", ")); diff --git a/ortools/flatzinc/model.h b/ortools/flatzinc/model.h index cb5775966b2..fd57a3fcd75 100644 --- a/ortools/flatzinc/model.h +++ b/ortools/flatzinc/model.h @@ -23,11 +23,8 @@ #include "absl/container/flat_hash_map.h" #include "absl/strings/string_view.h" #include "absl/types/span.h" -#include "ortools/base/logging.h" -#include "ortools/base/types.h" #include "ortools/graph/iterators.h" #include "ortools/util/logging.h" -#include "ortools/util/string_array.h" namespace operations_research { namespace fz { @@ -390,7 +387,22 @@ class Model { #endif bool maximize() const { return maximize_; } Variable* objective() const { return objective_; } + const std::vector& float_objective_variables() const { + return float_objective_variables_; + } + const std::vector& float_objective_coefficients() const { + return float_objective_coefficients_; + } + double float_objective_offset() const { return float_objective_offset_; } void SetObjective(Variable* obj) { objective_ = obj; } + void ClearObjective() { objective_ = nullptr; } + void AddFloatingPointObjectiveTerm(Variable* var, double coeff) { + float_objective_variables_.push_back(var); + float_objective_coefficients_.push_back(coeff); + } + void SetFloatingPointObjectiveOffset(double offset) { + float_objective_offset_ = offset; + } // Services. std::string DebugString() const; @@ -408,6 +420,9 @@ class Model { // The objective variable (it belongs to variables_). Variable* objective_; bool maximize_; + std::vector float_objective_variables_; + std::vector float_objective_coefficients_; + double float_objective_offset_ = 0.0; // All search annotations are stored as a vector of Annotation. std::vector search_annotations_; std::vector output_; diff --git a/ortools/flatzinc/presolve.cc b/ortools/flatzinc/presolve.cc index 6ba1ec4d14c..fb985006458 100644 --- a/ortools/flatzinc/presolve.cc +++ b/ortools/flatzinc/presolve.cc @@ -108,6 +108,19 @@ void Presolver::PresolveBool2Int(Constraint* ct) { } } +// Propagates cast constraint. +// Rule 1: +// Input: int2float(x, y) +// Action: Replace all instances of y by x. +// Output: inactive constraint +void Presolver::PresolveInt2Float(Constraint* ct) { + DCHECK_EQ(ct->type, "int2float"); + // Rule 1. + UpdateRuleStats("int2float: merge integer and floating point variables."); + AddVariableSubstitution(ct->arguments[1].Var(), ct->arguments[0].Var()); + ct->MarkAsInactive(); +} + // Minizinc flattens 2d element constraints (x = A[y][z]) into 1d element // constraint with an affine mapping between y, z and the new index. // This rule stores the mapping to reconstruct the 2d element constraint. @@ -426,6 +439,8 @@ void Presolver::Run(Model* model) { for (Constraint* const ct : model->constraints()) { if (ct->active && ct->type == "bool2int") { PresolveBool2Int(ct); + } else if (ct->active && ct->type == "int2float") { + PresolveInt2Float(ct); } else if (ct->active && ct->type == "int_lin_eq" && ct->arguments[1].variables.size() == 2 && ct->strong_propagation) { @@ -454,6 +469,47 @@ void Presolver::Run(Model* model) { } } + // Third pass: process objective with floating point coefficients. + Variable* float_objective_var = nullptr; + for (Variable* var : model->variables()) { + if (!var->active) continue; + if (var->domain.is_float) { + CHECK(float_objective_var == nullptr); + float_objective_var = var; + } + } + + Constraint* float_objective_ct = nullptr; + if (float_objective_var != nullptr) { + for (Constraint* ct : model->constraints()) { + if (!ct->active) continue; + if (ct->type == "float_lin_eq") { + CHECK(float_objective_ct == nullptr); + float_objective_ct = ct; + break; + } + } + } + + if (float_objective_ct != nullptr || float_objective_var != nullptr) { + CHECK(float_objective_ct != nullptr); + CHECK(float_objective_var != nullptr); + const int arity = float_objective_ct->arguments[0].Size(); + CHECK_EQ(float_objective_ct->arguments[1].variables[arity - 1], + float_objective_var); + CHECK_EQ(float_objective_ct->arguments[0].floats[arity - 1], -1.0); + for (int i = 0; i + 1 < arity; ++i) { + model->AddFloatingPointObjectiveTerm( + float_objective_ct->arguments[1].variables[i], + float_objective_ct->arguments[0].floats[i]); + } + model->SetFloatingPointObjectiveOffset( + -float_objective_ct->arguments[2].floats[0]); + model->ClearObjective(); + float_objective_var->active = false; + float_objective_ct->active = false; + } + // Report presolve rules statistics. if (!successful_rules_.empty()) { for (const auto& rule : successful_rules_) { diff --git a/ortools/flatzinc/presolve.h b/ortools/flatzinc/presolve.h index 890a8bad752..ea3e6a7c87d 100644 --- a/ortools/flatzinc/presolve.h +++ b/ortools/flatzinc/presolve.h @@ -95,6 +95,7 @@ class Presolver { // Presolve rules. void PresolveBool2Int(Constraint* ct); + void PresolveInt2Float(Constraint* ct); void PresolveStoreAffineMapping(Constraint* ct); void PresolveStoreFlatteningMapping(Constraint* ct); void PresolveSimplifyElement(Constraint* ct); diff --git a/ortools/glop/CMakeLists.txt b/ortools/glop/CMakeLists.txt index 6884bfe0014..3d1a0f9b073 100644 --- a/ortools/glop/CMakeLists.txt +++ b/ortools/glop/CMakeLists.txt @@ -28,5 +28,5 @@ target_link_libraries(${NAME} PRIVATE absl::strings absl::str_format protobuf::libprotobuf - ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto) + ${PROJECT_NAMESPACE}::ortools_proto) #add_library(${PROJECT_NAMESPACE}::glop ALIAS ${NAME}) diff --git a/ortools/glop/lp_solver.cc b/ortools/glop/lp_solver.cc index 8e08ff4f52e..072004dbeeb 100644 --- a/ortools/glop/lp_solver.cc +++ b/ortools/glop/lp_solver.cc @@ -36,6 +36,7 @@ #include "ortools/lp_data/lp_types.h" #include "ortools/lp_data/lp_utils.h" #include "ortools/lp_data/proto_utils.h" +#include "ortools/port/proto_utils.h" #include "ortools/util/fp_utils.h" #include "ortools/util/logging.h" @@ -177,7 +178,7 @@ ProblemStatus LPSolver::SolveWithTimeLimit(const LinearProgram& lp, SOLVER_LOG(&logger_, "Initial problem: ", lp.GetDimensionString()); SOLVER_LOG(&logger_, "Objective stats: ", lp.GetObjectiveStatsString()); SOLVER_LOG(&logger_, "Bounds stats: ", lp.GetBoundsStatsString()); - SOLVER_LOG(&logger_, "Parameters: ", parameters_.ShortDebugString()); + SOLVER_LOG(&logger_, "Parameters: ", ProtobufShortDebugString(parameters_)); } // Check some preconditions. diff --git a/ortools/glop/lu_factorization.cc b/ortools/glop/lu_factorization.cc index 28a0cfe5ee4..de30ea0f226 100644 --- a/ortools/glop/lu_factorization.cc +++ b/ortools/glop/lu_factorization.cc @@ -19,6 +19,7 @@ #include #include "absl/log/check.h" +#include "absl/types/span.h" #include "ortools/lp_data/lp_types.h" #include "ortools/lp_data/lp_utils.h" @@ -132,15 +133,14 @@ namespace { // norm of the given column, otherwise do the same with a sparse version. In // both cases column is cleared. Fractional ComputeSquaredNormAndResetToZero( - const std::vector& non_zeros, DenseColumn* column) { + const std::vector& non_zeros, absl::Span column) { Fractional sum = 0.0; if (non_zeros.empty()) { - sum = SquaredNorm(*column); - column->clear(); + sum = SquaredNormAndResetToZero(column); } else { for (const RowIndex row : non_zeros) { - sum += Square((*column)[row]); - (*column)[row] = 0.0; + sum += Square(column[row.value()]); + (column)[row.value()] = 0.0; } } return sum; @@ -152,7 +152,8 @@ Fractional LuFactorization::RightSolveSquaredNorm(const ColumnView& a) const { if (is_identity_factorization_) return SquaredNorm(a); non_zero_rows_.clear(); - dense_zero_scratchpad_.resize(lower_.num_rows(), 0.0); + const RowIndex num_rows = lower_.num_rows(); + dense_zero_scratchpad_.resize(num_rows, 0.0); DCHECK(IsAllZero(dense_zero_scratchpad_)); for (const SparseColumn::Entry e : a) { @@ -174,8 +175,9 @@ Fractional LuFactorization::RightSolveSquaredNorm(const ColumnView& a) const { upper_.HyperSparseSolveWithReversedNonZeros(&dense_zero_scratchpad_, &non_zero_rows_); } - return ComputeSquaredNormAndResetToZero(non_zero_rows_, - &dense_zero_scratchpad_); + return ComputeSquaredNormAndResetToZero( + non_zero_rows_, + absl::MakeSpan(dense_zero_scratchpad_.data(), num_rows.value())); } Fractional LuFactorization::DualEdgeSquaredNorm(RowIndex row) const { @@ -185,7 +187,8 @@ Fractional LuFactorization::DualEdgeSquaredNorm(RowIndex row) const { col_perm_.empty() ? row : ColToRowIndex(col_perm_[RowToColIndex(row)]); non_zero_rows_.clear(); - dense_zero_scratchpad_.resize(lower_.num_rows(), 0.0); + const RowIndex num_rows = lower_.num_rows(); + dense_zero_scratchpad_.resize(num_rows, 0.0); DCHECK(IsAllZero(dense_zero_scratchpad_)); dense_zero_scratchpad_[permuted_row] = 1.0; non_zero_rows_.push_back(permuted_row); @@ -204,8 +207,9 @@ Fractional LuFactorization::DualEdgeSquaredNorm(RowIndex row) const { transpose_lower_.HyperSparseSolveWithReversedNonZeros( &dense_zero_scratchpad_, &non_zero_rows_); } - return ComputeSquaredNormAndResetToZero(non_zero_rows_, - &dense_zero_scratchpad_); + return ComputeSquaredNormAndResetToZero( + non_zero_rows_, + absl::MakeSpan(dense_zero_scratchpad_.data(), num_rows.value())); } namespace { diff --git a/ortools/glop/markowitz.h b/ortools/glop/markowitz.h index 212c656c0f9..c4acfca66d0 100644 --- a/ortools/glop/markowitz.h +++ b/ortools/glop/markowitz.h @@ -194,7 +194,8 @@ class MatrixNonZeroPattern { // // TODO(user): We could be even more efficient since a size of int32_t is // enough for us and we could store in common the inlined/not-inlined size. - absl::StrongVector> row_non_zero_; + util_intops::StrongVector> + row_non_zero_; StrictITIVector row_degree_; StrictITIVector col_degree_; DenseBooleanRow deleted_columns_; @@ -274,7 +275,7 @@ class SparseMatrixWithReusableColumnMemory { // mutable_column(col) is stored in columns_[mapping_[col]]. // The columns_ that can be reused have their index stored in free_columns_. const SparseColumn empty_column_; - absl::StrongVector mapping_; + util_intops::StrongVector mapping_; std::vector free_columns_; std::vector columns_; }; diff --git a/ortools/glop/preprocessor.cc b/ortools/glop/preprocessor.cc index 1e73c5e9032..4f88318c8a4 100644 --- a/ortools/glop/preprocessor.cc +++ b/ortools/glop/preprocessor.cc @@ -1390,10 +1390,10 @@ bool ImpliedFreePreprocessor::Run(LinearProgram* lp) { const int size = num_rows.value(); // TODO(user) : Replace SumWithNegativeInfiniteAndOneMissing and // SumWithPositiveInfiniteAndOneMissing with IntervalSumWithOneMissing. - absl::StrongVector lb_sums( - size); - absl::StrongVector ub_sums( - size); + util_intops::StrongVector + lb_sums(size); + util_intops::StrongVector + ub_sums(size); // Initialize the sums by adding all the bounds of the variables. for (ColIndex col(0); col < num_cols; ++col) { @@ -3677,7 +3677,7 @@ bool ShiftVariableBoundsPreprocessor::Run(LinearProgram* lp) { int num_bound_shifts = 0; const RowIndex num_rows = lp->num_constraints(); KahanSum objective_offset; - absl::StrongVector row_offsets(num_rows.value()); + util_intops::StrongVector row_offsets(num_rows.value()); offsets_.assign(num_cols, 0.0); for (ColIndex col(0); col < num_cols; ++col) { if (0.0 < variable_initial_lbs_[col] || 0.0 > variable_initial_ubs_[col]) { diff --git a/ortools/glop/preprocessor.h b/ortools/glop/preprocessor.h index 1f792962723..37723dad6c8 100644 --- a/ortools/glop/preprocessor.h +++ b/ortools/glop/preprocessor.h @@ -268,7 +268,7 @@ class RowDeletionHelper { // EmptyColumnPreprocessor // -------------------------------------------------------- // Removes the empty columns from the problem. -class EmptyColumnPreprocessor : public Preprocessor { +class EmptyColumnPreprocessor final : public Preprocessor { public: explicit EmptyColumnPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -292,7 +292,7 @@ class EmptyColumnPreprocessor : public Preprocessor { // usually called duplicates. The notion is the same once the problem has been // scaled. However, during presolve the columns can't be assumed to be scaled, // so it makes sense to use the more general notion of proportional columns. -class ProportionalColumnPreprocessor : public Preprocessor { +class ProportionalColumnPreprocessor final : public Preprocessor { public: explicit ProportionalColumnPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -333,7 +333,7 @@ class ProportionalColumnPreprocessor : public Preprocessor { // Removes the proportional rows from the problem. // The linear programming literature also calls such rows duplicates, see the // same remark above for columns in ProportionalColumnPreprocessor. -class ProportionalRowPreprocessor : public Preprocessor { +class ProportionalRowPreprocessor final : public Preprocessor { public: explicit ProportionalRowPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -433,7 +433,7 @@ class SingletonUndo { // Deletes as many singleton rows or singleton columns as possible. Note that // each time we delete a row or a column, new singletons may be created. -class SingletonPreprocessor : public Preprocessor { +class SingletonPreprocessor final : public Preprocessor { public: explicit SingletonPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -504,10 +504,10 @@ class SingletonPreprocessor : public Preprocessor { // This is used as a "cache" by MakeConstraintAnEqualityIfPossible() to avoid // scanning more than once each row. See the code to see how this is used. - absl::StrongVector row_sum_is_cached_; - absl::StrongVector + util_intops::StrongVector row_sum_is_cached_; + util_intops::StrongVector row_lb_sum_; - absl::StrongVector + util_intops::StrongVector row_ub_sum_; // TODO(user): It is annoying that we need to store a part of the matrix that @@ -522,7 +522,7 @@ class SingletonPreprocessor : public Preprocessor { // FixedVariablePreprocessor // -------------------------------------------------------- // Removes the fixed variables from the problem. -class FixedVariablePreprocessor : public Preprocessor { +class FixedVariablePreprocessor final : public Preprocessor { public: explicit FixedVariablePreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -557,7 +557,7 @@ class FixedVariablePreprocessor : public Preprocessor { // later by the FreeConstraintPreprocessor. // // * Otherwise, wo do nothing. -class ForcingAndImpliedFreeConstraintPreprocessor : public Preprocessor { +class ForcingAndImpliedFreeConstraintPreprocessor final : public Preprocessor { public: explicit ForcingAndImpliedFreeConstraintPreprocessor( const GlopParameters* parameters) @@ -603,7 +603,7 @@ class ForcingAndImpliedFreeConstraintPreprocessor : public Preprocessor { // TODO(user): Only process doubleton columns so we have more chance in the // later passes to create more doubleton columns? Such columns lead to a smaller // problem thanks to the DoubletonFreeColumnPreprocessor. -class ImpliedFreePreprocessor : public Preprocessor { +class ImpliedFreePreprocessor final : public Preprocessor { public: explicit ImpliedFreePreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -649,7 +649,7 @@ class ImpliedFreePreprocessor : public Preprocessor { // solver open source codes as of July 2013. All of them only process such // columns if one of the two rows is also an equality which is not actually // required. Most probably, commercial solvers do use it though. -class DoubletonFreeColumnPreprocessor : public Preprocessor { +class DoubletonFreeColumnPreprocessor final : public Preprocessor { public: explicit DoubletonFreeColumnPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -700,7 +700,7 @@ class DoubletonFreeColumnPreprocessor : public Preprocessor { // translated into bounds on the reduced costs or the columns, which may force // variables to their bounds. This is called forcing and dominated columns in // the Andersen & Andersen paper. -class UnconstrainedVariablePreprocessor : public Preprocessor { +class UnconstrainedVariablePreprocessor final : public Preprocessor { public: explicit UnconstrainedVariablePreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -752,7 +752,7 @@ class UnconstrainedVariablePreprocessor : public Preprocessor { // FreeConstraintPreprocessor // -------------------------------------------------------- // Removes the constraints with no bounds from the problem. -class FreeConstraintPreprocessor : public Preprocessor { +class FreeConstraintPreprocessor final : public Preprocessor { public: explicit FreeConstraintPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -771,7 +771,7 @@ class FreeConstraintPreprocessor : public Preprocessor { // EmptyConstraintPreprocessor // -------------------------------------------------------- // Removes the constraints with no coefficients from the problem. -class EmptyConstraintPreprocessor : public Preprocessor { +class EmptyConstraintPreprocessor final : public Preprocessor { public: explicit EmptyConstraintPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -793,7 +793,7 @@ class EmptyConstraintPreprocessor : public Preprocessor { // with only one entry) is positive. This is because this way the column will // be transformed in an identity column by the scaling. This will lead to more // efficient solve when this column is involved. -class SingletonColumnSignPreprocessor : public Preprocessor { +class SingletonColumnSignPreprocessor final : public Preprocessor { public: explicit SingletonColumnSignPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -815,7 +815,7 @@ class SingletonColumnSignPreprocessor : public Preprocessor { // Reduce equality constraints involving two variables (i.e. aX + bY = c), // by substitution (and thus removal) of one of the variables by the other // in all the constraints that it is involved in. -class DoubletonEqualityRowPreprocessor : public Preprocessor { +class DoubletonEqualityRowPreprocessor final : public Preprocessor { public: explicit DoubletonEqualityRowPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -904,7 +904,7 @@ void FixConstraintWithFixedStatuses(const DenseColumn& row_lower_bounds, // // IMPORTANT: FreeConstraintPreprocessor() must be called first since this // preprocessor does not deal correctly with free constraints. -class DualizerPreprocessor : public Preprocessor { +class DualizerPreprocessor final : public Preprocessor { public: explicit DualizerPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -962,7 +962,7 @@ class DualizerPreprocessor : public Preprocessor { // the ImpliedFreePreprocessor. However, shifting a variable with a domain like // [-1e10, 1e10] may introduce numerical issues. Relax the definition of // a free variable so that only having a domain containing 0.0 is enough? -class ShiftVariableBoundsPreprocessor : public Preprocessor { +class ShiftVariableBoundsPreprocessor final : public Preprocessor { public: explicit ShiftVariableBoundsPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -992,7 +992,7 @@ class ShiftVariableBoundsPreprocessor : public Preprocessor { // -------------------------------------------------------- // Scales the SparseMatrix of the linear program using a SparseMatrixScaler. // This is only applied if the parameter use_scaling is true. -class ScalingPreprocessor : public Preprocessor { +class ScalingPreprocessor final : public Preprocessor { public: explicit ScalingPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -1015,7 +1015,7 @@ class ScalingPreprocessor : public Preprocessor { // ToMinimizationPreprocessor // -------------------------------------------------------- // Changes the problem from maximization to minimization (if applicable). -class ToMinimizationPreprocessor : public Preprocessor { +class ToMinimizationPreprocessor final : public Preprocessor { public: explicit ToMinimizationPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} @@ -1046,7 +1046,7 @@ class ToMinimizationPreprocessor : public Preprocessor { // memory for no good reason. The internal matrix representation used in glop is // a lot more efficient, and there is no point keeping the slacks in // LinearProgram. It is also bad for incrementaly modifying the LP. -class AddSlackVariablesPreprocessor : public Preprocessor { +class AddSlackVariablesPreprocessor final : public Preprocessor { public: explicit AddSlackVariablesPreprocessor(const GlopParameters* parameters) : Preprocessor(parameters) {} diff --git a/ortools/glop/revised_simplex.cc b/ortools/glop/revised_simplex.cc index ac4ead7c5cf..28ef73d4c97 100644 --- a/ortools/glop/revised_simplex.cc +++ b/ortools/glop/revised_simplex.cc @@ -28,6 +28,7 @@ #include "absl/random/bit_gen_ref.h" #include "absl/random/random.h" #include "absl/random/seed_sequences.h" +#include "absl/strings/match.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -93,6 +94,12 @@ class Cleanup { constexpr const uint64_t kDeterministicSeed = 42; +namespace { + +bool UseAbslRandom() { return false; } + +} // namespace + RevisedSimplex::RevisedSimplex() : problem_status_(ProblemStatus::INIT), objective_(), @@ -100,8 +107,8 @@ RevisedSimplex::RevisedSimplex() variable_name_(), direction_(), error_(), - deterministic_random_(kDeterministicSeed), - random_(deterministic_random_), + random_(UseAbslRandom() ? absl::BitGenRef(absl_random_) + : absl::BitGenRef(deterministic_random_)), basis_factorization_(&compact_matrix_, &basis_), variables_info_(compact_matrix_), primal_edge_norms_(compact_matrix_, variables_info_, @@ -132,7 +139,12 @@ void RevisedSimplex::ClearStateForNextSolve() { } void RevisedSimplex::LoadStateForNextSolve(const BasisState& state) { - SCOPED_TIME_STAT(&function_stats_); + // We avoid marking the state as set externally if it is the same as the + // current one. + // + // TODO(user): Add comparison operator. + if (state.statuses == solution_state_.statuses) return; + solution_state_ = state; solution_state_has_been_set_externally_ = true; } @@ -3617,7 +3629,7 @@ Fractional RevisedSimplex::ComputeInitialProblemObjectiveValue() const { void RevisedSimplex::SetParameters(const GlopParameters& parameters) { SCOPED_TIME_STAT(&function_stats_); deterministic_random_.seed(parameters.random_seed()); - + absl_random_ = absl::BitGen(absl::SeedSeq({parameters.random_seed()})); initial_parameters_ = parameters; parameters_ = parameters; PropagateParameters(); @@ -3639,7 +3651,7 @@ void RevisedSimplex::DisplayIterationInfo(bool primal, const std::string first_word = primal ? "Primal " : "Dual "; // We display the info on each re-factorization, and it is nice to show what - // trigerred the issue. Note that we don't display normal refactorization when + // triggered the issue. Note that we don't display normal refactorization when // we decide that it is worth it for the solve time or we reach the fixed // refactorization period. std::string info; @@ -3813,9 +3825,9 @@ void RevisedSimplex::DisplayVariableBounds() { } } -absl::StrongVector RevisedSimplex::ComputeDictionary( - const DenseRow* column_scales) { - absl::StrongVector dictionary(num_rows_.value()); +util_intops::StrongVector +RevisedSimplex::ComputeDictionary(const DenseRow* column_scales) { + util_intops::StrongVector dictionary(num_rows_.value()); for (ColIndex col(0); col < num_cols_; ++col) { ComputeDirection(col); for (const auto e : direction_) { diff --git a/ortools/glop/revised_simplex.h b/ortools/glop/revised_simplex.h index f3cd68dba42..a1e05f82baa 100644 --- a/ortools/glop/revised_simplex.h +++ b/ortools/glop/revised_simplex.h @@ -729,9 +729,9 @@ class RevisedSimplex { // non-deterministic behavior and avoid client depending on a golden optimal // solution which prevent us from easily changing the solver. random_engine_t deterministic_random_; -#ifndef NDEBUG absl::BitGen absl_random_; -#endif + + // A reference to one of the above random generators. Fixed at construction. absl::BitGenRef random_; // Helpers for logging the solve progress. diff --git a/ortools/glop/variable_values.cc b/ortools/glop/variable_values.cc index 1700bcad936..0990ca2f409 100644 --- a/ortools/glop/variable_values.cc +++ b/ortools/glop/variable_values.cc @@ -201,7 +201,7 @@ void VariableValues::UpdateOnPivoting(const ScatteredColumn& direction, } void VariableValues::UpdateGivenNonBasicVariables( - const std::vector& cols_to_update, bool update_basic_variables) { + absl::Span cols_to_update, bool update_basic_variables) { SCOPED_TIME_STAT(&stats_); if (!update_basic_variables) { for (ColIndex col : cols_to_update) { diff --git a/ortools/glop/variable_values.h b/ortools/glop/variable_values.h index ffdc40e36f4..96021d5993c 100644 --- a/ortools/glop/variable_values.h +++ b/ortools/glop/variable_values.h @@ -111,7 +111,7 @@ class VariableValues { // update_basic_variables is true. The update is done in an incremental way // and is thus more efficient than calling afterwards // RecomputeBasicVariableValues() and RecomputeDualPrices(). - void UpdateGivenNonBasicVariables(const std::vector& cols_to_update, + void UpdateGivenNonBasicVariables(absl::Span cols_to_update, bool update_basic_variables); // Functions dealing with the primal-infeasible basic variables. A basic diff --git a/ortools/graph/BUILD.bazel b/ortools/graph/BUILD.bazel index e1427d5e587..fe0f5883b5d 100644 --- a/ortools/graph/BUILD.bazel +++ b/ortools/graph/BUILD.bazel @@ -11,7 +11,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@rules_cc//cc:defs.bzl", "cc_proto_library") +load("@rules_cc//cc:defs.bzl", "cc_library", "cc_proto_library") +load("@rules_proto//proto:defs.bzl", "proto_library") package(default_visibility = ["//visibility:public"]) @@ -65,10 +66,12 @@ cc_library( hdrs = ["bounded_dijkstra.h"], deps = [ ":graph", - "//ortools/base", "//ortools/base:iterator_adaptors", "//ortools/base:threadpool", + "@com_google_absl//absl/algorithm:container", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/types:span", ], ) @@ -76,8 +79,9 @@ cc_library( name = "multi_dijkstra", hdrs = ["multi_dijkstra.h"], deps = [ - "//ortools/base", - "@com_google_absl//absl/container:flat_hash_set", + "//ortools/base:map_util", + "//ortools/base:types", + "@com_google_absl//absl/container:flat_hash_map", ], ) @@ -86,8 +90,10 @@ cc_library( hdrs = ["bidirectional_dijkstra.h"], deps = [ "//ortools/base", + "//ortools/base:iterator_adaptors", "//ortools/base:threadpool", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -99,7 +105,7 @@ cc_library( hdrs = ["cliques.h"], deps = [ "//ortools/base", - "//ortools/base:intops", + "//ortools/base:int_type", "//ortools/base:strong_vector", "//ortools/util:time_limit", "@com_google_absl//absl/container:flat_hash_set", @@ -112,9 +118,11 @@ cc_library( hdrs = ["hamiltonian_path.h"], deps = [ "//ortools/base", + "//ortools/base:types", "//ortools/util:bitset", "//ortools/util:saturated_arithmetic", "//ortools/util:vector_or_function", + "@com_google_absl//absl/types:span", ], ) @@ -123,11 +131,16 @@ cc_library( hdrs = ["christofides.h"], deps = [ ":eulerian_path", + ":graph", ":minimum_spanning_tree", ":perfect_matching", "//ortools/base", + "//ortools/base:types", "//ortools/linear_solver", "//ortools/linear_solver:linear_solver_cc_proto", + "//ortools/util:saturated_arithmetic", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", ], ) @@ -144,10 +157,10 @@ cc_library( hdrs = ["minimum_spanning_tree.h"], deps = [ ":connected_components", - ":graph", - "//ortools/base", "//ortools/base:adjustable_priority_queue", + "//ortools/base:types", "//ortools/util:vector_or_function", + "@com_google_absl//absl/types:span", ], ) @@ -156,9 +169,10 @@ cc_library( hdrs = ["one_tree_lower_bound.h"], deps = [ ":christofides", + ":graph", ":minimum_spanning_tree", - "//ortools/base", - "@com_google_absl//absl/strings", + "@com_google_absl//absl/log", + "@com_google_absl//absl/types:span", ], ) @@ -167,8 +181,10 @@ cc_library( hdrs = ["ebert_graph.h"], deps = [ "//ortools/base", + "//ortools/base:types", "//ortools/util:permutation", "//ortools/util:zvector", + "@com_google_absl//absl/strings", ], ) @@ -181,12 +197,32 @@ cc_library( ":graph", "//ortools/base", "//ortools/base:adjustable_priority_queue", - "//ortools/base:file", "//ortools/base:map_util", "//ortools/base:stl_util", "//ortools/base:threadpool", "//ortools/base:timer", + "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/functional:bind_front", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/types:span", + ], +) + +cc_library( + name = "k_shortest_paths", + hdrs = ["k_shortest_paths.h"], + deps = [ + ":bounded_dijkstra", + ":ebert_graph", + ":shortest_paths", + "@com_google_absl//absl/algorithm:container", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/types:span", ], ) @@ -212,9 +248,12 @@ cc_library( ":graph", ":graphs", "//ortools/base", + "//ortools/base:types", "//ortools/util:stats", "//ortools/util:zvector", "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", ], ) @@ -228,17 +267,15 @@ cc_test( ":graphs", ":max_flow", "//ortools/base", - "//ortools/base:gmock", - "//ortools/base:message_matchers", + "//ortools/base:gmock_main", "//ortools/base:path", - "//ortools/base:status_matchers", "//ortools/linear_solver", "//ortools/util:file_util", - "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/random", "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:span", "@com_google_benchmark//:benchmark", - "@com_google_googletest//:gtest_main", + "@com_google_protobuf//:protobuf", ], ) @@ -254,7 +291,6 @@ cc_library( "//conditions:default": [], }), deps = [ - ":connected_components", ":ebert_graph", ":graph", ":graphs", @@ -262,9 +298,13 @@ cc_library( "//ortools/base", "//ortools/base:dump_vars", "//ortools/base:mathutil", + "//ortools/base:types", "//ortools/util:saturated_arithmetic", "//ortools/util:stats", "//ortools/util:zvector", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", ], ) @@ -279,7 +319,8 @@ cc_binary( ":min_cost_flow", "//ortools/base", "//ortools/base:file", - "//ortools/base:filesystem", + "//ortools/base:status_macros", + "//ortools/base:timer", "//ortools/util:filelineiter", "//ortools/util:stats", "@com_google_absl//absl/flags:flag", @@ -298,7 +339,7 @@ cc_library( deps = [ ":ebert_graph", ":linear_assignment", - "//ortools/base", + "@com_google_absl//absl/flags:flag", ], ) @@ -314,50 +355,11 @@ cc_library( "//ortools/base:types", "//ortools/util:permutation", "//ortools/util:zvector", - "@com_google_absl//absl/strings", + "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/strings:str_format", ], ) -# Biconnected -#cc_library( -# name = "biconnected", -# srcs = ["biconnected.cc"], -# hdrs = ["biconnected.h"], -# deps = [ -# ":ebert_graph", -# "//ortools/base", -# "//ortools/base:types", -# ], -#) - -# Hopcroft-Karp (Old) -#cc_library( -# name = "hopcroft_karp", -# srcs = ["hopcroft_karp.c"], -# hdrs = ["hopcroft_karp.h"], -#) - -# Hopcroft-Karp (New) -#cc_library( -# name = "bipartite_matching", -# srcs = ["bipartite_matching.cc"], -# hdrs = ["bipartite_matching.h"], -# deps = [ -# "//ortools/base", -# ], -#) - -#cc_library( -# name = "dag_connectivity", -# srcs = ["dag_connectivity.cc"], -# hdrs = ["dag_connectivity.h"], -# deps = [ -# ":topologicalsorter", -# "//ortools/base", -# ], -#) - cc_library( name = "perfect_matching", srcs = ["perfect_matching.cc"], @@ -365,12 +367,13 @@ cc_library( deps = [ "//ortools/base", "//ortools/base:adjustable_priority_queue", - "//ortools/base:intops", + "//ortools/base:int_type", "//ortools/base:strong_vector", - "//ortools/base:types", "//ortools/util:saturated_arithmetic", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/memory", + "@com_google_absl//absl/base:log_severity", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ], ) @@ -380,8 +383,10 @@ cc_library( srcs = ["dag_shortest_path.cc"], hdrs = ["dag_shortest_path.h"], deps = [ + ":ebert_graph", ":graph", ":topologicalsorter", + "@com_google_absl//absl/algorithm:container", "@com_google_absl//absl/log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", @@ -398,12 +403,31 @@ cc_library( ":dag_shortest_path", ":graph", ":topologicalsorter", + "//ortools/base:threadpool", + "@com_google_absl//absl/algorithm:container", + "@com_google_absl//absl/base:log_severity", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", ], ) +cc_library( + name = "rooted_tree", + hdrs = ["rooted_tree.h"], + deps = [ + "//ortools/base:status_macros", + "@com_google_absl//absl/algorithm:container", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/types:span", + ], +) + # From util/graph cc_library( name = "connected_components", @@ -441,6 +465,21 @@ cc_library( hdrs = ["iterators.h"], ) +cc_library( + name = "random_graph", + srcs = ["random_graph.cc"], + hdrs = ["random_graph.h"], + deps = [ + ":graph", + "//ortools/base:logging", + "//ortools/base:types", + "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/random", + "@com_google_absl//absl/random:bit_gen_ref", + ], +) + cc_library( name = "strongly_connected_components", hdrs = [ diff --git a/ortools/graph/CMakeLists.txt b/ortools/graph/CMakeLists.txt index d9b638e9d77..109cf30b019 100644 --- a/ortools/graph/CMakeLists.txt +++ b/ortools/graph/CMakeLists.txt @@ -23,6 +23,7 @@ list(REMOVE_ITEM _SRCS ${CMAKE_CURRENT_SOURCE_DIR}/ebert_graph_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/eulerian_path_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/hamiltonian_path_test.cc + ${CMAKE_CURRENT_SOURCE_DIR}/k_shortest_paths_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/linear_assignment_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/max_flow_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/min_cost_flow_test.cc @@ -30,6 +31,7 @@ list(REMOVE_ITEM _SRCS ${CMAKE_CURRENT_SOURCE_DIR}/multi_dijkstra_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/one_tree_lower_bound_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/perfect_matching_test.cc + ${CMAKE_CURRENT_SOURCE_DIR}/rooted_tree_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/shortest_paths_benchmarks.cc ${CMAKE_CURRENT_SOURCE_DIR}/shortest_paths_test.cc ${CMAKE_CURRENT_SOURCE_DIR}/solve_flow_model.cc @@ -53,6 +55,6 @@ target_link_libraries(${NAME} PRIVATE absl::strings absl::str_format protobuf::libprotobuf - ${PROJECT_NAMESPACE}::${PROJECT_NAME}_proto + ${PROJECT_NAMESPACE}::ortools_proto $<$:Coin::Cbc>) #add_library(${PROJECT_NAMESPACE}::graph ALIAS ${NAME}) diff --git a/ortools/graph/README.md b/ortools/graph/README.md index 59d10b728c8..e8940279a7a 100644 --- a/ortools/graph/README.md +++ b/ortools/graph/README.md @@ -6,8 +6,8 @@ network flow problems. It contains in particular: * well-tuned algorithms (for example, shortest paths and - [Hamiltonian paths](https://en.wikipedia.org/wiki/Hamiltonian_path)). -* hard-to-find algorithms (Hamiltonian paths, push-relabel flow algorithms). + [Hamiltonian paths](https://en.wikipedia.org/wiki/Hamiltonian_path)). +* hard-to-find algorithms (Hamiltonian paths, push-relabel flow algorithms). * other, more common algorithms, that are useful to use with `EbertGraph`. Graph representations: @@ -69,11 +69,11 @@ Flow algorithms: * [`linear_assignment.h`][linear_assignment_h]: entry point for solving linear sum assignment problems (classical assignment problems where the total cost is the sum of the costs of each arc used) on directed graphs with arc costs, - based on the Goldberg-Kennedy push-relabel algorithm. + based on the Goldberg-Kennedy push-relabel algorithm. * [`max_flow.h`][max_flow_h]: entry point for computing maximum flows on - directed graphs with arc capacities, based on the Goldberg-Tarjan - push-relabel algorithm. + directed graphs with arc capacities, based on the Goldberg-Tarjan + push-relabel algorithm. * [`min_cost_flow.h`][min_cost_flow_h]: entry point for computing minimum-cost flows on directed graphs with arc capacities, arc costs, and diff --git a/ortools/graph/bidirectional_dijkstra_test.cc b/ortools/graph/bidirectional_dijkstra_test.cc index 9ce1d12ac12..5de9f168ab8 100644 --- a/ortools/graph/bidirectional_dijkstra_test.cc +++ b/ortools/graph/bidirectional_dijkstra_test.cc @@ -20,15 +20,15 @@ #include #include +#include "absl/base/log_severity.h" #include "absl/container/flat_hash_map.h" #include "absl/random/distributions.h" #include "absl/strings/str_cat.h" +#include "absl/types/span.h" #include "gtest/gtest.h" #include "ortools/base/gmock.h" -#include "ortools/base/map_util.h" #include "ortools/graph/bounded_dijkstra.h" #include "ortools/graph/graph.h" -#include "util/tuple/dump_vars.h" namespace operations_research { namespace { @@ -100,9 +100,6 @@ TEST(BidirectionalDijkstraTest, SmallTest) { TEST(BidirectionalDijkstraTest, RandomizedCorrectnessTest) { std::mt19937 random(12345); - // Performance on forge as of 2016-10-05 with these numbers, over 1000 runs: - // - fastbuild: max = 21.9s, avg = 10.7s. - // - opt: max = 23.2s, avg = 10.4s. const int kNumGraphs = DEBUG_MODE ? 100 : 300; const int kNumQueriesPerGraph = DEBUG_MODE ? 10 : 30; const int kNumNodes = 1000; @@ -145,7 +142,7 @@ TEST(BidirectionalDijkstraTest, RandomizedCorrectnessTest) { &forward_graph, &forward_lengths); // To print some debugging info in case the test fails. - auto print_arc_path = [&](const std::vector& arc_path) -> std::string { + auto print_arc_path = [&](absl::Span arc_path) -> std::string { if (arc_path.empty()) return ""; std::string out = absl::StrCat(forward_graph.Tail(arc_path[0])); double total_length = 0.0; @@ -158,7 +155,7 @@ TEST(BidirectionalDijkstraTest, RandomizedCorrectnessTest) { return out; }; auto print_node_distances = - [&](const std::vector& nds) -> std::string { + [&](absl::Span nds) -> std::string { std::string out = "{"; for (const Dijkstra::NodeDistance& nd : nds) { absl::StrAppend(&out, " #", nd.node, " dist=", (nd.distance), ","); diff --git a/ortools/graph/christofides.h b/ortools/graph/christofides.h index 0b24e1088f3..38f0c907bf2 100644 --- a/ortools/graph/christofides.h +++ b/ortools/graph/christofides.h @@ -27,13 +27,14 @@ #define OR_TOOLS_GRAPH_CHRISTOFIDES_H_ #include +#include #include +#include #include #include "absl/status/status.h" #include "absl/status/statusor.h" #include "ortools/base/logging.h" -#include "ortools/base/types.h" #include "ortools/graph/eulerian_path.h" #include "ortools/graph/graph.h" #include "ortools/graph/minimum_spanning_tree.h" diff --git a/ortools/graph/christofides_test.cc b/ortools/graph/christofides_test.cc index 797cdd2d861..b08699ba3f2 100644 --- a/ortools/graph/christofides_test.cc +++ b/ortools/graph/christofides_test.cc @@ -22,6 +22,7 @@ #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" +#include "absl/types/span.h" #include "benchmark/benchmark.h" #include "gtest/gtest.h" #include "ortools/base/logging.h" @@ -30,7 +31,7 @@ namespace operations_research { // Displays the path. -std::string PathToString(const std::vector& path) { +std::string PathToString(absl::Span path) { std::string path_string; const int size = path.size(); for (int i = 0; i < size; i++) { diff --git a/ortools/graph/cliques.cc b/ortools/graph/cliques.cc index 5f2506b6f58..ed3ccc1b937 100644 --- a/ortools/graph/cliques.cc +++ b/ortools/graph/cliques.cc @@ -188,7 +188,9 @@ class FindAndEliminate { public: FindAndEliminate(std::function graph, int node_count, std::function&)> callback) - : graph_(graph), node_count_(node_count), callback_(callback) {} + : graph_(std::move(graph)), + node_count_(node_count), + callback_(std::move(callback)) {} bool GraphCallback(int node1, int node2) { if (visited_.find( @@ -233,13 +235,13 @@ void FindCliques(std::function graph, int node_count, } bool stop = false; - Search(graph, callback, initial_candidates.get(), 0, node_count, &actual, - &stop); + Search(std::move(graph), std::move(callback), initial_candidates.get(), 0, + node_count, &actual, &stop); } void CoverArcsByCliques(std::function graph, int node_count, std::function&)> callback) { - FindAndEliminate cache(graph, node_count, callback); + FindAndEliminate cache(std::move(graph), node_count, std::move(callback)); std::unique_ptr initial_candidates(new int[node_count]); std::vector actual; @@ -256,8 +258,8 @@ void CoverArcsByCliques(std::function graph, int node_count, } bool stop = false; - Search(cached_graph, cached_callback, initial_candidates.get(), 0, node_count, - &actual, &stop); + Search(std::move(cached_graph), std::move(cached_callback), + initial_candidates.get(), 0, node_count, &actual, &stop); } } // namespace operations_research diff --git a/ortools/graph/cliques.h b/ortools/graph/cliques.h index 889fc54b29e..7901566bf28 100644 --- a/ortools/graph/cliques.h +++ b/ortools/graph/cliques.h @@ -24,6 +24,7 @@ #ifndef OR_TOOLS_GRAPH_CLIQUES_H_ #define OR_TOOLS_GRAPH_CLIQUES_H_ +#include #include #include #include @@ -275,7 +276,7 @@ class BronKerboschAlgorithm { // clique. // NOTE(user): We could store the delta between the iterations; however, // we need to evaluate the impact this would have on the performance. - absl::StrongVector candidates; + util_intops::StrongVector candidates; // The index of the first actual candidate in 'candidates'. This number is // also the number of elements of the "not" set stored at the beginning of // 'candidates'. @@ -451,7 +452,7 @@ void BronKerboschAlgorithm::PushState(NodeIndex selected) { DCHECK(time_limit_ != nullptr); DVLOG(2) << "PushState: New depth = " << states_.size() + 1 << ", selected node = " << selected; - absl::StrongVector new_candidates; + util_intops::StrongVector new_candidates; State* const previous_state = &states_.back(); const double deterministic_time = diff --git a/ortools/graph/cliques_test.cc b/ortools/graph/cliques_test.cc index 7ec860b969c..d846be1d651 100644 --- a/ortools/graph/cliques_test.cc +++ b/ortools/graph/cliques_test.cc @@ -26,6 +26,7 @@ #include "absl/functional/bind_front.h" #include "absl/random/distributions.h" #include "absl/strings/str_cat.h" +#include "absl/types/span.h" #include "benchmark/benchmark.h" #include "gtest/gtest.h" #include "ortools/base/mathutil.h" @@ -76,7 +77,7 @@ class CliqueSizeVerifier { int64_t num_cliques() const { return num_cliques_; } - bool AppendClique(const std::vector& new_clique) { + bool AppendClique(absl::Span new_clique) { EXPECT_GE(expected_max_clique_size_, new_clique.size()); EXPECT_LE(expected_min_clique_size_, new_clique.size()); ++num_cliques_; @@ -84,7 +85,7 @@ class CliqueSizeVerifier { } std::function&)> MakeCliqueCallback() { - return [this](const std::vector& clique) { + return [this](absl::Span clique) { return AppendClique(clique) ? CliqueResponse::STOP : CliqueResponse::CONTINUE; }; diff --git a/ortools/graph/dag_constrained_shortest_path.cc b/ortools/graph/dag_constrained_shortest_path.cc index 6b2c57e1e12..8fe84503237 100644 --- a/ortools/graph/dag_constrained_shortest_path.cc +++ b/ortools/graph/dag_constrained_shortest_path.cc @@ -78,8 +78,8 @@ PathWithLength ConstrainedShortestPathsOnDag( std::vector destinations = {destination}; ConstrainedShortestPathsOnDagWrapper> constrained_shortest_path_on_dag(&graph, &arc_lengths, &arc_resources, - &(*topological_order), &sources, - &destinations, &max_resources); + *topological_order, sources, + destinations, &max_resources); PathWithLength path_with_length = constrained_shortest_path_on_dag.RunConstrainedShortestPathOnDag(); diff --git a/ortools/graph/dag_constrained_shortest_path.h b/ortools/graph/dag_constrained_shortest_path.h index b864d8d94fc..ddb22ada990 100644 --- a/ortools/graph/dag_constrained_shortest_path.h +++ b/ortools/graph/dag_constrained_shortest_path.h @@ -15,7 +15,6 @@ #define OR_TOOLS_GRAPH_DAG_CONSTRAINED_SHORTEST_PATH_H_ #include -#include #include #include @@ -24,14 +23,27 @@ #include "absl/log/check.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" +#include "ortools/base/threadpool.h" #include "ortools/graph/dag_shortest_path.h" +#include "ortools/graph/graph.h" namespace operations_research { // This library provides APIs to compute the constrained shortest path (CSP) on // a given directed acyclic graph (DAG) with resources on each arc. A CSP is a // shortest path on a DAG which does not exceed a set of maximum resources -// consumption. The algorithm is exponential and has no guarantee to finish. +// consumption. The algorithm is exponential and has no guarantee to finish. It +// is based on bi-drectionnal search. First is a forward pass from the source to +// nodes “somewhere in the middle” to generate forward labels, just as the +// onedirectional labeling algorithm we discussed; then a symmetric backward +// pass from the destination generates backward labels; and finally at each node +// with both forward and backward labels, it joins any pair of labels to form a +// feasible complete path. Intuitively, the number of labels grows exponentially +// with the number of arcs in the path. The overall number of labels are then +// expected to be smaller with shorter paths. For DAG with a topological +// ordering, we can pick any node (usually right in the middle) as a *midpoint* +// to stop each pass at. Then labels can be joined at only one half of the nodes +// by considering all edges between each half. // // In the DAG, multiple arcs between the same pair of nodes is allowed. However, // self-loop arcs are not allowed. @@ -67,23 +79,6 @@ PathWithLength ConstrainedShortestPathsOnDag( // ----------------------------------------------------------------------------- // Advanced API. // ----------------------------------------------------------------------------- -#if __cplusplus >= 202002L -template -concept DagGraphType = requires(GraphType graph) { - { typename GraphType::NodeIndex{} }; - { typename GraphType::ArcIndex{} }; - { graph.num_nodes() } -> std::same_as; - { graph.num_arcs() } -> std::same_as; - { graph.OutgoingArcs(typename GraphType::NodeIndex{}) }; - { - graph.Tail(typename GraphType::ArcIndex{}) - } -> std::same_as; - { - graph.Head(typename GraphType::ArcIndex{}) - } -> std::same_as; -}; -#endif - // A wrapper that holds the memory needed to run many constrained shortest path // computations efficiently on the given DAG (on which resources do not change). // `GraphType` can use one of the interfaces defined in `util/graph/graph.h`. @@ -114,70 +109,153 @@ class ConstrainedShortestPathsOnDagWrapper { // // Validity of arcs and topological order are DCHECKed. // - // If the number of labels in memory exceeds `max_num_created_labels` at any - // point in the algorithm, it returns the best path found so far, most - // particularly the empty path if none were found. + // If the number of labels in memory exceeds `max_num_created_labels / 2` at + // any point in each pass of the algorithm, new labels are not generated + // anymore and it returns the best path found so far, most particularly the + // empty path if none were found. // - // SUBTLE: You can modify the graph, the arc lengths and resources, the - // topological order, sources, destinations or the maximum resource between - // calls to the `RunConstrainedShortestPathOnDag()` function. That's fine. - // Doing so will obviously invalidate the result API of the last constrained - // shortest path run, which could return an upper bound, junk, or crash. + // IMPORTANT: You cannot modify anything except `arc_lengths` between calls to + // the `RunConstrainedShortestPathOnDag()` function. ConstrainedShortestPathsOnDagWrapper( const GraphType* graph, const std::vector* arc_lengths, const std::vector>* arc_resources, - const std::vector* topological_order, - const std::vector* sources, - const std::vector* destinations, + absl::Span topological_order, + absl::Span sources, + absl::Span destinations, const std::vector* max_resources, int max_num_created_labels = 1e9); // Returns {+inf, {}, {}} if there is no constrained path of finite length - // from one node in `sources` to one node in `destinations`. + // wihtin resources constraints from one node in `sources` to one node in + // `destinations`. PathWithLength RunConstrainedShortestPathOnDag(); + // For benchmarking and informational purposes, returns the number of labels + // generated in the call of `RunConstrainedShortestPathOnDag()`. + int label_count() const { + return lengths_from_sources_[FORWARD].size() + + lengths_from_sources_[BACKWARD].size(); + } + private: - // Returns the list of all the arcs of the shortest path from one node in - // `sources` ending by the arc from a given `label_index` if and only if - // `label_index` is between 0 and `labels_from_sources_.size() - 1`. - std::vector BestArcPathEndingWith(int label_index) const; + enum Direction { + FORWARD = 0, + BACKWARD = 1, + }; + + inline static Direction Reverse(Direction d) { + return d == FORWARD ? BACKWARD : FORWARD; + } + + // A LabelPair includes the `length` of a path that can be constructed by + // merging the paths from two *linkable* labels corresponding to + // `label_index`. + struct LabelPair { + double length = 0.0; + int label_index[2]; + }; + + void RunHalfConstrainedShortestPathOnDag( + const GraphType& reverse_graph, absl::Span arc_lengths, + absl::Span> arc_resources, + absl::Span> min_arc_resources, + absl::Span max_resources, int max_num_created_labels, + std::vector& lengths_from_sources, + std::vector>& resources_from_sources, + std::vector& incoming_arc_indices_from_sources, + std::vector& incoming_label_indices_from_sources, + std::vector& first_label, std::vector& num_labels); + + // Returns the arc index linking two nodes from each pass forming the best + // path. Returns -1 if no better path than the one found from + // `best_label_pair` is found. + ArcIndex MergeHalfRuns( + const GraphType& graph, absl::Span arc_lengths, + absl::Span> arc_resources, + absl::Span max_resources, + const std::vector sub_node_indices[2], + const std::vector lengths_from_sources[2], + const std::vector> resources_from_sources[2], + const std::vector first_label[2], + const std::vector num_labels[2], LabelPair& best_label_pair); + + // Returns the path as list of arc indices that starts from a node in + // `sources` (if `direction` iS FORWARD) or `destinations` (if `direction` is + // BACKWARD) and ends in node represented by `best_label_index`. + std::vector ArcPathTo( + int best_label_index, + absl::Span incoming_arc_indices_from_sources, + absl::Span incoming_label_indices_from_sources) const; + // Returns the list of all the nodes implied by a given `arc_path`. - std::vector NodePathImpliedBy( - const std::vector& arc_path) const; + std::vector NodePathImpliedBy(absl::Span arc_path, + const GraphType& graph) const; + + static constexpr double kTolerance = 1e-6; const GraphType* const graph_; const std::vector* const arc_lengths_; const std::vector>* const arc_resources_; - const std::vector* const topological_order_; - const std::vector* const sources_; - const std::vector* const destinations_; const std::vector* const max_resources_; - int max_num_created_labels_; - - std::vector node_is_source_; - std::vector node_is_destination_; - // Using the fact that the graph is a DAG, we can disregard any node that - // comes after the last destination (based on the topological order). - std::vector node_is_after_last_destination_; + absl::Span sources_; + absl::Span destinations_; + const int num_resources_; - // Data for reverse graph. - GraphType reverse_graph_; - std::vector reverse_inverse_arc_permutation_; + // Data about *reachable* sub-graphs split in two for bidirectional search. + // Reachable nodes are nodes that can be reached given the resources + // constraints, i.e., for each resource, the sum of the minimum resource to + // get to a node from a node in `sources` and to get from a node to a node in + // `destinations` should be less than the maximum resource. Reachable arcs are + // arcs linking reachable nodes. + // + // `sub_reverse_graph_[dir]` is the reachable sub-graph split in *half* with + // an additional linked to sources (resp. destinations) for the forward (resp. + // backward) direction. For the forward (resp. backward) direction, nodes are + // indexed using the original (resp. reverse) topological order. + GraphType sub_reverse_graph_[2]; + std::vector> sub_arc_resources_[2]; + // `sub_full_arc_indices_[dir]` has size `sub_reverse_graph_[dir].num_arcs()` + // such that `sub_full_arc_indices_[dir][sub_arc] = arc` where `sub_arc` is + // the arc in the reachable sub-graph for direction `dir` (i.e. + // `sub_reverse_graph[dir]`) and `arc` is the arc in the original graph (i.e. + // `graph`). + std::vector sub_full_arc_indices_[2]; + // `sub_node_indices_[dir]` has size `graph->num_nodes()` such that + // `sub_node_indices[dir][node] = sub_node` where `node` is the node in the + // original graph (i.e. `graph`) and `sub_node` is the node in the reachable + // sub-graph for direction `dir` (i.e. `sub_reverse_graph[dir]`) and -1 if + // `node` is not present in reachable sub-graph. + std::vector sub_node_indices_[2]; + // `sub_is_source_[dir][sub_dir]` has size + // `sub_reverse_graph_[dir].num_nodes()` such that + // `sub_is_source_[dir][sub_dir][sub_node]` is true if `sub_node` is a node in + // the reachable sub-graph for direction `dir` (i.e. `sub_reverse_graph[dir]`) + // which is a source (resp. destination) is `sub_dir` is FORWARD (resp. + // BACKWARD). + std::vector sub_is_source_[2][2]; + // `sub_min_arc_resources_[dir]` has size `max_resources->size()` and + // `sub_min_arc_resources_[dir][r]`, `sub_reverse_graph_[dir].num_nodes()` + // such that `sub_min_arc_resources_[dir][r][sub_node]` is the minimum of + // resource r needed to get to a destination (resp. come from a source) if + // `dir` is FORWARD (resp. BACKWARD). + std::vector> sub_min_arc_resources_[2]; + // Maximum number of labels created for each sub-graph. + int max_num_created_labels_[2]; // Data about the last call of the RunConstrainedShortestPathOnDag() - // function. A Label includes the cumulative length, resources and the - // previous arc used in the path to get to this node. - struct Label { - double length; - // TODO(b/315786885): Optimize resources in Label struct. - std::vector resources; - ArcIndex incoming_arc; - }; - // A label is present in `labels_from_sources_` if and only if it is feasible - // with respect to all resources. - std::vector