From 6022e303c7ab1546a80f86399307e9319b251227 Mon Sep 17 00:00:00 2001 From: Henri Lefebvre Date: Mon, 30 Oct 2023 18:24:51 +0100 Subject: [PATCH] add minknap wrapper --- examples/assignment.example.cpp | 1 + lib/CMakeLists.txt | 12 ++ lib/idolConfig.cmake.in | 8 + .../optimizers/wrappers/MinKnap/MinKnap.h | 28 +++ .../wrappers/MinKnap/Optimizers_MinKnap.h | 57 ++++++ .../optimizers/wrappers/MinKnap/MinKnap.cpp | 18 ++ .../wrappers/MinKnap/Optimizers_MinKnap.cpp | 186 ++++++++++++++++++ 7 files changed, 310 insertions(+) create mode 100644 lib/include/idol/optimizers/wrappers/MinKnap/MinKnap.h create mode 100644 lib/include/idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h create mode 100644 lib/src/optimizers/wrappers/MinKnap/MinKnap.cpp create mode 100644 lib/src/optimizers/wrappers/MinKnap/Optimizers_MinKnap.cpp diff --git a/examples/assignment.example.cpp b/examples/assignment.example.cpp index d6743dfa..d1aff2f1 100644 --- a/examples/assignment.example.cpp +++ b/examples/assignment.example.cpp @@ -11,6 +11,7 @@ #include "idol/optimizers/callbacks/SimpleRounding.h" #include "idol/optimizers/branch-and-bound/branching-rules/factories/MostInfeasible.h" #include "idol/optimizers/wrappers/HiGHS/HiGHS.h" +#include "idol/optimizers/wrappers/MinKnap/MinKnap.h" using namespace idol; diff --git a/lib/CMakeLists.txt b/lib/CMakeLists.txt index 0fa74256..63b4c9b7 100644 --- a/lib/CMakeLists.txt +++ b/lib/CMakeLists.txt @@ -175,6 +175,10 @@ add_library(idol STATIC include/idol/optimizers/branch-and-bound/branching-rules/factories/Diver.h include/idol/optimizers/branch-and-bound/watchers/ExportBranchAndBoundTreeToCSV.h src/optimizers/branch-and-bound/watchers/BranchAndBoundTree.cpp + include/idol/optimizers/wrappers/MinKnap/MinKnap.h + include/idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h + src/optimizers/wrappers/MinKnap/MinKnap.cpp + src/optimizers/wrappers/MinKnap/Optimizers_MinKnap.cpp ) find_package(OpenMP REQUIRED) @@ -233,6 +237,14 @@ if (${USE_HIGHS}) endif() +if (${USE_MINKNAP}) + + find_package(minknap REQUIRED) + target_link_libraries(idol PUBLIC minknap) + target_compile_definitions(idol PUBLIC IDOL_USE_MINKNAP) + +endif() + set(AVAILABLE_MILP_SOLVERS "${AVAILABLE_MILP_SOLVERS}" PARENT_SCOPE) if (${WITH_PROFILING}) diff --git a/lib/idolConfig.cmake.in b/lib/idolConfig.cmake.in index f27bfea4..cb2e1b7e 100644 --- a/lib/idolConfig.cmake.in +++ b/lib/idolConfig.cmake.in @@ -58,4 +58,12 @@ if (@USE_ROBINHOOD@) endif() +if (@USE_MINKNAP@) + + set(MINKNAP_DIR "@MINKNAP_DIR@") + find_dependency(minknap REQUIRED) + add_compile_definitions(IDOL_USE_MINKNAP) + +endif() + include("${CMAKE_CURRENT_LIST_DIR}/idolTargets.cmake") \ No newline at end of file diff --git a/lib/include/idol/optimizers/wrappers/MinKnap/MinKnap.h b/lib/include/idol/optimizers/wrappers/MinKnap/MinKnap.h new file mode 100644 index 00000000..c24b3a66 --- /dev/null +++ b/lib/include/idol/optimizers/wrappers/MinKnap/MinKnap.h @@ -0,0 +1,28 @@ +// +// Created by henri on 23/03/23. +// + +#ifndef IDOL_MINKNAP_H +#define IDOL_MINKNAP_H + +#include "idol/optimizers/OptimizerFactory.h" + +namespace idol { + class MinKnap; +} + +class idol::MinKnap : public OptimizerFactoryWithDefaultParameters { + MinKnap(const MinKnap&) = default; +public: + MinKnap() = default; + MinKnap(MinKnap&&) noexcept = default; + + MinKnap& operator=(const MinKnap&) = delete; + MinKnap& operator=(MinKnap&&) noexcept = delete; + + Optimizer *operator()(const Model &t_model) const override; + + [[nodiscard]] MinKnap *clone() const override; +}; + +#endif //IDOL_MINKNAP_H diff --git a/lib/include/idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h b/lib/include/idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h new file mode 100644 index 00000000..8f0e95fc --- /dev/null +++ b/lib/include/idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h @@ -0,0 +1,57 @@ +// +// Created by henri on 20/02/23. +// + +#ifndef IDOL_OPTIMIZERS_MINKNAP_H +#define IDOL_OPTIMIZERS_MINKNAP_H + +#ifdef IDOL_USE_MINKNAP + +#include "idol/optimizers/wrappers/OptimizerWithLazyUpdates.h" + +namespace idol::Optimizers { + class MinKnap; +} + +class idol::Optimizers::MinKnap : public OptimizerWithLazyUpdates, bool> { + SolutionStatus m_solution_status = Loaded; + SolutionReason m_solution_reason = NotSpecified; + std::optional m_objective_value; +protected: + void hook_build() override; + void hook_optimize() override; + void hook_write(const std::string &t_name) override; + std::optional hook_add(const Var &t_var, bool t_add_column) override; + bool hook_add(const Ctr &t_ctr) override; + void hook_update_objective_sense() override {} + void hook_update_matrix(const Ctr &t_ctr, const Var &t_var, const Constant &t_constant) override {} + void hook_update() override {} + void hook_update(const Var &t_var) override {} + void hook_update(const Ctr &t_ctr) override {} + void hook_update_objective() override {} + void hook_update_rhs() override {} + void hook_remove(const Var &t_var) override {} + void hook_remove(const Ctr &t_ctr) override {} + + [[nodiscard]] SolutionStatus get_status() const override; + [[nodiscard]] SolutionReason get_reason() const override; + [[nodiscard]] double get_best_obj() const override; + [[nodiscard]] double get_best_bound() const override; + [[nodiscard]] double get_var_primal(const Var &t_var) const override; + [[nodiscard]] double get_var_ray(const Var &t_var) const override; + [[nodiscard]] double get_ctr_dual(const Ctr &t_ctr) const override; + [[nodiscard]] double get_ctr_farkas(const Ctr &t_ctr) const override; + [[nodiscard]] double get_relative_gap() const override; + [[nodiscard]] double get_absolute_gap() const override; + [[nodiscard]] unsigned int get_n_solutions() const override; + [[nodiscard]] unsigned int get_solution_index() const override { return 0; } + void set_solution_index(unsigned int t_index) override; +public: + explicit MinKnap(const Model& t_model); + + [[nodiscard]] std::string name() const override { return "minknap"; } +}; + +#endif + +#endif //IDOL_OPTIMIZERS_MINKNAP_H diff --git a/lib/src/optimizers/wrappers/MinKnap/MinKnap.cpp b/lib/src/optimizers/wrappers/MinKnap/MinKnap.cpp new file mode 100644 index 00000000..efd189f7 --- /dev/null +++ b/lib/src/optimizers/wrappers/MinKnap/MinKnap.cpp @@ -0,0 +1,18 @@ +// +// Created by henri on 30.10.23. +// +#include "idol/optimizers/wrappers/MinKnap/MinKnap.h" +#include "idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h" + +idol::Optimizer *idol::MinKnap::operator()(const Model &t_model) const { +#ifdef IDOL_USE_MINKNAP + return new Optimizers::MinKnap(t_model); +#else + throw Exception("idol was not linked to minknap."); +#endif +} + +idol::MinKnap *idol::MinKnap::clone() const { + return new MinKnap(*this); +} + diff --git a/lib/src/optimizers/wrappers/MinKnap/Optimizers_MinKnap.cpp b/lib/src/optimizers/wrappers/MinKnap/Optimizers_MinKnap.cpp new file mode 100644 index 00000000..05ee9798 --- /dev/null +++ b/lib/src/optimizers/wrappers/MinKnap/Optimizers_MinKnap.cpp @@ -0,0 +1,186 @@ +// +// Created by henri on 30.10.23. +// +#include + +#include "idol/optimizers/wrappers/MinKnap/Optimizers_MinKnap.h" + +#ifdef IDOL_USE_MINKNAP + +extern "C" { +#include "minknap.h" +} + +idol::Optimizers::MinKnap::MinKnap(const idol::Model &t_model) : OptimizerWithLazyUpdates(t_model) { + +} + +void idol::Optimizers::MinKnap::hook_build() {} + +void idol::Optimizers::MinKnap::hook_write(const std::string &t_name) { + throw Exception("Writing to file using MinKnap is not available."); +} + +std::optional idol::Optimizers::MinKnap::hook_add(const idol::Var &t_var, bool t_add_column) { + return {}; +} + +bool idol::Optimizers::MinKnap::hook_add(const idol::Ctr &t_ctr) { + + const auto& model = parent(); + + if (model.ctrs().size() > 1) { + throw Exception("Cannot add more than one constraint to MinKnap."); + } + + if (model.get_ctr_type(t_ctr) != LessOrEqual) { + throw Exception("MinKnap only handles <= constraints."); + } + + return true; +} + +void idol::Optimizers::MinKnap::set_solution_index(unsigned int t_index) { + if (t_index > 0) { + throw Exception("Solution index out of range"); + } +} + +double idol::Optimizers::MinKnap::get_var_ray(const idol::Var &t_var) const { + throw Exception("Not available."); +} + +double idol::Optimizers::MinKnap::get_ctr_dual(const idol::Ctr &t_ctr) const { + throw Exception("Not available."); +} + +double idol::Optimizers::MinKnap::get_ctr_farkas(const idol::Ctr &t_ctr) const { + throw Exception("Not available."); +} + +void idol::Optimizers::MinKnap::hook_optimize() { + + const auto& model = parent(); + const double sense_factor = model.get_obj_sense() == Minimize ? -1. : 1.; + const auto& ctr = model.get_ctr_by_index(0); + const auto& row = model.get_ctr_row(ctr); + + struct Item { + const Var variable; + const double profit; + const double weight; + Item(Var t_var, double t_profit, double t_weight) : variable(std::move(t_var)), profit(t_profit), weight(t_weight) {} + }; + + double unscaled_capacity = as_numeric(row.rhs()); + double fixed_profit = as_numeric(model.get_obj_expr().constant()); + std::list free_variables; + + // "presolve" + for (const auto& var : model.vars()) { + + const auto type = model.get_var_type(var); + + if (type == Continuous) { + throw Exception("Cannot handle continuous variables."); + } + + const double lb = model.get_var_lb(var); + const double ub = model.get_var_ub(var); + + if (lb < -Tolerance::Integer || ub > 1 + Tolerance::Integer) { + throw Exception("Cannot handle general integer variables."); + } + + const auto& column = model.get_var_column(var); + const double weight = as_numeric(column.linear().get(ctr)); + + if (ub < 1 - Tolerance::Integer || weight < -Tolerance::Feasibility) { + lazy(var).impl() = 0.; // fixed to 0 + continue; + } + + const double profit = as_numeric(column.obj()); + + if (lb > Tolerance::Integer) { + lazy(var).impl() = 1.; // fixed to 1 + unscaled_capacity -= weight; + fixed_profit += profit; + continue; + } + + free_variables.emplace_back(var, sense_factor * profit, weight); + + } + + // prepare to solve + const unsigned int n = free_variables.size(); + auto* profits = new int[n]; + auto* weights = new int[n]; + auto* values = new int[n]; + auto scaling_factor = (double) Tolerance::Digits; + auto capacity = (int) (scaling_factor * unscaled_capacity); + + unsigned int i = 0; + for (const auto& [var, profit, weight] : free_variables) { + profits[i] = (int) (scaling_factor * profit); + weights[i] = (int) (scaling_factor * weight); + ++i; + } + + // solve + m_objective_value = (double) minknap((int) n, profits, weights, values, capacity) / scaling_factor; + m_objective_value.value() *= sense_factor; + m_objective_value.value() += fixed_profit; + + // save solution + m_solution_status = Optimal; + m_solution_reason = Proved; + i = 0; + for (const auto& [var, profit, weight] : free_variables) { + lazy(var).impl() = values[i]; + ++i; + } + + // free memory + delete[] profits; + delete[] weights; + delete[] values; +} + +idol::SolutionStatus idol::Optimizers::MinKnap::get_status() const { + return m_solution_status; +} + +idol::SolutionReason idol::Optimizers::MinKnap::get_reason() const { + return m_solution_reason; +} + +double idol::Optimizers::MinKnap::get_best_obj() const { + return m_objective_value.value(); +} + +double idol::Optimizers::MinKnap::get_best_bound() const { + return get_best_obj(); +} + +double idol::Optimizers::MinKnap::get_var_primal(const idol::Var &t_var) const { + if (!lazy(t_var).has_impl()) { + throw Exception("Not available."); + } + return lazy(t_var).impl().value(); +} + +double idol::Optimizers::MinKnap::get_relative_gap() const { + return m_objective_value.has_value() ? 0. : +Inf; +} + +double idol::Optimizers::MinKnap::get_absolute_gap() const { + return get_relative_gap(); +} + +unsigned int idol::Optimizers::MinKnap::get_n_solutions() const { + return m_objective_value.has_value() ? 1 : 0; +} + +#endif