diff --git a/doc/OnlineDocs/contributed_packages/alternative_solutions.rst b/doc/OnlineDocs/contributed_packages/alternative_solutions.rst new file mode 100644 index 00000000000..cc5ab07c3cc --- /dev/null +++ b/doc/OnlineDocs/contributed_packages/alternative_solutions.rst @@ -0,0 +1,107 @@ +############################################### +Generating Alternative (Near-)Optimal Solutions +############################################### + +Optimization solvers are generally designed to return a feasible solution +to the user. However, there are many applications where a user needs +more context than this result. For example, + +* alternative solutions can support an assessment of trade-offs between competing objectives; + +* if the optimization formulation may be inaccurate or untrustworthy, then comparisons amongst alternative solutions provide additional insights into the reliability of these model predictions; or + +* the user may have unexpressed objectives or constraints, which only are realized in later stages of model analysis. + +The *alternative-solutions library* provides a variety of functions that +can be used to generate optimal or near-optimal solutions for a pyomo +model. Conceptually, these functions are like pyomo solvers. They can +be configured with solver names and options, and they return a list of +solutions for the pyomo model. However, these functions are independent +of pyomo's solver interface because they return a custom solution object. + +The following functions are defined in the alternative-solutions library: + +* ``enumerate_binary_solutions`` + + * Finds alternative optimal solutions for a binary problem using no-good cuts. + +* ``enumerate_linear_solutions`` + + * Finds alternative optimal solutions for a (mixed-integer) linear program. + +* ``enumerate_linear_solutions_soln_pool`` + + * Finds alternative optimal solutions for a (mixed-binary) linear program using Gurobi's solution pool feature. + +* ``gurobi_generate_solutions`` + + * Finds alternative optimal solutions for discrete variables using Gurobi's built-in solution pool capability. + +* ``obbt_analysis_bounds_and_solutions`` + + * Calculates the bounds on each variable by solving a series of min and max optimization problems where each variable is used as the objective function. This can be applied to any class of problem supported by the selected solver. + + +Usage Example +------------- + +Many of functions in the alternative-solutions library have similar options, so we simply illustrate the ``enumerate_binary_solutions`` function. We define a simple knapsack example whose alternative solutions have integer objective values ranging from 0 to 90. + +.. doctest:: + + >>> import pyomo.environ as pyo + + >>> values = [10, 40, 30, 50] + >>> weights = [5, 4, 6, 3] + >>> capacity = 10 + + >>> m = pyo.ConcreteModel() + >>> m.x = pyo.Var(range(4), within=pyo.Binary) + >>> m.o = pyo.Objective(expr=sum(values[i] * m.x[i] for i in range(4)), sense=pyo.maximize) + >>> m.c = pyo.Constraint(expr=sum(weights[i] * m.x[i] for i in range(4)) <= capacity) + +We can execute the ``enumerate_binary_solutions`` function to generate a list of ``Solution`` objects that represent alternative optimal solutions: + +.. doctest:: + :skipif: not glpk_available + + >>> import pyomo.contrib.alternative_solutions as aos + >>> solns = aos.enumerate_binary_solutions(m, num_solutions=100, solver="glpk") + >>> assert len(solns) == 10 + +Each ``Solution`` object contains information about the objective and variables, and it includes various methods to access this information. For example: + +.. doctest:: + :skipif: not glpk_available + + >>> print(solns[0]) + { + "fixed_variables": [], + "objective": "o", + "objective_value": 90.0, + "solution": { + "x[0]": 0, + "x[1]": 1, + "x[2]": 0, + "x[3]": 1 + } + } + + +Interface Documentation +----------------------- + +.. currentmodule:: pyomo.contrib.alternative_solutions + +.. autofunction:: enumerate_binary_solutions + +.. autofunction:: enumerate_linear_solutions + +.. autofunction:: enumerate_linear_solutions_soln_pool + +.. autofunction:: gurobi_generate_solutions + +.. autofunction:: obbt_analysis_bounds_and_solutions + +.. autoclass:: Solution + diff --git a/doc/OnlineDocs/contributed_packages/index.rst b/doc/OnlineDocs/contributed_packages/index.rst index b1d9cbbad3b..65c14a721df 100644 --- a/doc/OnlineDocs/contributed_packages/index.rst +++ b/doc/OnlineDocs/contributed_packages/index.rst @@ -15,6 +15,7 @@ Contributed packages distributed with Pyomo: .. toctree:: :maxdepth: 1 + alternative_solutions.rst community.rst doe/doe.rst gdpopt.rst diff --git a/pyomo/common/fileutils.py b/pyomo/common/fileutils.py index 7b6520327a0..8c3c6dfecaa 100644 --- a/pyomo/common/fileutils.py +++ b/pyomo/common/fileutils.py @@ -286,10 +286,17 @@ def find_dir( ) -_exeExt = {'linux': None, 'windows': '.exe', 'cygwin': '.exe', 'darwin': None} +_exeExt = { + 'linux': None, + 'freebsd': None, + 'windows': '.exe', + 'cygwin': '.exe', + 'darwin': None, +} _libExt = { 'linux': ('.so', '.so.*'), + 'freebsd': ('.so', '.so.*'), 'windows': ('.dll', '.pyd'), 'cygwin': ('.dll', '.so', '.so.*'), 'darwin': ('.dylib', '.so', '.so.*'), diff --git a/pyomo/common/tests/test_download.py b/pyomo/common/tests/test_download.py index 8fee0ba7e31..4ee781d5738 100644 --- a/pyomo/common/tests/test_download.py +++ b/pyomo/common/tests/test_download.py @@ -22,7 +22,7 @@ import pyomo.common.envvar as envvar from pyomo.common import DeveloperError -from pyomo.common.fileutils import this_file +from pyomo.common.fileutils import this_file, Executable from pyomo.common.download import FileDownloader, distro_available from pyomo.common.log import LoggingIntercept from pyomo.common.tee import capture_output @@ -173,7 +173,8 @@ def test_get_os_version(self): self.assertTrue(v.replace('.', '').startswith(dist_ver)) if ( - subprocess.run( + Executable('lsb_release').available() + and subprocess.run( ['lsb_release'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, @@ -206,7 +207,7 @@ def test_get_os_version(self): self.assertEqual(_os, 'win') self.assertEqual(_norm, _os + ''.join(_ver.split('.')[:2])) else: - self.assertEqual(ans, '') + self.assertEqual(_os, '') self.assertEqual((_os, _ver), FileDownloader._os_version) # Exercise the fetch from CACHE diff --git a/pyomo/common/unittest.py b/pyomo/common/unittest.py index c78e003a07d..996bb69ec78 100644 --- a/pyomo/common/unittest.py +++ b/pyomo/common/unittest.py @@ -858,7 +858,7 @@ def filter_file_contents(self, lines, abstol=None): return filtered - def compare_baseline(self, test_output, baseline, abstol=1e-6, reltol=None): + def compare_baseline(self, test_output, baseline, abstol=1e-6, reltol=1e-8): # Filter files independently and then compare filtered contents out_filtered = self.filter_file_contents( test_output.strip().split('\n'), abstol diff --git a/pyomo/contrib/alternative_solutions/README.md b/pyomo/contrib/alternative_solutions/README.md new file mode 100644 index 00000000000..b6e387aceee --- /dev/null +++ b/pyomo/contrib/alternative_solutions/README.md @@ -0,0 +1,8 @@ +# alternative_solutions + +pyomo.contrib.alternative_solutions is a collection of functions that +that generate a set of alternative (near-)optimal solutions +(AOS). These functions rely on a pyomo solver to search for solutions, +and they iteratively adapt the search process to find a variety of +alternative solutions. + diff --git a/pyomo/contrib/alternative_solutions/__init__.py b/pyomo/contrib/alternative_solutions/__init__.py new file mode 100644 index 00000000000..0b01e359879 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/__init__.py @@ -0,0 +1,20 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.contrib.alternative_solutions.aos_utils import logcontext +from pyomo.contrib.alternative_solutions.solution import Solution +from pyomo.contrib.alternative_solutions.solnpool import gurobi_generate_solutions +from pyomo.contrib.alternative_solutions.balas import enumerate_binary_solutions +from pyomo.contrib.alternative_solutions.obbt import ( + obbt_analysis, + obbt_analysis_bounds_and_solutions, +) +from pyomo.contrib.alternative_solutions.lp_enum import enumerate_linear_solutions diff --git a/pyomo/contrib/alternative_solutions/aos_utils.py b/pyomo/contrib/alternative_solutions/aos_utils.py new file mode 100644 index 00000000000..23fa8e3b4f7 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/aos_utils.py @@ -0,0 +1,303 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import logging + +logger = logging.getLogger(__name__) + +from contextlib import contextmanager + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +if numpy_available: + import numpy.random + from numpy.linalg import norm + +import pyomo.environ as pe +from pyomo.common.modeling import unique_component_name +from pyomo.common.collections import ComponentSet +import pyomo.util.vars_from_expressions as vfe + + +@contextmanager +def logcontext(level): + """ + This context manager is used to dynamically set the specified logging level + and then execute a block of code using that logging level. When the context is + deleted, the logging level is reset to the original value. + + Examples + -------- + >>> with logcontext(logging.INFO): + >>> logging.debug("This will not be printed") + >>> logging.info("This will be printed") + """ + logger = logging.getLogger() + current_level = logger.getEffectiveLevel() + logger.setLevel(level) + try: + yield + finally: + logger.setLevel(current_level) + + +def get_active_objective(model): + """ + Finds and returns the active objective function for a model. Currently + assume that there is exactly one active objective. + """ + + active_objs = list(model.component_data_objects(pe.Objective, active=True)) + assert ( + len(active_objs) == 1 + ), "Model has {} active objective functions, exactly one is required.".format( + len(active_objs) + ) + + return active_objs[0] + + +def _add_aos_block(model, name="_aos_block"): + """Adds an alternative optimal solution block with a unique name.""" + aos_block = pe.Block() + model.add_component(unique_component_name(model, name), aos_block) + return aos_block + + +def _add_objective_constraint( + aos_block, objective, objective_value, rel_opt_gap, abs_opt_gap +): + """ + Adds a relative and/or absolute objective function constraint to the + specified block. + """ + + assert ( + rel_opt_gap is None or rel_opt_gap >= 0.0 + ), "rel_opt_gap must be None or >= 0.0" + assert ( + abs_opt_gap is None or abs_opt_gap >= 0.0 + ), "abs_opt_gap must be None or >= 0.0" + + objective_constraints = [] + + objective_is_min = objective.is_minimizing() + objective_expr = objective.expr + + objective_sense = -1 + if objective_is_min: + objective_sense = 1 + + if rel_opt_gap is not None: + objective_cutoff = objective_value + objective_sense * rel_opt_gap * abs( + objective_value + ) + + if objective_is_min: + aos_block.optimality_tol_rel = pe.Constraint( + expr=objective_expr <= objective_cutoff + ) + else: + aos_block.optimality_tol_rel = pe.Constraint( + expr=objective_expr >= objective_cutoff + ) + objective_constraints.append(aos_block.optimality_tol_rel) + + if abs_opt_gap is not None: + objective_cutoff = objective_value + objective_sense * abs_opt_gap + + if objective_is_min: + aos_block.optimality_tol_abs = pe.Constraint( + expr=objective_expr <= objective_cutoff + ) + else: + aos_block.optimality_tol_abs = pe.Constraint( + expr=objective_expr >= objective_cutoff + ) + objective_constraints.append(aos_block.optimality_tol_abs) + + return objective_constraints + + +if numpy_available: + rng = numpy.random.default_rng(9283749387) +else: + rng = None + + +def _set_numpy_rng(seed): + global rng + rng = numpy.random.default_rng(seed) + + +def _get_random_direction(num_dimensions, iterations=1000, min_norm=1e-4): + """ + Get a unit vector of dimension num_dimensions by sampling from and + normalizing a standard multivariate Gaussian distribution. + """ + for idx in range(iterations): + samples = rng.normal(size=num_dimensions) + samples_norm = norm(samples) + if samples_norm > min_norm: + return samples / samples_norm + raise Exception( # pragma: no cover + ( + "Generated {} sequential Gaussian draws with a norm of " + "less than {}.".format(iterations, min_norm) + ) + ) + + +def _filter_model_variables( + variable_set, + var_generator, + include_continuous=True, + include_binary=True, + include_integer=True, + include_fixed=False, +): + """ + Filters variables from a variable generator and adds them to a set. + """ + for var in var_generator: + if var in variable_set or (var.is_fixed() and not include_fixed): + continue + if ( + (var.is_continuous() and include_continuous) + or (var.is_binary() and include_binary) + or (var.is_integer() and include_integer) + ): + variable_set.add(var) + + +def get_model_variables( + model, + components=None, + include_continuous=True, + include_binary=True, + include_integer=True, + include_fixed=False, +): + """ + Gathers and returns all variables or a subset of variables from a Pyomo + model. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model. + components: None or a collection of Pyomo components + The components from which variables should be collected. None + indicates that all variables will be included. Alternatively, a + collection of Pyomo Blocks, Constraints, or Variables (indexed or + non-indexed) from which variables will be gathered can be provided. + If a Block is provided, all variables associated with constraints + in that that block and its sub-blocks will be returned. To exclude + sub-blocks, a tuple element with the format (Block, False) can be + used. + include_continuous : boolean + Boolean indicating that continuous variables should be included. + include_binary : boolean + Boolean indicating that binary variables should be included. + include_integer : boolean + Boolean indicating that integer variables should be included. + include_fixed : boolean + Boolean indicating that fixed variables should be included. + + Returns + ------- + variable_set + A Pyomo ComponentSet containing _GeneralVarData variables. + """ + + component_list = (pe.Objective, pe.Constraint) + variable_set = ComponentSet() + if components == None: + var_generator = vfe.get_vars_from_components( + model, component_list, include_fixed=include_fixed + ) + _filter_model_variables( + variable_set, + var_generator, + include_continuous, + include_binary, + include_integer, + include_fixed, + ) + else: + for comp in components: + if hasattr(comp, "ctype") and comp.ctype == pe.Block: + blocks = comp.values() if comp.is_indexed() else (comp,) + for item in blocks: + variables = vfe.get_vars_from_components( + item, component_list, include_fixed=include_fixed + ) + _filter_model_variables( + variable_set, + variables, + include_continuous, + include_binary, + include_integer, + include_fixed, + ) + elif ( + isinstance(comp, tuple) + and hasattr(comp[0], "ctype") + and comp[0].ctype == pe.Block + ): + block = comp[0] + descend_into = pe.Block if comp[1] else False + blocks = block.values() if block.is_indexed() else (block,) + for item in blocks: + variables = vfe.get_vars_from_components( + item, + component_list, + include_fixed=include_fixed, + descend_into=descend_into, + ) + _filter_model_variables( + variable_set, + variables, + include_continuous, + include_binary, + include_integer, + include_fixed, + ) + elif hasattr(comp, "ctype") and comp.ctype in component_list: + constraints = comp.values() if comp.is_indexed() else (comp,) + for item in constraints: + variables = pe.expr.identify_variables( + item.expr, include_fixed=include_fixed + ) + _filter_model_variables( + variable_set, + variables, + include_continuous, + include_binary, + include_integer, + include_fixed, + ) + elif hasattr(comp, "ctype") and comp.ctype == pe.Var: + variables = comp.values() if comp.is_indexed() else (comp,) + _filter_model_variables( + variable_set, + variables, + include_continuous, + include_binary, + include_integer, + include_fixed, + ) + else: # pragma: no cover + logger.info( + ("No variables added for unrecognized component {}.").format(comp) + ) + + return variable_set diff --git a/pyomo/contrib/alternative_solutions/balas.py b/pyomo/contrib/alternative_solutions/balas.py new file mode 100644 index 00000000000..489642a5b63 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/balas.py @@ -0,0 +1,254 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import logging + +logger = logging.getLogger(__name__) + +import pyomo.environ as pe +from pyomo.common.collections import ComponentSet +from pyomo.contrib.alternative_solutions import Solution +import pyomo.contrib.alternative_solutions.aos_utils as aos_utils + + +def enumerate_binary_solutions( + model, + *, + num_solutions=10, + variables=None, + rel_opt_gap=None, + abs_opt_gap=None, + search_mode="optimal", + solver="gurobi", + solver_options={}, + tee=False, + seed=None, +): + """ + Finds alternative optimal solutions for a binary problem using no-good + cuts. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model + num_solutions : int + The maximum number of solutions to generate. + variables: None or a collection of Pyomo _GeneralVarData variables + The variables for which bounds will be generated. None indicates + that all variables will be included. Alternatively, a collection of + _GenereralVarData variables can be provided. + rel_opt_gap : float or None + The relative optimality gap for the original objective for which + variable bounds will be found. None indicates that a relative gap + constraint will not be added to the model. + abs_opt_gap : float or None + The absolute optimality gap for the original objective for which + variable bounds will be found. None indicates that an absolute gap + constraint will not be added to the model. + search_mode : 'optimal', 'random', or 'hamming' + Indicates the mode that is used to generate alternative solutions. + The optimal mode finds the next best solution. The random mode + finds an alternative solution in the direction of a random ray. The + hamming mode iteratively finds solution that maximize the hamming + distance from previously discovered solutions. + solver : string + The solver to be used. + solver_options : dict + Solver option-value pairs to be passed to the solver. + tee : boolean + Boolean indicating that the solver output should be displayed. + seed : int + Optional integer seed for the numpy random number generator + + Returns + ------- + solutions + A list of Solution objects. + [Solution] + """ + logger.info("STARTING NO-GOOD CUT ANALYSIS") + + assert search_mode in [ + "optimal", + "random", + "hamming", + ], 'search mode must be "optimal", "random", or "hamming".' + + if seed is not None: + aos_utils._set_numpy_rng(seed) + + all_variables = aos_utils.get_model_variables(model, include_fixed=True) + if variables == None: + binary_variables = [ + var for var in all_variables if var.is_binary() and not var.is_fixed() + ] + logger.debug( + "Analysis using %d binary variables: %s" + % (len(binary_variables), " ".join(var.name for var in binary_variables)) + ) + else: + binary_variables = ComponentSet() + non_binary_variables = [] + for var in variables: + if var.is_binary(): + binary_variables.add(var) + else: # pragma: no cover + non_binary_variables.append(var.name) + if len(non_binary_variables) > 0: + logger.warn( + ( + "Warning: The following non-binary variables were included" + "in the variable list and will be ignored:" + ) + ) + logger.warn(", ".join(non_binary_variables)) + + orig_objective = aos_utils.get_active_objective(model) + + if len(binary_variables) == 0: + logger.warn("No binary variables found!") + + # + # Setup solver + # + opt = pe.SolverFactory(solver) + opt.available() + for parameter, value in solver_options.items(): + opt.options[parameter] = value + # + # Appsi-specific configurations + # + use_appsi = False + if "appsi" in solver: + use_appsi = True + opt.update_config.update_constraints = False + opt.update_config.check_for_new_or_removed_constraints = True + opt.update_config.check_for_new_or_removed_vars = False + opt.update_config.check_for_new_or_removed_params = False + opt.update_config.update_vars = False + opt.update_config.update_params = False + opt.update_config.update_named_expressions = False + opt.update_config.treat_fixed_vars_as_params = False + + if search_mode == "hamming": + opt.update_config.check_for_new_objective = True + opt.update_config.update_objective = True + elif search_mode == "random": + opt.update_config.check_for_new_objective = True + opt.update_config.update_objective = False + else: + opt.update_config.check_for_new_objective = False + opt.update_config.update_objective = False + + # + # Initial solve of the model + # + logger.info("Performing initial solve of model.") + results = opt.solve(model, tee=tee, load_solutions=False) + status = results.solver.status + if not pe.check_optimal_termination(results): + condition = results.solver.termination_condition + raise Exception( + ( + "No-good cut analysis cannot be applied, " + "SolverStatus = {}, " + "TerminationCondition = {}" + ).format(status.value, condition.value) + ) + + model.solutions.load_from(results) + orig_objective_value = pe.value(orig_objective) + logger.info("Found optimal solution, value = {}.".format(orig_objective_value)) + solutions = [Solution(model, all_variables, objective=orig_objective)] + # + # Return just this solution if there are no binary variables + # + if len(binary_variables) == 0: + return solutions + + aos_block = aos_utils._add_aos_block(model, name="_balas") + logger.info("Added block {} to the model.".format(aos_block)) + aos_block.no_good_cuts = pe.ConstraintList() + aos_utils._add_objective_constraint( + aos_block, orig_objective, orig_objective_value, rel_opt_gap, abs_opt_gap + ) + + if search_mode in ["random", "hamming"]: + orig_objective.deactivate() + + solution_number = 2 + while solution_number <= num_solutions: + + expr = 0 + for var in binary_variables: + if var.value > 0.5: + expr += 1 - var + else: + expr += var + + aos_block.no_good_cuts.add(expr=expr >= 1) + + if search_mode == "hamming": + if hasattr(aos_block, "hamming_objective"): + aos_block.hamming_objective.expr += expr + if use_appsi and opt.update_config.check_for_new_objective: + opt.update_config.check_for_new_objective = False + else: + aos_block.hamming_objective = pe.Objective(expr=expr, sense=pe.maximize) + + elif search_mode == "random": + if hasattr(aos_block, "random_objective"): + aos_block.del_component("random_objective") + vector = aos_utils._get_random_direction(len(binary_variables)) + idx = 0 + expr = 0 + for var in binary_variables: + expr += vector[idx] * var + idx += 1 + aos_block.random_objective = pe.Objective(expr=expr, sense=pe.maximize) + + results = opt.solve(model, tee=tee, load_solutions=False) + status = results.solver.status + condition = results.solver.termination_condition + if pe.check_optimal_termination(results): + model.solutions.load_from(results) + orig_obj_value = pe.value(orig_objective) + logger.info( + "Iteration {}: objective = {}".format(solution_number, orig_obj_value) + ) + solutions.append(Solution(model, all_variables, objective=orig_objective)) + solution_number += 1 + elif ( + condition == pe.TerminationCondition.infeasibleOrUnbounded + or condition == pe.TerminationCondition.infeasible + ): + logger.info( + "Iteration {}: Infeasible, no additional binary solutions.".format( + solution_number + ) + ) + break + else: # pragma: no cover + logger.info( + ( + "Iteration {}: Unexpected condition, SolverStatus = {}, " + "TerminationCondition = {}" + ).format(solution_number, status.value, condition.value) + ) + break + + aos_block.deactivate() + orig_objective.activate() + + logger.info("COMPLETED NO-GOOD CUT ANALYSIS") + + return solutions diff --git a/pyomo/contrib/alternative_solutions/lp_enum.py b/pyomo/contrib/alternative_solutions/lp_enum.py new file mode 100644 index 00000000000..7cb7b5eaabe --- /dev/null +++ b/pyomo/contrib/alternative_solutions/lp_enum.py @@ -0,0 +1,330 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import logging + +logger = logging.getLogger(__name__) + +import pyomo.environ as pe +from pyomo.contrib.alternative_solutions import ( + aos_utils, + shifted_lp, + solution, + solnpool, +) +from pyomo.contrib import appsi + + +def enumerate_linear_solutions( + model, + *, + num_solutions=10, + rel_opt_gap=None, + abs_opt_gap=None, + zero_threshold=1e-5, + search_mode="optimal", + solver="gurobi", + solver_options={}, + tee=False, + seed=None, +): + """ + Finds alternative optimal solutions a (mixed-integer) linear program. + + This function implements the technique described here: + + S. Lee, C. Phalakornkule, M.M. Domach, and I.E. Grossmann, + "Recursive MILP model for finding all the alternative optima in LP + models for metabolic networks", Computers and Chemical Engineering, + 24 (2000) 711-716. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model + num_solutions : int + The maximum number of solutions to generate. + rel_opt_gap : float or None + The relative optimality gap for the original objective for which + variable bounds will be found. None indicates that a relative gap + constraint will not be added to the model. + abs_opt_gap : float or None + The absolute optimality gap for the original objective for which + variable bounds will be found. None indicates that an absolute gap + constraint will not be added to the model. + zero_threshold: float + The threshold for which a continuous variables' value is considered + to be equal to zero. + search_mode : 'optimal', 'random', or 'norm' + Indicates the mode that is used to generate alternative solutions. + The optimal mode finds the next best solution. The random mode + finds an alternative solution in the direction of a random ray. The + norm mode iteratively finds solution that maximize the L2 distance + from previously discovered solutions. + solver : string + The solver to be used. + solver_options : dict + Solver option-value pairs to be passed to the solver. + tee : boolean + Boolean indicating that the solver output should be displayed. + seed : int + Optional integer seed for the numpy random number generator + + Returns + ------- + solutions + A list of Solution objects. + [Solution] + """ + logger.info("STARTING LP ENUMERATION ANALYSIS") + + assert search_mode in [ + "optimal", + "random", + "norm", + ], 'search mode must be "optimal", "random", or "norm".' + # TODO: Implement the random and norm objectives. I think it is sufficient + # to only consider the cb.var_lower variables in the objective for these two + # cases. The cb.var_upper variables are directly linked to these to diversity + # in one implies diversity in the other. Diversity in the cb.basic_slack + # variables doesn't really matter since we only really care about diversity + # in the original problem and not in the slack space (I think) + + all_variables = aos_utils.get_model_variables(model) + # else: + # binary_variables = ComponentSet() + # non_binary_variables = [] + # for var in variables: + # if var.is_binary(): + # binary_variables.append(var) + # else: + # non_binary_variables.append(var.name) + # if len(non_binary_variables) > 0: + # logger.warn(('Warning: The following non-binary variables were included' + # 'in the variable list and will be ignored:')) + # logger.warn(", ".join(non_binary_variables)) + # all_variables = aos_utils.get_model_variables(model, None, + # include_fixed=True) + + # TODO: Relax this if possible - Should allow for the mixed-binary case + for var in all_variables: + assert var.is_continuous(), "Model must be an LP" + + use_appsi = False + if "appsi" in solver: + use_appsi = True + opt = appsi.solvers.Gurobi() + opt.config.load_solution = False + opt.config.stream_solver = tee + opt.update_config.check_for_new_or_removed_constraints = True + opt.update_config.update_constraints = False + opt.update_config.check_for_new_or_removed_vars = True + opt.update_config.check_for_new_or_removed_params = False + opt.update_config.update_vars = False + opt.update_config.update_params = False + opt.update_config.update_named_expressions = False + opt.update_config.treat_fixed_vars_as_params = False + + if search_mode == "norm": + opt.update_config.check_for_new_objective = True + opt.update_config.update_objective = True + elif search_mode == "random": + opt.update_config.check_for_new_objective = True + opt.update_config.update_objective = False + else: + opt.update_config.check_for_new_objective = False + opt.update_config.update_objective = False + for parameter, value in solver_options.items(): + opt.gurobi_options[parameter] = value + else: + opt = pe.SolverFactory(solver) + opt.available() + for parameter, value in solver_options.items(): + opt.options[parameter] = value + if solver == "gurobi": + # Disable gurobi heuristics, which can return + # solutions not at a vertex + opt.options["Heuristics"] = 0.0 + + logger.info("Performing initial solve of model.") + + if use_appsi: + results = opt.solve(model) + condition = results.termination_condition + optimal_tc = appsi.base.TerminationCondition.optimal + else: + results = opt.solve(model, tee=tee, load_solutions=False) + condition = results.solver.termination_condition + optimal_tc = pe.TerminationCondition.optimal + if condition != optimal_tc: + raise Exception( + ( + "Model could not be solved. LP enumeration analysis " + "cannot be applied, " + "TerminationCondition = {}" + ).format(condition.value) + ) + if use_appsi: + results.solution_loader.load_vars(solution_number=0) + else: + model.solutions.load_from(results) + + orig_objective = aos_utils.get_active_objective(model) + orig_objective_value = pe.value(orig_objective) + logger.info("Found optimal solution, value = {}.".format(orig_objective_value)) + + aos_block = aos_utils._add_aos_block(model, name="_lp_enum") + aos_utils._add_objective_constraint( + aos_block, orig_objective, orig_objective_value, rel_opt_gap, abs_opt_gap + ) + logger.info("Added block {} to the model.".format(aos_block)) + + canon_block = shifted_lp.get_shifted_linear_model(model) + cb = canon_block + + # Set K + cb.iteration = pe.Set(pe.PositiveIntegers) + + # w variables + cb.basic_lower = pe.Var(pe.Any, domain=pe.Binary, dense=False) + cb.basic_upper = pe.Var(pe.Any, domain=pe.Binary, dense=False) + cb.basic_slack = pe.Var(pe.Any, domain=pe.Binary, dense=False) + + # w upper bounds constraints (Eqn (3)) + cb.bound_lower = pe.Constraint(pe.Any) + cb.bound_upper = pe.Constraint(pe.Any) + cb.bound_slack = pe.Constraint(pe.Any) + + # non-zero basic variable no-good cut set + cb.cut_set = pe.Constraint(pe.PositiveIntegers) + + # [ (continuous, binary, constraint) ] + variable_groups = [ + (cb.var_lower, cb.basic_lower, cb.bound_lower), + (cb.var_upper, cb.basic_upper, cb.bound_upper), + (cb.slack_vars, cb.basic_slack, cb.bound_slack), + ] + + solution_number = 1 + solutions = [] + while solution_number <= num_solutions: + logger.info("Solving Iteration {}: ".format(solution_number), end="") + + if logger.isEnabledFor(logging.DEBUG): + model.pprint() + if use_appsi: + results = opt.solve(model) + condition = results.termination_condition + else: + results = opt.solve(cb, tee=tee, load_solutions=False) + condition = results.solver.termination_condition + + if condition == optimal_tc: + if use_appsi: + results.solution_loader.load_vars(solution_number=0) + else: + model.solutions.load_from(results) + + for var, index in cb.var_map.items(): + var.set_value(var.lb + cb.var_lower[index].value) + sol = solution.Solution(model, all_variables, objective=orig_objective) + solutions.append(sol) + orig_objective_value = sol.objective[1] + + if logger.isEnabledFor(logging.INFO): + logger.info("Solved, objective = {}".format(orig_objective_value)) + for var, index in cb.var_map.items(): + logger.info( + "{} = {}".format(var.name, var.lb + cb.var_lower[index].value) + ) + if logger.isEnabledFor(logging.DEBUG): + model.display() + + if hasattr(cb, "force_out"): + cb.del_component("force_out") + if hasattr(cb, "link_in_out"): + cb.del_component("link_in_out") + if hasattr(cb, "basic_last_lower"): + cb.del_component("basic_last_lower") + if hasattr(cb, "basic_last_upper"): + cb.del_component("basic_last_upper") + if hasattr(cb, "basic_last_slack"): + cb.del_component("basic_last_slack") + + cb.link_in_out = pe.Constraint(pe.Any) + # y variables + cb.basic_last_lower = pe.Var(pe.Any, domain=pe.Binary, dense=False) + cb.basic_last_upper = pe.Var(pe.Any, domain=pe.Binary, dense=False) + cb.basic_last_slack = pe.Var(pe.Any, domain=pe.Binary, dense=False) + basic_last_list = [ + cb.basic_last_lower, + cb.basic_last_upper, + cb.basic_last_slack, + ] + + # Number of variables with non-zero values + num_non_zero = 0 + # This expression is used to ensure that at least one of the non-zero basic + # variables in the previous solution is selected. + force_out_expr = -1 + # This expression is used to ensure that at most (# non-zero basic variables)-1 + # binary choice variables can be selected. + non_zero_basic_expr = 1 + for idx in range(len(variable_groups)): + continuous_var, binary_var, constraint = variable_groups[idx] + for var in continuous_var: + if continuous_var[var].value > zero_threshold: + num_non_zero += 1 + + # Eqn (3): if binary choice variable is not selected, then + # continuous variable is zero. + constraint[var] = ( + continuous_var[var] + <= continuous_var[var].ub * binary_var[var] + ) + non_zero_basic_expr += binary_var[var] + basic_var = basic_last_list[idx][var] + force_out_expr += basic_var + # Eqn (4): if binary choice variable is selected, then + # basic variable is zero + cb.link_in_out[var] = basic_var + binary_var[var] <= 1 + # Eqn (1): at least one of the non-zero basic variables in the + # previous solution is selected + cb.force_out = pe.Constraint(expr=force_out_expr >= 0) + # Eqn (2): At most (# non-zero basic variables)-1 binary choice + # variables can be selected + cb.cut_set[solution_number] = non_zero_basic_expr <= num_non_zero + + solution_number += 1 + elif ( + condition == pe.TerminationCondition.infeasibleOrUnbounded + or condition == pe.TerminationCondition.infeasible + ): + logger.info("Infeasible, all alternative solutions have been found.") + break + else: + logger.info( + ( + "Unexpected solver condition. Stopping LP enumeration. " + "SolverStatus = {}, TerminationCondition = {}" + ).format(results.solver.status.value, condition.value) + ) + break + if logger.isEnabledFor(logging.DEBUG): + logging.debug("") + logging.debug("=" * 80) + logging.debug("") + + model.del_component("aos_block") + + logger.info("COMPLETED LP ENUMERATION ANALYSIS") + + return solutions diff --git a/pyomo/contrib/alternative_solutions/lp_enum_solnpool.py b/pyomo/contrib/alternative_solutions/lp_enum_solnpool.py new file mode 100644 index 00000000000..1806b96e0ec --- /dev/null +++ b/pyomo/contrib/alternative_solutions/lp_enum_solnpool.py @@ -0,0 +1,237 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import logging + +logger = logging.getLogger(__name__) + +from pyomo.common.dependencies import attempt_import + +gurobipy, gurobi_available = attempt_import("gurobipy") + +import pyomo.environ as pe +import pyomo.common.errors +from pyomo.contrib.alternative_solutions import aos_utils, shifted_lp, solution +from pyomo.contrib import appsi + + +class NoGoodCutGenerator: + def __init__( + self, + model, + variable_groups, + zero_threshold, + orig_model, + all_variables, + orig_objective, + num_solutions, + ): + self.model = model + self.zero_threshold = zero_threshold + self.variable_groups = variable_groups + self.variables = aos_utils.get_model_variables(model) + self.orig_model = orig_model + self.all_variables = all_variables + self.orig_objective = orig_objective + self.solutions = [] + self.num_solutions = num_solutions + + def cut_generator_callback(self, cb_m, cb_opt, cb_where): + from gurobipy import GRB + + if cb_where == GRB.Callback.MIPSOL: + cb_opt.cbGetSolution(vars=self.variables) + logger.info("***FOUND SOLUTION***") + + for var, index in self.model.var_map.items(): + var.set_value(var.lb + self.model.var_lower[index].value) + sol = solution.Solution( + self.orig_model, self.all_variables, objective=self.orig_objective + ) + self.solutions.append(sol) + + if len(self.solutions) >= self.num_solutions: + cb_opt._solver_model.terminate() + num_non_zero = 0 + non_zero_basic_expr = 1 + for idx in range(len(self.variable_groups)): + continuous_var, binary_var = self.variable_groups[idx] + for var in continuous_var: + if continuous_var[var].value > self.zero_threshold: + num_non_zero += 1 + non_zero_basic_expr += binary_var[var] + # TODO: JLG - If we want to add the mixed binary case, I think we + # need to do it here. Essentially we would want to continue to + # build up the num_non_zero as follows + # for binary in binary_vars: + # if binary.value > 0.5: + # num_non_zero += 1 - binary + # else: + # num_non_zero += binary + new_con = self.model.cl.add(non_zero_basic_expr <= num_non_zero) + cb_opt.cbLazy(new_con) + + +def enumerate_linear_solutions_soln_pool( + model, + num_solutions=10, + rel_opt_gap=None, + abs_opt_gap=None, + zero_threshold=1e-5, + solver_options={}, + tee=False, +): + """ + Finds alternative optimal solutions for a (mixed-binary) linear program + using Gurobi's solution pool feature. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model + num_solutions : int + The maximum number of solutions to generate. + variables: None or a collection of Pyomo _GeneralVarData variables + The variables for which bounds will be generated. None indicates + that all variables will be included. Alternatively, a collection of + _GenereralVarData variables can be provided. + rel_opt_gap : float or None + The relative optimality gap for the original objective for which + variable bounds will be found. None indicates that a relative gap + constraint will not be added to the model. + abs_opt_gap : float or None + The absolute optimality gap for the original objective for which + variable bounds will be found. None indicates that an absolute gap + constraint will not be added to the model. + zero_threshold: float + The threshold for which a continuous variables' value is considered + to be equal to zero. + solver_options : dict + Solver option-value pairs to be passed to the solver. + tee : boolean + Boolean indicating that the solver output should be displayed. + + Returns + ------- + solutions + A list of Solution objects. + [Solution] + """ + logger.info("STARTING LP ENUMERATION ANALYSIS USING GUROBI SOLUTION POOL") + # + # Setup gurobi + # + if not gurobi_available: + raise pyomo.common.errors.ApplicationError(f"Solver (gurobi) not available") + + all_variables = aos_utils.get_model_variables(model) + for var in all_variables: + if var.is_integer(): + raise pyomo.common.errors.ApplicationError( + f"The enumerate_linear_solutions_soln_pool() function cannot be used with models that contain discrete variables" + ) + + opt = pe.SolverFactory("gurobi") + if not opt.available(exception_flag=False): + raise ValueError(solver + " is not available") + for parameter, value in solver_options.items(): + opt.options[parameter] = value + + logger.info("Performing initial solve of model.") + results = opt.solve(model, tee=tee) + status = results.solver.status + condition = results.solver.termination_condition + if condition != pe.TerminationCondition.optimal: + raise Exception( + ( + "Model could not be solve. LP enumeration analysis " + "cannot be applied, SolverStatus = {}, " + "TerminationCondition = {}" + ).format(status.value, condition.value) + ) + + orig_objective = aos_utils.get_active_objective(model) + orig_objective_value = pe.value(orig_objective) + logger.info("Found optimal solution, value = {}.".format(orig_objective_value)) + + aos_block = aos_utils._add_aos_block(model, name="_lp_enum") + logger.info("Added block {} to the model.".format(aos_block)) + aos_utils._add_objective_constraint( + aos_block, orig_objective, orig_objective_value, rel_opt_gap, abs_opt_gap + ) + + canonical_block = shifted_lp.get_shifted_linear_model(model) + cb = canonical_block + lower_index = list(cb.var_lower.keys()) + upper_index = list(cb.var_upper.keys()) + + # w variables + cb.basic_lower = pe.Var(lower_index, domain=pe.Binary) + cb.basic_upper = pe.Var(upper_index, domain=pe.Binary) + cb.basic_slack = pe.Var(cb.slack_index, domain=pe.Binary) + + # w upper bounds constraints + def bound_lower_rule(m, var_index): + return ( + m.var_lower[var_index] + <= m.var_lower[var_index].ub * m.basic_lower[var_index] + ) + + cb.bound_lower = pe.Constraint(lower_index, rule=bound_lower_rule) + + def bound_upper_rule(m, var_index): + return ( + m.var_upper[var_index] + <= m.var_upper[var_index].ub * m.basic_upper[var_index] + ) + + cb.bound_upper = pe.Constraint(upper_index, rule=bound_upper_rule) + + def bound_slack_rule(m, var_index): + return ( + m.slack_vars[var_index] + <= m.slack_vars[var_index].ub * m.basic_slack[var_index] + ) + + cb.bound_slack = pe.Constraint(cb.slack_index, rule=bound_slack_rule) + + cb.cl = pe.ConstraintList() + + # TODO: If we go the mixed binary route we also want to list the binary variables + variable_groups = [ + (cb.var_lower, cb.basic_lower), + (cb.var_upper, cb.basic_upper), + (cb.slack_vars, cb.basic_slack), + ] + cut_generator = NoGoodCutGenerator( + cb, + variable_groups, + zero_threshold, + model, + all_variables, + orig_objective, + num_solutions, + ) + + opt = appsi.solvers.Gurobi() + for parameter, value in solver_options.items(): + opt.gurobi_options[parameter] = value + opt.config.stream_solver = True + opt.config.load_solution = False + opt.gurobi_options["LazyConstraints"] = 1 + opt.set_instance(cb) + opt.set_callback(cut_generator.cut_generator_callback) + opt.solve(cb) + + aos_block.deactivate() + logger.info("COMPLETED LP ENUMERATION ANALYSIS") + + return cut_generator.solutions diff --git a/pyomo/contrib/alternative_solutions/obbt.py b/pyomo/contrib/alternative_solutions/obbt.py new file mode 100644 index 00000000000..3fc59bd3214 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/obbt.py @@ -0,0 +1,355 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import logging + +logger = logging.getLogger(__name__) + +import pyomo.environ as pe +from pyomo.contrib.alternative_solutions import aos_utils +from pyomo.contrib.alternative_solutions import Solution +from pyomo.contrib import appsi + + +def obbt_analysis( + model, + *, + variables=None, + rel_opt_gap=None, + abs_opt_gap=None, + refine_discrete_bounds=False, + warmstart=True, + solver="gurobi", + solver_options={}, + tee=False, +): + """ + Calculates the bounds on each variable by solving a series of min and max + optimization problems where each variable is used as the objective function + This can be applied to any class of problem supported by the selected + solver. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model. + variables: None or a collection of Pyomo _GeneralVarData variables + The variables for which bounds will be generated. None indicates + that all variables will be included. Alternatively, a collection of + _GenereralVarData variables can be provided. + rel_opt_gap : float or None + The relative optimality gap for the original objective for which + variable bounds will be found. None indicates that a relative gap + constraint will not be added to the model. + abs_opt_gap : float or None + The absolute optimality gap for the original objective for which + variable bounds will be found. None indicates that an absolute gap + constraint will not be added to the model. + refine_discrete_bounds : boolean + Boolean indicating that new constraints should be added to the + model at each iteration to tighten the bounds for discrete + variables. + warmstart : boolean + Boolean indicating that the solver should be warmstarted from the + best previously discovered solution. + solver : string + The solver to be used. + solver_options : dict + Solver option-value pairs to be passed to the solver. + tee : boolean + Boolean indicating that the solver output should be displayed. + + Returns + ------- + variable_ranges + A Pyomo ComponentMap containing the bounds for each variable. + {variable: (lower_bound, upper_bound)}. An exception is raised when + the solver encountered an issue. + """ + bounds, solns = obbt_analysis_bounds_and_solutions( + model, + variables=variables, + rel_opt_gap=rel_opt_gap, + abs_opt_gap=abs_opt_gap, + refine_discrete_bounds=refine_discrete_bounds, + warmstart=warmstart, + solver=solver, + solver_options=solver_options, + tee=tee, + ) + return bounds + + +def obbt_analysis_bounds_and_solutions( + model, + *, + variables=None, + rel_opt_gap=None, + abs_opt_gap=None, + refine_discrete_bounds=False, + warmstart=True, + solver="gurobi", + solver_options={}, + tee=False, +): + """ + Calculates the bounds on each variable by solving a series of min and max + optimization problems where each variable is used as the objective function + This can be applied to any class of problem supported by the selected + solver. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model. + variables: None or a collection of Pyomo _GeneralVarData variables + The variables for which bounds will be generated. None indicates + that all variables will be included. Alternatively, a collection of + _GenereralVarData variables can be provided. + rel_opt_gap : float or None + The relative optimality gap for the original objective for which + variable bounds will be found. None indicates that a relative gap + constraint will not be added to the model. + abs_opt_gap : float or None + The absolute optimality gap for the original objective for which + variable bounds will be found. None indicates that an absolute gap + constraint will not be added to the model. + refine_discrete_bounds : boolean + Boolean indicating that new constraints should be added to the + model at each iteration to tighten the bounds for discrete + variables. + warmstart : boolean + Boolean indicating that the solver should be warmstarted from the + best previously discovered solution. + solver : string + The solver to be used. + solver_options : dict + Solver option-value pairs to be passed to the solver. + tee : boolean + Boolean indicating that the solver output should be displayed. + + Returns + ------- + variable_ranges + A Pyomo ComponentMap containing the bounds for each variable. + {variable: (lower_bound, upper_bound)}. An exception is raised when + the solver encountered an issue. + solutions + [Solution] + """ + + # TODO - parallelization + + logger.info("STARTING OBBT ANALYSIS") + + if warmstart: + assert ( + variables == None + ), "Cannot restrict variable list when warmstart is specified" + all_variables = aos_utils.get_model_variables(model, include_fixed=False) + if variables == None: + variable_list = all_variables + else: + variable_list = list(variables) + if warmstart: + solutions = pe.ComponentMap() + for var in all_variables: + solutions[var] = [] + + num_vars = len(variable_list) + logger.info( + "Analyzing {} variables ({} total solves).".format(num_vars, 2 * num_vars) + ) + orig_objective = aos_utils.get_active_objective(model) + + use_appsi = False + if "appsi" in solver: + opt = appsi.solvers.Gurobi() + for parameter, value in solver_options.items(): + opt.gurobi_options[parameter] = value + opt.config.stream_solver = tee + opt.config.load_solution = False + results = opt.solve(model) + condition = results.termination_condition + optimal_tc = appsi.base.TerminationCondition.optimal + infeas_or_unbdd_tc = appsi.base.TerminationCondition.infeasibleOrUnbounded + unbdd_tc = appsi.base.TerminationCondition.unbounded + use_appsi = True + else: + opt = pe.SolverFactory(solver) + opt.available() + for parameter, value in solver_options.items(): + opt.options[parameter] = value + try: + results = opt.solve( + model, warmstart=warmstart, tee=tee, load_solutions=False + ) + except ValueError: + # An exception occurs if the solver does not recognize the warmstart option + results = opt.solve(model, tee=tee, load_solutions=False) + condition = results.solver.termination_condition + optimal_tc = pe.TerminationCondition.optimal + infeas_or_unbdd_tc = pe.TerminationCondition.infeasibleOrUnbounded + unbdd_tc = pe.TerminationCondition.unbounded + logger.info("Performing initial solve of model.") + + if condition != optimal_tc: + raise RuntimeError( + ("OBBT cannot be applied, " "TerminationCondition = {}").format( + condition.value + ) + ) + if use_appsi: + results.solution_loader.load_vars(solution_number=0) + else: + model.solutions.load_from(results) + if warmstart: + _add_solution(solutions) + orig_objective_value = pe.value(orig_objective) + logger.info("Found optimal solution, value = {}.".format(orig_objective_value)) + aos_block = aos_utils._add_aos_block(model, name="_obbt") + # placeholder for objective + aos_block.var_objective = pe.Objective(expr=0) + logger.info("Added block {} to the model.".format(aos_block)) + obj_constraints = aos_utils._add_objective_constraint( + aos_block, orig_objective, orig_objective_value, rel_opt_gap, abs_opt_gap + ) + if refine_discrete_bounds: + aos_block.bound_constraints = pe.ConstraintList() + new_constraint = False + if len(obj_constraints) > 0: + new_constraint = True + orig_objective.deactivate() + + if use_appsi: + opt.update_config.check_for_new_or_removed_constraints = new_constraint + opt.update_config.check_for_new_or_removed_vars = False + opt.update_config.check_for_new_or_removed_params = False + opt.update_config.check_for_new_objective = True + opt.update_config.update_constraints = False + opt.update_config.update_vars = False + opt.update_config.update_params = False + opt.update_config.update_named_expressions = False + opt.update_config.update_objective = True + opt.update_config.treat_fixed_vars_as_params = False + + variable_bounds = pe.ComponentMap() + solns = [Solution(model, all_variables, objective=orig_objective)] + + senses = [(pe.minimize, "LB"), (pe.maximize, "UB")] + + iteration = 1 + total_iterations = len(senses) * num_vars + for idx in range(len(senses)): + sense = senses[idx][0] + bound_dir = senses[idx][1] + + for var in variable_list: + if idx == 0: + variable_bounds[var] = [None, None] + + aos_block.var_objective.expr = var + aos_block.var_objective.sense = sense + + if warmstart: + _update_values(var, bound_dir, solutions) + + if use_appsi: + opt.update_config.check_for_new_or_removed_constraints = new_constraint + if use_appsi: + opt.config.stream_solver = tee + results = opt.solve(model) + condition = results.termination_condition + else: + try: + results = opt.solve( + model, warmstart=warmstart, tee=tee, load_solutions=False + ) + except ValueError: + # An exception occurs if the solver does not recognize the warmstart option + results = opt.solve(model, tee=tee, load_solutions=False) + condition = results.solver.termination_condition + new_constraint = False + + if condition == optimal_tc: + if use_appsi: + results.solution_loader.load_vars(solution_number=0) + else: + model.solutions.load_from(results) + solns.append(Solution(model, all_variables, objective=orig_objective)) + + if warmstart: + _add_solution(solutions) + obj_val = pe.value(var) + variable_bounds[var][idx] = obj_val + + if refine_discrete_bounds and not var.is_continuous(): + if sense == pe.minimize and var.lb < obj_val: + aos_block.bound_constraints.add(var >= obj_val) + new_constraint = True + + if sense == pe.maximize and var.ub > obj_val: + aos_block.bound_constraints.add(var <= obj_val) + new_constraint = True + + # An infeasibleOrUnbounded status code will imply the problem is + # unbounded since feasibility has been established previously + elif condition == infeas_or_unbdd_tc or condition == unbdd_tc: + if sense == pe.minimize: + variable_bounds[var][idx] = float("-inf") + else: + variable_bounds[var][idx] = float("inf") + else: # pragma: no cover + logger.warn( + ( + "Unexpected condition for the variable {} {} problem." + "TerminationCondition = {}" + ).format(var.name, bound_dir, condition.value) + ) + + var_value = variable_bounds[var][idx] + logger.info( + "Iteration {}/{}: {}_{} = {}".format( + iteration, total_iterations, var.name, bound_dir, var_value + ) + ) + + if idx == 1: + variable_bounds[var] = tuple(variable_bounds[var]) + + iteration += 1 + + aos_block.deactivate() + orig_objective.activate() + + logger.info("COMPLETED OBBT ANALYSIS") + + return variable_bounds, solns + + +def _add_solution(solutions): + """Add the current variable values to the solution list.""" + for var in solutions: + solutions[var].append(pe.value(var)) + + +def _update_values(var, bound_dir, solutions): + """ + Set the values of all variables to the best solution seen previously for + the current objective function. + """ + if bound_dir == "LB": + value = min(solutions[var]) + else: + value = max(solutions[var]) + idx = solutions[var].index(value) + for variable in solutions: + variable.set_value(solutions[variable][idx]) diff --git a/pyomo/contrib/alternative_solutions/shifted_lp.py b/pyomo/contrib/alternative_solutions/shifted_lp.py new file mode 100644 index 00000000000..944651c96c5 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/shifted_lp.py @@ -0,0 +1,225 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import pyomo.environ as pe +from pyomo.common.collections import ComponentMap +from pyomo.gdp.util import clone_without_expression_components +from pyomo.contrib.fbbt.fbbt import compute_bounds_on_expr +from pyomo.contrib.alternative_solutions import aos_utils + + +def _get_unique_name(collection, name): + """Create a unique name for an item that will be added to a collection.""" + if name not in collection: + return name + else: + i = 1 + while "{}_{}".format(name, i) not in collection: + i += 1 + return "{}_{}".format(name, i) + + +def _set_slack_ub(expression, slack_var): + """ + Use FBBT to compute an upper bound for a slack variable on an equality + expression.""" + slack_lb, slack_ub = compute_bounds_on_expr(expression) + assert slack_ub >= 0 + slack_var.setub(slack_ub) + + +def get_shifted_linear_model(model, block=None): + """ + Converts an (MI)LP with bounded (discrete and) continuous variables + (l <= x <= u) into a standard form where where all continuous variables + are non-negative reals and all constraints are equalities. For a pure LP of + the form, + + min/max cx + s.t. + A_1 * x = b_1 + A_2 * x <= b_2 + l <= x <= u + + a problem of the form, + + min/max c'z + s.t. + Bz = q + z >= 0 + + will be created and added to the returned block. z consists of var_lower + and var_upper variables that are substituted into the original x variables, + and slack_vars that are used to convert the original inequalities to + equalities. Bounds are provided on all variables in z. For MILPs, only the + continuous part of the problem is converted. + + See Lee, Sangbum., C. Phalakornkule, M. Domach, I. Grossmann, Recursive + MILP model for finding all the alternate optima in LP models for metabolic + networks, Computers & Chemical Engineering, Volume 24, Issues 2–7, 2000, + page 712 for additional details. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model + block : Block + The Pyomo block that the new model should be added to. + + Returns + ------- + block + The block that holds the reformulated model. + """ + + # Gather all variables and confirm the model is bounded + all_vars = aos_utils.get_model_variables(model) + new_vars = {} + all_vars_new = {} + var_map = ComponentMap() + var_range = {} + for var in all_vars: + assert var.lb is not None, ( + "Variable {} does not have a " + "lower bound. All variables must be " + "bounded.".format(var.name) + ) + assert var.ub is not None, ( + "Variable {} does not have an " + "upper bound. All variables must be " + "bounded.".format(var.name) + ) + if var.is_continuous(): + var_name = _get_unique_name(new_vars.keys(), var.name) + new_vars[var_name] = var + all_vars_new[var_name] = var + var_map[var] = var_name + var_range[var_name] = (0, var.ub - var.lb) + else: + all_vars_new[var.name] = var + + if block is None: + block = model + shifted_lp = aos_utils._add_aos_block(block, name="_shifted_lp") + + # Replace original variables with shifted lower and upper variables + shifted_lp.var_lower = pe.Var( + new_vars.keys(), domain=pe.NonNegativeReals, bounds=var_range + ) + shifted_lp.var_upper = pe.Var( + new_vars.keys(), domain=pe.NonNegativeReals, bounds=var_range + ) + + # Link the shifted lower and upper variables + def link_vars_rule(m, var_index): + return ( + m.var_lower[var_index] + m.var_upper[var_index] == m.var_upper[var_index].ub + ) + + shifted_lp.link_vars = pe.Constraint(new_vars.keys(), rule=link_vars_rule) + + # Map the lower and upper variables to the original variables and their + # lower bounds. This will be used to substitute x with var_lower + x.lb. + var_lower_map = {id(var): shifted_lp.var_lower[i] for i, var in new_vars.items()} + var_lower_bounds = {id(var): var.lb for var in new_vars.values()} + var_zeros = {id(var): 0 for var in all_vars_new.values()} + + # Substitute the new s variables into the objective function + # The c_fix_zeros calculation is used to find any constant terms that exist + # in the objective expression to avoid double counting + active_objective = aos_utils.get_active_objective(model) + c_var_lower = clone_without_expression_components( + active_objective.expr, substitute=var_lower_map + ) + c_fix_lower = clone_without_expression_components( + active_objective.expr, substitute=var_lower_bounds + ) + c_fix_zeros = clone_without_expression_components( + active_objective.expr, substitute=var_zeros + ) + shifted_lp.objective = pe.Objective( + expr=c_var_lower - c_fix_zeros + c_fix_lower, + name=active_objective.name + "_shifted", + sense=active_objective.sense, + ) + + # Identify all of the shifted constraints and associated slack variables + # that will need to be created + new_constraints = {} + constraint_map = ComponentMap() + constraint_type = {} + slacks = [] + for constraint in model.component_data_objects(pe.Constraint, active=True): + if constraint.parent_block() == shifted_lp: + continue + if constraint.equality: + constraint_name = constraint.name + "_equal" + constraint_name = _get_unique_name(new_constraints.keys(), constraint.name) + new_constraints[constraint_name] = constraint + constraint_map[constraint] = constraint_name + constraint_type[constraint_name] = 0 + else: + if constraint.lb is not None: + constraint_name = constraint.name + "_lower" + constraint_name = _get_unique_name( + new_constraints.keys(), constraint.name + ) + new_constraints[constraint_name] = constraint + constraint_map[constraint] = constraint_name + constraint_type[constraint_name] = -1 + slacks.append(constraint_name) + if constraint.ub is not None: + constraint_name = constraint.name + "_upper" + constraint_name = _get_unique_name( + new_constraints.keys(), constraint.name + ) + new_constraints[constraint_name] = constraint + constraint_map[constraint] = constraint_name + constraint_type[constraint_name] = 1 + slacks.append(constraint_name) + shifted_lp.constraint_index = pe.Set(initialize=new_constraints.keys()) + shifted_lp.slack_index = pe.Set(initialize=slacks) + shifted_lp.slack_vars = pe.Var(shifted_lp.slack_index, domain=pe.NonNegativeReals) + shifted_lp.constraints = pe.Constraint(shifted_lp.constraint_index) + + for constraint_name, constraint in new_constraints.items(): + # The c_fix_zeros calculation is used to find any constant terms that + # exist in the constraint expression to avoid double counting + a_sub_var_lower = clone_without_expression_components( + constraint.body, substitute=var_lower_map + ) + a_sub_fix_lower = clone_without_expression_components( + constraint.body, substitute=var_lower_bounds + ) + a_sub_fix_zeros = clone_without_expression_components( + constraint.body, substitute=var_zeros + ) + b_lower = constraint.lb + b_upper = constraint.ub + con_type = constraint_type[constraint_name] + if con_type == 0: + expr = a_sub_var_lower - a_sub_fix_zeros + a_sub_fix_lower - b_lower == 0 + elif con_type == -1: + expr_rhs = a_sub_var_lower - a_sub_fix_zeros + a_sub_fix_lower - b_lower + expr = shifted_lp.slack_vars[constraint_name] == expr_rhs + _set_slack_ub(expr_rhs, shifted_lp.slack_vars[constraint_name]) + elif con_type == 1: + expr_rhs = b_upper - a_sub_var_lower + a_sub_fix_zeros - a_sub_fix_lower + expr = shifted_lp.slack_vars[constraint_name] == expr_rhs + _set_slack_ub(expr_rhs, shifted_lp.slack_vars[constraint_name]) + shifted_lp.constraints[constraint_name] = expr + + shifted_lp.var_map = var_map + shifted_lp.new_vars = new_vars + shifted_lp.constraint_map = constraint_map + shifted_lp.new_constraints = new_constraints + + return shifted_lp diff --git a/pyomo/contrib/alternative_solutions/solnpool.py b/pyomo/contrib/alternative_solutions/solnpool.py new file mode 100644 index 00000000000..51acb57c8a5 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/solnpool.py @@ -0,0 +1,113 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import logging + +logger = logging.getLogger(__name__) + +from pyomo.common.dependencies import attempt_import + +gurobipy, gurobipy_available = attempt_import("gurobipy") + +import pyomo.environ as pe +from pyomo.contrib import appsi +import pyomo.contrib.alternative_solutions.aos_utils as aos_utils +from pyomo.contrib.alternative_solutions import Solution + + +def gurobi_generate_solutions( + model, + *, + num_solutions=10, + rel_opt_gap=None, + abs_opt_gap=None, + solver_options={}, + tee=False, +): + """ + Finds alternative optimal solutions for discrete variables using Gurobi's + built-in Solution Pool capability. See the Gurobi Solution Pool + documentation for additional details. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model. + num_solutions : int + The maximum number of solutions to generate. This parameter maps to + the PoolSolutions parameter in Gurobi. + rel_opt_gap : non-negative float or None + The relative optimality gap for allowable alternative solutions. + None implies that there is no limit on the relative optimality gap + (i.e. that any feasible solution can be considered by Gurobi). + This parameter maps to the PoolGap parameter in Gurobi. + abs_opt_gap : non-negative float or None + The absolute optimality gap for allowable alternative solutions. + None implies that there is no limit on the absolute optimality gap + (i.e. that any feasible solution can be considered by Gurobi). + This parameter maps to the PoolGapAbs parameter in Gurobi. + solver_options : dict + Solver option-value pairs to be passed to the Gurobi solver. + tee : boolean + Boolean indicating that the solver output should be displayed. + + Returns + ------- + solutions + A list of Solution objects. [Solution] + """ + # + # Setup gurobi + # + if not gurobipy_available: + raise pyomo.common.errors.ApplicationError("Solver (gurobi) not available") + opt = appsi.solvers.Gurobi() + + if not opt.available(): + raise pyomo.common.errors.ApplicationError("Solver (gurobi) not available") + + opt.config.stream_solver = tee + opt.config.load_solution = False + opt.gurobi_options["PoolSolutions"] = num_solutions + opt.gurobi_options["PoolSearchMode"] = 2 + if rel_opt_gap is not None: + opt.gurobi_options["PoolGap"] = rel_opt_gap + if abs_opt_gap is not None: + opt.gurobi_options["PoolGapAbs"] = abs_opt_gap + for parameter, value in solver_options.items(): + opt.gurobi_options[parameter] = value + # + # Run gurobi + # + results = opt.solve(model) + condition = results.termination_condition + if not (condition == appsi.base.TerminationCondition.optimal): + raise pyomo.common.errors.ApplicationError( + "Model cannot be solved, " "TerminationCondition = {}" + ).format(condition.value) + # + # Collect solutions + # + solution_count = opt.get_model_attr("SolCount") + variables = aos_utils.get_model_variables(model, include_fixed=True) + solutions = [] + for i in range(solution_count): + # + # Load the i-th solution into the model + # + results.solution_loader.load_vars(solution_number=i) + # + # Pull the solution from the model into a Solution object, + # and append to our list of solutions + # + solutions.append(Solution(model, variables)) + + return solutions diff --git a/pyomo/contrib/alternative_solutions/solution.py b/pyomo/contrib/alternative_solutions/solution.py new file mode 100644 index 00000000000..7b224e3089b --- /dev/null +++ b/pyomo/contrib/alternative_solutions/solution.py @@ -0,0 +1,158 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import json +import pyomo.environ as pe +from pyomo.common.collections import ComponentMap, ComponentSet +from pyomo.contrib.alternative_solutions import aos_utils + + +class Solution: + """ + A class to store solutions from a Pyomo model. + + Attributes + ---------- + variables : ComponentMap + A map between Pyomo variables and their values for a solution. + fixed_vars : ComponentSet + The set of Pyomo variables that are fixed in a solution. + objective : ComponentMap + A map between Pyomo objectives and their values for a solution. + + Methods + ------- + pprint(): + Prints a solution. + get_variable_name_values(self, ignore_fixed_vars=False): + Get a dictionary of variable name-variable value pairs. + get_fixed_variable_names(self): + Get a list of fixed-variable names. + get_objective_name_values(self): + Get a dictionary of objective name-objective value pairs. + """ + + def __init__(self, model, variable_list, include_fixed=True, objective=None): + """ + Constructs a Pyomo Solution object. + + Parameters + ---------- + model : ConcreteModel + A concrete Pyomo model. + variable_list: A collection of Pyomo _GenereralVarData variables + The variables for which the solution will be stored. + include_fixed : boolean + Boolean indicating that fixed variables should be added to the + solution. + objective: None or Objective + The objective functions for which the value will be saved. None + indicates that the active objective should be used, but a + different objective can be stored as well. + """ + + self.variables = ComponentMap() + self.fixed_vars = ComponentSet() + for var in variable_list: + is_fixed = var.is_fixed() + if is_fixed: + self.fixed_vars.add(var) + if include_fixed or not is_fixed: + self.variables[var] = pe.value(var) + + if objective is None: + objective = aos_utils.get_active_objective(model) + self.objective = (objective, pe.value(objective)) + + @property + def objective_value(self): + """ + Returns + ------- + The value of the objective. + """ + return self.objective[1] + + def pprint(self, round_discrete=True, sort_keys=True, indent=4): + """ + Print the solution variables and objective values. + + Parameters + ---------- + rounded_discrete : boolean + If True, then round discrete variable values before printing. + """ + print( + self.to_string( + round_discrete=round_discrete, sort_keys=sort_keys, indent=indent + ) + ) # pragma: no cover + + def to_string(self, round_discrete=True, sort_keys=True, indent=4): + return json.dumps( + self.to_dict(round_discrete=round_discrete), + sort_keys=sort_keys, + indent=indent, + ) + + def to_dict(self, round_discrete=True): + ans = {} + ans["objective"] = str(self.objective[0]) + ans["objective_value"] = self.objective[1] + soln = {} + for variable, value in self.variables.items(): + val = self._round_variable_value(variable, value, round_discrete) + soln[variable.name] = val + ans["solution"] = soln + ans["fixed_variables"] = [str(v) for v in self.fixed_vars] + return ans + + def __str__(self): + return self.to_string() + + __repn__ = __str__ + + def get_variable_name_values(self, include_fixed=True, round_discrete=True): + """ + Get a dictionary of variable name-variable value pairs. + + Parameters + ---------- + include_fixed : boolean + If True, then include fixed variables in the dictionary. + round_discrete : boolean + If True, then round discrete variable values in the dictionary. + + Returns + ------- + Dictionary mapping variable names to variable values. + """ + return { + var.name: self._round_variable_value(var, val, round_discrete) + for var, val in self.variables.items() + if include_fixed or not var in self.fixed_vars + } + + def get_fixed_variable_names(self): + """ + Get a list of fixed-variable names. + + Returns + ------- + A list of the variable names that are fixed. + """ + return [var.name for var in self.fixed_vars] + + def _round_variable_value(self, variable, value, round_discrete=True): + """ + Returns a rounded value unless the variable is discrete or rounded_discrete is False. + """ + return value if not round_discrete or variable.is_continuous() else round(value) diff --git a/pyomo/contrib/alternative_solutions/tests/__init__.py b/pyomo/contrib/alternative_solutions/tests/__init__.py new file mode 100644 index 00000000000..a4a626013c4 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/__init__.py @@ -0,0 +1,10 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ diff --git a/pyomo/contrib/alternative_solutions/tests/test_aos_utils.py b/pyomo/contrib/alternative_solutions/tests/test_aos_utils.py new file mode 100644 index 00000000000..625104fa56a --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_aos_utils.py @@ -0,0 +1,300 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common import unittest + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +import pyomo.environ as pe +import pyomo.common.unittest as unittest +from pyomo.common.collections import ComponentSet + +import pyomo.contrib.alternative_solutions.aos_utils as au + + +class TestAOSUtilsUnit(unittest.TestCase): + + def get_multiple_objective_model(self): + """Create a simple model with three objectives.""" + m = pe.ConcreteModel() + m.b1 = pe.Block() + m.b2 = pe.Block() + m.x = pe.Var() + m.y = pe.Var() + m.b1.o = pe.Objective(expr=m.x) + m.b2.o = pe.Objective([0, 1]) + m.b2.o[0] = pe.Objective(expr=m.y) + m.b2.o[1] = pe.Objective(expr=m.x + m.y) + return m + + def test_multiple_objectives(self): + """Check that an error is thrown with multiple objectives.""" + m = self.get_multiple_objective_model() + assert_text = ( + "Model has 3 active objective functions, exactly one " "is required." + ) + with self.assertRaisesRegex(AssertionError, assert_text): + au.get_active_objective(m) + + def test_no_objectives(self): + """Check that an error is thrown with no objectives.""" + m = self.get_multiple_objective_model() + m.b1.o.deactivate() + m.b2.o.deactivate() + assert_text = ( + "Model has 0 active objective functions, exactly one " "is required." + ) + with self.assertRaisesRegex(AssertionError, assert_text): + au.get_active_objective(m) + + def test_one_objective(self): + """ + Check that the active objective is returned, when there is just one + objective. + """ + m = self.get_multiple_objective_model() + m.b1.o.deactivate() + m.b2.o[0].deactivate() + self.assertEqual(m.b2.o[1], au.get_active_objective(m)) + + def test_aos_block(self): + """Ensure that an alternative solution block is added.""" + m = self.get_multiple_objective_model() + block_name = "test_block" + b = au._add_aos_block(m, block_name) + self.assertEqual(b.name, block_name) + self.assertEqual(b.ctype, pe.Block) + + def get_simple_model(self, sense=pe.minimize): + """Create a simple 2d linear program with an objective.""" + m = pe.ConcreteModel() + m.x = pe.Var() + m.y = pe.Var() + m.o = pe.Objective(expr=m.x + m.y, sense=sense) + return m + + def test_no_obj_constraint(self): + """Ensure that no objective constraints are added.""" + m = self.get_simple_model() + cons = au._add_objective_constraint(m, m.o, 2, None, None) + self.assertEqual(cons, []) + self.assertEqual(m.find_component("optimality_tol_rel"), None) + self.assertEqual(m.find_component("optimality_tol_abs"), None) + + def test_min_rel_obj_constraint(self): + """Ensure that the correct relative objective constraint is added.""" + m = self.get_simple_model() + cons = au._add_objective_constraint(m, m.o, 2, 0.1, None) + self.assertEqual(len(cons), 1) + self.assertEqual(m.find_component("optimality_tol_rel"), cons[0]) + self.assertEqual(m.find_component("optimality_tol_abs"), None) + self.assertEqual(2.2, cons[0].upper) + self.assertEqual(None, cons[0].lower) + + def test_min_abs_obj_constraint(self): + """Ensure that the correct absolute objective constraint is added.""" + m = self.get_simple_model() + cons = au._add_objective_constraint(m, m.o, 2, None, 1) + self.assertEqual(len(cons), 1) + self.assertEqual(m.find_component("optimality_tol_rel"), None) + self.assertEqual(m.find_component("optimality_tol_abs"), cons[0]) + self.assertEqual(3, cons[0].upper) + self.assertEqual(None, cons[0].lower) + + def test_min_both_obj_constraint(self): + m = self.get_simple_model() + cons = au._add_objective_constraint(m, m.o, -10, 0.3, 5) + self.assertEqual(len(cons), 2) + self.assertEqual(m.find_component("optimality_tol_rel"), cons[0]) + self.assertEqual(m.find_component("optimality_tol_abs"), cons[1]) + self.assertEqual(-7, cons[0].upper) + self.assertEqual(None, cons[0].lower) + self.assertEqual(-5, cons[1].upper) + self.assertEqual(None, cons[1].lower) + + def test_max_both_obj_constraint(self): + """ + Ensure that the correct relative and absolute objective constraints are + added. + """ + m = self.get_simple_model(sense=pe.maximize) + cons = au._add_objective_constraint(m, m.o, -1, 0.3, 1) + self.assertEqual(len(cons), 2) + self.assertEqual(m.find_component("optimality_tol_rel"), cons[0]) + self.assertEqual(m.find_component("optimality_tol_abs"), cons[1]) + self.assertEqual(None, cons[0].upper) + self.assertEqual(-1.3, cons[0].lower) + self.assertEqual(None, cons[1].upper) + self.assertEqual(-2, cons[1].lower) + + def test_max_both_obj_constraint2(self): + """ + Ensure that the correct relative and absolute objective constraints are + added. + """ + m = self.get_simple_model(sense=pe.maximize) + cons = au._add_objective_constraint(m, m.o, 20, 0.5, 11) + self.assertEqual(len(cons), 2) + self.assertEqual(m.find_component("optimality_tol_rel"), cons[0]) + self.assertEqual(m.find_component("optimality_tol_abs"), cons[1]) + self.assertEqual(None, cons[0].upper) + self.assertEqual(10, cons[0].lower) + self.assertEqual(None, cons[1].upper) + self.assertEqual(9, cons[1].lower) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_random_direction(self): + """ + Ensure that _get_random_direction returns a normal vector. + """ + from numpy.linalg import norm + + vector = au._get_random_direction(10) + self.assertAlmostEqual(1.0, norm(vector)) + + def get_var_model(self): + """ + Create a model with multiple variables that are nested over several + layers of blocks. + """ + + indices = [0, 1, 2, 3] + + m = pe.ConcreteModel() + + m.b1 = pe.Block() + m.b2 = pe.Block() + m.b1.sb1 = pe.Block() + m.b2.sb2 = pe.Block() + + m.x = pe.Var(domain=pe.Reals) + m.b1.y = pe.Var(domain=pe.Binary) + m.b2.z = pe.Var(domain=pe.Integers) + + m.x_f = pe.Var(domain=pe.Reals) + m.b1.y_f = pe.Var(domain=pe.Binary) + m.b2.z_f = pe.Var(domain=pe.Integers) + m.x_f.fix(0) + m.b1.y_f.fix(0) + m.b2.z_f.fix(0) + + m.b1.sb1.x_l = pe.Var(indices, domain=pe.Reals) + m.b1.sb1.y_l = pe.Var(indices, domain=pe.Binary) + m.b2.sb2.z_l = pe.Var(indices, domain=pe.Integers) + + m.b1.sb1.x_l[3].fix(0) + m.b1.sb1.y_l[3].fix(0) + m.b2.sb2.z_l[3].fix(0) + + vars_minus_x = ( + [m.b1.y, m.b2.z, m.x_f, m.b1.y_f, m.b2.z_f] + + [m.b1.sb1.x_l[i] for i in indices] + + [m.b1.sb1.y_l[i] for i in indices] + + [m.b2.sb2.z_l[i] for i in indices] + ) + + m.con = pe.Constraint(expr=sum(v for v in vars_minus_x) <= 1) + m.b1.con = pe.Constraint(expr=m.b1.y <= 1) + m.b1.sb1.con = pe.Constraint(expr=m.b1.sb1.y_l[0] <= 1) + m.obj = pe.Objective(expr=m.x) + + m.all_vars = ComponentSet([m.x] + vars_minus_x) + m.unfixed_vars = ComponentSet([var for var in m.all_vars if not var.is_fixed()]) + + return m + + def test_get_all_variables_unfixed(self): + """Check that all unfixed variables are gathered.""" + m = self.get_var_model() + var = au.get_model_variables(m) + self.assertEqual(var, m.unfixed_vars) + + def test_get_all_variables(self): + """Check that all fixed and unfixed variables are gathered.""" + m = self.get_var_model() + var = au.get_model_variables(m, include_fixed=True) + self.assertEqual(var, m.all_vars) + + def test_get_all_continuous(self): + """Check that all continuous variables are gathered.""" + m = self.get_var_model() + var = au.get_model_variables( + m, include_continuous=True, include_binary=False, include_integer=False + ) + continuous_vars = ComponentSet( + var for var in m.unfixed_vars if var.is_continuous() + ) + self.assertEqual(var, continuous_vars) + + def test_get_all_binary(self): + """Check that all binary variables are gathered.""" + m = self.get_var_model() + var = au.get_model_variables( + m, include_continuous=False, include_binary=True, include_integer=False + ) + binary_vars = ComponentSet(var for var in m.unfixed_vars if var.is_binary()) + self.assertEqual(var, binary_vars) + + def test_get_all_integer(self): + """Check that all integer variables are gathered.""" + m = self.get_var_model() + var = au.get_model_variables( + m, include_continuous=False, include_binary=False, include_integer=True + ) + continuous_vars = ComponentSet( + var for var in m.unfixed_vars if var.is_integer() + ) + self.assertEqual(var, continuous_vars) + + def test_get_specific_vars(self): + """Check that all variables from a list are gathered.""" + m = self.get_var_model() + components = [m.x, m.b1.sb1.y_l[0], m.b2.sb2.z_l] + var = au.get_model_variables(m, components=components) + specific_vars = ComponentSet( + [m.x, m.b1.sb1.y_l[0], m.b2.sb2.z_l[0], m.b2.sb2.z_l[1], m.b2.sb2.z_l[2]] + ) + self.assertEqual(var, specific_vars) + + def test_get_block_vars1(self): + """ + Check that all variables from block are gathered (without + descending into subblocks). + """ + m = self.get_var_model() + components = [m.b2.sb2.z_l, (m.b1, False)] + var = au.get_model_variables(m, components=components) + specific_vars = ComponentSet( + [m.b1.y, m.b2.sb2.z_l[0], m.b2.sb2.z_l[1], m.b2.sb2.z_l[2]] + ) + self.assertEqual(var, specific_vars) + + def test_get_block_vars2(self): + """ + Check that all variables from block are gathered (without + descending into subblocks). + """ + m = self.get_var_model() + components = [m.b1] + var = au.get_model_variables(m, components=components) + specific_vars = ComponentSet([m.b1.y, m.b1.sb1.y_l[0]]) + self.assertEqual(var, specific_vars) + + def test_get_constraint_vars(self): + """Check that all variables constraints and objectives are gathered.""" + m = self.get_var_model() + components = [m.con, m.obj] + var = au.get_model_variables(m, components=components) + self.assertEqual(var, m.unfixed_vars) + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/alternative_solutions/tests/test_balas.py b/pyomo/contrib/alternative_solutions/tests/test_balas.py new file mode 100644 index 00000000000..27c3c7b014d --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_balas.py @@ -0,0 +1,155 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from collections import Counter + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +if numpy_available: + from numpy.testing import assert_array_almost_equal + +import pyomo.environ as pe +from pyomo.common import unittest +import pyomo.opt + +from pyomo.contrib.alternative_solutions import enumerate_binary_solutions +import pyomo.contrib.alternative_solutions.tests.test_cases as tc + +solvers = list(pyomo.opt.check_available_solvers("glpk", "gurobi", "appsi_gurobi")) +pytestmark = unittest.pytest.mark.parametrize("mip_solver", solvers) + + +@unittest.pytest.mark.default +class TestBalasUnit: + + def test_bad_solver(self, mip_solver): + """ + Confirm that an exception is thrown with a bad solver name. + """ + m = tc.get_triangle_ip() + try: + enumerate_binary_solutions(m, solver="unknown_solver") + except pyomo.common.errors.ApplicationError as e: + pass + + def test_ip_feasibility(self, mip_solver): + """ + Enumerate solutions for an ip: triangle_ip. + + Check that there is just one solution when the # of binary variables is 0. + """ + m = tc.get_triangle_ip() + results = enumerate_binary_solutions(m, num_solutions=100, solver=mip_solver) + assert len(results) == 1 + assert results[0].objective_value == unittest.pytest.approx(5) + + @unittest.skipIf(True, "Ignoring fragile test for solver timeout.") + def test_no_time(self, mip_solver): + """ + Enumerate solutions for an ip: triangle_ip. + + Check that something sensible happens when the solver times out. + """ + m = tc.get_triangle_ip() + with unittest.pytest.raises(Exception): + results = enumerate_binary_solutions( + m, num_solutions=100, solver=mip_solver, solver_options={"TimeLimit": 0} + ) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_knapsack_all(self, mip_solver): + """ + Enumerate solutions for a binary problem: knapsack + + """ + m = tc.get_aos_test_knapsack( + 1, weights=[3, 4, 6, 5], values=[2, 3, 1, 4], capacity=8 + ) + results = enumerate_binary_solutions(m, num_solutions=100, solver=mip_solver) + objectives = list( + sorted((round(result.objective[1], 2) for result in results), reverse=True) + ) + assert_array_almost_equal(objectives, m.ranked_solution_values) + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, m.num_ranked_solns) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_knapsack_x0_x1(self, mip_solver): + """ + Enumerate solutions for a binary problem: knapsack + + Check that we only see 4 solutions that enumerate alternatives of x[1] and x[1] + """ + m = tc.get_aos_test_knapsack( + 1, weights=[3, 4, 6, 5], values=[2, 3, 1, 4], capacity=8 + ) + results = enumerate_binary_solutions( + m, num_solutions=100, solver=mip_solver, variables=[m.x[0], m.x[1]] + ) + objectives = list( + sorted((round(result.objective[1], 2) for result in results), reverse=True) + ) + assert_array_almost_equal(objectives, [6, 5, 4, 3]) + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, [1, 1, 1, 1]) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_knapsack_optimal_3(self, mip_solver): + """ + Enumerate solutions for a binary problem: knapsack + + """ + m = tc.get_aos_test_knapsack( + 1, weights=[3, 4, 6, 5], values=[2, 3, 1, 4], capacity=8 + ) + results = enumerate_binary_solutions(m, num_solutions=3, solver=mip_solver) + objectives = list( + sorted((round(result.objective[1], 2) for result in results), reverse=True) + ) + assert_array_almost_equal(objectives, m.ranked_solution_values[:3]) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_knapsack_hamming_3(self, mip_solver): + """ + Enumerate solutions for a binary problem: knapsack + + """ + m = tc.get_aos_test_knapsack( + 1, weights=[3, 4, 6, 5], values=[2, 3, 1, 4], capacity=8 + ) + results = enumerate_binary_solutions( + m, num_solutions=3, solver=mip_solver, search_mode="hamming" + ) + objectives = list( + sorted((round(result.objective[1], 2) for result in results), reverse=True) + ) + assert_array_almost_equal(objectives, [6, 3, 1]) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_knapsack_random_3(self, mip_solver): + """ + Enumerate solutions for a binary problem: knapsack + + """ + m = tc.get_aos_test_knapsack( + 1, weights=[3, 4, 6, 5], values=[2, 3, 1, 4], capacity=8 + ) + results = enumerate_binary_solutions( + m, num_solutions=3, solver=mip_solver, search_mode="random", seed=1118798374 + ) + objectives = list( + sorted((round(result.objective[1], 2) for result in results), reverse=True) + ) + assert_array_almost_equal(objectives, [6, 5, 4]) + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/alternative_solutions/tests/test_case.xlsx b/pyomo/contrib/alternative_solutions/tests/test_case.xlsx new file mode 100644 index 00000000000..4fa4ee1045a Binary files /dev/null and b/pyomo/contrib/alternative_solutions/tests/test_case.xlsx differ diff --git a/pyomo/contrib/alternative_solutions/tests/test_cases.py b/pyomo/contrib/alternative_solutions/tests/test_cases.py new file mode 100644 index 00000000000..2cac807ca7e --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_cases.py @@ -0,0 +1,438 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from itertools import product +from math import ceil, floor +from collections import Counter + +from pyomo.common.dependencies import numpy as np + +import pyomo.environ as pe + +""" +This script has collection of test cases that can be used to enumerate solutions. +That is, simple problems where the alternative solutions can be found manually. +""" + + +def _is_satisfied(constraint, feasibility_tol=1e-6): + value = pe.value(constraint.body) + if constraint.has_lb() and value < constraint.lb - feasibility_tol: + return False + if constraint.has_ub() and value > constraint.ub + feasibility_tol: + return False + return True + + +def get_2d_diamond_problem(discrete_x=False, discrete_y=False): + """ + Simple 2d problem where the feasible is diamond-shaped. + """ + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.Integers if discrete_x else pe.Reals, bounds=(-10, 10)) + m.y = pe.Var(within=pe.Integers if discrete_y else pe.Reals, bounds=(-10, 10)) + + m.o = pe.Objective(expr=m.x + m.y, sense=pe.maximize) + + m.c1 = pe.Constraint(expr=-4 / 5 * m.x - 4 <= m.y) + m.c2 = pe.Constraint(expr=5 / 9 * m.x - 5 <= m.y) + m.c3 = pe.Constraint(expr=2 / 9 * m.x + 2 >= m.y) + m.c4 = pe.Constraint(expr=-1 / 2 * m.x + 3 >= m.y) + + # Continuous exteme points and bounds + m.extreme_points = { + (0.737704918, -4.590163934), + (-5.869565217, 0.695652174), + (1.384615385, 2.307692308), + (7.578947368, -0.789473684), + } + + m.continuous_bounds = pe.ComponentMap() + m.continuous_bounds[m.x] = (-5.869565217, 7.578947368) + m.continuous_bounds[m.y] = (-4.590163934, 2.307692308) + + # Continuous exteme points and bounds for the case where an objective + # constraint is added within a 100% relative gap of optimality or an + # absolute gap of 6.789473684 + + m.extreme_points_cut = { + (45 / 14, -45 / 14), + (-18 / 11, 18 / 11), + (1.384615385, 2.307692308), + (7.578947368, -0.789473684), + } + + m.continuous_bounds_cut = pe.ComponentMap() + m.continuous_bounds_cut[m.x] = (-18 / 11, 7.578947368) + m.continuous_bounds_cut[m.y] = (-45 / 14, 2.307692308) + + # Discrete feasible solutions and bounds + feasible_sols = [] + x_lower_bound = None + x_upper_bound = None + y_lower_bound = None + y_upper_bound = None + + x_lower = ceil(m.continuous_bounds[m.x][0]) + x_upper = floor(m.continuous_bounds[m.x][1]) + y_lower = ceil(m.continuous_bounds[m.y][0]) + y_upper = floor(m.continuous_bounds[m.y][1]) + cons = [m.c1, m.c2, m.c3, m.c4] + for x_value in range(x_lower, x_upper + 1): + for y_value in range(y_lower, y_upper + 1): + m.x.set_value(x_value) + m.y.set_value(y_value) + is_feasible = True + for con in cons: + if not _is_satisfied(con): + is_feasible = False + break + if is_feasible: + if x_lower_bound is None or x_value < x_lower_bound: + x_lower_bound = x_value + if x_upper_bound is None or x_value > x_upper_bound: + x_upper_bound = x_value + if y_lower_bound is None or y_value < y_lower_bound: + y_lower_bound = y_value + if y_upper_bound is None or y_value > y_upper_bound: + y_upper_bound = y_value + feasible_sols.append(((x_value, y_value), x_value + y_value)) + m.discrete_feasible = sorted(feasible_sols, key=lambda sol: sol[1], reverse=True) + m.discrete_bounds = pe.ComponentMap() + m.discrete_bounds[m.x] = (x_lower_bound, x_upper_bound) + m.discrete_bounds[m.y] = (y_lower_bound, y_upper_bound) + + return m + + +def get_3d_polyhedron_problem(): + """ + Simple 3d polyhedron that is expressed using all types of linear constraints + """ + m = pe.ConcreteModel() + m.x = pe.Var([0, 1, 2], within=pe.Reals) + m.x[0].setlb(-1) + m.x[0].setub(1) + m.x[1].setlb(-2) + m.x[1].setub(2) + m.x[2].setlb(1) + m.x[2].setub(2) + + def _constraint_switch_rule(m, i): + if i == 0: + return m.x[0] + m.x[1] <= 2 + elif i == 1: + return -m.x[0] + m.x[1] <= 2 + elif i == 2: + return m.x[0] + m.x[1] >= -2 + elif i == 3: + return -m.x[0] + m.x[1] >= -2 + elif i == 4: + return m.x[0] + m.x[1] + m.x[2] == 4 + + m.c = pe.Constraint([i for i in range(5)], rule=_constraint_switch_rule) + + m.o = pe.Objective(expr=m.x[0] + m.x[2], sense=pe.maximize) + return m + + +def get_2d_unbounded_problem(): + """ + Simple 2d problem where the feasible region is unbounded, but the problem + has an optimal solution. + """ + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.Reals) + m.y = pe.Var(within=pe.Reals) + + m.o = pe.Objective(expr=m.y - m.x) + + m.c1 = pe.Constraint(expr=m.x <= 4) + m.c2 = pe.Constraint(expr=m.y >= 2) + + m.extreme_points = {(4, 2)} + + m.continuous_bounds = pe.ComponentMap() + m.continuous_bounds[m.x] = (float("-inf"), 4) + m.continuous_bounds[m.y] = (2, float("inf")) + return m + + +def get_2d_degenerate_lp(): + """ + Simple 2d problem that includes a redundant constraint such that three + constraints are active at optimality. + """ + m = pe.ConcreteModel() + + m.x = pe.Var(within=pe.Reals, bounds=(-1, 3)) + m.y = pe.Var(within=pe.Reals, bounds=(-3, 2)) + + m.obj = pe.Objective(expr=m.x + 2 * m.y, sense=pe.maximize) + + m.con1 = pe.Constraint(expr=m.x + m.y <= 3) + m.con2 = pe.Constraint(expr=m.x + 2 * m.y <= 5) + m.con3 = pe.Constraint(expr=m.x + m.y >= -1) + + return m + + +def get_triangle_ip(): + """ + Simple 2d discrete problem where the feasible region looks like a 90-45-45 + right triangle and the optimal solutions fall along the hypotenuse, where + x + y == 5. Alternative near-optimal have integer objective values from 0 to 4. + """ + var_max = 5 + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.NonNegativeIntegers, bounds=(0, var_max)) + m.y = pe.Var(within=pe.NonNegativeIntegers, bounds=(0, var_max)) + + m.o = pe.Objective(expr=m.x + m.y, sense=pe.maximize) + m.c = pe.Constraint(expr=m.x + m.y <= var_max) + + # + # Enumerate all feasible solutions + # + feasible_sols = [] + for i in range(var_max + 1): + for j in range(var_max + 1): + if i + j <= var_max: + feasible_sols.append(((i, j), i + j)) + feasible_sols = sorted(feasible_sols, key=lambda sol: sol[1], reverse=True) + m.feasible_sols = feasible_sols + # + # Count of solutions from best to worst + # + m.num_ranked_solns = [6, 5, 4, 3, 2, 1] + + return m + + +def get_implied_bound_ip(): + """ + 2d discrete problem where the bounds of z are impled by x and y. This + facilitate testing cases where the impled bounds are tighter than the + given bounds for the variable. + """ + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.NonNegativeIntegers, bounds=(0, 5)) + m.y = pe.Var(within=pe.NonNegativeIntegers, bounds=(0, 5)) + m.z = pe.Var(within=pe.NonNegativeIntegers, bounds=(0, 5)) + + m.o = pe.Objective(expr=m.x + m.z) + + m.c1 = pe.Constraint(expr=m.x + m.y == 3) + m.c2 = pe.Constraint(expr=m.x + m.y + m.z <= 5) + m.c3 = pe.Constraint(expr=m.x + m.y + m.z >= 4) + + m.var_bounds = pe.ComponentMap() + m.var_bounds[m.x] = (0, 3) + m.var_bounds[m.y] = (0, 3) + m.var_bounds[m.z] = (1, 2) + + return m + + +def get_aos_test_knapsack( + var_max, weights, values, capacity=None, capacity_fraction=1.0 +): + """ + Creates a knapsack problem, given arrays of weights and values, and + returns all feasible solutions. The capacity represents the percent of the + total max weight that can be selected (sum weights * var_max). The var_max + parameter sets the upper bound on all variables, the max number of times + they can be selected. + """ + assert len(weights) == len(values), "weights and values must be the same length." + assert ( + 0 <= capacity_fraction and capacity_fraction <= 1 + ), "capacity_fraction must be between 0 and 1." + + num_vars = len(weights) + if capacity is None: + capacity = sum(weights) * var_max * capacity_fraction + + m = pe.ConcreteModel() + m.i = pe.RangeSet(0, num_vars - 1) + + if var_max == 1: + m.x = pe.Var(m.i, within=pe.Binary) + else: + m.x = pe.Var(m.i, within=pe.NonNegativeIntegers, bounds=(0, var_max)) + + m.o = pe.Objective(expr=sum(values[i] * m.x[i] for i in m.i), sense=pe.maximize) + + m.c = pe.Constraint(expr=sum(weights[i] * m.x[i] for i in m.i) <= capacity) + + var_domain = range(var_max + 1) + all_combos = product(var_domain, repeat=num_vars) + + feasible_sols = [] + for sol in all_combos: + if np.dot(sol, weights) <= capacity: + feasible_sols.append((sol, np.dot(sol, values))) + feasible_sols = sorted(feasible_sols, key=lambda sol: sol[1], reverse=True) + m.ranked_solution_values = list(sorted([v for x, v in feasible_sols], reverse=True)) + m.num_ranked_solns = list(Counter([v for x, v in feasible_sols]).values()) + return m + + +def get_pentagonal_lp(): + """ + Pentagonal LP + """ + var_max = 5 + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.Reals, bounds=(0, 2 * var_max)) + m.y = pe.Var(within=pe.Reals, bounds=(0, 2 * var_max)) + m.z = pe.Var(within=pe.NonNegativeReals, bounds=(0, 2 * var_max)) + m.o = pe.Objective(expr=m.z, sense=pe.minimize) + + base_points = np.array( + [ + [var_max, 2 * var_max, 0], + [2 * var_max, var_max, 0], + [3.0 * var_max / 2.0, 0, 0], + [var_max / 2.0, 0, 0], + [0, var_max, 0], + ] + ) + apex_point = np.array([var_max, var_max, var_max]) + + m.c = pe.ConstraintList() + for i in range(5): + vec_1 = base_points[i] - apex_point + vec_2 = base_points[(i + 1) % var_max] - base_points[i] + n = np.cross(vec_1, vec_2) + m.c.add( + n[0] * (m.x - apex_point[0]) + + n[1] * (m.y - apex_point[1]) + + n[2] * (m.z - apex_point[2]) + >= 0 + ) + + return m + + +def get_pentagonal_pyramid_mip(): + """ + Pentagonal pyramid with integer coordinates in the first two dimensions and + a third continuous dimension. + """ + var_max = 5 + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.Integers, bounds=(-var_max, var_max)) + m.y = pe.Var(within=pe.Integers, bounds=(-var_max, var_max)) + m.z = pe.Var(within=pe.NonNegativeReals, bounds=(0, var_max)) + m.o = pe.Objective(expr=m.z, sense=pe.maximize) + + base_points = np.array( + [ + [0, var_max, 0], + [var_max, 0, 0], + [var_max / 2.0, -var_max, 0], + [-var_max / 2.0, -var_max, 0], + [-var_max, 0, 0], + ] + ) + apex_point = np.array([0, 0, var_max]) + + m.c = pe.ConstraintList() + for i in range(5): + vec_1 = base_points[i] - apex_point + vec_2 = base_points[(i + 1) % var_max] - base_points[i] + n = np.cross(vec_1, vec_2) + m.c.add( + n[0] * (m.x - apex_point[0]) + + n[1] * (m.y - apex_point[1]) + + n[2] * (m.z - apex_point[2]) + >= 0 + ) + # + # Count of solutions from best to worst + # + m.num_ranked_solns = [1, 4, 2, 8, 2, 12, 4, 16, 4, 20] + return m + + +def get_indexed_pentagonal_pyramid_mip(): + """ + Pentagonal pyramid with integer coordinates in the first two dimensions and + a third continuous dimension. + """ + var_max = 5 + m = pe.ConcreteModel() + m.x = pe.Var([1, 2], within=pe.Integers, bounds=(-var_max, var_max)) + m.z = pe.Var(within=pe.NonNegativeReals, bounds=(0, var_max)) + m.o = pe.Objective(expr=m.z, sense=pe.maximize) + base_points = np.array( + [ + [0, var_max, 0], + [var_max, 0, 0], + [var_max / 2.0, -var_max, 0], + [-var_max / 2.0, -var_max, 0], + [-var_max, 0, 0], + ] + ) + apex_point = np.array([0, 0, var_max]) + + def _con_rule(m, i): + vec_1 = base_points[i] - apex_point + vec_2 = base_points[(i + 1) % var_max] - base_points[i] + n = np.cross(vec_1, vec_2) + expr = ( + n[0] * (m.x[1] - apex_point[0]) + + n[1] * (m.x[2] - apex_point[1]) + + n[2] * (m.z - apex_point[2]) + ) + return expr >= 0 + + m.c = pe.Constraint([i for i in range(5)], rule=_con_rule) + m.num_ranked_solns = [1, 4, 2, 8, 2, 12, 4, 16, 4, 20] + return m + + +def get_bloated_pentagonal_pyramid_mip(): + """ + Pentagonal pyramid with integer coordinates in the first two dimensions and + a third continuous dimension. Bounds are artificially widened for obbt testing purposes + """ + var_max = 5 + m = pe.ConcreteModel() + m.x = pe.Var(within=pe.Integers, bounds=(-2 * var_max, 2 * var_max)) + m.y = pe.Var(within=pe.Integers, bounds=(-2 * var_max, var_max)) + m.z = pe.Var(within=pe.NonNegativeReals, bounds=(0, 2 * var_max)) + m.var_bounds = pe.ComponentMap() + m.o = pe.Objective(expr=m.z, sense=pe.maximize) + base_points = np.array( + [ + [0, var_max, 0], + [var_max, 0, 0], + [var_max / 2.0, -var_max, 0], + [-var_max / 2.0, -var_max, 0], + [-var_max, 0, 0], + ] + ) + apex_point = np.array([0, 0, var_max]) + + m.c = pe.ConstraintList() + for i in range(5): + vec_1 = base_points[i] - apex_point + vec_2 = base_points[(i + 1) % var_max] - base_points[i] + n = np.cross(vec_1, vec_2) + m.c.add( + n[0] * (m.x - apex_point[0]) + + n[1] * (m.y - apex_point[1]) + + n[2] * (m.z - apex_point[2]) + >= 0 + ) + return m diff --git a/pyomo/contrib/alternative_solutions/tests/test_lp_enum.py b/pyomo/contrib/alternative_solutions/tests/test_lp_enum.py new file mode 100644 index 00000000000..d761522b019 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_lp_enum.py @@ -0,0 +1,107 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +import pyomo.environ as pe +from pyomo.common import unittest +import pyomo.opt + +import pyomo.contrib.alternative_solutions.tests.test_cases as tc +from pyomo.contrib.alternative_solutions import lp_enum + +# +# Find available solvers. Just use GLPK if it's available. +# +solvers = list( + pyomo.opt.check_available_solvers("glpk", "gurobi") +) # , "appsi_gurobi")) +pytestmark = unittest.pytest.mark.parametrize("mip_solver", solvers) + +timelimit = {"gurobi": "TimeLimit", "appsi_gurobi": "TimeLimit", "glpk": "tmlim"} + + +@unittest.pytest.mark.default +class TestLPEnum: + + def test_bad_solver(self, mip_solver): + """ + Confirm that an exception is thrown with a bad solver name. + """ + m = tc.get_3d_polyhedron_problem() + try: + lp_enum.enumerate_linear_solutions(m, solver="unknown_solver") + except pyomo.common.errors.ApplicationError as e: + pass + + @unittest.skipIf(True, "Ignoring fragile test for solver timeout.") + def test_no_time(self, mip_solver): + """ + Check that the correct bounds are found for a discrete problem where + more restrictive bounds are implied by the constraints. + """ + m = tc.get_3d_polyhedron_problem() + with unittest.pytest.raises(Exception): + lp_enum.enumerate_linear_solutions( + m, solver=mip_solver, solver_options={timelimit[mip_solver]: 0} + ) + + def test_3d_polyhedron(self, mip_solver): + m = tc.get_3d_polyhedron_problem() + m.o.deactivate() + m.obj = pe.Objective(expr=m.x[0] + m.x[1] + m.x[2]) + + sols = lp_enum.enumerate_linear_solutions(m, solver=mip_solver) + assert len(sols) == 2 + for s in sols: + assert s.objective_value == unittest.pytest.approx(4) + + def test_3d_polyhedron(self, mip_solver): + m = tc.get_3d_polyhedron_problem() + m.o.deactivate() + m.obj = pe.Objective(expr=m.x[0] + 2 * m.x[1] + 3 * m.x[2]) + + sols = lp_enum.enumerate_linear_solutions(m, solver=mip_solver) + assert len(sols) == 2 + for s in sols: + assert s.objective_value == unittest.pytest.approx( + 9 + ) or s.objective_value == unittest.pytest.approx(10) + + def test_2d_diamond_problem(self, mip_solver): + m = tc.get_2d_diamond_problem() + sols = lp_enum.enumerate_linear_solutions(m, solver=mip_solver, num_solutions=2) + assert len(sols) == 2 + for s in sols: + print(s) + assert sols[0].objective_value == unittest.pytest.approx(6.789473684210527) + assert sols[1].objective_value == unittest.pytest.approx(3.6923076923076916) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_pentagonal_pyramid(self, mip_solver): + n = tc.get_pentagonal_pyramid_mip() + n.o.sense = pe.minimize + n.x.domain = pe.Reals + n.y.domain = pe.Reals + + sols = lp_enum.enumerate_linear_solutions(n, solver=mip_solver, tee=False) + for s in sols: + print(s) + assert len(sols) == 6 + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_pentagon(self, mip_solver): + n = tc.get_pentagonal_lp() + + sols = lp_enum.enumerate_linear_solutions(n, solver=mip_solver) + for s in sols: + print(s) + assert len(sols) == 6 diff --git a/pyomo/contrib/alternative_solutions/tests/test_lp_enum_solnpool.py b/pyomo/contrib/alternative_solutions/tests/test_lp_enum_solnpool.py new file mode 100644 index 00000000000..6d3b5211f9e --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_lp_enum_solnpool.py @@ -0,0 +1,44 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import pyomo.environ as pe +import pyomo.opt + +import pyomo.contrib.alternative_solutions.tests.test_cases as tc +from pyomo.contrib.alternative_solutions import lp_enum +from pyomo.contrib.alternative_solutions import lp_enum_solnpool + +from pyomo.common.dependencies import attempt_import + +numpy, numpy_available = attempt_import("numpy") +gurobipy, gurobi_available = attempt_import("gurobipy") + +# +# TODO: Setup detailed tests here +# + + +def test_here(): + if numpy_available: + n = tc.get_pentagonal_pyramid_mip() + n.x.domain = pe.Reals + n.y.domain = pe.Reals + + try: + sols = lp_enum_solnpool.enumerate_linear_solutions_soln_pool(n, tee=True) + except pyomo.common.errors.ApplicationError as e: + sols = [] + + # TODO - Confirm how solnpools deal with duplicate solutions + if gurobi_available: + assert len(sols) == 7 + else: + assert len(sols) == 0 diff --git a/pyomo/contrib/alternative_solutions/tests/test_obbt.py b/pyomo/contrib/alternative_solutions/tests/test_obbt.py new file mode 100644 index 00000000000..d2b180c9e3d --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_obbt.py @@ -0,0 +1,245 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import math + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +if numpy_available: + from numpy.testing import assert_array_almost_equal + +import pyomo.environ as pe +from pyomo.common import unittest + +import pyomo.opt +from pyomo.contrib.alternative_solutions import ( + obbt_analysis_bounds_and_solutions, + obbt_analysis, +) +import pyomo.contrib.alternative_solutions.tests.test_cases as tc + +solvers = list(pyomo.opt.check_available_solvers("glpk", "gurobi", "appsi_gurobi")) +pytestmark = unittest.pytest.mark.parametrize("mip_solver", solvers) + +timelimit = {"gurobi": "TimeLimit", "appsi_gurobi": "TimeLimit", "glpk": "tmlim"} + + +@unittest.pytest.mark.default +class TestOBBTUnit: + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_bad_solver(self, mip_solver): + """ + Confirm that an exception is thrown with a bad solver name. + """ + m = tc.get_2d_diamond_problem() + try: + obbt_analysis(m, solver="unknown_solver") + except pyomo.common.errors.ApplicationError as e: + pass + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_obbt_analysis(self, mip_solver): + """ + Check that the correct bounds are found for a continuous problem. + """ + m = tc.get_2d_diamond_problem() + all_bounds = obbt_analysis(m, solver=mip_solver) + assert all_bounds.keys() == m.continuous_bounds.keys() + for var, bounds in all_bounds.items(): + assert_array_almost_equal(bounds, m.continuous_bounds[var]) + + def test_obbt_error1(self, mip_solver): + """ + ERROR: Cannot restrict variable list when warmstart is specified + """ + m = tc.get_2d_diamond_problem() + with unittest.pytest.raises(AssertionError): + obbt_analysis_bounds_and_solutions(m, variables=[m.x], solver=mip_solver) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_obbt_some_vars(self, mip_solver): + """ + Check that the correct bounds are found for a continuous problem. + """ + m = tc.get_2d_diamond_problem() + all_bounds, solns = obbt_analysis_bounds_and_solutions( + m, variables=[m.x], warmstart=False, solver=mip_solver + ) + assert len(all_bounds) == 1 + assert len(solns) == 2 * len(all_bounds) + 1 + for var, bounds in all_bounds.items(): + assert_array_almost_equal(bounds, m.continuous_bounds[var]) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_obbt_continuous(self, mip_solver): + """ + Check that the correct bounds are found for a continuous problem. + """ + m = tc.get_2d_diamond_problem() + all_bounds, solns = obbt_analysis_bounds_and_solutions(m, solver=mip_solver) + assert len(solns) == 2 * len(all_bounds) + 1 + assert all_bounds.keys() == m.continuous_bounds.keys() + for var, bounds in all_bounds.items(): + assert_array_almost_equal(bounds, m.continuous_bounds[var]) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_rel_objective(self, mip_solver): + """ + Check that relative mip gap constraints are added for a mip with indexed vars and constraints + """ + m = tc.get_indexed_pentagonal_pyramid_mip() + all_bounds, solns = obbt_analysis_bounds_and_solutions( + m, rel_opt_gap=0.5, solver=mip_solver + ) + assert len(solns) == 2 * len(all_bounds) + 1 + assert m._obbt.optimality_tol_rel.lb == unittest.pytest.approx(2.5) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_abs_objective(self, mip_solver): + """ + Check that absolute mip gap constraints are added + """ + m = tc.get_pentagonal_pyramid_mip() + all_bounds, solns = obbt_analysis_bounds_and_solutions( + m, abs_opt_gap=1.99, solver=mip_solver + ) + assert len(solns) == 2 * len(all_bounds) + 1 + assert m._obbt.optimality_tol_abs.lb == unittest.pytest.approx(3.01) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_obbt_warmstart(self, mip_solver): + """ + Check that warmstarting works. + """ + m = tc.get_2d_diamond_problem() + m.x.value = 0 + m.y.value = 0 + all_bounds, solns = obbt_analysis_bounds_and_solutions( + m, solver=mip_solver, warmstart=True, tee=False + ) + assert len(solns) == 2 * len(all_bounds) + 1 + assert all_bounds.keys() == m.continuous_bounds.keys() + for var, bounds in all_bounds.items(): + assert_array_almost_equal(bounds, m.continuous_bounds[var]) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_obbt_mip(self, mip_solver): + """ + Check that bound tightening only occurs for continuous variables + that can be tightened. + """ + m = tc.get_bloated_pentagonal_pyramid_mip() + all_bounds, solns = obbt_analysis_bounds_and_solutions( + m, solver=mip_solver, tee=False + ) + assert len(solns) == 2 * len(all_bounds) + 1 + bounds_tightened = False + bounds_not_tightned = False + for var, bounds in all_bounds.items(): + if bounds[0] > var.lb: + bounds_tightened = True + else: + bounds_not_tightened = True + if bounds[1] < var.ub: + bounds_tightened = True + else: + bounds_not_tightened = True + assert bounds_tightened + assert bounds_not_tightened + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_obbt_unbounded(self, mip_solver): + """ + Check that the correct bounds are found for an unbounded problem. + """ + m = tc.get_2d_unbounded_problem() + all_bounds, solns = obbt_analysis_bounds_and_solutions(m, solver=mip_solver) + assert all_bounds.keys() == m.continuous_bounds.keys() + num = 1 + for var, bounds in all_bounds.items(): + if not math.isinf(bounds[0]): + num += 1 + if not math.isinf(bounds[1]): + num += 1 + assert_array_almost_equal(bounds, m.continuous_bounds[var]) + assert len(solns) == num + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_bound_tightening(self, mip_solver): + """ + Check that the correct bounds are found for a discrete problem where + more restrictive bounds are implied by the constraints. + """ + m = tc.get_implied_bound_ip() + all_bounds, solns = obbt_analysis_bounds_and_solutions(m, solver=mip_solver) + assert len(solns) == 2 * len(all_bounds) + 1 + assert all_bounds.keys() == m.var_bounds.keys() + for var, bounds in all_bounds.items(): + assert_array_almost_equal(bounds, m.var_bounds[var]) + + @unittest.skipIf(True, "Ignoring fragile test for solver timeout.") + def test_no_time(self, mip_solver): + """ + Check that the correct bounds are found for a discrete problem where + more restrictive bounds are implied by the constraints. + """ + m = tc.get_implied_bound_ip() + with unittest.pytest.raises(RuntimeError): + obbt_analysis_bounds_and_solutions( + m, solver=mip_solver, solver_options={timelimit[mip_solver]: 0} + ) + + def test_bound_refinement(self, mip_solver): + """ + Check that the correct bounds are found for a discrete problem where + more restrictive bounds are implied by the constraints and constraints + are added. + """ + m = tc.get_implied_bound_ip() + all_bounds, solns = obbt_analysis_bounds_and_solutions( + m, solver=mip_solver, refine_discrete_bounds=True + ) + assert len(solns) == 2 * len(all_bounds) + 1 + for var, bounds in all_bounds.items(): + if m.var_bounds[var][0] > var.lb: + match = False + for idx in m._obbt.bound_constraints: + const = m._obbt.bound_constraints[idx] + if var is const.body and bounds[0] == const.lb: + match = True + break + assert match, "Constraint not found for {} lower bound {}".format( + var, bounds[0] + ) + if m.var_bounds[var][1] < var.ub: + match = False + for idx in m._obbt.bound_constraints: + const = m._obbt.bound_constraints[idx] + if var is const.body and bounds[1] == const.ub: + match = True + break + assert match, "Constraint not found for {} upper bound {}".format( + var, bounds[1] + ) + + def test_obbt_infeasible(self, mip_solver): + """ + Check that code catches cases where the problem is infeasible. + """ + m = tc.get_2d_diamond_problem() + m.infeasible_constraint = pe.Constraint(expr=m.x >= 10) + with unittest.pytest.raises(Exception): + obbt_analysis_bounds_and_solutions(m, solver=mip_solver) + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/alternative_solutions/tests/test_shifted_lp.py b/pyomo/contrib/alternative_solutions/tests/test_shifted_lp.py new file mode 100644 index 00000000000..da17e537914 --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_shifted_lp.py @@ -0,0 +1,68 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +if numpy_available: + from numpy.testing import assert_array_almost_equal + +import pyomo.environ as pe +import pyomo.opt +from pyomo.common import unittest + +import pyomo.contrib.alternative_solutions.tests.test_cases as tc +from pyomo.contrib.alternative_solutions import shifted_lp + +# TODO: add checks that confirm the shifted constraints make sense + +# +# Find available solvers. Just use GLPK if it's available. +# +solvers = list(pyomo.opt.check_available_solvers("glpk", "gurobi")) +if "glpk" in solvers: + solver = ["glpk"] +pytestmark = unittest.pytest.mark.parametrize("lp_solver", solvers) + + +@unittest.pytest.mark.default +class TestShiftedIP: + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_abs_objective(self, lp_solver): + m = tc.get_indexed_pentagonal_pyramid_mip() + m.x.domain = pe.Reals + + opt = pe.SolverFactory(lp_solver) + old_results = opt.solve(m, tee=False) + old_obj = pe.value(m.o) + + new_model = shifted_lp.get_shifted_linear_model(m) + new_results = opt.solve(new_model, tee=False) + new_obj = pe.value(new_model.objective) + + assert old_obj == unittest.pytest.approx(new_obj) + + def test_polyhedron(self, lp_solver): + m = tc.get_3d_polyhedron_problem() + + opt = pe.SolverFactory(lp_solver) + old_results = opt.solve(m, tee=False) + old_obj = pe.value(m.o) + + new_model = shifted_lp.get_shifted_linear_model(m) + new_results = opt.solve(new_model, tee=False) + new_obj = pe.value(new_model.objective) + + assert old_obj == unittest.pytest.approx(new_obj) + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/alternative_solutions/tests/test_solnpool.py b/pyomo/contrib/alternative_solutions/tests/test_solnpool.py new file mode 100644 index 00000000000..0b9914a86dd --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_solnpool.py @@ -0,0 +1,148 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common.dependencies import numpy as numpy, numpy_available + +if numpy_available: + from numpy.testing import assert_array_almost_equal +from pyomo.common.dependencies import attempt_import + +gurobipy, gurobipy_available = attempt_import("gurobipy") + +from collections import Counter + +import pyomo.environ as pe +from pyomo.common import unittest + +from pyomo.contrib.alternative_solutions import gurobi_generate_solutions +import pyomo.contrib.alternative_solutions.tests.test_cases as tc + + +@unittest.skipIf(not gurobipy_available, "Gurobi MIP solver not available") +class TestSolnPoolUnit(unittest.TestCase): + """ + Cases to cover: + + LP feasibility (for an LP just one solution should be returned since gurobi cannot enumerate over continuous vars) + + Pass at least one solver option to make sure that work, e.g. time limit + + We need a utility to check that a two sets of solutions are the same. + Maybe this should be an AOS utility since it may be a thing we will want to do often. + """ + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_ip_feasibility(self): + """ + Enumerate all solutions for an ip: triangle_ip. + + Check that the correct number of alternate solutions are found. + """ + m = tc.get_triangle_ip() + results = gurobi_generate_solutions(m, num_solutions=100) + objectives = [round(result.objective[1], 2) for result in results] + actual_solns_by_obj = m.num_ranked_solns + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, actual_solns_by_obj) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_ip_num_solutions(self): + """ + Enumerate 8 solutions for an ip: triangle_ip. + + Check that the correct number of alternate solutions are found. + """ + m = tc.get_triangle_ip() + results = gurobi_generate_solutions(m, num_solutions=8) + assert len(results) == 8 + objectives = [round(result.objective[1], 2) for result in results] + actual_solns_by_obj = [6, 2] + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, actual_solns_by_obj) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_feasibility(self): + """ + Enumerate all solutions for a mip: indexed_pentagonal_pyramid_mip. + + Check that the correct number of alternate solutions are found. + """ + m = tc.get_indexed_pentagonal_pyramid_mip() + results = gurobi_generate_solutions(m, num_solutions=100) + objectives = [round(result.objective[1], 2) for result in results] + actual_solns_by_obj = m.num_ranked_solns + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, actual_solns_by_obj) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_rel_feasibility(self): + """ + Enumerate solutions for a mip: indexed_pentagonal_pyramid_mip. + + Check that only solutions within a relative tolerance of 0.2 are + found. + """ + m = tc.get_pentagonal_pyramid_mip() + results = gurobi_generate_solutions(m, num_solutions=100, rel_opt_gap=0.2) + objectives = [round(result.objective[1], 2) for result in results] + actual_solns_by_obj = m.num_ranked_solns[0:2] + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, actual_solns_by_obj) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_rel_feasibility_options(self): + """ + Enumerate solutions for a mip: indexed_pentagonal_pyramid_mip. + + Check that only solutions within a relative tolerance of 0.2 are + found. + """ + m = tc.get_pentagonal_pyramid_mip() + results = gurobi_generate_solutions( + m, num_solutions=100, solver_options={"PoolGap": 0.2} + ) + objectives = [round(result.objective[1], 2) for result in results] + actual_solns_by_obj = m.num_ranked_solns[0:2] + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, actual_solns_by_obj) + + @unittest.skipIf(not numpy_available, "Numpy not installed") + def test_mip_abs_feasibility(self): + """ + Enumerate solutions for a mip: indexed_pentagonal_pyramid_mip. + + Check that only solutions within an absolute tolerance of 1.99 are + found. + """ + m = tc.get_pentagonal_pyramid_mip() + results = gurobi_generate_solutions(m, num_solutions=100, abs_opt_gap=1.99) + objectives = [round(result.objective[1], 2) for result in results] + actual_solns_by_obj = m.num_ranked_solns[0:3] + unique_solns_by_obj = [val for val in Counter(objectives).values()] + assert_array_almost_equal(unique_solns_by_obj, actual_solns_by_obj) + + @unittest.skipIf(True, "Ignoring fragile test for solver timeout.") + def test_mip_no_time(self): + """ + Enumerate solutions for a mip: indexed_pentagonal_pyramid_mip. + + Check that no solutions are returned with a timelimit of 0. + """ + m = tc.get_pentagonal_pyramid_mip() + # Use quiet=False to test error message + results = gurobi_generate_solutions( + m, num_solutions=100, solver_options={"TimeLimit": 0.0}, quiet=False + ) + assert len(results) == 0 + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/alternative_solutions/tests/test_solution.py b/pyomo/contrib/alternative_solutions/tests/test_solution.py new file mode 100644 index 00000000000..9df9374daef --- /dev/null +++ b/pyomo/contrib/alternative_solutions/tests/test_solution.py @@ -0,0 +1,96 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +import pyomo.opt +import pyomo.environ as pe +import pyomo.common.unittest as unittest +import pyomo.contrib.alternative_solutions.aos_utils as au +from pyomo.contrib.alternative_solutions import Solution + +pyomo.opt.check_available_solvers("gurobi") +mip_solver = "gurobi" + + +class TestSolutionUnit(unittest.TestCase): + + def get_model(self): + """ + Simple model with all variable types and fixed variables to test the + Solution code. + """ + m = pe.ConcreteModel() + m.x = pe.Var(domain=pe.NonNegativeReals) + m.y = pe.Var(domain=pe.Binary) + m.z = pe.Var(domain=pe.NonNegativeIntegers) + m.f = pe.Var(domain=pe.Reals) + + m.f.fix(1) + m.obj = pe.Objective(expr=m.x + m.y + m.z + m.f, sense=pe.maximize) + + m.con_x = pe.Constraint(expr=m.x <= 1.5) + m.con_y = pe.Constraint(expr=m.y <= 1) + m.con_z = pe.Constraint(expr=m.z <= 3) + return m + + @unittest.skipUnless( + pe.SolverFactory(mip_solver).available(exception_flag=False), + "MIP solver not available", + ) + def test_solution(self): + """ + Create a Solution Object, call its functions, and ensure the correct + data is returned. + """ + model = self.get_model() + opt = pe.SolverFactory(mip_solver) + opt.solve(model) + all_vars = au.get_model_variables(model, include_fixed=True) + + solution = Solution(model, all_vars, include_fixed=False) + sol_str = """{ + "fixed_variables": [ + "f" + ], + "objective": "obj", + "objective_value": 6.5, + "solution": { + "x": 1.5, + "y": 1, + "z": 3 + } +}""" + assert str(solution) == sol_str + + solution = Solution(model, all_vars) + sol_str = """{ + "fixed_variables": [ + "f" + ], + "objective": "obj", + "objective_value": 6.5, + "solution": { + "f": 1, + "x": 1.5, + "y": 1, + "z": 3 + } +}""" + assert solution.to_string(round_discrete=True) == sol_str + + sol_val = solution.get_variable_name_values( + include_fixed=True, round_discrete=True + ) + self.assertEqual(set(sol_val.keys()), {"x", "y", "z", "f"}) + self.assertEqual(set(solution.get_fixed_variable_names()), {"f"}) + + +if __name__ == "__main__": + unittest.main() diff --git a/pyomo/contrib/appsi/solvers/ipopt.py b/pyomo/contrib/appsi/solvers/ipopt.py index 76cd204e36d..af40d2e88d2 100644 --- a/pyomo/contrib/appsi/solvers/ipopt.py +++ b/pyomo/contrib/appsi/solvers/ipopt.py @@ -567,3 +567,24 @@ def get_reduced_costs( return ComponentMap((k, v) for k, v in self._reduced_costs.items()) else: return ComponentMap((v, self._reduced_costs[v]) for v in vars_to_load) + + def has_linear_solver(self, linear_solver): + import pyomo.core as AML + from pyomo.common.tee import capture_output + + m = AML.ConcreteModel() + m.x = AML.Var() + m.o = AML.Objective(expr=(m.x - 2) ** 2) + with capture_output() as OUT: + solver = self.__class__() + solver.config.stream_solver = True + solver.config.load_solution = False + solver.ipopt_options['linear_solver'] = linear_solver + try: + solver.solve(m) + except FileNotFoundError: + # The APPSI interface always tries to open the SOL file, + # and will generate a FileNotFoundError if ipopt didn't + # generate one + return False + return 'running with linear solver' in OUT.getvalue() diff --git a/pyomo/contrib/appsi/tests/test_ipopt.py b/pyomo/contrib/appsi/tests/test_ipopt.py new file mode 100644 index 00000000000..b3697b9b233 --- /dev/null +++ b/pyomo/contrib/appsi/tests/test_ipopt.py @@ -0,0 +1,42 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common import unittest +from pyomo.contrib.appsi.solvers import ipopt + + +ipopt_available = ipopt.Ipopt().available() + + +@unittest.skipIf(not ipopt_available, "The 'ipopt' command is not available") +class TestIpoptInterface(unittest.TestCase): + def test_has_linear_solver(self): + opt = ipopt.Ipopt() + self.assertTrue( + any( + map( + opt.has_linear_solver, + [ + 'mumps', + 'ma27', + 'ma57', + 'ma77', + 'ma86', + 'ma97', + 'pardiso', + 'pardisomkl', + 'spral', + 'wsmp', + ], + ) + ) + ) + self.assertFalse(opt.has_linear_solver('bogus_linear_solver')) diff --git a/pyomo/contrib/cp/interval_var.py b/pyomo/contrib/cp/interval_var.py index dec5af74d9f..013fa145b15 100644 --- a/pyomo/contrib/cp/interval_var.py +++ b/pyomo/contrib/cp/interval_var.py @@ -206,8 +206,6 @@ def _getitem_when_not_present(self, index): class ScalarIntervalVar(IntervalVarData, IntervalVar): def __init__(self, *args, **kwds): - self._suppress_ctypes = set() - IntervalVarData.__init__(self, self) IntervalVar.__init__(self, *args, **kwds) self._data[None] = self diff --git a/pyomo/contrib/cp/tests/test_logical_to_disjunctive.py b/pyomo/contrib/cp/tests/test_logical_to_disjunctive.py index 3f66aa57726..d940468900b 100755 --- a/pyomo/contrib/cp/tests/test_logical_to_disjunctive.py +++ b/pyomo/contrib/cp/tests/test_logical_to_disjunctive.py @@ -200,6 +200,41 @@ def test_equivalence(self): assertExpressionsEqual(self, m.cons[10].expr, m.z[5] >= 1) + def test_equivalent_to_True(self): + m = self.make_model() + e = m.a.equivalent_to(True) + + visitor = LogicalToDisjunctiveVisitor() + m.cons = visitor.constraints + m.z = visitor.z_vars + + visitor.walk_expression(e) + + self.assertIs(m.a.get_associated_binary(), m.z[1]) + self.assertEqual(len(m.z), 4) + self.assertEqual(len(m.cons), 10) + + # z[2] == !a v True + assertExpressionsEqual( + self, m.cons[1].expr, (1 - m.z[2]) + (1 - m.z[1]) + 1 >= 1 + ) + assertExpressionsEqual(self, m.cons[2].expr, 1 - (1 - m.z[1]) + m.z[2] >= 1) + assertExpressionsEqual(self, m.cons[3].expr, m.z[2] + (1 - 1) >= 1) + + # z[3] == a v ! c + assertExpressionsEqual(self, m.cons[4].expr, (1 - m.z[3]) + m.z[1] >= 1) + assertExpressionsEqual(self, m.cons[5].expr, m.z[3] + (1 - m.z[1]) >= 1) + assertExpressionsEqual(self, m.cons[6].expr, m.z[3] + 1 >= 1) + + # z[4] == z[2] ^ z[3] + assertExpressionsEqual(self, m.cons[7].expr, m.z[4] <= m.z[2]) + assertExpressionsEqual(self, m.cons[8].expr, m.z[4] <= m.z[3]) + assertExpressionsEqual( + self, m.cons[9].expr, 1 - m.z[4] <= 2 - (m.z[2] + m.z[3]) + ) + + assertExpressionsEqual(self, m.cons[10].expr, m.z[4] >= 1) + def test_xor(self): m = self.make_model() e = m.a.xor(m.b) @@ -263,8 +298,6 @@ def test_at_most(self): # z3 = a ^ b assertExpressionsEqual(self, m.cons[1].expr, m.z[3] <= a) assertExpressionsEqual(self, m.cons[2].expr, m.z[3] <= b) - m.cons.pprint() - print(m.cons[3].expr) assertExpressionsEqual(self, m.cons[3].expr, 1 - m.z[3] <= 2 - sum([a, b])) # atmost in disjunctive form diff --git a/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py b/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py index 09894b47090..95cbaf57fa5 100644 --- a/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py +++ b/pyomo/contrib/cp/transform/logical_to_disjunctive_walker.py @@ -51,14 +51,7 @@ def _dispatch_var(visitor, node): def _dispatch_param(visitor, node): - if int(value(node)) == value(node): - return False, node - else: - raise ValueError( - "Found non-integer valued Param '%s' in a logical " - "expression. This cannot be written to a disjunctive " - "form." % node.name - ) + return False, node def _dispatch_expression(visitor, node): @@ -244,6 +237,12 @@ def initializeWalker(self, expr): def beforeChild(self, node, child, child_idx): if child.__class__ in EXPR.native_types: + if child.__class__ is bool: + # If we encounter a bool, we are going to need to treat it as + # binary explicitly because we are finally pedantic enough in the + # expression system to not allow some of the mixing we will need + # (like summing a LinearExpression with a bool) + return False, int(child) return False, child if child.is_numeric_type(): diff --git a/pyomo/contrib/doe/doe.py b/pyomo/contrib/doe/doe.py index a120add4200..90818ddf622 100644 --- a/pyomo/contrib/doe/doe.py +++ b/pyomo/contrib/doe/doe.py @@ -97,7 +97,7 @@ def __init__( A Python ``function`` that returns a Concrete Pyomo model, similar to the interface for ``parmest`` solver: A ``solver`` object that User specified, default=None. - If not specified, default solver is IPOPT MA57. + If not specified, default solver is IPOPT (with MA57, if available). prior_FIM: A 2D numpy array containing Fisher information matrix (FIM) for prior experiments. The default None means there is no prior information. @@ -995,7 +995,7 @@ def run_grid_search( ) count += 1 failed_count += 1 - self.logger.warning("failed count:", failed_count) + self.logger.warning("failed count: %s", failed_count) result_combine[tuple(design_set_iter)] = None # For user's access @@ -1387,7 +1387,10 @@ def _fix_design(self, m, design_val, fix_opt=True, optimize_option=None): def _get_default_ipopt_solver(self): """Default solver""" solver = SolverFactory("ipopt") - solver.options["linear_solver"] = "ma57" + for linear_solver in ('ma57', 'ma27', 'ma97'): + if solver.has_linear_solver(linear_solver): + solver.options["linear_solver"] = linear_solver + break solver.options["halt_on_ampl_error"] = "yes" solver.options["max_iter"] = 3000 return solver diff --git a/pyomo/contrib/doe/tests/test_example.py b/pyomo/contrib/doe/tests/test_example.py index e4ffbe89142..47ce39d596a 100644 --- a/pyomo/contrib/doe/tests/test_example.py +++ b/pyomo/contrib/doe/tests/test_example.py @@ -39,6 +39,7 @@ from pyomo.opt import SolverFactory ipopt_available = SolverFactory("ipopt").available() +k_aug_available = SolverFactory("k_aug").available(exception_flag=False) class TestReactorExamples(unittest.TestCase): @@ -57,6 +58,7 @@ def test_reactor_optimize_doe(self): reactor_optimize_doe.main() + @unittest.skipIf(not k_aug_available, "The 'k_aug' command is not available") @unittest.skipIf(not ipopt_available, "The 'ipopt' command is not available") @unittest.skipIf(not pandas_available, "pandas is not available") @unittest.skipIf(not numpy_available, "Numpy is not available") diff --git a/pyomo/contrib/doe/tests/test_fim_doe.py b/pyomo/contrib/doe/tests/test_fim_doe.py index d9a8d60fdb4..9cae2fe6278 100644 --- a/pyomo/contrib/doe/tests/test_fim_doe.py +++ b/pyomo/contrib/doe/tests/test_fim_doe.py @@ -35,6 +35,9 @@ VariablesWithIndices, ) from pyomo.contrib.doe.examples.reactor_kinetics import create_model, disc_for_measure +from pyomo.environ import SolverFactory + +ipopt_available = SolverFactory("ipopt").available() class TestMeasurementError(unittest.TestCase): @@ -196,6 +199,7 @@ def test(self): @unittest.skipIf(not numpy_available, "Numpy is not available") +@unittest.skipIf(not ipopt_available, "ipopt is not available") class TestPriorFIMError(unittest.TestCase): def test(self): # Control time set [h] diff --git a/pyomo/contrib/doe/tests/test_reactor_example.py b/pyomo/contrib/doe/tests/test_reactor_example.py index 19fb4e61820..f88ae48db1a 100644 --- a/pyomo/contrib/doe/tests/test_reactor_example.py +++ b/pyomo/contrib/doe/tests/test_reactor_example.py @@ -35,6 +35,7 @@ from pyomo.opt import SolverFactory ipopt_available = SolverFactory("ipopt").available() +k_aug_available = SolverFactory("k_aug").available(exception_flag=False) class Test_Reaction_Kinetics_Example(unittest.TestCase): @@ -133,6 +134,7 @@ def test_kinetics_example_sequential_finite_then_optimize(self): # self.assertAlmostEqual(value(optimize_result.model.T[0.5]), 300, places=2) self.assertAlmostEqual(np.log10(optimize_result.trace), 3.340, places=2) + @unittest.skipIf(not k_aug_available, "The 'k_aug' solver is not available") @unittest.skipIf(not ipopt_available, "The 'ipopt' solver is not available") @unittest.skipIf(not numpy_available, "Numpy is not available") @unittest.skipIf(not pandas_available, "Pandas is not available") diff --git a/pyomo/contrib/parmest/examples/reactor_design/parameter_estimation_example.py b/pyomo/contrib/parmest/examples/reactor_design/parameter_estimation_example.py index a84a3fde5e7..d29cbfd4d49 100644 --- a/pyomo/contrib/parmest/examples/reactor_design/parameter_estimation_example.py +++ b/pyomo/contrib/parmest/examples/reactor_design/parameter_estimation_example.py @@ -39,3 +39,7 @@ def main(): obj, theta, cov = pest.theta_est(calc_cov=True, cov_n=17) print(obj) print(theta) + + +if __name__ == "__main__": + main() diff --git a/pyomo/contrib/parmest/tests/test_examples.py b/pyomo/contrib/parmest/tests/test_examples.py index dca05026e80..3b0c869affa 100644 --- a/pyomo/contrib/parmest/tests/test_examples.py +++ b/pyomo/contrib/parmest/tests/test_examples.py @@ -12,9 +12,11 @@ import pyomo.common.unittest as unittest import pyomo.contrib.parmest.parmest as parmest from pyomo.contrib.parmest.graphics import matplotlib_available, seaborn_available +from pyomo.contrib.pynumero.asl import AmplInterface from pyomo.opt import SolverFactory ipopt_available = SolverFactory("ipopt").available() +pynumero_ASL_available = AmplInterface.available() @unittest.skipIf( @@ -43,6 +45,7 @@ def test_model_with_constraint(self): rooney_biegler_with_constraint.main() + @unittest.skipUnless(pynumero_ASL_available, "test requires libpynumero_ASL") @unittest.skipUnless(seaborn_available, "test requires seaborn") def test_parameter_estimation_example(self): from pyomo.contrib.parmest.examples.rooney_biegler import ( @@ -66,11 +69,11 @@ def test_likelihood_ratio_example(self): likelihood_ratio_example.main() -@unittest.skipIf( - not parmest.parmest_available, - "Cannot test parmest: required dependencies are missing", +@unittest.skipUnless(pynumero_ASL_available, "test requires libpynumero_ASL") +@unittest.skipUnless(ipopt_available, "The 'ipopt' solver is not available") +@unittest.skipUnless( + parmest.parmest_available, "Cannot test parmest: required dependencies are missing" ) -@unittest.skipIf(not ipopt_available, "The 'ipopt' solver is not available") class TestReactionKineticsExamples(unittest.TestCase): @classmethod def setUpClass(self): @@ -140,6 +143,7 @@ def test_model(self): reactor_design.main() + @unittest.skipUnless(pynumero_ASL_available, "test requires libpynumero_ASL") def test_parameter_estimation_example(self): from pyomo.contrib.parmest.examples.reactor_design import ( parameter_estimation_example, diff --git a/pyomo/contrib/parmest/tests/test_parmest.py b/pyomo/contrib/parmest/tests/test_parmest.py index 65e2e4a3b06..52b7cd390e8 100644 --- a/pyomo/contrib/parmest/tests/test_parmest.py +++ b/pyomo/contrib/parmest/tests/test_parmest.py @@ -9,43 +9,29 @@ # This software is distributed under the 3-clause BSD License. # ___________________________________________________________________________ -from pyomo.common.dependencies import ( - numpy as np, - numpy_available, - pandas as pd, - pandas_available, - scipy, - scipy_available, - matplotlib, - matplotlib_available, -) - import platform - -is_osx = platform.mac_ver()[0] != "" - -import pyomo.common.unittest as unittest import sys import os import subprocess from itertools import product +import pyomo.common.unittest as unittest import pyomo.contrib.parmest.parmest as parmest import pyomo.contrib.parmest.graphics as graphics import pyomo.contrib.parmest as parmestbase -from pyomo.contrib.parmest.experiment import Experiment import pyomo.environ as pyo import pyomo.dae as dae +from pyomo.common.dependencies import numpy as np, pandas as pd, scipy, matplotlib +from pyomo.common.fileutils import this_file_dir +from pyomo.contrib.parmest.experiment import Experiment +from pyomo.contrib.pynumero.asl import AmplInterface from pyomo.opt import SolverFactory +is_osx = platform.mac_ver()[0] != "" ipopt_available = SolverFactory("ipopt").available() - -from pyomo.common.fileutils import find_library - -pynumero_ASL_available = False if find_library("pynumero_ASL") is None else True - -testdir = os.path.dirname(os.path.abspath(__file__)) +pynumero_ASL_available = AmplInterface.available() +testdir = this_file_dir() @unittest.skipIf( @@ -208,17 +194,7 @@ def test_parallel_parmest(self): retcode = subprocess.call(rlist) self.assertEqual(retcode, 0) - @unittest.skip("Most folks don't have k_aug installed") - def test_theta_k_aug_for_Hessian(self): - # this will fail if k_aug is not installed - objval, thetavals, Hessian = self.pest.theta_est(solver="k_aug") - self.assertAlmostEqual(objval, 4.4675, places=2) - - @unittest.skipIf(not pynumero_ASL_available, "pynumero ASL is not available") - @unittest.skipIf( - not parmest.inverse_reduced_hessian_available, - "Cannot test covariance matrix: required ASL dependency is missing", - ) + @unittest.skipIf(not pynumero_ASL_available, "pynumero_ASL is not available") def test_theta_est_cov(self): objval, thetavals, cov = self.pest.theta_est(calc_cov=True, cov_n=6) @@ -568,11 +544,7 @@ def SSE(model): }, } - @unittest.skipIf(not pynumero_ASL_available, "pynumero ASL is not available") - @unittest.skipIf( - not parmest.inverse_reduced_hessian_available, - "Cannot test covariance matrix: required ASL dependency is missing", - ) + @unittest.skipIf(not pynumero_ASL_available, "pynumero_ASL is not available") def check_rooney_biegler_results(self, objval, cov): # get indices in covariance matrix @@ -596,6 +568,7 @@ def check_rooney_biegler_results(self, objval, cov): cov.iloc[rate_constant_index, rate_constant_index], 0.04193591, places=2 ) # 0.04124 from paper + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics(self): for model_type, parmest_input in self.input.items(): @@ -609,6 +582,7 @@ def test_parmest_basics(self): obj_at_theta = pest.objective_at_theta(parmest_input["theta_vals"]) self.assertAlmostEqual(obj_at_theta["obj"][0], 16.531953, places=2) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics_with_initialize_parmest_model_option(self): for model_type, parmest_input in self.input.items(): @@ -625,6 +599,7 @@ def test_parmest_basics_with_initialize_parmest_model_option(self): self.assertAlmostEqual(obj_at_theta["obj"][0], 16.531953, places=2) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics_with_square_problem_solve(self): for model_type, parmest_input in self.input.items(): @@ -641,6 +616,7 @@ def test_parmest_basics_with_square_problem_solve(self): self.assertAlmostEqual(obj_at_theta["obj"][0], 16.531953, places=2) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics_with_square_problem_solve_no_theta_vals(self): for model_type, parmest_input in self.input.items(): @@ -923,6 +899,7 @@ def test_return_continuous_set_multiple_datasets(self): self.assertAlmostEqual(return_vals1["time"].loc[1][18], 2.368, places=3) self.assertAlmostEqual(return_vals2["time"].loc[1][18], 2.368, places=3) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_covariance(self): from pyomo.contrib.interior_point.inverse_reduced_hessian import ( inv_reduced_hessian_barrier, @@ -1217,17 +1194,7 @@ def test_parallel_parmest(self): retcode = subprocess.call(rlist) assert retcode == 0 - @unittest.skip("Most folks don't have k_aug installed") - def test_theta_k_aug_for_Hessian(self): - # this will fail if k_aug is not installed - objval, thetavals, Hessian = self.pest.theta_est(solver="k_aug") - self.assertAlmostEqual(objval, 4.4675, places=2) - - @unittest.skipIf(not pynumero_ASL_available, "pynumero ASL is not available") - @unittest.skipIf( - not parmest.inverse_reduced_hessian_available, - "Cannot test covariance matrix: required ASL dependency is missing", - ) + @unittest.skipIf(not pynumero_ASL_available, "pynumero_ASL is not available") def test_theta_est_cov(self): objval, thetavals, cov = self.pest.theta_est(calc_cov=True, cov_n=6) @@ -1485,11 +1452,7 @@ def SSE(model, data): }, } - @unittest.skipIf(not pynumero_ASL_available, "pynumero ASL is not available") - @unittest.skipIf( - not parmest.inverse_reduced_hessian_available, - "Cannot test covariance matrix: required ASL dependency is missing", - ) + @unittest.skipIf(not pynumero_ASL_available, "pynumero_ASL is not available") def test_parmest_basics(self): for model_type, parmest_input in self.input.items(): pest = parmest.Estimator( @@ -1518,6 +1481,7 @@ def test_parmest_basics(self): obj_at_theta = pest.objective_at_theta(parmest_input["theta_vals"]) self.assertAlmostEqual(obj_at_theta["obj"][0], 16.531953, places=2) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics_with_initialize_parmest_model_option(self): for model_type, parmest_input in self.input.items(): pest = parmest.Estimator( @@ -1549,6 +1513,7 @@ def test_parmest_basics_with_initialize_parmest_model_option(self): self.assertAlmostEqual(obj_at_theta["obj"][0], 16.531953, places=2) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics_with_square_problem_solve(self): for model_type, parmest_input in self.input.items(): pest = parmest.Estimator( @@ -1580,6 +1545,7 @@ def test_parmest_basics_with_square_problem_solve(self): self.assertAlmostEqual(obj_at_theta["obj"][0], 16.531953, places=2) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_parmest_basics_with_square_problem_solve_no_theta_vals(self): for model_type, parmest_input in self.input.items(): pest = parmest.Estimator( @@ -1923,6 +1889,7 @@ def test_return_continuous_set_multiple_datasets(self): self.assertAlmostEqual(return_vals1["time"].loc[1][18], 2.368, places=3) self.assertAlmostEqual(return_vals2["time"].loc[1][18], 2.368, places=3) + @unittest.skipUnless(pynumero_ASL_available, 'pynumero_ASL is not available') def test_covariance(self): from pyomo.contrib.interior_point.inverse_reduced_hessian import ( inv_reduced_hessian_barrier, diff --git a/pyomo/contrib/piecewise/piecewise_linear_function.py b/pyomo/contrib/piecewise/piecewise_linear_function.py index e92edacc756..742eb52d5bb 100644 --- a/pyomo/contrib/piecewise/piecewise_linear_function.py +++ b/pyomo/contrib/piecewise/piecewise_linear_function.py @@ -506,8 +506,6 @@ class ScalarPiecewiseLinearFunction( PiecewiseLinearFunctionData, PiecewiseLinearFunction ): def __init__(self, *args, **kwds): - self._suppress_ctypes = set() - PiecewiseLinearFunctionData.__init__(self, self) PiecewiseLinearFunction.__init__(self, *args, **kwds) self._data[None] = self diff --git a/pyomo/contrib/solver/base.py b/pyomo/contrib/solver/base.py index 98bf3836004..4fe8bee4e53 100644 --- a/pyomo/contrib/solver/base.py +++ b/pyomo/contrib/solver/base.py @@ -353,15 +353,20 @@ def __init__(self, **kwargs): raise NotImplementedError('Still working on this') # There is no reason for a user to be trying to mix both old # and new options. That is silly. So we will yell at them. - self.options = kwargs.pop('options', None) + _options = kwargs.pop('options', None) if 'solver_options' in kwargs: - if self.options is not None: + if _options is not None: raise ValueError( "Both 'options' and 'solver_options' were requested. " "Please use one or the other, not both." ) - self.options = kwargs.pop('solver_options') + _options = kwargs.pop('solver_options') + if _options is not None: + kwargs['solver_options'] = _options super().__init__(**kwargs) + # Make the legacy 'options' attribute an alias of the new + # config.solver_options + self.options = self.config.solver_options # # Support "with" statements @@ -372,6 +377,14 @@ def __enter__(self): def __exit__(self, t, v, traceback): """Exit statement - enables `with` statements.""" + def __setattr__(self, attr, value): + # 'options' and 'config' are really singleton attributes. Map + # any assignment to set_value() + if attr in ('options', 'config') and attr in self.__dict__: + getattr(self, attr).set_value(value) + else: + super().__setattr__(attr, value) + def _map_config( self, tee=NOTSET, @@ -390,7 +403,6 @@ def _map_config( writer_config=NOTSET, ): """Map between legacy and new interface configuration options""" - self.config = self.config() if 'report_timing' not in self.config: self.config.declare( 'report_timing', ConfigValue(domain=bool, default=False) @@ -405,8 +417,6 @@ def _map_config( self.config.time_limit = timelimit if report_timing is not NOTSET: self.config.report_timing = report_timing - if self.options is not None: - self.config.solver_options.set_value(self.options) if (options is not NOTSET) and (solver_options is not NOTSET): # There is no reason for a user to be trying to mix both old # and new options. That is silly. So we will yell at them. diff --git a/pyomo/contrib/solver/ipopt.py b/pyomo/contrib/solver/ipopt.py index c88696f531b..c467d283d9b 100644 --- a/pyomo/contrib/solver/ipopt.py +++ b/pyomo/contrib/solver/ipopt.py @@ -238,6 +238,21 @@ def version(self, config=None): self._version_cache = (pth, version) return self._version_cache[1] + def has_linear_solver(self, linear_solver): + import pyomo.core as AML + + m = AML.ConcreteModel() + m.x = AML.Var() + m.o = AML.Objective(expr=(m.x - 2) ** 2) + results = self.solve( + m, + tee=False, + raise_exception_on_nonoptimal_result=False, + load_solutions=False, + solver_options={'linear_solver': linear_solver}, + ) + return 'running with linear solver' in results.solver_log + def _write_options_file(self, filename: str, options: Mapping): # First we need to determine if we even need to create a file. # If options is empty, then we return False diff --git a/pyomo/contrib/solver/tests/unit/test_base.py b/pyomo/contrib/solver/tests/unit/test_base.py index b52f96ba903..e9ea717593f 100644 --- a/pyomo/contrib/solver/tests/unit/test_base.py +++ b/pyomo/contrib/solver/tests/unit/test_base.py @@ -16,6 +16,10 @@ from pyomo.contrib.solver import base +class _LegacyWrappedSolverBase(base.LegacySolverWrapper, base.SolverBase): + pass + + class TestSolverBase(unittest.TestCase): def test_abstract_member_list(self): expected_list = ['solve', 'available', 'version'] @@ -192,11 +196,13 @@ def test_class_method_list(self): ] self.assertEqual(sorted(expected_list), sorted(method_list)) + @unittest.mock.patch.multiple(_LegacyWrappedSolverBase, __abstractmethods__=set()) def test_context_manager(self): - with base.LegacySolverWrapper() as instance: - with self.assertRaises(AttributeError): - instance.available() + with _LegacyWrappedSolverBase() as instance: + self.assertIsInstance(instance, _LegacyWrappedSolverBase) + self.assertFalse(instance.available(False)) + @unittest.mock.patch.multiple(_LegacyWrappedSolverBase, __abstractmethods__=set()) def test_map_config(self): # Create a fake/empty config structure that can be added to an empty # instance of LegacySolverWrapper @@ -205,7 +211,7 @@ def test_map_config(self): 'solver_options', ConfigDict(implicit=True, description="Options to pass to the solver."), ) - instance = base.LegacySolverWrapper() + instance = _LegacyWrappedSolverBase() instance.config = self.config instance._map_config( True, False, False, 20, True, False, None, None, None, False, None, None @@ -272,98 +278,81 @@ def test_map_config(self): with self.assertRaises(AttributeError): print(instance.config.keepfiles) + @unittest.mock.patch.multiple(_LegacyWrappedSolverBase, __abstractmethods__=set()) def test_solver_options_behavior(self): # options can work in multiple ways (set from instantiation, set # after instantiation, set during solve). # Test case 1: Set at instantiation - solver = base.LegacySolverWrapper(options={'max_iter': 6}) + solver = _LegacyWrappedSolverBase(options={'max_iter': 6}) self.assertEqual(solver.options, {'max_iter': 6}) + self.assertEqual(solver.config.solver_options, {'max_iter': 6}) # Test case 2: Set later - solver = base.LegacySolverWrapper() + solver = _LegacyWrappedSolverBase() solver.options = {'max_iter': 4, 'foo': 'bar'} self.assertEqual(solver.options, {'max_iter': 4, 'foo': 'bar'}) + self.assertEqual(solver.config.solver_options, {'max_iter': 4, 'foo': 'bar'}) # Test case 3: pass some options to the mapping (aka, 'solve' command) - solver = base.LegacySolverWrapper() - config = ConfigDict(implicit=True) - config.declare( - 'solver_options', - ConfigDict(implicit=True, description="Options to pass to the solver."), - ) - solver.config = config + solver = _LegacyWrappedSolverBase() solver._map_config(options={'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 4}) self.assertEqual(solver.config.solver_options, {'max_iter': 4}) # Test case 4: Set at instantiation and override during 'solve' call - solver = base.LegacySolverWrapper(options={'max_iter': 6}) - config = ConfigDict(implicit=True) - config.declare( - 'solver_options', - ConfigDict(implicit=True, description="Options to pass to the solver."), - ) - solver.config = config + solver = _LegacyWrappedSolverBase(options={'max_iter': 6}) solver._map_config(options={'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 4}) self.assertEqual(solver.config.solver_options, {'max_iter': 4}) - self.assertEqual(solver.options, {'max_iter': 6}) # solver_options are also supported # Test case 1: set at instantiation - solver = base.LegacySolverWrapper(solver_options={'max_iter': 6}) + solver = _LegacyWrappedSolverBase(solver_options={'max_iter': 6}) self.assertEqual(solver.options, {'max_iter': 6}) + self.assertEqual(solver.config.solver_options, {'max_iter': 6}) # Test case 2: pass some solver_options to the mapping (aka, 'solve' command) - solver = base.LegacySolverWrapper() - config = ConfigDict(implicit=True) - config.declare( - 'solver_options', - ConfigDict(implicit=True, description="Options to pass to the solver."), - ) - solver.config = config + solver = _LegacyWrappedSolverBase() solver._map_config(solver_options={'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 4}) self.assertEqual(solver.config.solver_options, {'max_iter': 4}) # Test case 3: Set at instantiation and override during 'solve' call - solver = base.LegacySolverWrapper(solver_options={'max_iter': 6}) - config = ConfigDict(implicit=True) - config.declare( - 'solver_options', - ConfigDict(implicit=True, description="Options to pass to the solver."), - ) - solver.config = config + solver = _LegacyWrappedSolverBase(solver_options={'max_iter': 6}) solver._map_config(solver_options={'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 4}) self.assertEqual(solver.config.solver_options, {'max_iter': 4}) - self.assertEqual(solver.options, {'max_iter': 6}) # users can mix... sort of # Test case 1: Initialize with options, solve with solver_options - solver = base.LegacySolverWrapper(options={'max_iter': 6}) - config = ConfigDict(implicit=True) - config.declare( - 'solver_options', - ConfigDict(implicit=True, description="Options to pass to the solver."), - ) - solver.config = config + solver = _LegacyWrappedSolverBase(options={'max_iter': 6}) solver._map_config(solver_options={'max_iter': 4}) + self.assertEqual(solver.options, {'max_iter': 4}) self.assertEqual(solver.config.solver_options, {'max_iter': 4}) # users CANNOT initialize both values at the same time, because how # do we know what to do with it then? # Test case 1: Class instance with self.assertRaises(ValueError): - solver = base.LegacySolverWrapper( + solver = _LegacyWrappedSolverBase( options={'max_iter': 6}, solver_options={'max_iter': 4} ) # Test case 2: Passing to `solve` - solver = base.LegacySolverWrapper() + solver = _LegacyWrappedSolverBase() + with self.assertRaises(ValueError): + solver._map_config(solver_options={'max_iter': 4}, options={'max_iter': 6}) + + # Test that assignment to maps to set_value: + solver = _LegacyWrappedSolverBase() config = ConfigDict(implicit=True) config.declare( 'solver_options', ConfigDict(implicit=True, description="Options to pass to the solver."), ) solver.config = config - with self.assertRaises(ValueError): - solver._map_config(solver_options={'max_iter': 4}, options={'max_iter': 6}) + solver.config.solver_options.max_iter = 6 + self.assertEqual(solver.options, {'max_iter': 6}) + self.assertEqual(solver.config.solver_options, {'max_iter': 6}) def test_map_results(self): # Unclear how to test this diff --git a/pyomo/contrib/solver/tests/unit/test_ipopt.py b/pyomo/contrib/solver/tests/unit/test_ipopt.py index cc459245506..27a80feede0 100644 --- a/pyomo/contrib/solver/tests/unit/test_ipopt.py +++ b/pyomo/contrib/solver/tests/unit/test_ipopt.py @@ -84,6 +84,7 @@ def test_class_member_list(self): 'CONFIG', 'config', 'available', + 'has_linear_solver', 'is_persistent', 'solve', 'version', @@ -167,6 +168,29 @@ def test_write_options_file(self): data = f.readlines() self.assertEqual(len(data), len(list(opt.config.solver_options.keys()))) + def test_has_linear_solver(self): + opt = ipopt.Ipopt() + self.assertTrue( + any( + map( + opt.has_linear_solver, + [ + 'mumps', + 'ma27', + 'ma57', + 'ma77', + 'ma86', + 'ma97', + 'pardiso', + 'pardisomkl', + 'spral', + 'wsmp', + ], + ) + ) + ) + self.assertFalse(opt.has_linear_solver('bogus_linear_solver')) + def test_create_command_line(self): opt = ipopt.Ipopt() # No custom options, no file created. Plain and simple. diff --git a/pyomo/contrib/trustregion/tests/test_interface.py b/pyomo/contrib/trustregion/tests/test_interface.py index 0922ccf950b..d241576f3ba 100644 --- a/pyomo/contrib/trustregion/tests/test_interface.py +++ b/pyomo/contrib/trustregion/tests/test_interface.py @@ -234,7 +234,7 @@ def test_updateSurrogateModel(self): for key, val in self.interface.data.grad_basis_model_output.items(): self.assertEqual(value(val), 0) for key, val in self.interface.data.truth_model_output.items(): - self.assertEqual(value(val), 0.8414709848078965) + self.assertAlmostEqual(value(val), 0.8414709848078965) # The truth gradients should equal the output of [cos(2-1), -cos(2-1)] truth_grads = [] for key, val in self.interface.data.grad_truth_model_output.items(): @@ -332,7 +332,7 @@ def test_calculateFeasibility(self): # Check after a solve is completed self.interface.data.basis_constraint.activate() objective, step_norm, feasibility = self.interface.solveModel() - self.assertEqual(feasibility, 0.09569982275514467) + self.assertAlmostEqual(feasibility, 0.09569982275514467) self.interface.data.basis_constraint.deactivate() @unittest.skipIf( @@ -361,7 +361,7 @@ def test_calculateStepSizeInfNorm(self): # Check after a solve is completed self.interface.data.basis_constraint.activate() objective, step_norm, feasibility = self.interface.solveModel() - self.assertEqual(step_norm, 3.393437471478297) + self.assertAlmostEqual(step_norm, 3.393437471478297) self.interface.data.basis_constraint.deactivate() @unittest.skipIf( diff --git a/pyomo/core/base/block.py b/pyomo/core/base/block.py index 653809e0419..526c4c8bb41 100644 --- a/pyomo/core/base/block.py +++ b/pyomo/core/base/block.py @@ -960,13 +960,8 @@ def add_component(self, name, val): % (name, type(val), self.name, type(getattr(self, name))) ) # - # Skip the add_component() logic if this is a - # component type that is suppressed. - # _component = self.parent_component() _type = val.ctype - if _type in _component._suppress_ctypes: - return # # Raise an exception if the component already has a parent. # @@ -1048,12 +1043,6 @@ def add_component(self, name, val): else: self._ctypes[_type] = [_new_idx, _new_idx, 1] # - # Propagate properties to sub-blocks: - # suppressed ctypes - # - if _type is Block: - val._suppress_ctypes |= _component._suppress_ctypes - # # Error, for disabled support implicit rule names # if '_rule' in val.__dict__ and val._rule is None: @@ -2029,7 +2018,6 @@ def __init__( def __init__(self, *args, **kwargs): """Constructor""" - self._suppress_ctypes = set() _rule = kwargs.pop('rule', None) _options = kwargs.pop('options', None) # As concrete applies to the Block at declaration time, we will diff --git a/pyomo/core/base/constraint.py b/pyomo/core/base/constraint.py index bc9a32f5404..f0e020bcfd0 100644 --- a/pyomo/core/base/constraint.py +++ b/pyomo/core/base/constraint.py @@ -179,7 +179,7 @@ def __call__(self, exception=True): body = value(self.body, exception=exception) return body - def to_bounded_expression(self): + def to_bounded_expression(self, evaluate_bounds=False): """Convert this constraint to a tuple of 3 expressions (lb, body, ub) This method "standardizes" the expression into a 3-tuple of @@ -195,6 +195,13 @@ def to_bounded_expression(self): extension, the result) can change after fixing / unfixing :py:class:`Var` objects. + Parameters + ---------- + evaluate_bounds: bool + + If True, then the lower and upper bounds will be evaluated + to a finite numeric constant or None. + Raises ------ @@ -226,15 +233,36 @@ def to_bounded_expression(self): "variable upper bound. Cannot normalize the " "constraint or send it to a solver." ) - return ans - elif expr is not None: + elif expr is None: + ans = None, None, None + else: lhs, rhs = expr.args if rhs.__class__ in native_types or not rhs.is_potentially_variable(): - return rhs if expr.__class__ is EqualityExpression else None, lhs, rhs - if lhs.__class__ in native_types or not lhs.is_potentially_variable(): - return lhs, rhs, lhs if expr.__class__ is EqualityExpression else None - return 0 if expr.__class__ is EqualityExpression else None, lhs - rhs, 0 - return None, None, None + ans = rhs if expr.__class__ is EqualityExpression else None, lhs, rhs + elif lhs.__class__ in native_types or not lhs.is_potentially_variable(): + ans = lhs, rhs, lhs if expr.__class__ is EqualityExpression else None + else: + ans = 0 if expr.__class__ is EqualityExpression else None, lhs - rhs, 0 + + if evaluate_bounds: + lb, body, ub = ans + return self._evaluate_bound(lb, True), body, self._evaluate_bound(ub, False) + return ans + + def _evaluate_bound(self, bound, is_lb): + if bound is None: + return None + if bound.__class__ not in native_numeric_types: + bound = float(value(bound)) + # Note that "bound != bound" catches float('nan') + if bound in _nonfinite_values or bound != bound: + if bound == (-_inf if is_lb else _inf): + return None + raise ValueError( + f"Constraint '{self.name}' created with an invalid non-finite " + f"{'lower' if is_lb else 'upper'} bound ({bound})." + ) + return bound @property def body(self): @@ -291,38 +319,12 @@ def upper(self): @property def lb(self): """Access the value of the lower bound of a constraint expression.""" - bound = self.to_bounded_expression()[0] - if bound is None: - return None - if bound.__class__ not in native_numeric_types: - bound = float(value(bound)) - # Note that "bound != bound" catches float('nan') - if bound in _nonfinite_values or bound != bound: - if bound == -_inf: - return None - raise ValueError( - f"Constraint '{self.name}' created with an invalid non-finite " - f"lower bound ({bound})." - ) - return bound + return self._evaluate_bound(self.to_bounded_expression()[0], True) @property def ub(self): """Access the value of the upper bound of a constraint expression.""" - bound = self.to_bounded_expression()[2] - if bound is None: - return None - if bound.__class__ not in native_numeric_types: - bound = float(value(bound)) - # Note that "bound != bound" catches float('nan') - if bound in _nonfinite_values or bound != bound: - if bound == _inf: - return None - raise ValueError( - f"Constraint '{self.name}' created with an invalid non-finite " - f"upper bound ({bound})." - ) - return bound + return self._evaluate_bound(self.to_bounded_expression()[2], False) @property def equality(self): diff --git a/pyomo/core/base/set.py b/pyomo/core/base/set.py index 049775fd9dd..e4a6d13e96e 100644 --- a/pyomo/core/base/set.py +++ b/pyomo/core/base/set.py @@ -16,15 +16,18 @@ import math import sys import weakref -from pyomo.common.pyomo_typing import overload -from typing import Union, Type, Any as typingAny + from collections.abc import Iterator +from functools import partial +from typing import Union, Type, Any as typingAny +from pyomo.common.autoslots import AutoSlots from pyomo.common.collections import ComponentSet from pyomo.common.deprecation import deprecated, deprecation_warning, RenamedClass from pyomo.common.errors import DeveloperError, PyomoException from pyomo.common.log import is_debug_set from pyomo.common.modeling import NOTSET +from pyomo.common.pyomo_typing import overload from pyomo.common.sorting import sorted_robust from pyomo.common.timing import ConstructionTimer @@ -478,9 +481,7 @@ def __call__(self, parent, index): if not isinstance(_val, Sequence): _val = tuple(_val) - if len(_val) == 0: - return _val - if isinstance(_val[0], tuple): + if not _val or isinstance(_val[0], tuple): return _val return self._tuplize(_val, parent, index) @@ -501,7 +502,7 @@ def _tuplize(self, _val, parent, index): "length %s is not a multiple of dimen=%s" % (len(_val), d) ) - return list(tuple(_val[d * i : d * (i + 1)]) for i in range(len(_val) // d)) + return (tuple(_val[i : i + d]) for i in range(0, len(_val), d)) class _NotFound(object): @@ -1364,87 +1365,12 @@ def filter(self): return self._filter def add(self, *values): - count = 0 - _block = self.parent_block() - for value in values: - if normalize_index.flatten: - _value = normalize_index(value) - if _value.__class__ is tuple: - _d = len(_value) - else: - _d = 1 - else: - # If we are not normalizing indices, then we cannot reliably - # infer the set dimen - _d = 1 - if isinstance(value, Sequence) and self.dimen != 1: - _d = len(value) - _value = value - if _value not in self._domain: - raise ValueError( - "Cannot add value %s to Set %s.\n" - "\tThe value is not in the domain %s" - % (value, self.name, self._domain) - ) - - # We wrap this check in a try-except because some values - # (like lists) are not hashable and can raise exceptions. - try: - if _value in self: - logger.warning( - "Element %s already exists in Set %s; no action taken" - % (value, self.name) - ) - continue - except: - exc = sys.exc_info() - raise TypeError( - "Unable to insert '%s' into Set %s:\n\t%s: %s" - % (value, self.name, exc[0].__name__, exc[1]) - ) + N = len(self) + self.update(values) + return len(self) - N - if self._filter is not None: - if not self._filter(_block, _value): - continue - - if self._validate is not None: - try: - flag = self._validate(_block, _value) - except: - logger.error( - "Exception raised while validating element '%s' " - "for Set %s" % (value, self.name) - ) - raise - if not flag: - raise ValueError( - "The value=%s violates the validation rule of Set %s" - % (value, self.name) - ) - - # If the Set has a fixed dimension, check that this element is - # compatible. - if self._dimen is not None: - if _d != self._dimen: - if self._dimen is UnknownSetDimen: - # The first thing added to a Set with unknown - # dimension sets its dimension - self._dimen = _d - else: - raise ValueError( - "The value=%s has dimension %s and is not " - "valid for Set %s which has dimen=%s" - % (value, _d, self.name, self._dimen) - ) - - # Add the value to this object (this last redirection allows - # derived classes to implement a different storage mechanism) - self._add_impl(_value) - count += 1 - return count - - def _add_impl(self, value): - self._values.add(value) + def _update_impl(self, values): + self._values.update(values) def remove(self, val): self._values.remove(val) @@ -1457,17 +1383,147 @@ def clear(self): def set_value(self, val): self.clear() - for x in val: - self.add(x) + self.update(val) + + def _initialize(self, val): + try: + # We want to explicitly call the update() on *this class* to + # bypass potential double logging of the use of unordered + # data with ordered Sets + FiniteSetData.update(self, val) + except TypeError as e: + if 'not iterable' in str(e): + logger.error( + "Initializer for Set %s returned non-iterable object " + "of type %s." + % ( + self.name, + (val if val.__class__ is type else type(val).__name__), + ) + ) + raise def update(self, values): - for v in values: - if v not in self: - self.add(v) + # Special case: set operations that are not first attached + # to the model must be constructed. + if isinstance(values, SetOperator): + values.construct() + # It is important that val_iter is an actual iterator + val_iter = iter(values) + if self._dimen is not None: + if normalize_index.flatten: + val_iter = self._cb_normalized_dimen_verifier(self._dimen, val_iter) + else: + val_iter = self._cb_raw_dimen_verifier(self._dimen, val_iter) + elif normalize_index.flatten: + val_iter = map(normalize_index, val_iter) + else: + val_iter = self._cb_check_set_end(val_iter) + + if self._domain is not Any: + val_iter = self._cb_domain_verifier(self._domain, val_iter) + + if self._filter is not None: + val_iter = filter(partial(self._filter, self.parent_block()), val_iter) + + if self._validate is not None: + val_iter = self._cb_validate(self._validate, self.parent_block(), val_iter) + + # We wrap this check in a try-except because some values + # (like lists) are not hashable and can raise exceptions. + try: + self._update_impl(val_iter) + except Set._SetEndException: + pass def pop(self): return self._values.pop() + def _cb_domain_verifier(self, domain, val_iter): + for value in val_iter: + if value not in domain: + raise ValueError( + "Cannot add value %s to Set %s.\n" + "\tThe value is not in the domain %s" + % (value, self.name, self._domain) + ) + yield value + + def _cb_check_set_end(self, val_iter): + for value in val_iter: + if value is Set.End: + return + yield value + + def _cb_validate(self, validate, block, val_iter): + for value in val_iter: + try: + flag = validate(block, value) + except: + logger.error( + "Exception raised while validating element '%s' " + "for Set %s" % (value, self.name) + ) + raise + if not flag: + raise ValueError( + "The value=%s violates the validation rule of Set %s" + % (value, self.name) + ) + yield value + + def _cb_normalized_dimen_verifier(self, dimen, val_iter): + for value in val_iter: + if value.__class__ in native_types: + if dimen == 1: + yield value + continue + normalized_value = value + else: + normalized_value = normalize_index(value) + # Note: normalize_index() will never return a 1-tuple + if normalized_value.__class__ is tuple: + if dimen == len(normalized_value): + yield normalized_value[0] if dimen == 1 else normalized_value + continue + + _d = len(normalized_value) if normalized_value.__class__ is tuple else 1 + if _d == dimen: + yield normalized_value + elif dimen is UnknownSetDimen: + # The first thing added to a Set with unknown dimension + # sets its dimension + self._dimen = dimen = _d + yield normalized_value + else: + raise ValueError( + "The value=%s has dimension %s and is not " + "valid for Set %s which has dimen=%s" + % (value, _d, self.name, self._dimen) + ) + + def _cb_raw_dimen_verifier(self, dimen, val_iter): + for value in val_iter: + if isinstance(value, Sequence): + if dimen == len(value): + yield value + continue + elif dimen == 1: + yield value + continue + _d = len(value) if isinstance(value, Sequence) else 1 + if dimen is UnknownSetDimen: + # The first thing added to a Set with unknown dimension + # sets its dimension + self._dimen = dimen = _d + yield value + else: + raise ValueError( + "The value=%s has dimension %s and is not " + "valid for Set %s which has dimen=%s" + % (value, _d, self.name, self._dimen) + ) + class _FiniteSetData(metaclass=RenamedClass): __renamed__new_class__ = FiniteSetData @@ -1545,10 +1601,16 @@ def ordered_iter(self): return iter(self) def first(self): - return self.at(1) + try: + return next(iter(self)) + except StopIteration: + raise IndexError(f"{self.name} index out of range") from None def last(self): - return self.at(len(self)) + try: + return next(reversed(self)) + except StopIteration: + raise IndexError(f"{self.name} index out of range") from None def next(self, item, step=1): """ @@ -1655,27 +1717,30 @@ class OrderedSetData(_OrderedSetMixin, FiniteSetData): def __init__(self, component): self._values = {} - self._ordered_values = [] + self._ordered_values = None FiniteSetData.__init__(self, component=component) def _iter_impl(self): """ Return an iterator for the set. """ - return iter(self._ordered_values) + return iter(self._values) def __reversed__(self): - return reversed(self._ordered_values) + return reversed(self._values) - def _add_impl(self, value): - self._values[value] = len(self._values) - self._ordered_values.append(value) + def _update_impl(self, values): + for val in values: + # Note that we reset _ordered_values within the loop because + # of an old example where the initializer rule makes + # reference to values previously inserted into the Set + # (which triggered the creation of the _ordered_values) + self._ordered_values = None + self._values[val] = None def remove(self, val): - idx = self._values.pop(val) - self._ordered_values.pop(idx) - for i in range(idx, len(self._ordered_values)): - self._values[self._ordered_values[i]] -= 1 + self._values.pop(val) + self._ordered_values = None def discard(self, val): try: @@ -1685,15 +1750,15 @@ def discard(self, val): def clear(self): self._values.clear() - self._ordered_values = [] + self._ordered_values = None def pop(self): try: ans = self.last() except IndexError: - # Map the index error to a KeyError for consistency with - # set().pop() - raise KeyError('pop from an empty set') + # Map the exception for iterating over an empty dict to a + # KeyError for consistency with set().pop() + raise KeyError('pop from an empty set') from None self.discard(ans) return ans @@ -1704,6 +1769,8 @@ def at(self, index): The public Set API is 1-based, even though the internal _lookup and _values are (pythonically) 0-based. """ + if self._ordered_values is None: + self._rebuild_ordered_values() i = self._to_0_based_index(index) try: return self._ordered_values[i] @@ -1723,6 +1790,8 @@ def ord(self, item): # when they are actually put as Set members. So, we will look # for the exact thing that the user sent us and then fall back # on the scalar. + if self._ordered_values is None: + self._rebuild_ordered_values() try: return self._values[item] + 1 except KeyError: @@ -1733,6 +1802,12 @@ def ord(self, item): except KeyError: raise ValueError("%s.ord(x): x not in %s" % (self.name, self.name)) + def _rebuild_ordered_values(self): + _set = self._values + self._ordered_values = list(_set) + for i, v in enumerate(self._ordered_values): + _set[v] = i + class _OrderedSetData(metaclass=RenamedClass): __renamed__new_class__ = OrderedSetData @@ -1752,6 +1827,16 @@ class InsertionOrderSetData(OrderedSetData): __slots__ = () + def _initialize(self, val): + if type(val) in Set._UnorderedInitializers: + logger.warning( + "Initializing ordered Set %s with " + "a fundamentally unordered data source (type: %s). " + "This WILL potentially lead to nondeterministic behavior " + "in Pyomo" % (self.name, type(val).__name__) + ) + super()._initialize(val) + def set_value(self, val): if type(val) in Set._UnorderedInitializers: logger.warning( @@ -1760,7 +1845,8 @@ def set_value(self, val): "This WILL potentially lead to nondeterministic behavior " "in Pyomo" % (type(val).__name__,) ) - super(InsertionOrderSetData, self).set_value(val) + self.clear() + super().update(val) def update(self, values): if type(values) in Set._UnorderedInitializers: @@ -1770,7 +1856,7 @@ def update(self, values): "This WILL potentially lead to nondeterministic behavior " "in Pyomo" % (type(values).__name__,) ) - super(InsertionOrderSetData, self).update(values) + super().update(values) class _InsertionOrderSetData(metaclass=RenamedClass): @@ -1800,73 +1886,42 @@ class SortedSetData(_SortedSetMixin, OrderedSetData): Public Class Attributes: """ - __slots__ = ('_is_sorted',) - - def __init__(self, component): - # An empty set is sorted... - self._is_sorted = True - OrderedSetData.__init__(self, component=component) + __slots__ = () def _iter_impl(self): """ Return an iterator for the set. """ - if not self._is_sorted: - self._sort() - return super(SortedSetData, self)._iter_impl() + if self._ordered_values is None: + self._rebuild_ordered_values() + return iter(self._ordered_values) def __reversed__(self): - if not self._is_sorted: - self._sort() - return super(SortedSetData, self).__reversed__() + if self._ordered_values is None: + self._rebuild_ordered_values() + return reversed(self._ordered_values) - def _add_impl(self, value): - # Note that the sorted status has no bearing on insertion, - # so there is no reason to check if the data is correctly sorted - self._values[value] = len(self._values) - self._ordered_values.append(value) - self._is_sorted = False + def _update_impl(self, values): + for val in values: + # Note that we reset _ordered_values within the loop because + # of an old example where the initializer rule makes + # reference to values previously inserted into the Set + # (which triggered the creation of the _ordered_values) + self._ordered_values = None + self._values[val] = None # Note: removing data does not affect the sorted flag # def remove(self, val): # def discard(self, val): - def clear(self): - super(SortedSetData, self).clear() - self._is_sorted = True - - def at(self, index): - """ - Return the specified member of the set. - - The public Set API is 1-based, even though the - internal _lookup and _values are (pythonically) 0-based. - """ - if not self._is_sorted: - self._sort() - return super(SortedSetData, self).at(index) - - def ord(self, item): - """ - Return the position index of the input value. - - Note that Pyomo Set objects have positions starting at 1 (not 0). - - If the search item is not in the Set, then an IndexError is raised. - """ - if not self._is_sorted: - self._sort() - return super(SortedSetData, self).ord(item) - def sorted_data(self): return self.data() - def _sort(self): - self._ordered_values = list( - self.parent_component()._sort_fcn(self._ordered_values) - ) - self._values = {j: i for i, j in enumerate(self._ordered_values)} - self._is_sorted = True + def _rebuild_ordered_values(self): + _set = self._values + self._ordered_values = list(self.parent_component()._sort_fcn(_set)) + for i, v in enumerate(self._ordered_values): + _set[v] = i class _SortedSetData(metaclass=RenamedClass): @@ -1975,10 +2030,14 @@ class Set(IndexedComponent): """ - class End(object): + class _SetEndException(Exception): pass - class Skip(object): + class _SetEndType(type): + def __hash__(self): + raise Set._SetEndException() + + class End(metaclass=_SetEndType): pass class InsertionOrder(object): @@ -2223,21 +2282,6 @@ def _getitem_when_not_present(self, index): if _d is UnknownSetDimen and domain is not None and domain.dimen is not None: _d = domain.dimen - if self._init_values is not None: - self._init_values._dimen = _d - try: - _values = self._init_values(_block, index) - except TuplizeError as e: - raise ValueError( - str(e) % (self._name, "[%s]" % index if self.is_indexed() else "") - ) - - if _values is Set.Skip: - return - elif _values is None: - raise ValueError( - "Set rule or initializer returned None instead of Set.Skip" - ) if index is None and not self.is_indexed(): obj = self._data[index] = self else: @@ -2259,55 +2303,35 @@ def _getitem_when_not_present(self, index): obj._validate = self._init_validate if self._init_filter is not None: try: - _filter = Initializer(self._init_filter(_block, index)) - if _filter.constant(): + obj._filter = Initializer(self._init_filter(_block, index)) + if obj._filter.constant(): # _init_filter was the actual filter function; use it. - _filter = self._init_filter + obj._filter = self._init_filter except: # We will assume any exceptions raised when getting the # filter for this index indicate that the function # should have been passed directly to the underlying sets. - _filter = self._init_filter + obj._filter = self._init_filter else: - _filter = None + obj._filter = None if self._init_values is not None: - # _values was initialized above... - if obj.isordered() and type(_values) in Set._UnorderedInitializers: - logger.warning( - "Initializing ordered Set %s with a fundamentally " - "unordered data source (type: %s). This WILL potentially " - "lead to nondeterministic behavior in Pyomo" - % (self.name, type(_values).__name__) - ) - # Special case: set operations that are not first attached - # to the model must be constructed. - if isinstance(_values, SetOperator): - _values.construct() + # record the user-provided dimen in the initializer + self._init_values._dimen = _d try: - val_iter = iter(_values) - except TypeError: - logger.error( - "Initializer for Set %s%s returned non-iterable object " - "of type %s." - % ( - self.name, - ("[%s]" % (index,) if self.is_indexed() else ""), - ( - _values - if _values.__class__ is type - else type(_values).__name__ - ), - ) + _values = self._init_values(_block, index) + except TuplizeError as e: + raise ValueError( + str(e) % (self._name, "[%s]" % index if self.is_indexed() else "") ) - raise - for val in val_iter: - if val is Set.End: - break - if _filter is None or _filter(_block, val): - obj.add(val) - # We defer adding the filter until now so that add() doesn't - # call it a second time. - obj._filter = _filter + if _values is Set.Skip: + del self._data[index] + return + elif _values is None: + raise ValueError( + "Set rule or initializer returned None instead of Set.Skip" + ) + + obj._initialize(_values) return obj @staticmethod diff --git a/pyomo/core/base/suffix.py b/pyomo/core/base/suffix.py index be2f732650d..19dbe48f650 100644 --- a/pyomo/core/base/suffix.py +++ b/pyomo/core/base/suffix.py @@ -19,6 +19,7 @@ from pyomo.common.modeling import NOTSET from pyomo.common.pyomo_typing import overload from pyomo.common.timing import ConstructionTimer +from pyomo.core.base.block import BlockData from pyomo.core.base.component import ActiveComponent, ModelComponentFactory from pyomo.core.base.disable_methods import disable_methods from pyomo.core.base.initializer import Initializer @@ -409,7 +410,7 @@ class AbstractSuffix(Suffix): class SuffixFinder(object): - def __init__(self, name, default=None): + def __init__(self, name, default=None, context=None): """This provides an efficient utility for finding suffix values on a (hierarchical) Pyomo model. @@ -424,11 +425,26 @@ def __init__(self, name, default=None): Default value to return from `.find()` if no matching Suffix is found. + context: BlockData + + The root of the Block hierarchy to use when searching for + Suffix components. Suffixes outside this hierarchy will not + be interrogated and components that are queried (with + :py:meth:`find(component_data)` will return the default + value. + """ self.name = name self.default = default self.all_suffixes = [] - self._suffixes_by_block = {None: []} + self._context = context + self._suffixes_by_block = ComponentMap() + self._suffixes_by_block[self._context] = [] + if context is not None: + s = context.component(name) + if s is not None and s.ctype is Suffix and s.active: + self._suffixes_by_block[context].append(s) + self.all_suffixes.append(s) def find(self, component_data): """Find suffix value for a given component data object in model tree @@ -458,7 +474,17 @@ def find(self, component_data): """ # Walk parent tree and search for suffixes - suffixes = self._get_suffix_list(component_data.parent_block()) + if isinstance(component_data, BlockData): + _block = component_data + else: + _block = component_data.parent_block() + try: + suffixes = self._get_suffix_list(_block) + except AttributeError: + # Component was outside the context (eventually parent + # becomes None and parent.parent_block() raises an + # AttributeError): we will return the default value + return self.default # Pass 1: look for the component_data, working root to leaf for s in suffixes: if component_data in s: diff --git a/pyomo/core/kernel/constraint.py b/pyomo/core/kernel/constraint.py index fe8eb8b2c1f..ed877e8af92 100644 --- a/pyomo/core/kernel/constraint.py +++ b/pyomo/core/kernel/constraint.py @@ -160,6 +160,17 @@ def has_ub(self): ub = self.ub return (ub is not None) and (value(ub) != float('inf')) + def to_bounded_expression(self, evaluate_bounds=False): + if evaluate_bounds: + lb = self.lb + if lb == -float('inf'): + lb = None + ub = self.ub + if ub == float('inf'): + ub = None + return lb, self.body, ub + return self.lower, self.body, self.upper + class _MutableBoundsConstraintMixin(object): """ @@ -177,9 +188,6 @@ class _MutableBoundsConstraintMixin(object): # Define some of the IConstraint abstract methods # - def to_bounded_expression(self): - return self.lower, self.body, self.upper - @property def lower(self): """The expression for the lower bound of the constraint""" diff --git a/pyomo/core/plugins/transform/scaling.py b/pyomo/core/plugins/transform/scaling.py index 11d4ac8c493..0835e5fd060 100644 --- a/pyomo/core/plugins/transform/scaling.py +++ b/pyomo/core/plugins/transform/scaling.py @@ -82,15 +82,10 @@ def _create_using(self, original_model, **kwds): self._apply_to(scaled_model, **kwds) return scaled_model - def _get_float_scaling_factor(self, component): - if self._suffix_finder is None: - self._suffix_finder = SuffixFinder('scaling_factor', 1.0) - return self._suffix_finder.find(component) - def _apply_to(self, model, rename=True): # create a map of component to scaling factor component_scaling_factor_map = ComponentMap() - self._suffix_finder = SuffixFinder('scaling_factor', 1.0) + self._suffix_finder = SuffixFinder('scaling_factor', 1.0, model) # if the scaling_method is 'user', get the scaling parameters from the suffixes if self._scaling_method == 'user': diff --git a/pyomo/core/tests/transform/test_scaling.py b/pyomo/core/tests/transform/test_scaling.py index d0fbfab61bd..7168f6bb707 100644 --- a/pyomo/core/tests/transform/test_scaling.py +++ b/pyomo/core/tests/transform/test_scaling.py @@ -13,7 +13,7 @@ import pyomo.common.unittest as unittest import pyomo.environ as pyo from pyomo.opt.base.solvers import UnknownSolver -from pyomo.core.plugins.transform.scaling import ScaleModel +from pyomo.core.plugins.transform.scaling import ScaleModel, SuffixFinder class TestScaleModelTransformation(unittest.TestCase): @@ -600,6 +600,13 @@ def con_rule(m, i): self.assertAlmostEqual(pyo.value(model.zcon), -8, 4) def test_get_float_scaling_factor_top_level(self): + # Note: the transformation used to have a private method for + # finding suffix values (which this method tested). The + # transformation now leverages the SuffixFinder. To ensure that + # the SuffixFinder behaves in the same way as the original local + # method, we preserve these tests, but directly test the + # SuffixFinder + m = pyo.ConcreteModel() m.scaling_factor = pyo.Suffix(direction=pyo.Suffix.EXPORT) @@ -616,17 +623,23 @@ def test_get_float_scaling_factor_top_level(self): m.scaling_factor[m.v1] = 0.1 m.scaling_factor[m.b1.v2] = 0.2 + _finder = SuffixFinder('scaling_factor', 1.0, m) + # SF should be 0.1 from top level - sf = ScaleModel()._get_float_scaling_factor(m.v1) - assert sf == float(0.1) + self.assertEqual(_finder.find(m.v1), 0.1) # SF should be 0.1 from top level, lower level ignored - sf = ScaleModel()._get_float_scaling_factor(m.b1.v2) - assert sf == float(0.2) + self.assertEqual(_finder.find(m.b1.v2), 0.2) # No SF, should return 1 - sf = ScaleModel()._get_float_scaling_factor(m.b1.b2.v3) - assert sf == 1.0 + self.assertEqual(_finder.find(m.b1.b2.v3), 1.0) def test_get_float_scaling_factor_local_level(self): + # Note: the transformation used to have a private method for + # finding suffix values (which this method tested). The + # transformation now leverages the SuffixFinder. To ensure that + # the SuffixFinder behaves in the same way as the original local + # method, we preserve these tests, but directly test the + # SuffixFinder + m = pyo.ConcreteModel() m.scaling_factor = pyo.Suffix(direction=pyo.Suffix.EXPORT) @@ -647,15 +660,21 @@ def test_get_float_scaling_factor_local_level(self): # Add an intermediate scaling factor - this should take priority m.b1.scaling_factor[m.b1.b2.v3] = 0.4 + _finder = SuffixFinder('scaling_factor', 1.0, m) + # Should get SF from local levels - sf = ScaleModel()._get_float_scaling_factor(m.v1) - assert sf == float(0.1) - sf = ScaleModel()._get_float_scaling_factor(m.b1.v2) - assert sf == float(0.2) - sf = ScaleModel()._get_float_scaling_factor(m.b1.b2.v3) - assert sf == float(0.4) + self.assertEqual(_finder.find(m.v1), 0.1) + self.assertEqual(_finder.find(m.b1.v2), 0.2) + self.assertEqual(_finder.find(m.b1.b2.v3), 0.4) def test_get_float_scaling_factor_intermediate_level(self): + # Note: the transformation used to have a private method for + # finding suffix values (which this method tested). The + # transformation now leverages the SuffixFinder. To ensure that + # the SuffixFinder behaves in the same way as the original local + # method, we preserve these tests, but directly test the + # SuffixFinder + m = pyo.ConcreteModel() m.scaling_factor = pyo.Suffix(direction=pyo.Suffix.EXPORT) @@ -680,15 +699,14 @@ def test_get_float_scaling_factor_intermediate_level(self): m.b1.b2.b3.scaling_factor[m.b1.b2.b3.v3] = 0.4 + _finder = SuffixFinder('scaling_factor', 1.0, m) + # v1 should be unscaled as SF set below variable level - sf = ScaleModel()._get_float_scaling_factor(m.v1) - assert sf == 1.0 + self.assertEqual(_finder.find(m.v1), 1.0) # v2 should get SF from b1 level - sf = ScaleModel()._get_float_scaling_factor(m.b1.b2.b3.v2) - assert sf == float(0.2) + self.assertEqual(_finder.find(m.b1.b2.b3.v2), 0.2) # v2 should get SF from highest level, ignoring b3 level - sf = ScaleModel()._get_float_scaling_factor(m.b1.b2.b3.v3) - assert sf == float(0.3) + self.assertEqual(_finder.find(m.b1.b2.b3.v3), 0.3) if __name__ == "__main__": diff --git a/pyomo/core/tests/unit/test_set.py b/pyomo/core/tests/unit/test_set.py index 2b0da8b861d..8f3fab23bbb 100644 --- a/pyomo/core/tests/unit/test_set.py +++ b/pyomo/core/tests/unit/test_set.py @@ -3518,21 +3518,25 @@ def test_iteration(self): def test_declare(self): NS = {} DeclareGlobalSet(RangeSet(name='TrinarySet', ranges=(NR(0, 2, 1),)), NS) - self.assertEqual(list(NS['TrinarySet']), [0, 1, 2]) - a = pickle.loads(pickle.dumps(NS['TrinarySet'])) - self.assertIs(a, NS['TrinarySet']) - with self.assertRaisesRegex(NameError, "name 'TrinarySet' is not defined"): - TrinarySet - del SetModule.GlobalSets['TrinarySet'] - del NS['TrinarySet'] + try: + self.assertEqual(list(NS['TrinarySet']), [0, 1, 2]) + a = pickle.loads(pickle.dumps(NS['TrinarySet'])) + self.assertIs(a, NS['TrinarySet']) + with self.assertRaisesRegex(NameError, "name 'TrinarySet' is not defined"): + TrinarySet + finally: + del SetModule.GlobalSets['TrinarySet'] + del NS['TrinarySet'] # Now test the automatic identification of the globals() scope DeclareGlobalSet(RangeSet(name='TrinarySet', ranges=(NR(0, 2, 1),))) - self.assertEqual(list(TrinarySet), [0, 1, 2]) - a = pickle.loads(pickle.dumps(TrinarySet)) - self.assertIs(a, TrinarySet) - del SetModule.GlobalSets['TrinarySet'] - del globals()['TrinarySet'] + try: + self.assertEqual(list(TrinarySet), [0, 1, 2]) + a = pickle.loads(pickle.dumps(TrinarySet)) + self.assertIs(a, TrinarySet) + finally: + del SetModule.GlobalSets['TrinarySet'] + del globals()['TrinarySet'] with self.assertRaisesRegex(NameError, "name 'TrinarySet' is not defined"): TrinarySet @@ -3551,18 +3555,22 @@ def test_exceptions(self): NS = {} ts = DeclareGlobalSet(RangeSet(name='TrinarySet', ranges=(NR(0, 2, 1),)), NS) - self.assertIs(NS['TrinarySet'], ts) + try: + self.assertIs(NS['TrinarySet'], ts) - # Repeat declaration is OK - DeclareGlobalSet(ts, NS) - self.assertIs(NS['TrinarySet'], ts) + # Repeat declaration is OK + DeclareGlobalSet(ts, NS) + self.assertIs(NS['TrinarySet'], ts) - # but conflicting one raises exception - NS['foo'] = None - with self.assertRaisesRegex( - RuntimeError, "Refusing to overwrite global object, foo" - ): - DeclareGlobalSet(RangeSet(name='foo', ranges=(NR(0, 2, 1),)), NS) + # but conflicting one raises exception + NS['foo'] = None + with self.assertRaisesRegex( + RuntimeError, "Refusing to overwrite global object, foo" + ): + DeclareGlobalSet(RangeSet(name='foo', ranges=(NR(0, 2, 1),)), NS) + finally: + del SetModule.GlobalSets['TrinarySet'] + del NS['TrinarySet'] def test_RealSet_IntegerSet(self): output = StringIO() @@ -3763,8 +3771,8 @@ def I_init(m): m = ConcreteModel() m.I = Set(initialize={1, 3, 2, 4}) ref = ( - "Initializing ordered Set I with a " - "fundamentally unordered data source (type: set)." + 'Initializing ordered Set I with a fundamentally ' + 'unordered data source (type: set).' ) self.assertIn(ref, output.getvalue()) self.assertEqual(m.I.sorted_data(), (1, 2, 3, 4)) @@ -3811,6 +3819,7 @@ def I_init(m): self.assertEqual(m.I.data(), (4, 3, 2, 1)) self.assertEqual(m.I.dimen, 1) + def test_initialize_with_noniterable(self): output = StringIO() with LoggingIntercept(output, 'pyomo.core'): with self.assertRaisesRegex(TypeError, "'int' object is not iterable"): @@ -3819,6 +3828,14 @@ def I_init(m): ref = "Initializer for Set I returned non-iterable object of type int." self.assertIn(ref, output.getvalue()) + output = StringIO() + with LoggingIntercept(output, 'pyomo.core'): + with self.assertRaisesRegex(TypeError, "'int' object is not iterable"): + m = ConcreteModel() + m.I = Set([1, 2], initialize=5) + ref = "Initializer for Set I[1] returned non-iterable object of type int." + self.assertIn(ref, output.getvalue()) + def test_scalar_indexed_api(self): m = ConcreteModel() m.I = Set(initialize=range(3)) @@ -3877,12 +3894,13 @@ def _verify(_s, _l): m.I.add(4) _verify(m.I, [1, 3, 2, 4]) + N = len(m.I) output = StringIO() with LoggingIntercept(output, 'pyomo.core'): m.I.add(3) - self.assertEqual( - output.getvalue(), "Element 3 already exists in Set I; no action taken\n" - ) + # In Pyomo <= 6.7.3 duplicate values logged a warning. + self.assertEqual(output.getvalue(), "") + self.assertEqual(N, len(m.I)) _verify(m.I, [1, 3, 2, 4]) m.I.remove(3) @@ -3959,12 +3977,13 @@ def _verify(_s, _l): m.I.add(4) _verify(m.I, [1, 2, 3, 4]) + N = len(m.I) output = StringIO() with LoggingIntercept(output, 'pyomo.core'): m.I.add(3) - self.assertEqual( - output.getvalue(), "Element 3 already exists in Set I; no action taken\n" - ) + # In Pyomo <= 6.7.3 duplicate values logged a warning. + self.assertEqual(output.getvalue(), "") + self.assertEqual(N, len(m.I)) _verify(m.I, [1, 2, 3, 4]) m.I.remove(3) @@ -4052,12 +4071,13 @@ def _verify(_s, _l): m.I.add(4) _verify(m.I, [1, 2, 3, 4]) + N = len(m.I) output = StringIO() with LoggingIntercept(output, 'pyomo.core'): m.I.add(3) - self.assertEqual( - output.getvalue(), "Element 3 already exists in Set I; no action taken\n" - ) + # In Pyomo <= 6.7.3 duplicate values logged a warning. + self.assertEqual(output.getvalue(), "") + self.assertEqual(N, len(m.I)) _verify(m.I, [1, 2, 3, 4]) m.I.remove(3) @@ -4248,26 +4268,23 @@ def test_add_filter_validate(self): self.assertIn(1, m.I) self.assertIn(1.0, m.I) + N = len(m.I) output = StringIO() with LoggingIntercept(output, 'pyomo.core'): self.assertFalse(m.I.add(1)) - self.assertEqual( - output.getvalue(), "Element 1 already exists in Set I; no action taken\n" - ) + # In Pyomo <= 6.7.3 duplicate values logged a warning. + self.assertEqual(output.getvalue(), "") + self.assertEqual(N, len(m.I)) output = StringIO() with LoggingIntercept(output, 'pyomo.core'): self.assertFalse(m.I.add((1,))) - self.assertEqual( - output.getvalue(), "Element (1,) already exists in Set I; no action taken\n" - ) + # In Pyomo <= 6.7.3 duplicate values logged a warning. + self.assertEqual(output.getvalue(), "") m.J = Set() # Note that pypy raises a different exception from cpython - err = ( - r"Unable to insert '{}' into Set J:\n\tTypeError: " - r"((unhashable type: 'dict')|('dict' objects are unhashable))" - ) + err = r"((unhashable type: 'dict')|('dict' objects are unhashable))" with self.assertRaisesRegex(TypeError, err): m.J.add({}) @@ -4275,9 +4292,9 @@ def test_add_filter_validate(self): output = StringIO() with LoggingIntercept(output, 'pyomo.core'): self.assertFalse(m.J.add(1)) - self.assertEqual( - output.getvalue(), "Element 1 already exists in Set J; no action taken\n" - ) + # In Pyomo <= 6.7.3 duplicate values logged a warning. + self.assertEqual(output.getvalue(), "") + self.assertEqual(N, len(m.I)) def _l_tri(model, i, j): self.assertIs(model, m) @@ -5254,6 +5271,21 @@ def Bindex(m): self.assertIs(m.K.index_set()._domain, Integers) self.assertEqual(m.K.index_set(), [0, 1, 2, 3, 4]) + def test_normalize_index(self): + try: + _oldFlatten = normalize_index.flatten + normalize_index.flatten = True + + m = ConcreteModel() + with self.assertRaisesRegex( + ValueError, + r"The value=\(\(2, 3\),\) has dimension 2 and is not " + "valid for Set I which has dimen=1", + ): + m.I = Set(initialize=[1, ((2, 3),)]) + finally: + normalize_index.flatten = _oldFlatten + def test_no_normalize_index(self): try: _oldFlatten = normalize_index.flatten diff --git a/pyomo/core/tests/unit/test_suffix.py b/pyomo/core/tests/unit/test_suffix.py index d2e861cceb5..f56f84fc129 100644 --- a/pyomo/core/tests/unit/test_suffix.py +++ b/pyomo/core/tests/unit/test_suffix.py @@ -1795,47 +1795,77 @@ def test_suffix_finder(self): m.b1.b2 = Block() m.b1.b2.v3 = Var([0]) - _suffix_finder = SuffixFinder('suffix') - # Add Suffixes m.suffix = Suffix(direction=Suffix.EXPORT) # No suffix on b1 - make sure we can handle missing suffixes m.b1.b2.suffix = Suffix(direction=Suffix.EXPORT) + _suffix_finder = SuffixFinder('suffix') + _suffix_b1_finder = SuffixFinder('suffix', context=m.b1) + _suffix_b2_finder = SuffixFinder('suffix', context=m.b1.b2) + # Check for no suffix value - assert _suffix_finder.find(m.b1.b2.v3[0]) == None + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), None) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), None) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), None) # Check finding default values # Add a default at the top level m.suffix[None] = 1 - assert _suffix_finder.find(m.b1.b2.v3[0]) == 1 + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), 1) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), None) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), None) # Add a default suffix at a lower level m.b1.b2.suffix[None] = 2 - assert _suffix_finder.find(m.b1.b2.v3[0]) == 2 + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), 2) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), 2) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), 2) # Check for container at lowest level m.b1.b2.suffix[m.b1.b2.v3] = 3 - assert _suffix_finder.find(m.b1.b2.v3[0]) == 3 + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), 3) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), 3) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), 3) # Check for container at top level m.suffix[m.b1.b2.v3] = 4 - assert _suffix_finder.find(m.b1.b2.v3[0]) == 4 + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), 4) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), 3) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), 3) # Check for specific values at lowest level m.b1.b2.suffix[m.b1.b2.v3[0]] = 5 - assert _suffix_finder.find(m.b1.b2.v3[0]) == 5 + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), 5) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), 5) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), 5) # Check for specific values at top level m.suffix[m.b1.b2.v3[0]] = 6 - assert _suffix_finder.find(m.b1.b2.v3[0]) == 6 + self.assertEqual(_suffix_finder.find(m.b1.b2.v3[0]), 6) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2.v3[0]), 5) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2.v3[0]), 5) # Make sure we don't find default suffixes at lower levels - assert _suffix_finder.find(m.b1.v2) == 1 + self.assertEqual(_suffix_finder.find(m.b1.v2), 1) + self.assertEqual(_suffix_b1_finder.find(m.b1.v2), None) + self.assertEqual(_suffix_b2_finder.find(m.b1.v2), None) # Make sure we don't find specific suffixes at lower levels m.b1.b2.suffix[m.v1] = 5 - assert _suffix_finder.find(m.v1) == 1 + self.assertEqual(_suffix_finder.find(m.v1), 1) + self.assertEqual(_suffix_b1_finder.find(m.v1), None) + self.assertEqual(_suffix_b2_finder.find(m.v1), None) + + # Make sure we can look up Blocks and that they will match + # suffixes that they hold + self.assertEqual(_suffix_finder.find(m.b1.b2), 2) + self.assertEqual(_suffix_b1_finder.find(m.b1.b2), 2) + self.assertEqual(_suffix_b2_finder.find(m.b1.b2), 2) + + self.assertEqual(_suffix_finder.find(m.b1), 1) + self.assertEqual(_suffix_b1_finder.find(m.b1), None) + self.assertEqual(_suffix_b2_finder.find(m.b1), None) if __name__ == "__main__": diff --git a/pyomo/gdp/disjunct.py b/pyomo/gdp/disjunct.py index 637f55cbed1..bfaada8f3de 100644 --- a/pyomo/gdp/disjunct.py +++ b/pyomo/gdp/disjunct.py @@ -502,12 +502,6 @@ def _activate_without_unfixing_indicator(self): class ScalarDisjunct(DisjunctData, Disjunct): def __init__(self, *args, **kwds): - ## FIXME: This is a HACK to get around a chicken-and-egg issue - ## where BlockData creates the indicator_var *before* - ## Block.__init__ declares the _defer_construction flag. - self._defer_construction = True - self._suppress_ctypes = set() - DisjunctData.__init__(self, self) Disjunct.__init__(self, *args, **kwds) self._data[None] = self diff --git a/pyomo/opt/base/solvers.py b/pyomo/opt/base/solvers.py index c0698165603..4ffef7e7cac 100644 --- a/pyomo/opt/base/solvers.py +++ b/pyomo/opt/base/solvers.py @@ -470,8 +470,8 @@ def set_results_format(self, format): Set the current results format (if it's valid for the current problem format). """ - if (self._problem_format in self._valid_results_formats) and ( - format in self._valid_results_formats[self._problem_format] + if (self._problem_format in self._valid_result_formats) and ( + format in self._valid_result_formats[self._problem_format] ): self._results_format = format else: diff --git a/pyomo/opt/tests/base/test_solver.py b/pyomo/opt/tests/base/test_solver.py index 8ffc647804d..919e9375f60 100644 --- a/pyomo/opt/tests/base/test_solver.py +++ b/pyomo/opt/tests/base/test_solver.py @@ -109,7 +109,7 @@ def test_set_problem_format(self): def test_set_results_format(self): opt = pyomo.opt.SolverFactory("stest1") opt._valid_problem_formats = ['a'] - opt._valid_results_formats = {'a': 'b'} + opt._valid_result_formats = {'a': 'b'} self.assertEqual(opt.problem_format(), None) try: opt.set_results_format('b') diff --git a/pyomo/repn/beta/matrix.py b/pyomo/repn/beta/matrix.py index 0201c46eb18..992e1810fec 100644 --- a/pyomo/repn/beta/matrix.py +++ b/pyomo/repn/beta/matrix.py @@ -587,6 +587,11 @@ def constant(self): # Abstract Interface (ConstraintData) # + def to_bounded_expression(self, evaluate_bounds=False): + """Access this constraint as a single expression.""" + # Note that the bounds are always going to be floats... + return self.lower, self.body, self.upper + @property def body(self): """Access the body of a constraint expression.""" diff --git a/pyomo/repn/plugins/__init__.py b/pyomo/repn/plugins/__init__.py index d3804c55106..4029f44a03d 100644 --- a/pyomo/repn/plugins/__init__.py +++ b/pyomo/repn/plugins/__init__.py @@ -37,6 +37,23 @@ def load(): def activate_writer_version(name, ver): """DEBUGGING TOOL to switch the "default" writer implementation""" + from pyomo.opt import WriterFactory + doc = WriterFactory.doc(name) WriterFactory.unregister(name) WriterFactory.register(name, doc)(WriterFactory.get_class(f'{name}_v{ver}')) + + +def active_writer_version(name): + """DEBUGGING TOOL to switch the "default" writer implementation""" + from pyomo.opt import WriterFactory + + ref = WriterFactory.get_class(name) + ver = 1 + try: + while 1: + if WriterFactory.get_class(f'{name}_v{ver}') is ref: + return ver + ver += 1 + except KeyError: + return None diff --git a/pyomo/repn/plugins/baron_writer.py b/pyomo/repn/plugins/baron_writer.py index ab673b0c1c3..861735dc973 100644 --- a/pyomo/repn/plugins/baron_writer.py +++ b/pyomo/repn/plugins/baron_writer.py @@ -256,9 +256,9 @@ def _skip_trivial(constraint_data): suffix_gen = ( lambda b: pyomo.core.base.suffix.active_export_suffix_generator(b) ) - r_o_eqns = [] - c_eqns = [] - l_eqns = [] + r_o_eqns = {} + c_eqns = {} + l_eqns = {} branching_priorities_suffixes = [] for block in all_blocks_list: for name, suffix in suffix_gen(block): @@ -266,13 +266,14 @@ def _skip_trivial(constraint_data): branching_priorities_suffixes.append(suffix) elif name == 'constraint_types': for constraint_data, constraint_type in suffix.items(): + info = constraint_data.to_bounded_expression(True) if not _skip_trivial(constraint_data): if constraint_type.lower() == 'relaxationonly': - r_o_eqns.append(constraint_data) + r_o_eqns[constraint_data] = info elif constraint_type.lower() == 'convex': - c_eqns.append(constraint_data) + c_eqns[constraint_data] = info elif constraint_type.lower() == 'local': - l_eqns.append(constraint_data) + l_eqns[constraint_data] = info else: raise ValueError( "A suffix '%s' contained an invalid value: %s\n" @@ -294,7 +295,10 @@ def _skip_trivial(constraint_data): % (name, _location) ) - non_standard_eqns = r_o_eqns + c_eqns + l_eqns + non_standard_eqns = set() + non_standard_eqns.update(r_o_eqns) + non_standard_eqns.update(c_eqns) + non_standard_eqns.update(l_eqns) # # EQUATIONS @@ -304,7 +308,7 @@ def _skip_trivial(constraint_data): n_roeqns = len(r_o_eqns) n_ceqns = len(c_eqns) n_leqns = len(l_eqns) - eqns = [] + eqns = {} # Alias the constraints by declaration order since Baron does not # include the constraint names in the solution file. It is important @@ -321,14 +325,15 @@ def _skip_trivial(constraint_data): for constraint_data in block.component_data_objects( Constraint, active=True, sort=sorter, descend_into=False ): - if (not constraint_data.has_lb()) and (not constraint_data.has_ub()): + lb, body, ub = constraint_data.to_bounded_expression(True) + if lb is None and ub is None: assert not constraint_data.equality continue # non-binding, so skip if (not _skip_trivial(constraint_data)) and ( constraint_data not in non_standard_eqns ): - eqns.append(constraint_data) + eqns[constraint_data] = lb, body, ub con_symbol = symbol_map.createSymbol(constraint_data, c_labeler) assert not con_symbol.startswith('.') @@ -407,12 +412,12 @@ def mutable_param_gen(b): # Equation Definition output_file.write('c_e_FIX_ONE_VAR_CONST__: ONE_VAR_CONST__ == 1;\n') - for constraint_data in itertools.chain(eqns, r_o_eqns, c_eqns, l_eqns): + for constraint_data, (lb, body, ub) in itertools.chain( + eqns.items(), r_o_eqns.items(), c_eqns.items(), l_eqns.items() + ): variables = OrderedSet() # print(symbol_map.byObject.keys()) - eqn_body = expression_to_string( - constraint_data.body, variables, smap=symbol_map - ) + eqn_body = expression_to_string(body, variables, smap=symbol_map) # print(symbol_map.byObject.keys()) referenced_variable_ids.update(variables) @@ -439,22 +444,22 @@ def mutable_param_gen(b): # Equality constraint if constraint_data.equality: eqn_lhs = '' - eqn_rhs = ' == ' + ftoa(constraint_data.upper) + eqn_rhs = ' == ' + ftoa(ub) # Greater than constraint - elif not constraint_data.has_ub(): - eqn_rhs = ' >= ' + ftoa(constraint_data.lower) + elif ub is None: + eqn_rhs = ' >= ' + ftoa(lb) eqn_lhs = '' # Less than constraint - elif not constraint_data.has_lb(): - eqn_rhs = ' <= ' + ftoa(constraint_data.upper) + elif lb is None: + eqn_rhs = ' <= ' + ftoa(ub) eqn_lhs = '' # Double-sided constraint - elif constraint_data.has_lb() and constraint_data.has_ub(): - eqn_lhs = ftoa(constraint_data.lower) + ' <= ' - eqn_rhs = ' <= ' + ftoa(constraint_data.upper) + elif lb is not None and ub is not None: + eqn_lhs = ftoa(lb) + ' <= ' + eqn_rhs = ' <= ' + ftoa(ub) eqn_string = eqn_lhs + eqn_body + eqn_rhs + ';\n' output_file.write(eqn_string) diff --git a/pyomo/repn/plugins/gams_writer.py b/pyomo/repn/plugins/gams_writer.py index a0f407d7952..f0a9eb7afef 100644 --- a/pyomo/repn/plugins/gams_writer.py +++ b/pyomo/repn/plugins/gams_writer.py @@ -619,11 +619,12 @@ def _write_model( # encountered will be added to the var_list due to the labeler # defined above. for con in model.component_data_objects(Constraint, active=True, sort=sort): - if not con.has_lb() and not con.has_ub(): + lb, body, ub = con.to_bounded_expression(True) + if lb is None and ub is None: assert not con.equality continue # non-binding, so skip - con_body = as_numeric(con.body) + con_body = as_numeric(body) if skip_trivial_constraints and con_body.is_fixed(): continue if linear: @@ -642,20 +643,20 @@ def _write_model( constraint_names.append('%s' % cName) ConstraintIO.write( '%s.. %s =e= %s ;\n' - % (constraint_names[-1], con_body_str, ftoa(con.upper, False)) + % (constraint_names[-1], con_body_str, ftoa(ub, False)) ) else: - if con.has_lb(): + if lb is not None: constraint_names.append('%s_lo' % cName) ConstraintIO.write( '%s.. %s =l= %s ;\n' - % (constraint_names[-1], ftoa(con.lower, False), con_body_str) + % (constraint_names[-1], ftoa(lb, False), con_body_str) ) - if con.has_ub(): + if ub is not None: constraint_names.append('%s_hi' % cName) ConstraintIO.write( '%s.. %s =l= %s ;\n' - % (constraint_names[-1], con_body_str, ftoa(con.upper, False)) + % (constraint_names[-1], con_body_str, ftoa(ub, False)) ) obj = list(model.component_data_objects(Objective, active=True, sort=sort)) diff --git a/pyomo/repn/plugins/lp_writer.py b/pyomo/repn/plugins/lp_writer.py index 814f79a4eb9..2fbdae3571d 100644 --- a/pyomo/repn/plugins/lp_writer.py +++ b/pyomo/repn/plugins/lp_writer.py @@ -408,10 +408,10 @@ def write(self, model): if with_debug_timing and con.parent_component() is not last_parent: timer.toc('Constraint %s', last_parent, level=logging.DEBUG) last_parent = con.parent_component() - # Note: Constraint.lb/ub guarantee a return value that is - # either a (finite) native_numeric_type, or None - lb = con.lb - ub = con.ub + # Note: Constraint.to_bounded_expression(evaluate_bounds=True) + # guarantee a return value that is either a (finite) + # native_numeric_type, or None + lb, body, ub = con.to_bounded_expression(True) if lb is None and ub is None: # Note: you *cannot* output trivial (unbounded) @@ -419,7 +419,7 @@ def write(self, model): # slack variable if skip_trivial_constraints is False, # but that seems rather silly. continue - repn = constraint_visitor.walk_expression(con.body) + repn = constraint_visitor.walk_expression(body) if repn.nonlinear is not None: raise ValueError( f"Model constraint ({con.name}) contains nonlinear terms that " diff --git a/pyomo/repn/plugins/nl_writer.py b/pyomo/repn/plugins/nl_writer.py index 8fc82d21d30..8f51f0b0fba 100644 --- a/pyomo/repn/plugins/nl_writer.py +++ b/pyomo/repn/plugins/nl_writer.py @@ -510,8 +510,8 @@ def compile(self, column_order, row_order, obj_order, model_id): class CachingNumericSuffixFinder(SuffixFinder): scale = True - def __init__(self, name, default=None): - super().__init__(name, default) + def __init__(self, name, default=None, context=None): + super().__init__(name, default, context) self.suffix_cache = {} def __call__(self, obj): @@ -646,7 +646,7 @@ def write(self, model): # Data structures to support variable/constraint scaling # if self.config.scale_model and 'scaling_factor' in suffix_data: - scaling_factor = CachingNumericSuffixFinder('scaling_factor', 1) + scaling_factor = CachingNumericSuffixFinder('scaling_factor', 1, model) scaling_cache = scaling_factor.suffix_cache del suffix_data['scaling_factor'] else: @@ -723,14 +723,14 @@ def write(self, model): timer.toc('Constraint %s', last_parent, level=logging.DEBUG) last_parent = con.parent_component() scale = scaling_factor(con) - expr_info = visitor.walk_expression((con.body, con, 0, scale)) + # Note: Constraint.to_bounded_expression(evaluate_bounds=True) + # guarantee a return value that is either a (finite) + # native_numeric_type, or None + lb, body, ub = con.to_bounded_expression(True) + expr_info = visitor.walk_expression((body, con, 0, scale)) if expr_info.named_exprs: self._record_named_expression_usage(expr_info.named_exprs, con, 0) - # Note: Constraint.lb/ub guarantee a return value that is - # either a (finite) native_numeric_type, or None - lb = con.lb - ub = con.ub if lb is None and ub is None: # and self.config.skip_trivial_constraints: continue if scale != 1: diff --git a/pyomo/repn/tests/test_plugins.py b/pyomo/repn/tests/test_plugins.py new file mode 100644 index 00000000000..1152131f6b6 --- /dev/null +++ b/pyomo/repn/tests/test_plugins.py @@ -0,0 +1,50 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common import unittest + +from pyomo.opt import WriterFactory +from pyomo.repn.plugins import activate_writer_version, active_writer_version + +import pyomo.environ + + +class TestPlugins(unittest.TestCase): + def test_active(self): + with self.assertRaises(KeyError): + active_writer_version('nonexistent_writer') + ver = active_writer_version('lp') + self.assertIs( + WriterFactory.get_class('lp'), WriterFactory.get_class(f'lp_v{ver}') + ) + + class TMP(object): + pass + + WriterFactory.register('test_writer')(TMP) + try: + self.assertIsNone(active_writer_version('test_writer')) + finally: + WriterFactory.unregister('test_writer') + + def test_activate(self): + ver = active_writer_version('lp') + try: + activate_writer_version('lp', 2) + self.assertIs( + WriterFactory.get_class('lp'), WriterFactory.get_class(f'lp_v2') + ) + activate_writer_version('lp', 1) + self.assertIs( + WriterFactory.get_class('lp'), WriterFactory.get_class(f'lp_v1') + ) + finally: + activate_writer_version('lp', ver) diff --git a/pyomo/scripting/driver_help.py b/pyomo/scripting/driver_help.py index 38d1a4c16bf..45fdc711137 100644 --- a/pyomo/scripting/driver_help.py +++ b/pyomo/scripting/driver_help.py @@ -20,6 +20,7 @@ import pyomo.common from pyomo.common.collections import Bunch +from pyomo.common.tee import capture_output import pyomo.scripting.pyomo_parser logger = logging.getLogger('pyomo.solvers') @@ -235,33 +236,44 @@ def help_solvers(): try: # Disable warnings logging.disable(logging.WARNING) - for s in solver_list: - # Create a solver, and see if it is available - with pyomo.opt.SolverFactory(s) as opt: - ver = '' - if opt.available(False): - avail = '-' - if opt.license_is_valid(): - avail = '+' - try: - ver = opt.version() - if ver: - while len(ver) > 2 and ver[-1] == 0: - ver = ver[:-1] - ver = '.'.join(str(v) for v in ver) - else: - ver = '' - except (AttributeError, NameError): - pass - elif s == 'py' or (hasattr(opt, "_metasolver") and opt._metasolver): - # py is a metasolver, but since we don't specify a subsolver - # for this test, opt is actually an UnknownSolver, so we - # can't try to get the _metasolver attribute from it. - # Also, default to False if the attribute isn't implemented - avail = '*' - else: - avail = '' - _data.append((avail, s, ver, pyomo.opt.SolverFactory.doc(s))) + # suppress ALL output + with capture_output(capture_fd=True): + for s in solver_list: + # Create a solver, and see if it is available + with pyomo.opt.SolverFactory(s) as opt: + ver = '' + if opt.available(False): + avail = '-' + if opt.license_is_valid(): + avail = '+' + try: + ver = opt.version() + if isinstance(ver, str): + pass + elif ver: + while len(ver) > 2 and ver[-1] == 0: + ver = ver[:-1] + ver = '.'.join(str(v) for v in ver) + else: + ver = '' + except (AttributeError, NameError): + pass + elif s == 'py': + # py is a metasolver, but since we don't specify a subsolver + # for this test, opt is actually an UnknownSolver, so we + # can't try to get the _metasolver attribute from it. + avail = '*' + elif isinstance(s, pyomo.opt.solvers.UnknownSolver): + # We can get here if creating a registered + # solver failed (i.e., an exception was raised + # in __init__) + avail = '' + elif getattr(opt, "_metasolver", False): + # Note: default to False if the attribute isn't implemented + avail = '*' + else: + avail = '' + _data.append((avail, s, ver, pyomo.opt.SolverFactory.doc(s))) finally: # Reset logging level logging.disable(logging.NOTSET) diff --git a/pyomo/solvers/plugins/solvers/ASL.py b/pyomo/solvers/plugins/solvers/ASL.py index 7acd59936b1..c912f2a30ee 100644 --- a/pyomo/solvers/plugins/solvers/ASL.py +++ b/pyomo/solvers/plugins/solvers/ASL.py @@ -108,7 +108,7 @@ def _get_version(self): if ver is None: # Some ASL solvers do not export a version number if results.stdout.strip().split()[-1].startswith('ASL('): - return '0.0.0' + return (0, 0, 0) return ver except OSError: pass diff --git a/pyomo/solvers/plugins/solvers/IPOPT.py b/pyomo/solvers/plugins/solvers/IPOPT.py index 21045cb7b4f..82dcfdb75a0 100644 --- a/pyomo/solvers/plugins/solvers/IPOPT.py +++ b/pyomo/solvers/plugins/solvers/IPOPT.py @@ -14,6 +14,8 @@ from pyomo.common import Executable from pyomo.common.collections import Bunch +from pyomo.common.errors import ApplicationError +from pyomo.common.tee import capture_output from pyomo.common.tempfiles import TempfileManager from pyomo.opt.base import ProblemFormat, ResultsFormat @@ -207,3 +209,16 @@ def process_output(self, rc): res.solver.message = line.split(':')[2].strip() assert "degrees of freedom" in res.solver.message return res + + def has_linear_solver(self, linear_solver): + import pyomo.core as AML + + m = AML.ConcreteModel() + m.x = AML.Var() + m.o = AML.Objective(expr=(m.x - 2) ** 2) + try: + with capture_output() as OUT: + self.solve(m, tee=True, options={'linear_solver': linear_solver}) + except ApplicationError: + return False + return 'running with linear solver' in OUT.getvalue() diff --git a/pyomo/solvers/tests/checks/test_ipopt.py b/pyomo/solvers/tests/checks/test_ipopt.py new file mode 100644 index 00000000000..b7d00c35a6f --- /dev/null +++ b/pyomo/solvers/tests/checks/test_ipopt.py @@ -0,0 +1,42 @@ +# ___________________________________________________________________________ +# +# Pyomo: Python Optimization Modeling Objects +# Copyright (c) 2008-2024 +# National Technology and Engineering Solutions of Sandia, LLC +# Under the terms of Contract DE-NA0003525 with National Technology and +# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain +# rights in this software. +# This software is distributed under the 3-clause BSD License. +# ___________________________________________________________________________ + +from pyomo.common import unittest +from pyomo.solvers.plugins.solvers import IPOPT +import pyomo.environ + +ipopt_available = IPOPT.IPOPT().available() + + +@unittest.skipIf(not ipopt_available, "The 'ipopt' command is not available") +class TestIpoptInterface(unittest.TestCase): + def test_has_linear_solver(self): + opt = IPOPT.IPOPT() + self.assertTrue( + any( + map( + opt.has_linear_solver, + [ + 'mumps', + 'ma27', + 'ma57', + 'ma77', + 'ma86', + 'ma97', + 'pardiso', + 'pardisomkl', + 'spral', + 'wsmp', + ], + ) + ) + ) + self.assertFalse(opt.has_linear_solver('bogus_linear_solver')) diff --git a/pyomo/solvers/tests/mip/test_factory.py b/pyomo/solvers/tests/mip/test_factory.py index 31d47486aa4..f69fd198009 100644 --- a/pyomo/solvers/tests/mip/test_factory.py +++ b/pyomo/solvers/tests/mip/test_factory.py @@ -53,8 +53,8 @@ def setUpClass(cls): def tearDown(self): ReaderFactory.unregister('rtest3') - ReaderFactory.unregister('stest3') - ReaderFactory.unregister('wtest3') + SolverFactory.unregister('stest3') + WriterFactory.unregister('wtest3') def test_solver_factory(self): """ @@ -119,6 +119,9 @@ def test_writer_instance(self): ans = WriterFactory("none") self.assertEqual(ans, None) ans = WriterFactory("wtest3") + self.assertEqual(ans, None) + WriterFactory.register('wtest3')(MockWriter) + ans = WriterFactory("wtest3") self.assertNotEqual(ans, None) def test_writer_registration(self):