Skip to content

Commit

Permalink
fix for #189 + v0.6.3 (#190)
Browse files Browse the repository at this point in the history
  • Loading branch information
Wikunia authored Apr 16, 2020
1 parent d365aa1 commit e2a7299
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 15 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# Juniper.jl Changelog

### 0.6.3
- Bugfix: `registered_functions` failed in feasibility pump [Issue #189](https://github.com/lanl-ansi/Juniper.jl/pull/189)

### 0.6.2
- Bugfix: `@NLexpression` failed in feasibility pump [Issue #184](https://github.com/lanl-ansi/Juniper.jl/pull/184)

Expand Down
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name = "Juniper"
uuid = "2ddba703-00a4-53a7-87a5-e8b9971dde84"
authors = ["Ole Kröger <[email protected]>", "Kaarthik Sundar [email protected]"]
repo = "https://github.com/lanl-ansi/Juniper.jl.git"
version = "0.6.2"
version = "0.6.3"

[deps]
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
Expand Down
4 changes: 4 additions & 0 deletions src/fpump.jl
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,8 @@ function generate_nlp(optimizer, m, mip_sol, start_fpump; random_start=false)
JuMP.set_start_value.(nx[1:m.num_var],mip_sol)
end

register_functions!(nlp_model, m.options.registered_functions)

# add all constraints
backend = JuMP.backend(nlp_model);
llc = optimizer.linear_le_constraints
Expand Down Expand Up @@ -202,6 +204,8 @@ function generate_real_nlp(optimizer, m, sol; random_start=false)
JuMP.fix(rx[vi], sol[vi]; force=true)
end

register_functions!(rmodel, m.options.registered_functions)

# define the objective function
# TODO check whether it is supported
if optimizer.nlp_data.has_objective
Expand Down
18 changes: 5 additions & 13 deletions src/model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,7 @@ function create_root_model!(optimizer::MOI.AbstractOptimizer, jp::JuniperProblem
JuMP.set_start_value(x[i], jp.primal_start[i])
end

if jp.options.registered_functions !== nothing
for reg_f in jp.options.registered_functions
if reg_f.gradf === nothing
JuMP.register(jp.model, reg_f.s, reg_f.dimension, reg_f.f; autodiff=reg_f.autodiff)
elseif reg_f.grad2f === nothing
JuMP.register(jp.model, reg_f.s, reg_f.dimension, reg_f.f, reg_f.gradf)
else
JuMP.register(jp.model, reg_f.s, reg_f.dimension, reg_f.f, reg_f.gradf, reg_f.grad2f)
end
end
end
register_functions!(jp.model, jp.options.registered_functions)

# TODO check whether it is supported
if optimizer.nlp_data.has_objective
Expand All @@ -33,7 +23,8 @@ function create_root_model!(optimizer::MOI.AbstractOptimizer, jp::JuniperProblem
try
JuMP.set_NL_objective(jp.model, optimizer.sense, obj_expr)
catch
error("Have you registered a function? Then please register the function also for Juniper see: https://lanl-ansi.github.io/Juniper.jl/stable/options/#registered_functions::Union{Nothing,Vector{RegisteredFunction}}-[nothing]-1")
error("Have you registered a function? Then please register the function also for Juniper see: \n
https://lanl-ansi.github.io/Juniper.jl/stable/options/#registered_functions%3A%3AUnion%7BNothing%2CVector%7BRegisteredFunction%7D%7D-%5Bnothing%5D-1")
end
elseif optimizer.objective !== nothing
MOI.set(jp.model, MOI.ObjectiveFunction{typeof(optimizer.objective)}(), optimizer.objective)
Expand All @@ -58,7 +49,8 @@ function create_root_model!(optimizer::MOI.AbstractOptimizer, jp::JuniperProblem
try
JuMP.add_NL_constraint(jp.model, constr_expr)
catch
error("Have you registered a function? Then please register the function also for Juniper see: https://lanl-ansi.github.io/Juniper.jl/stable/options/#registered_functions::Union{Nothing,Vector{RegisteredFunction}}-[nothing]-1")
error("Have you registered a function? Then please register the function also for Juniper see: \n
https://lanl-ansi.github.io/Juniper.jl/stable/options/#registered_functions%3A%3AUnion%7BNothing%2CVector%7BRegisteredFunction%7D%7D-%5Bnothing%5D-1")
end
end

Expand Down
14 changes: 14 additions & 0 deletions src/util.jl
Original file line number Diff line number Diff line change
Expand Up @@ -289,4 +289,18 @@ function optimize_get_status_backend(model::JuMP.Model; solver=nothing)
backend = JuMP.backend(model)
status = MOI.get(backend, MOI.TerminationStatus())
return status, backend
end

function register_functions!(model, registered_functions)
if registered_functions !== nothing
for reg_f in registered_functions
if reg_f.gradf === nothing
JuMP.register(model, reg_f.s, reg_f.dimension, reg_f.f; autodiff=reg_f.autodiff)
elseif reg_f.grad2f === nothing
JuMP.register(model, reg_f.s, reg_f.dimension, reg_f.f, reg_f.gradf)
else
JuMP.register(model, reg_f.s, reg_f.dimension, reg_f.f, reg_f.gradf, reg_f.grad2f)
end
end
end
end
3 changes: 2 additions & 1 deletion test/fpump.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ end
register_args = [:special_constr_fct, 2, special_constr_fct]
JuMP.register(m, register_args...; autodiff=true)

@NLconstraint(m, special_constr_fct(x[1],x[2]) == 0)
# == 1 such that start value of 0 is not optimal (test for issue 189)
@NLconstraint(m, special_constr_fct(x[1],x[2]) == 1)
@objective(m, Max, sum(x))

optimizer = optimizer_with_attributes(
Expand Down

0 comments on commit e2a7299

Please sign in to comment.