diff --git a/ext/OmeletteLuxExt.jl b/ext/OmeletteLuxExt.jl index fc10655..6f544f7 100644 --- a/ext/OmeletteLuxExt.jl +++ b/ext/OmeletteLuxExt.jl @@ -13,7 +13,7 @@ function _add_predictor(predictor::Omelette.Pipeline, layer::Lux.Dense, p) if layer.activation === identity # Do nothing elseif layer.activation === Lux.NNlib.relu - push!(predictor.layers, Omelette.ReLUBigM(1e6)) + push!(predictor.layers, Omelette.ReLUBigM(1e4)) else error("Unsupported activation function: $x") end diff --git a/test/test_ReLU.jl b/test/test_ReLU.jl index 1622e67..0b667d7 100644 --- a/test/test_ReLU.jl +++ b/test/test_ReLU.jl @@ -35,7 +35,7 @@ function test_ReLU_BigM() fix.(x, [-1, 2]) optimize!(model) @assert is_solved_and_feasible(model) - @test value.(y) ≈ [0.0, 2.] + @test value.(y) ≈ [0.0, 2.0] return end @@ -64,7 +64,7 @@ function test_ReLU_Quadratic() fix.(x, [-1, 2]) optimize!(model) @assert is_solved_and_feasible(model) - @test value.(y) ≈ [0.0, 2.] + @test value.(y) ≈ [0.0, 2.0] return end