diff --git a/test/neural_net_solution.jl b/test/neural_net_solution.jl index e46cbcd6..fb54c851 100644 --- a/test/neural_net_solution.jl +++ b/test/neural_net_solution.jl @@ -65,7 +65,7 @@ for i in 1:10 end Flux.adjust!(optim, ParameterSchedulers.next!(s)) Flux.update!(optim, neural_net, grads[1]) - push!(losses, loss) # logging, outside gradient context + push!(losses, lss) # logging, outside gradient context if epoch % 10 == 0 println("Epoch: $epoch; Loss: $lss; Opt state: $(optim.layers[1].weight.rule)") end end end