Skip to content

Commit

Permalink
Merge pull request #151 from dolfin-adjoint/dolci/add_derivative_options
Browse files Browse the repository at this point in the history
Dolci/add derivative options
  • Loading branch information
dham authored Jun 19, 2024
2 parents 42cc411 + b317169 commit 34511f9
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 9 deletions.
14 changes: 7 additions & 7 deletions pyadjoint/optimization/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def serialise_bounds(rf_np, bounds):
return np.array(bounds_arr).T


def minimize_scipy_generic(rf_np, method, bounds=None, **kwargs):
def minimize_scipy_generic(rf_np, method, bounds=None, derivative_options=None, **kwargs):
"""Interface to the generic minimize method in scipy
"""
Expand Down Expand Up @@ -61,8 +61,7 @@ def minimize_scipy_generic(rf_np, method, bounds=None, **kwargs):
m = [p.tape_value() for p in rf_np.controls]
m_global = rf_np.obj_to_array(m)
J = rf_np.__call__

dJ = lambda m: rf_np.derivative(m, forget=forget, project=project)
dJ = lambda m: rf_np.derivative(m, forget=forget, project=project, options=derivative_options)
H = rf_np.hessian

if "options" not in kwargs:
Expand Down Expand Up @@ -137,7 +136,7 @@ def jac(x):
return m


def minimize_custom(rf_np, bounds=None, **kwargs):
def minimize_custom(rf_np, bounds=None, derivative_options=None, **kwargs):
""" Interface to the user-provided minimisation method """

try:
Expand All @@ -153,7 +152,7 @@ def minimize_custom(rf_np, bounds=None, **kwargs):
m_global = rf_np.obj_to_array(m)
J = rf_np.__call__

dJ = lambda m: rf_np.derivative(m, forget=None)
dJ = lambda m: rf_np.derivative(m, forget=None, options=derivative_options)
H = rf_np.hessian

if bounds is not None:
Expand Down Expand Up @@ -256,7 +255,7 @@ def minimize(rf, method='L-BFGS-B', scale=1.0, **kwargs):
return opt


def maximize(rf, method='L-BFGS-B', scale=1.0, **kwargs):
def maximize(rf, method='L-BFGS-B', scale=1.0, derivative_options=None, **kwargs):
""" Solves the maximisation problem with PDE constraint:
max_m func(u, m)
Expand All @@ -275,6 +274,7 @@ def maximize(rf, method='L-BFGS-B', scale=1.0, **kwargs):
* 'method' specifies the optimization method to be used to solve the problem.
The available methods can be listed with the print_optimization_methods function.
* 'scale' is a factor to scale to problem (default: 1.0).
* 'derivative_options' is a dictionary of options that will be passed to the `rf.derivative`.
* 'bounds' is an optional keyword parameter to support control constraints: bounds = (lb, ub).
lb and ub must be of the same type than the parameters m.
Expand All @@ -283,7 +283,7 @@ def maximize(rf, method='L-BFGS-B', scale=1.0, **kwargs):
For detailed information about which arguments are supported for each optimization method,
please refer to the documentaton of the optimization algorithm.
"""
return minimize(rf, method, scale=-scale, **kwargs)
return minimize(rf, method, scale=-scale, derivative_options=derivative_options, **kwargs)


minimise = minimize
Expand Down
4 changes: 2 additions & 2 deletions pyadjoint/reduced_functional_numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def get_global(self, m):
return numpy.array(m_global, dtype="d")

@no_annotations
def derivative(self, m_array=None, forget=True, project=False):
def derivative(self, m_array=None, forget=True, project=False, options=None):
""" An implementation of the reduced functional derivative evaluation
that accepts the controls as an array of scalars. If no control values are given,
the result is derivative at the lastest forward run.
Expand All @@ -67,7 +67,7 @@ def derivative(self, m_array=None, forget=True, project=False):
# TODO: No good way to check. Is it ok to always assume `m_array` is the same as used last in __call__?
# if m_array is not None:
# self.__call__(m_array)
dJdm = self.rf.derivative()
dJdm = self.rf.derivative(options=options)
dJdm = Enlist(dJdm)

m_global = []
Expand Down
8 changes: 8 additions & 0 deletions tests/firedrake_adjoint/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
pytest.importorskip("firedrake")

from numpy.testing import assert_allclose
import numpy as np
from firedrake import *
from firedrake.adjoint import *

Expand Down Expand Up @@ -56,3 +57,10 @@ def test_simple_inversion():

x = minimize(rf)
assert_allclose(x.dat.data, source_ref.dat.data, rtol=1e-2)
rf(source)
x = minimize(rf, derivative_options={"riesz_representation": "l2"})
assert_allclose(x.dat.data, source_ref.dat.data, rtol=1e-2)
rf(source)
x = minimize(rf, derivative_options={"riesz_representation": "H1"})
# Assert that the optimisation does not converge for H1 representation
assert not np.allclose(x.dat.data, source_ref.dat.data, rtol=1e-2)

0 comments on commit 34511f9

Please sign in to comment.