From 2cda5e02655e303295ea2d7b3244d5cebaa480fe Mon Sep 17 00:00:00 2001 From: Ronny Bergmann Date: Wed, 21 Aug 2024 12:01:05 +0200 Subject: [PATCH] Replace further arguments. --- src/documentation_glossary.jl | 31 ++++++++++++------- src/solvers/FrankWolfe.jl | 2 +- .../adaptive_regularization_with_cubics.jl | 4 +-- src/solvers/augmented_Lagrangian_method.jl | 2 +- src/solvers/conjugate_gradient_descent.jl | 2 +- src/solvers/exact_penalty_method.jl | 2 +- src/solvers/gradient_descent.jl | 2 +- src/solvers/interior_point_Newton.jl | 10 +++--- src/solvers/quasi_Newton.jl | 2 +- .../truncated_conjugate_gradient_descent.jl | 4 +-- src/solvers/trust_regions.jl | 4 +-- 11 files changed, 37 insertions(+), 28 deletions(-) diff --git a/src/documentation_glossary.jl b/src/documentation_glossary.jl index 90e4d6d203..d7ea13fa50 100644 --- a/src/documentation_glossary.jl +++ b/src/documentation_glossary.jl @@ -143,9 +143,8 @@ define!( # for each variable as a symbol, we store # The variable name should be the symbol # :default – in positional or keyword arguments -# :description – a text description of the variable (always functions) +# :description – a text description of the variable # :type a type -# _var(args...; kwargs...) = glossary(:Variable, args...; kwargs...) #Meta: How to format an argument, a field of a struct, and a keyword @@ -184,7 +183,25 @@ define!( (; M="M", p="p") -> "a cost function ``f: $(_tex(:Cal, M))→ ℝ`` implemented as `($M, $p) -> v`", ) -define!(:Variable, :f, :type, "Any") +define!(:Variable, :f, :type, "Function") + +define!( + :Variable, + :grad_f, + :description, + (; M="M", p="p") -> + "the (Riemannian) gradient ``$(_tex(:grad))f``: $(_math(:M, M=M)) → $(_math(:TpM; M=M, p=p)) of f as a function `(M, p) -> X` or a function `(M, X, p) -> X` computing `X` in-place", +) +define!(:Variable, :grad_f, :type, "Function") + +define!( + :Variable, + :Hess_f, + :description, + (; M="M", p="p") -> + "the (Riemannian) Hessian ``$(_tex(:Hess))f``: $(_math(:TpM, M=M, p=p)) → $(_math(:TpM; M=M, p=p)) of f as a function `(M, p, X) -> Y` or a function `(M, Y, p, X) -> Y` computing `Y` in-place", +) +define!(:Variable, :Hess_f, :type, "Function") define!( :Variable, :M, :description, (; M="M") -> "a Riemannian manifold ``$(_tex(:Cal, M))``" @@ -297,14 +314,6 @@ _sc(args...; kwargs...) = glossary(:StoppingCriterion, args...; kwargs...) # Old strings # Arguments -_arg_grad_f = raw""" -* `grad_f`: the gradient ``\operatorname{grad}f: \mathcal M → T\mathcal M`` of f - as a function `(M, p) -> X` or a function `(M, X, p) -> X` computing `X` in-place -""" -_arg_Hess_f = """ -* `Hess_f`: the Hessian ``$(_tex(:Hess))_long`` of f - as a function `(M, p, X) -> Y` or a function `(M, Y, p, X) -> Y` computing `Y` in-place -""" _arg_sub_problem = "* `sub_problem` a [`AbstractManoptProblem`](@ref) to specify a problem for a solver or a closed form solution function." _arg_sub_state = "* `sub_state` a [`AbstractManoptSolverState`](@ref) for the `sub_problem`." _arg_subgrad_f = raw""" diff --git a/src/solvers/FrankWolfe.jl b/src/solvers/FrankWolfe.jl index 55d0c9e4db..504713de64 100644 --- a/src/solvers/FrankWolfe.jl +++ b/src/solvers/FrankWolfe.jl @@ -185,7 +185,7 @@ use a retraction and its inverse. $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$_arg_grad_f +$(_var(:Argument, :grad_f)) $(_var(:Argument, :p)) $(_note(:GradientObjective)) diff --git a/src/solvers/adaptive_regularization_with_cubics.jl b/src/solvers/adaptive_regularization_with_cubics.jl index 73e8f42143..fa72482281 100644 --- a/src/solvers/adaptive_regularization_with_cubics.jl +++ b/src/solvers/adaptive_regularization_with_cubics.jl @@ -222,8 +222,8 @@ For more details see [AgarwalBoumalBullinsCartis:2020](@cite). $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$_arg_grad_f -$_arg_Hess_f +$(_var(:Argument, :grad_f)) +$(_var(:Argument, :Hess_f)) $(_var(:Argument, :p)) the cost `f` and its gradient and Hessian might also be provided as a [`ManifoldHessianObjective`](@ref) diff --git a/src/solvers/augmented_Lagrangian_method.jl b/src/solvers/augmented_Lagrangian_method.jl index 8b5d9922dd..7cff293abb 100644 --- a/src/solvers/augmented_Lagrangian_method.jl +++ b/src/solvers/augmented_Lagrangian_method.jl @@ -269,7 +269,7 @@ where ``θ_ρ ∈ (0,1)`` is a constant scaling factor. $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$_arg_grad_f +$(_var(:Argument, :grad_f)) # Optional (if not called with the [`ConstrainedManifoldObjective`](@ref) `cmo`) diff --git a/src/solvers/conjugate_gradient_descent.jl b/src/solvers/conjugate_gradient_descent.jl index 07a9cb741e..fbb4df500d 100644 --- a/src/solvers/conjugate_gradient_descent.jl +++ b/src/solvers/conjugate_gradient_descent.jl @@ -71,7 +71,7 @@ $(_doc_update_delta_k) $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$(_arg_grad_f) +$(_var(:Argument, :grad_f)) $(_var(:Argument, :p)) # Keyword arguments diff --git a/src/solvers/exact_penalty_method.jl b/src/solvers/exact_penalty_method.jl index fec584522f..5cb160131b 100644 --- a/src/solvers/exact_penalty_method.jl +++ b/src/solvers/exact_penalty_method.jl @@ -214,7 +214,7 @@ $(_doc_EMP_ρ_update) $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$(_arg_grad_f) +$(_var(:Argument, :grad_f)) $(_var(:Argument, :p)) # Keyword arguments diff --git a/src/solvers/gradient_descent.jl b/src/solvers/gradient_descent.jl index 06c35690f2..2d240740c8 100644 --- a/src/solvers/gradient_descent.jl +++ b/src/solvers/gradient_descent.jl @@ -131,7 +131,7 @@ The algorithm can be performed in-place of `p`. $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$_arg_grad_f +$(_var(:Argument, :grad_f)) $(_var(:Argument, :p)) $(_note(:GradientObjective)) diff --git a/src/solvers/interior_point_Newton.jl b/src/solvers/interior_point_Newton.jl index b44c372cd2..c1d947a587 100644 --- a/src/solvers/interior_point_Newton.jl +++ b/src/solvers/interior_point_Newton.jl @@ -37,11 +37,11 @@ the constraints are further fulfilled. # Input -* `M`: a manifold ``$(_math(:M))`` -* `f`: a cost function ``f : $(_math(:M)) → ℝ`` to minimize -* `grad_f`: the gradient ``$(_tex(:grad)) f : $(_math(:M)) → T $(_math(:M))`` of ``f`` -* `Hess_f`: the Hessian ``$(_tex(:Hess))f(p): T_p$(_math(:M)) → T_p$(_math(:M))``, ``X ↦ $(_tex(:Hess))f(p)[X] = ∇_X$(_tex(:grad))f(p)`` -$(_var(:Field, :p)) +$(_var(:Argument, :M)) +$(_var(:Argument, :f)) +$(_var(:Argument, :grad_f)) +$(_var(:Argument, :Hess_f)) +$(_var(:Argument, :p)) or a [`ConstrainedManifoldObjective`](@ref) `cmo` containing `f`, `grad_f`, `Hess_f`, and the constraints diff --git a/src/solvers/quasi_Newton.jl b/src/solvers/quasi_Newton.jl index 78027981ee..05714b18a6 100644 --- a/src/solvers/quasi_Newton.jl +++ b/src/solvers/quasi_Newton.jl @@ -196,7 +196,7 @@ The ``k``th iteration consists of $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$(_arg_grad_f) +$(_var(:Argument, :grad_f)) $(_var(:Argument, :p)) # Keyword arguments diff --git a/src/solvers/truncated_conjugate_gradient_descent.jl b/src/solvers/truncated_conjugate_gradient_descent.jl index daf0243971..ffedca6560 100644 --- a/src/solvers/truncated_conjugate_gradient_descent.jl +++ b/src/solvers/truncated_conjugate_gradient_descent.jl @@ -431,8 +431,8 @@ see [AbsilBakerGallivan:2006, ConnGouldToint:2000](@cite). $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$(_arg_grad_f) -$(_arg_Hess_f) +$(_var(:Argument, :grad_f)) +$(_var(:Argument, :Hess_f)) $(_var(:Argument, :p)) $(_var(:Argument, :X)) diff --git a/src/solvers/trust_regions.jl b/src/solvers/trust_regions.jl index e38667e153..4022c46606 100644 --- a/src/solvers/trust_regions.jl +++ b/src/solvers/trust_regions.jl @@ -271,8 +271,8 @@ by default the [`truncated_conjugate_gradient_descent`](@ref) is used. $(_var(:Argument, :M; type=true)) $(_var(:Argument, :f)) -$(_arg_grad_f) -$(_arg_Hess_f) +$(_var(:Argument, :grad_f)) +$(_var(:Argument, :Hess_f)) $(_var(:Argument, :p)) # Keyword arguments