Skip to content

Commit

Permalink
Unify signatures, fix a few tex typos and improve english on the fact…
Browse files Browse the repository at this point in the history
…ory info/note.
  • Loading branch information
kellertuer committed Aug 27, 2024
1 parent 9f48ba7 commit 318bca3
Show file tree
Hide file tree
Showing 9 changed files with 28 additions and 29 deletions.
6 changes: 2 additions & 4 deletions src/documentation_glossary.jl
Original file line number Diff line number Diff line change
Expand Up @@ -176,10 +176,8 @@ define!(
(type::String) -> """
!!! info
This function generates a [`ManifoldDefaultsFactory`](@ref) for [`$(type)`](@ref).
If you do not provide a manifold, the manifold `M` later provided to (usually) generate
the corresponding [`AbstractManoptSolverState`](@ref) will be used.
This affects all arguments and keyword argumentss with defaults that depend on the manifold,
unless provided with a value here.
For default values, that depend on the manifold, this factory postpones the construction
until the manifold from for example a corresponding [`AbstractManoptSolverState`](@ref) is available.
""",
)
define!(
Expand Down
2 changes: 1 addition & 1 deletion src/plans/conjugate_gradient_plan.jl
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ $(_var(:Field, :vector_transport_method))
# Constructor
ConjugateGradientState(M; kwargs...)
ConjugateGradientState(M::AbstractManifold; kwargs...)
where the last five fields can be set by their names as keyword and the
`X` can be set to a tangent vector type using the keyword `initial_gradient` which defaults to `zero_vector(M,p)`,
Expand Down
4 changes: 2 additions & 2 deletions src/plans/gradient_plan.jl
Original file line number Diff line number Diff line change
Expand Up @@ -448,8 +448,8 @@ function (a::AverageGradientRule)(
end

"""
AverageGradient(M; kwargs...)
AverageGradient(; kwargs...)
AverageGradient(M::AbstractManifold; kwargs...)
Add an average of gradients to a gradient processor. A set of previous directions (from the
inner processor) and the last iterate are stored, average is taken after vector transporting
Expand Down Expand Up @@ -548,7 +548,7 @@ end

@doc """
Nesterov(; kwargs...)
Nesterov(M; kwargs...)
Nesterov(M::AbstractManifold; kwargs...)
Assume ``f`` is ``L``-Lipschitz and ``μ``-strongly convex. Given
Expand Down
35 changes: 18 additions & 17 deletions src/plans/stepsize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ end
ConstantLength(s; kwargs...)
ConstantLength(M::AbstractManifold, s; kwargs...)
Specify a [`Stepsize`] that is constant.
Specify a [`Stepsize`](@ref) that is constant.
# Input
Expand Down Expand Up @@ -191,8 +191,8 @@ function show(io::IO, s::DecreasingStepsize)
)
end
"""
DecreasingLength(M; kwargs...)
DegreasingLength(; kwargs...)
DecreasingLength(M::AbstractManifold; kwargs...)
Specify a [`Stepsize`] that is decreasing as ``s_k = $(_tex(:frac, "(l - ak)f^i", "(k+s)^e"))
with the following
Expand All @@ -208,7 +208,7 @@ with the following
* `:relative` – scale the gradient tangent vector ``X`` to ``s_k*X``
* `:absolute` – scale the gradient to an absolute step length ``s_k``, that is ``$(_tex(:frac, "s_k", _tex(:norm, "X")))X``
$(_note(:ManifoldDefaultFactory, "NesterovRule"))
$(_note(:ManifoldDefaultFactory, "DecreasingStepsize"))
"""
function DecreasingLength(args...; kwargs...)
return ManifoldDefaultsFactory(Manopt.DecreasingStepsize, args...; kwargs...)
Expand Down Expand Up @@ -404,11 +404,11 @@ function set_parameter!(a::ArmijoLinesearchStepsize, ::Val{:IncreaseCondition},
end
"""
ArmijoLinesearch(; kwargs...)
ArmijoLinesearch(M; kwargs...)
ArmijoLinesearch(M::AbstractManifold; kwargs...)
Specify a step size that performs an Armijo line search. Given a Function `f:$(_math(:M))→ℝ`
and its Riemannian Gradient ``$(_tex(:grad))f:$(_math(:M))$(_math(:TM))`,
the curent point ``p∈$(_math(:M))`` and a search direction `X∈$(_math(:TpM))``.
Specify a step size that performs an Armijo line search. Given a Function ``f:$(_math(:M))→ℝ``
and its Riemannian Gradient ``$(_tex(:grad))f: $(_math(:M))$(_math(:TM))``,
the curent point ``p∈$(_math(:M))`` and a search direction ``X∈$(_math(:TpM))``.
Then the step size ``s`` is found by reducing the initial step size ``s`` until
Expand Down Expand Up @@ -450,6 +450,7 @@ function ArmijoLinesearch(args...; kwargs...)
return ManifoldDefaultsFactory(Manopt.ArmijoLinesearchStepsize, args...; kwargs...)
end


@doc """
AdaptiveWNGradientStepsize{I<:Integer,R<:Real,F<:Function} <: Stepsize
Expand Down Expand Up @@ -569,7 +570,7 @@ function show(io::IO, awng::AdaptiveWNGradientStepsize)
end
"""
AdaptiveWNGradient(; kwargs...)
AdaptiveWNGradient(M; kwargs...)
AdaptiveWNGradient(M::AbstractManifold; kwargs...)
A stepsize based on the adaptive gradient method introduced by [GrapigliaStella:2023](@cite).
Expand Down Expand Up @@ -1001,7 +1002,7 @@ get_message(a::NonmonotoneLinesearchStepsize) = a.message

@doc """
NonmonotoneLinesearch(; kwargs...)
NonmonotoneLinesearch(M; kwargs...)
NonmonotoneLinesearch(M::AbstractManifold; kwargs...)
A functor representing a nonmonotone line search using the Barzilai-Borwein step size [IannazzoPorcelli:2017](@cite).
Expand All @@ -1021,10 +1022,10 @@ where ``α_{k-1}`` is the step size computed in the last iteration and ``$(_math
Then the Barzilai—Borwein step size is
```math
α_k^{$(_tex(:text, "BB"))} = \begin{cases}
$(_tex(:min))(α_{$(_tex(:text, "max"))}, $(_tex(:max))(α_{$(_tex(:text, "min"))}, τ_{k})), & $(_tex(:text, "if")) ⟨s_{k}, y_{k}⟩_{p_k} > 0,\\
α_{$(_tex(:text, "max"))}, & \text{else,}
\end{cases}
α_k^{$(_tex(:text, "BB"))} = $(_tex(:cases,
"$(_tex(:min))(α_{$(_tex(:text, "max"))}, $(_tex(:max))(α_{$(_tex(:text, "min"))}, τ_{k})), & $(_tex(:text, "if")) ⟨s_{k}, y_{k}⟩_{p_k} > 0,",
"α_{$(_tex(:text, "max"))}, & $(_tex(:text, "else,"))"
))
```
where
Expand Down Expand Up @@ -1065,7 +1066,7 @@ $(_var(:Keyword, :p; add="to store an interim result"))
* `bb_max_stepsize=1e3`: upper bound for the Barzilai-Borwein step size greater than min_stepsize
$(_var(:Keyword, :retraction_method))
* `strategy=direct`: defines if the new step size is computed using the `:direct`, `:indirect` or `:alternating` strategy
* `storage=[`StoreStateAction`](@ref)`(M; store_fields=[:Iterate, :Gradient])``: increase efficiency by using a [`StoreStateAction`](@ref) for `:Iterate` and `:Gradient`
* `storage=`[`StoreStateAction`](@ref)`(M; store_fields=[:Iterate, :Gradient])`: increase efficiency by using a [`StoreStateAction`](@ref) for `:Iterate` and `:Gradient`.
* `stepsize_reduction=0.5`: step size reduction factor contained in the interval ``(0,1)``
* `sufficient_decrease=1e-4`: sufficient decrease parameter contained in the interval ``(0,1)``
* `stop_when_stepsize_less=0.0`: smallest stepsize when to stop (the last one before is taken)
Expand Down Expand Up @@ -1129,7 +1130,7 @@ function show(io::IO, ps::PolyakStepsize)
end
"""
Polyak(; kwargs...)
Polyak(M; kwargs...)
Polyak(M::AbstractManifold; kwargs...)
Compute a step size according to a method propsed by Polyak, cf. the Dynamic step size
discussed in Section 3.2 of [Bertsekas:2015](@cite).
Expand Down Expand Up @@ -1326,7 +1327,7 @@ function status_summary(a::WolfePowellLinesearchStepsize)
end
"""
WolfePowellLinesearch(; kwargs...)
WolfePowellLinesearch(M; kwargs...)
WolfePowellLinesearch(M::AbstractManifold; kwargs...)
Perform a lineseach to fulfull both the Armijo-Goldstein conditions
```math
Expand Down Expand Up @@ -1501,7 +1502,7 @@ Then the following Algorithm is performed similar to Algorithm 7 from [Huang:201

"""
WolfePowellBinaryLinesearch(; kwargs...)
WolfePowellBinaryLinesearch(M; kwargs...)
WolfePowellBinaryLinesearch(M::AbstractManifold; kwargs...)
Perform a lineseach to fulfull both the Armijo-Goldstein conditions
for some given sufficient decrease coefficient ``c_1`` and some sufficient curvature condition coefficient``c_2``.
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/DouglasRachford.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ $(_var(:Field, :stopping_criterion, "stop"))
# Constructor
DouglasRachfordState(M; kwargs...)
DouglasRachfordState(M::AbstractManifold; kwargs...)
# Input
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/NelderMead.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ $(_var(:Field, :retraction_method))
# Constructors
NelderMeadState(M; kwargs...)
NelderMeadState(M::AbstractManifold; kwargs...)
Construct a Nelder-Mead Option with a default population (if not provided) of set of
`dimension(M)+1` random points stored in [`NelderMeadSimplex`](@ref).
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/alternating_gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ $(_var(:Field, :X; add=[:as_Gradient]))
# Constructors
AlternatingGradientDescentState(M; kwargs...)
AlternatingGradientDescentState(M::AbstractManifold; kwargs...)
# Keyword arguments
* `inner_iterations=5`
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ $(_var(:Field, :retraction_method))
# Constructor
GradientDescentState(M; kwargs...)
GradientDescentState(M::AbstractManifold; kwargs...)
Initialize the gradient descent solver state, where
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/stochastic_gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ $(_var(:Field, :retraction_method))
# Constructor
StochasticGradientDescentState(M; kwargs...)
StochasticGradientDescentState(M::AbstractManifold; kwargs...)
Create a `StochasticGradientDescentState` with start point `p`.
Expand Down

0 comments on commit 318bca3

Please sign in to comment.