Skip to content

Commit

Permalink
Codecov updates.
Browse files Browse the repository at this point in the history
  • Loading branch information
kellertuer committed Aug 21, 2024
1 parent bfe4d37 commit cb8c9a4
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 8 deletions.
3 changes: 1 addition & 2 deletions .codecov.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
ignore:
- "src/examples/.*"
- "src/tutorials/.*"
- "src/helpers/exports/.*" # exclude exports, since there are no CI tests for Asy-exports
- "src/documentation_glossary.jl" # exclude this since it is just use to create the docs and code cov goes bogus on this.
4 changes: 2 additions & 2 deletions src/Manopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ function __init__()
#
# Error Hints
#
@static if isdefined(Base.Experimental, :register_error_hint)
@static if isdefined(Base.Experimental, :register_error_hint) # COV_EXCL_LINE
Base.Experimental.register_error_hint(MethodError) do io, exc, argtypes, kwargs
if exc.f === convex_bundle_method_subsolver
print(
Expand All @@ -274,7 +274,7 @@ function __init__()
#
# Requires fallback for Julia < 1.9
#
@static if !isdefined(Base, :get_extension)
@static if !isdefined(Base, :get_extension) # COV_EXCL_LINE
@require JuMP = "4076af6c-e467-56ae-b986-b466b2749572" begin
include("../ext/ManoptJuMPExt.jl")
end
Expand Down
2 changes: 0 additions & 2 deletions src/plans/augmented_lagrangian_plan.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#_doc_al_Cost() = "$(_tex(:Cal, "L"))_\\rho(p, μ, λ)"
_doc_al_Cost(iter) = "$(_tex(:Cal, "L"))_{ρ^{($iter)}}(p, μ^{($iter)}, λ^{($iter)})"
_doc_AL_Cost_long = raw"""
```math
\mathcal L_\rho(p, μ, λ)
Expand Down
4 changes: 2 additions & 2 deletions test/solvers/test_adaptive_regularization_with_cubics.jl
Original file line number Diff line number Diff line change
Expand Up @@ -243,10 +243,10 @@ include("../utils/example_tasks.jl")
@testset "Start at a point with _exactly_ gradient zero - In Tutorial mode" begin
p0 = zeros(2)
M = Euclidean(2)
@test_logs (:info,) Manopt.set_parameter!(:Mode, "")
@test_logs (:info,) Manopt.set_parameter!(:Mode, "Tutorial")
f2(M, p) = 0
grad_f2(M, p) = [0.0, 0.0]
@test adaptive_regularization_with_cubics(M, f2, grad_f2, p0) == p0
@test_logs (:info,) Manopt.set_parameter!(:Mode, "Tutorial")
@test_logs (:info,) Manopt.set_parameter!(:Mode, "")
end
end

0 comments on commit cb8c9a4

Please sign in to comment.