From bf85177ad28557dfc737935ea1103bd7976ef8d1 Mon Sep 17 00:00:00 2001 From: Dominique Makowski Date: Wed, 4 Sep 2024 08:06:52 +0100 Subject: [PATCH] add own docstring to softplus --- activate.jl | 31 +++++++++++++++++++++++++++++++ src/basicfuns.jl | 16 ++++++++++++---- 2 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 activate.jl diff --git a/activate.jl b/activate.jl new file mode 100644 index 0000000..49da998 --- /dev/null +++ b/activate.jl @@ -0,0 +1,31 @@ +using Revise +using Pkg + +# Package +Pkg.activate("C:/Users/domma/Dropbox/Software/LogExpFunctions.jl/") + +using LogExpFunctions +using CairoMakie + + +xrange = range(-1.5, 1.5, length=100) +yexp = exp.(xrange) +ysoftplus1 = softplus.(xrange) +ysoftplus2 = softplus.(xrange; a=2) +ysoftplus3 = softplus.(xrange; a=3) + +ysoftplus5 = softplus.(xrange; a=5) +ysoftplus10 = softplus.(xrange; a=10) + + +# f = lines(xrange, yexp, color=:black) +f = lines(xrange, ysoftplus1, color=:red) +lines!(xrange, ysoftplus2, color=:orange) +lines!(xrange, ysoftplus3, color=:darkorange) +lines!(xrange, ysoftplus5, color=:green) +lines!(xrange, ysoftplus10, color=:blue) + +ablines!(0, 1, color=:grey, linestyle=:dash) +f + +softplus(0; a=3) \ No newline at end of file diff --git a/src/basicfuns.jl b/src/basicfuns.jl index d2bc90a..1e85c3c 100644 --- a/src/basicfuns.jl +++ b/src/basicfuns.jl @@ -165,13 +165,11 @@ Return `log(1+exp(x))` evaluated carefully for largish `x`. This is also called the ["softplus"](https://en.wikipedia.org/wiki/Rectifier_(neural_networks)) transformation, being a smooth approximation to `max(0,x)`. Its inverse is [`logexpm1`](@ref). -The generalized `softplus` function (Wiemann et al., 2024) takes an additional optional parameter `a` that control -the approximation error with respect to the linear spline. It defaults to `a=1.0`, in which case the softplus is -equivalent to `log1pexp`. +This is also called the ["softplus"](https://en.wikipedia.org/wiki/Rectifier_(neural_networks)) +transformation (in its default parametrization, see [`softplus`](@ref)), being a smooth approximation to `max(0,x)`. See: * Martin Maechler (2012) [“Accurately Computing log(1 − exp(− |a|))”](http://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf) - * Wiemann, P. F., Kneib, T., & Hambuckers, J. (2024). Using the softplus function to construct alternative link functions in generalized linear models and beyond. Statistical Papers, 65(5), 3155-3180. """ log1pexp(x::Real) = _log1pexp(float(x)) # ensures that BigInt/BigFloat, Int/Float64 etc. dispatch to the same algorithm @@ -262,6 +260,16 @@ Return `log(exp(x) - 1)` or the “invsoftplus” function. It is the inverse o logexpm1(x::Real) = x <= 18.0 ? log(_expm1(x)) : x <= 33.3 ? x - exp(-x) : oftype(exp(-x), x) logexpm1(x::Float32) = x <= 9f0 ? log(expm1(x)) : x <= 16f0 ? x - exp(-x) : oftype(exp(-x), x) +""" +$(SIGNATURES) + +The generalized `softplus` function (Wiemann et al., 2024) takes an additional optional parameter `a` that control +the approximation error with respect to the linear spline. It defaults to `a=1.0`, in which case the softplus is +equivalent to [`log1pexp`](@ref). + +See: + * Wiemann, P. F., Kneib, T., & Hambuckers, J. (2024). Using the softplus function to construct alternative link functions in generalized linear models and beyond. Statistical Papers, 65(5), 3155-3180. +""" softplus(x::Real) = log1pexp(x) softplus(x::Real, a::Real) = log1pexp(a * x) / a