From 4a16e45ac8b0a6bb99cc5a7096c9bf10a2079710 Mon Sep 17 00:00:00 2001 From: Oliver Schulz Date: Tue, 5 Dec 2023 20:57:45 +0100 Subject: [PATCH 1/4] Update to Documenter v1 --- docs/Project.toml | 2 +- docs/make.jl | 2 +- test/Project.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/Project.toml b/docs/Project.toml index f471c80..79af498 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -3,4 +3,4 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a" [compat] -Documenter = "~0.27" +Documenter = "1" diff --git a/docs/make.jl b/docs/make.jl index 2cb24ca..a7a8924 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -29,7 +29,7 @@ makedocs( ], doctest = ("fixdoctests" in ARGS) ? :fix : true, linkcheck = !("nonstrict" in ARGS), - strict = !("nonstrict" in ARGS), + warnonly = ("nonstrict" in ARGS), ) deploydocs( diff --git a/test/Project.toml b/test/Project.toml index 4a944da..d937f34 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -12,4 +12,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] -Documenter = "~0.27" +Documenter = "1" From 56e7518d896db5968405bc5df2d2b3d9f1160f0b Mon Sep 17 00:00:00 2001 From: Oliver Schulz Date: Tue, 5 Dec 2023 20:54:27 +0100 Subject: [PATCH 2/4] Add only_gradient --- src/gradient.jl | 15 ++++++++++++++- test/testutils.jl | 2 ++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/gradient.jl b/src/gradient.jl index 4a31688..e9021cd 100644 --- a/src/gradient.jl +++ b/src/gradient.jl @@ -43,6 +43,19 @@ export with_gradient!! with_gradient!!(f, @nospecialize(δx), x, ad::ADSelector) = with_gradient(f, x, ad::ADSelector) +""" + only_gradient(f, x, ad::ADSelector) + +Returns the gradient ∇f(x) of `f` at `x`. + +See also [`with_gradient(f, x, ad)`](@ref). +""" +function only_gradient end +export only_gradient + +only_gradient(f, x, ad::ADSelector) = with_gradient(f, x, ad)[2] + + struct _ValGradFunc{F,AD} <: Function f::F @@ -72,7 +85,7 @@ struct _GenericGradientFunc{F,AD} <: Function end _GenericGradientFunc(::Type{FT}, ad::AD) where {FT,AD<:ADSelector} = _GenericGradientFunc{Type{FT},AD}(FT, ad) -(f::_GenericGradientFunc)(x) = with_gradient(f.f, x, f.ad)[2] +(f::_GenericGradientFunc)(x) = only_gradient(f.f, x, f.ad) """ gradient_func(f, ad::ADSelector) diff --git a/test/testutils.jl b/test/testutils.jl index 815708a..961c0d6 100644 --- a/test/testutils.jl +++ b/test/testutils.jl @@ -39,6 +39,7 @@ function test_adsel_functionality(ad::ADSelector) @test_deprecated jacobian_matrix(f, x, ad) ≈ J_f_ref @test with_gradient(g, x, ad)[1] ≈ y_g_ref @test with_gradient(g, x, ad)[2] ≈ grad_g_x_ref + @test only_gradient(g, x, ad) ≈ grad_g_x_ref let δx = similar(x) fill!(δx, NaN) @@ -57,6 +58,7 @@ function test_adsel_functionality(ad::ADSelector) if AutoDiffOperators.supports_structargs(reverse_ad_selector(ad)) @test approx_cmp(with_gradient(f_nv, x_nv, ad), (y_nv_ref, grad_nv_ref)) + @test approx_cmp(only_gradient(f_nv, x_nv, ad), grad_nv_ref) @test approx_cmp(valgrad_func(f_nv, ad)(x_nv), (y_nv_ref, grad_nv_ref)) @test approx_cmp(gradient_func(f_nv, ad)(x_nv), grad_nv_ref) end From 7a96dc611cf538a409c25b272b5aaf8cc690cc27 Mon Sep 17 00:00:00 2001 From: Oliver Schulz Date: Tue, 5 Dec 2023 20:54:42 +0100 Subject: [PATCH 3/4] Improve ForwardDiff backend --- ext/AutoDiffOperatorsForwardDiffExt.jl | 31 +++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/ext/AutoDiffOperatorsForwardDiffExt.jl b/ext/AutoDiffOperatorsForwardDiffExt.jl index 509cbcc..5856d63 100644 --- a/ext/AutoDiffOperatorsForwardDiffExt.jl +++ b/ext/AutoDiffOperatorsForwardDiffExt.jl @@ -52,7 +52,36 @@ end # ToDo: Use AD parameters -AutoDiffOperators.with_gradient(f, x::AbstractVector{<:Real}, ad::ForwardDiffAD) = f(x), ForwardDiff.gradient(f, x) +function AutoDiffOperators.with_gradient(f, x::AbstractVector{<:Real}, ad::ForwardDiffAD) + T = typeof(x) + U = Core.Compiler.return_type(f, Tuple{typeof(x)}) + y = f(x) + R = promote_type(eltype(x), eltype(y)) + n_y, n_x = length(y), length(x) + dy = similar(x, R) + dy .= ForwardDiff.gradient(f, x) + return y, dy +end + + +function AutoDiffOperators.only_gradient(f, x, ad::ForwardDiffAD) + T = eltype(x) + U = Core.Compiler.return_type(f, Tuple{typeof(x)}) + R = promote_type(T, U) + _only_gradient_impl(f, x, ad, R) +end + +function _only_gradient_impl(f, x, ad::ForwardDiffAD, ::Type{R}) where {R <: Real} + dy = similar(x, R) + dy .= ForwardDiff.gradient(f, x) + return dy +end + +function _only_gradient_impl(f, x, ad::ForwardDiffAD, ::Type) + return ForwardDiff.gradient(f, x) +end + + # ToDo: Specialize `AutoDiffOperators.with_gradient!!(f, δx, x, ad::ForwardDiffAD)` From f328df4f927cffbb95af6bd83da5fc83c99ec375 Mon Sep 17 00:00:00 2001 From: Oliver Schulz Date: Wed, 6 Dec 2023 06:03:17 +0100 Subject: [PATCH 4/4] Increase package version to v0.1.7 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f81bae3..6da61a4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "AutoDiffOperators" uuid = "6e1301d5-4f4d-4fb5-9679-7191e22f0e0e" -version = "0.1.6" +version = "0.1.7" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"