Skip to content

Commit 1674ee9

Browse files
authored
Merge pull request #712 from JuliaDiff/dw/backport
Backport bug and test fixes to release-0.10 to prepare for new 0.10 release
2 parents 2ff6808 + c7f62ca commit 1674ee9

22 files changed

+203
-99
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ jobs:
2121
- '1.0'
2222
- '1.6'
2323
- '1'
24-
- 'nightly'
24+
# - 'nightly'
2525
os:
2626
- ubuntu-latest
2727
arch:

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,6 @@
44
*.DS_Store
55
/docs/build/
66
/docs/site/
7+
/docs/Manifest.toml
78
/benchmark_data/
89
/Manifest.toml

Project.toml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ CommonSubexpressions = "0.3"
2121
DiffResults = "0.0.1, 0.0.2, 0.0.3, 0.0.4, 1.0.1"
2222
DiffRules = "1.4.0"
2323
DiffTests = "0.0.1, 0.1"
24+
IrrationalConstants = "0.1, 0.2"
2425
LogExpFunctions = "0.3"
2526
NaNMath = "0.2.2, 0.3, 1"
2627
Preferences = "1"
@@ -35,12 +36,13 @@ ForwardDiffStaticArraysExt = "StaticArrays"
3536
Calculus = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
3637
DiffTests = "de460e47-3fe3-5279-bb4a-814414816d5d"
3738
InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
39+
IrrationalConstants = "92d709cd-6900-40b7-9082-c6be49f344b6"
3840
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
3941
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
4042
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
4143

4244
[targets]
43-
test = ["Calculus", "DiffTests", "SparseArrays", "Test", "InteractiveUtils", "StaticArrays"]
45+
test = ["Calculus", "DiffTests", "IrrationalConstants", "SparseArrays", "Test", "InteractiveUtils", "StaticArrays"]
4446

4547
[weakdeps]
46-
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
48+
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"

docs/Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
11
[deps]
22
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
33
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
4+
5+
[compat]
6+
Documenter = "1"

docs/make.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ makedocs(modules=[ForwardDiff],
1111
"Upgrading from Older Versions" => "user/upgrade.md"],
1212
"Developer Documentation" => [
1313
"How ForwardDiff Works" => "dev/how_it_works.md",
14-
"How to Contribute" => "dev/contributing.md"]])
14+
"How to Contribute" => "dev/contributing.md"]],
15+
checkdocs=:exports)
1516

1617
deploydocs(
1718
repo = "github.com/JuliaDiff/ForwardDiff.jl.git"

ext/ForwardDiffStaticArraysExt.jl

Lines changed: 27 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ using ForwardDiff: Dual, partials, GradientConfig, JacobianConfig, HessianConfig
77
gradient, hessian, jacobian, gradient!, hessian!, jacobian!,
88
extract_gradient!, extract_jacobian!, extract_value!,
99
vector_mode_gradient, vector_mode_gradient!,
10-
vector_mode_jacobian, vector_mode_jacobian!, valtype, value, _lyap_div!
10+
vector_mode_jacobian, vector_mode_jacobian!, valtype, value
1111
using DiffResults: DiffResult, ImmutableDiffResult, MutableDiffResult
1212

1313
@generated function dualize(::Type{T}, x::StaticArray) where T
@@ -23,27 +23,25 @@ end
2323

2424
@inline static_dual_eval(::Type{T}, f, x::StaticArray) where T = f(dualize(T, x))
2525

26+
# To fix method ambiguity issues:
2627
function LinearAlgebra.eigvals(A::Symmetric{<:Dual{Tg,T,N}, <:StaticArrays.StaticMatrix}) where {Tg,T<:Real,N}
27-
λ,Q = eigen(Symmetric(value.(parent(A))))
28-
parts = ntuple(j -> diag(Q' * getindex.(partials.(A), j) * Q), N)
29-
Dual{Tg}.(λ, tuple.(parts...))
28+
return ForwardDiff._eigvals(A)
3029
end
31-
3230
function LinearAlgebra.eigen(A::Symmetric{<:Dual{Tg,T,N}, <:StaticArrays.StaticMatrix}) where {Tg,T<:Real,N}
33-
λ = eigvals(A)
34-
_,Q = eigen(Symmetric(value.(parent(A))))
35-
parts = ntuple(j -> Q*ForwardDiff._lyap_div!(Q' * getindex.(partials.(A), j) * Q - Diagonal(getindex.(partials.(λ), j)), value.(λ)), N)
36-
Eigen(λ,Dual{Tg}.(Q, tuple.(parts...)))
31+
return ForwardDiff._eigen(A)
3732
end
3833

34+
# For `MMatrix` we can use the in-place method
35+
ForwardDiff._lyap_div!!(A::StaticArrays.MMatrix, λ::AbstractVector) = ForwardDiff._lyap_div!(A, λ)
36+
3937
# Gradient
40-
@inline ForwardDiff.gradient(f, x::StaticArray) = vector_mode_gradient(f, x)
41-
@inline ForwardDiff.gradient(f, x::StaticArray, cfg::GradientConfig) = gradient(f, x)
42-
@inline ForwardDiff.gradient(f, x::StaticArray, cfg::GradientConfig, ::Val) = gradient(f, x)
38+
@inline ForwardDiff.gradient(f::F, x::StaticArray) where F = vector_mode_gradient(f, x)
39+
@inline ForwardDiff.gradient(f::F, x::StaticArray, cfg::GradientConfig) where F = gradient(f, x)
40+
@inline ForwardDiff.gradient(f::F, x::StaticArray, cfg::GradientConfig, ::Val) where F = gradient(f, x)
4341

44-
@inline ForwardDiff.gradient!(result::Union{AbstractArray,DiffResult}, f, x::StaticArray) = vector_mode_gradient!(result, f, x)
45-
@inline ForwardDiff.gradient!(result::Union{AbstractArray,DiffResult}, f, x::StaticArray, cfg::GradientConfig) = gradient!(result, f, x)
46-
@inline ForwardDiff.gradient!(result::Union{AbstractArray,DiffResult}, f, x::StaticArray, cfg::GradientConfig, ::Val) = gradient!(result, f, x)
42+
@inline ForwardDiff.gradient!(result::Union{AbstractArray,DiffResult}, f::F, x::StaticArray) where F = vector_mode_gradient!(result, f, x)
43+
@inline ForwardDiff.gradient!(result::Union{AbstractArray,DiffResult}, f::F, x::StaticArray, cfg::GradientConfig) where F = gradient!(result, f, x)
44+
@inline ForwardDiff.gradient!(result::Union{AbstractArray,DiffResult}, f::F, x::StaticArray, cfg::GradientConfig, ::Val) where F = gradient!(result, f, x)
4745

4846
@generated function extract_gradient(::Type{T}, y::Real, x::S) where {T,S<:StaticArray}
4947
result = Expr(:tuple, [:(partials(T, y, $i)) for i in 1:length(x)]...)
@@ -65,13 +63,13 @@ end
6563
end
6664

6765
# Jacobian
68-
@inline ForwardDiff.jacobian(f, x::StaticArray) = vector_mode_jacobian(f, x)
69-
@inline ForwardDiff.jacobian(f, x::StaticArray, cfg::JacobianConfig) = jacobian(f, x)
70-
@inline ForwardDiff.jacobian(f, x::StaticArray, cfg::JacobianConfig, ::Val) = jacobian(f, x)
66+
@inline ForwardDiff.jacobian(f::F, x::StaticArray) where F = vector_mode_jacobian(f, x)
67+
@inline ForwardDiff.jacobian(f::F, x::StaticArray, cfg::JacobianConfig) where F = jacobian(f, x)
68+
@inline ForwardDiff.jacobian(f::F, x::StaticArray, cfg::JacobianConfig, ::Val) where F = jacobian(f, x)
7169

72-
@inline ForwardDiff.jacobian!(result::Union{AbstractArray,DiffResult}, f, x::StaticArray) = vector_mode_jacobian!(result, f, x)
73-
@inline ForwardDiff.jacobian!(result::Union{AbstractArray,DiffResult}, f, x::StaticArray, cfg::JacobianConfig) = jacobian!(result, f, x)
74-
@inline ForwardDiff.jacobian!(result::Union{AbstractArray,DiffResult}, f, x::StaticArray, cfg::JacobianConfig, ::Val) = jacobian!(result, f, x)
70+
@inline ForwardDiff.jacobian!(result::Union{AbstractArray,DiffResult}, f::F, x::StaticArray) where F = vector_mode_jacobian!(result, f, x)
71+
@inline ForwardDiff.jacobian!(result::Union{AbstractArray,DiffResult}, f::F, x::StaticArray, cfg::JacobianConfig) where F = jacobian!(result, f, x)
72+
@inline ForwardDiff.jacobian!(result::Union{AbstractArray,DiffResult}, f::F, x::StaticArray, cfg::JacobianConfig, ::Val) where F = jacobian!(result, f, x)
7573

7674
@generated function extract_jacobian(::Type{T}, ydual::StaticArray, x::S) where {T,S<:StaticArray}
7775
M, N = length(ydual), length(x)
@@ -110,18 +108,18 @@ end
110108
end
111109

112110
# Hessian
113-
ForwardDiff.hessian(f, x::StaticArray) = jacobian(y -> gradient(f, y), x)
114-
ForwardDiff.hessian(f, x::StaticArray, cfg::HessianConfig) = hessian(f, x)
115-
ForwardDiff.hessian(f, x::StaticArray, cfg::HessianConfig, ::Val) = hessian(f, x)
111+
ForwardDiff.hessian(f::F, x::StaticArray) where F = jacobian(y -> gradient(f, y), x)
112+
ForwardDiff.hessian(f::F, x::StaticArray, cfg::HessianConfig) where F = hessian(f, x)
113+
ForwardDiff.hessian(f::F, x::StaticArray, cfg::HessianConfig, ::Val) where F = hessian(f, x)
116114

117-
ForwardDiff.hessian!(result::AbstractArray, f, x::StaticArray) = jacobian!(result, y -> gradient(f, y), x)
115+
ForwardDiff.hessian!(result::AbstractArray, f::F, x::StaticArray) where F = jacobian!(result, y -> gradient(f, y), x)
118116

119-
ForwardDiff.hessian!(result::MutableDiffResult, f, x::StaticArray) = hessian!(result, f, x, HessianConfig(f, result, x))
117+
ForwardDiff.hessian!(result::MutableDiffResult, f::F, x::StaticArray) where F = hessian!(result, f, x, HessianConfig(f, result, x))
120118

121-
ForwardDiff.hessian!(result::ImmutableDiffResult, f, x::StaticArray, cfg::HessianConfig) = hessian!(result, f, x)
122-
ForwardDiff.hessian!(result::ImmutableDiffResult, f, x::StaticArray, cfg::HessianConfig, ::Val) = hessian!(result, f, x)
119+
ForwardDiff.hessian!(result::ImmutableDiffResult, f::F, x::StaticArray, cfg::HessianConfig) where F = hessian!(result, f, x)
120+
ForwardDiff.hessian!(result::ImmutableDiffResult, f::F, x::StaticArray, cfg::HessianConfig, ::Val) where F = hessian!(result, f, x)
123121

124-
function ForwardDiff.hessian!(result::ImmutableDiffResult, f, x::StaticArray)
122+
function ForwardDiff.hessian!(result::ImmutableDiffResult, f::F, x::StaticArray) where F
125123
T = typeof(Tag(f, eltype(x)))
126124
d1 = dualize(T, x)
127125
d2 = dualize(T, d1)

src/ForwardDiff.jl

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,12 @@ if VERSION >= v"1.6"
77
end
88
using Random
99
using LinearAlgebra
10-
10+
if VERSION < v"1.2.0-DEV.125" # 1da48c2e4028c1514ed45688be727efbef1db884
11+
require_one_based_indexing(A...) = !Base.has_offset_axes(A...) || throw(ArgumentError(
12+
"offset arrays are not supported but got an array with index other than 1"))
13+
else
14+
using Base: require_one_based_indexing
15+
end
1116
import Printf
1217
import NaNMath
1318
import SpecialFunctions

src/derivative.jl

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,9 @@ stored in `y`.
2222
2323
Set `check` to `Val{false}()` to disable tag checking. This can lead to perturbation confusion, so should be used with care.
2424
"""
25-
@inline function derivative(f!, y::AbstractArray, x::Real,
26-
cfg::DerivativeConfig{T} = DerivativeConfig(f!, y, x), ::Val{CHK}=Val{true}()) where {T, CHK}
25+
@inline function derivative(f!::F, y::AbstractArray, x::Real,
26+
cfg::DerivativeConfig{T} = DerivativeConfig(f!, y, x), ::Val{CHK}=Val{true}()) where {F, T, CHK}
27+
require_one_based_indexing(y)
2728
CHK && checktag(T, f!, x)
2829
ydual = cfg.duals
2930
seed!(ydual, y)
@@ -42,6 +43,7 @@ This method assumes that `isa(f(x), Union{Real,AbstractArray})`.
4243
"""
4344
@inline function derivative!(result::Union{AbstractArray,DiffResult},
4445
f::F, x::R) where {F,R<:Real}
46+
result isa DiffResult || require_one_based_indexing(result)
4547
T = typeof(Tag(f, R))
4648
ydual = f(Dual{T}(x, one(x)))
4749
result = extract_value!(T, result, ydual)
@@ -58,8 +60,9 @@ called as `f!(y, x)` where the result is stored in `y`.
5860
Set `check` to `Val{false}()` to disable tag checking. This can lead to perturbation confusion, so should be used with care.
5961
"""
6062
@inline function derivative!(result::Union{AbstractArray,DiffResult},
61-
f!, y::AbstractArray, x::Real,
62-
cfg::DerivativeConfig{T} = DerivativeConfig(f!, y, x), ::Val{CHK}=Val{true}()) where {T, CHK}
63+
f!::F, y::AbstractArray, x::Real,
64+
cfg::DerivativeConfig{T} = DerivativeConfig(f!, y, x), ::Val{CHK}=Val{true}()) where {F, T, CHK}
65+
result isa DiffResult ? require_one_based_indexing(y) : require_one_based_indexing(result, y)
6366
CHK && checktag(T, f!, x)
6467
ydual = cfg.duals
6568
seed!(ydual, y)

src/dual.jl

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,10 @@ end
7878
@inline Dual{T,V,N}(x::Number) where {T,V,N} = convert(Dual{T,V,N}, x)
7979
@inline Dual{T,V}(x) where {T,V} = convert(Dual{T,V}, x)
8080

81+
# Fix method ambiguity issue by adapting the definition in Base to `Dual`s
82+
Dual{T,V,N}(x::Base.TwicePrecision) where {T,V,N} =
83+
(Dual{T,V,N}(x.hi) + Dual{T,V,N}(x.lo))::Dual{T,V,N}
84+
8185
##############################
8286
# Utility/Accessor Functions #
8387
##############################
@@ -340,7 +344,6 @@ else
340344
Base.div(x::Dual, y::Dual) = div(value(x), value(y))
341345
end
342346

343-
Base.hash(d::Dual) = hash(value(d))
344347
Base.hash(d::Dual, hsh::UInt) = hash(value(d), hsh)
345348

346349
function Base.read(io::IO, ::Type{Dual{T,V,N}}) where {T,V,N}
@@ -416,7 +419,7 @@ function Base.promote_rule(::Type{Dual{T,A,N}},
416419
return Dual{T,promote_type(A, B),N}
417420
end
418421

419-
for R in (Irrational, Real, BigFloat, Bool)
422+
for R in (AbstractIrrational, Real, BigFloat, Bool)
420423
if isconcretetype(R) # issue #322
421424
@eval begin
422425
Base.promote_rule(::Type{$R}, ::Type{Dual{T,V,N}}) where {T,V,N} = Dual{T,promote_type($R, V),N}
@@ -703,7 +706,11 @@ end
703706
# Symmetric eigvals #
704707
#-------------------#
705708

706-
function LinearAlgebra.eigvals(A::Symmetric{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N}
709+
# To be able to reuse this default definition in the StaticArrays extension
710+
# (has to be re-defined to avoid method ambiguity issues)
711+
# we forward the call to an internal method that can be shared and reused
712+
LinearAlgebra.eigvals(A::Symmetric{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N} = _eigvals(A)
713+
function _eigvals(A::Symmetric{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N}
707714
λ,Q = eigen(Symmetric(value.(parent(A))))
708715
parts = ntuple(j -> diag(Q' * getindex.(partials.(A), j) * Q), N)
709716
Dual{Tg}.(λ, tuple.(parts...))
@@ -721,8 +728,19 @@ function LinearAlgebra.eigvals(A::SymTridiagonal{<:Dual{Tg,T,N}}) where {Tg,T<:R
721728
Dual{Tg}.(λ, tuple.(parts...))
722729
end
723730

724-
# A ./ (λ - λ') but with diag special cased
725-
function _lyap_div!(A, λ)
731+
# A ./ (λ' .- λ) but with diag special cased
732+
# Default out-of-place method
733+
function _lyap_div!!(A::AbstractMatrix, λ::AbstractVector)
734+
return map(
735+
(a, b, idx) -> a / (idx[1] == idx[2] ? oneunit(b) : b),
736+
A,
737+
λ' .- λ,
738+
CartesianIndices(A),
739+
)
740+
end
741+
# For `Matrix` (and e.g. `StaticArrays.MMatrix`) we can use an in-place method
742+
_lyap_div!!(A::Matrix, λ::AbstractVector) = _lyap_div!(A, λ)
743+
function _lyap_div!(A::AbstractMatrix, λ::AbstractVector)
726744
for (j,μ) in enumerate(λ), (k,λ) in enumerate(λ)
727745
if k j
728746
A[k,j] /= μ - λ
@@ -731,17 +749,21 @@ function _lyap_div!(A, λ)
731749
A
732750
end
733751

734-
function LinearAlgebra.eigen(A::Symmetric{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N}
752+
# To be able to reuse this default definition in the StaticArrays extension
753+
# (has to be re-defined to avoid method ambiguity issues)
754+
# we forward the call to an internal method that can be shared and reused
755+
LinearAlgebra.eigen(A::Symmetric{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N} = _eigen(A)
756+
function _eigen(A::Symmetric{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N}
735757
λ = eigvals(A)
736758
_,Q = eigen(Symmetric(value.(parent(A))))
737-
parts = ntuple(j -> Q*_lyap_div!(Q' * getindex.(partials.(A), j) * Q - Diagonal(getindex.(partials.(λ), j)), value.(λ)), N)
759+
parts = ntuple(j -> Q*_lyap_div!!(Q' * getindex.(partials.(A), j) * Q - Diagonal(getindex.(partials.(λ), j)), value.(λ)), N)
738760
Eigen(λ,Dual{Tg}.(Q, tuple.(parts...)))
739761
end
740762

741763
function LinearAlgebra.eigen(A::SymTridiagonal{<:Dual{Tg,T,N}}) where {Tg,T<:Real,N}
742764
λ = eigvals(A)
743765
_,Q = eigen(SymTridiagonal(value.(parent(A))))
744-
parts = ntuple(j -> Q*_lyap_div!(Q' * getindex.(partials.(A), j) * Q - Diagonal(getindex.(partials.(λ), j)), value.(λ)), N)
766+
parts = ntuple(j -> Q*_lyap_div!!(Q' * getindex.(partials.(A), j) * Q - Diagonal(getindex.(partials.(λ), j)), value.(λ)), N)
745767
Eigen(λ,Dual{Tg}.(Q, tuple.(parts...)))
746768
end
747769

src/gradient.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ This method assumes that `isa(f(x), Real)`.
1313
1414
Set `check` to `Val{false}()` to disable tag checking. This can lead to perturbation confusion, so should be used with care.
1515
"""
16-
function gradient(f, x::AbstractArray, cfg::GradientConfig{T} = GradientConfig(f, x), ::Val{CHK}=Val{true}()) where {T, CHK}
16+
function gradient(f::F, x::AbstractArray, cfg::GradientConfig{T} = GradientConfig(f, x), ::Val{CHK}=Val{true}()) where {F, T, CHK}
17+
require_one_based_indexing(x)
1718
CHK && checktag(T, f, x)
1819
if chunksize(cfg) == length(x)
1920
return vector_mode_gradient(f, x, cfg)
@@ -32,6 +33,7 @@ This method assumes that `isa(f(x), Real)`.
3233
3334
"""
3435
function gradient!(result::Union{AbstractArray,DiffResult}, f::F, x::AbstractArray, cfg::GradientConfig{T} = GradientConfig(f, x), ::Val{CHK}=Val{true}()) where {T, CHK, F}
36+
result isa DiffResult ? require_one_based_indexing(x) : require_one_based_indexing(result, x)
3537
CHK && checktag(T, f, x)
3638
if chunksize(cfg) == length(x)
3739
vector_mode_gradient!(result, f, x, cfg)

src/hessian.jl

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ This method assumes that `isa(f(x), Real)`.
1111
1212
Set `check` to `Val{false}()` to disable tag checking. This can lead to perturbation confusion, so should be used with care.
1313
"""
14-
function hessian(f, x::AbstractArray, cfg::HessianConfig{T} = HessianConfig(f, x), ::Val{CHK}=Val{true}()) where {T,CHK}
14+
function hessian(f::F, x::AbstractArray, cfg::HessianConfig{T} = HessianConfig(f, x), ::Val{CHK}=Val{true}()) where {F, T,CHK}
15+
require_one_based_indexing(x)
1516
CHK && checktag(T, f, x)
1617
∇f = y -> gradient(f, y, cfg.gradient_config, Val{false}())
1718
return jacobian(∇f, x, cfg.jacobian_config, Val{false}())
@@ -27,7 +28,8 @@ This method assumes that `isa(f(x), Real)`.
2728
2829
Set `check` to `Val{false}()` to disable tag checking. This can lead to perturbation confusion, so should be used with care.
2930
"""
30-
function hessian!(result::AbstractArray, f, x::AbstractArray, cfg::HessianConfig{T} = HessianConfig(f, x), ::Val{CHK}=Val{true}()) where {T,CHK}
31+
function hessian!(result::AbstractArray, f::F, x::AbstractArray, cfg::HessianConfig{T} = HessianConfig(f, x), ::Val{CHK}=Val{true}()) where {F,T,CHK}
32+
require_one_based_indexing(result, x)
3133
CHK && checktag(T, f, x)
3234
∇f = y -> gradient(f, y, cfg.gradient_config, Val{false}())
3335
jacobian!(result, ∇f, x, cfg.jacobian_config, Val{false}())
@@ -61,7 +63,7 @@ because `isa(result, DiffResult)`, `cfg` is constructed as `HessianConfig(f, res
6163
6264
Set `check` to `Val{false}()` to disable tag checking. This can lead to perturbation confusion, so should be used with care.
6365
"""
64-
function hessian!(result::DiffResult, f, x::AbstractArray, cfg::HessianConfig{T} = HessianConfig(f, result, x), ::Val{CHK}=Val{true}()) where {T,CHK}
66+
function hessian!(result::DiffResult, f::F, x::AbstractArray, cfg::HessianConfig{T} = HessianConfig(f, result, x), ::Val{CHK}=Val{true}()) where {F,T,CHK}
6567
CHK && checktag(T, f, x)
6668
∇f! = InnerGradientForHess(result, cfg, f)
6769
jacobian!(DiffResults.hessian(result), ∇f!, DiffResults.gradient(result), x, cfg.jacobian_config, Val{false}())

0 commit comments

Comments
 (0)