Skip to content

Commit 5b1bf9e

Browse files
committed
get all tests passing except for deprecation layer + SIMD tests
1 parent c3ee414 commit 5b1bf9e

File tree

15 files changed

+262
-433
lines changed

15 files changed

+262
-433
lines changed

src/ForwardDiff.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ include("derivative.jl")
5252
include("gradient.jl")
5353
include("jacobian.jl")
5454
include("hessian.jl")
55-
include("deprecated.jl")
55+
# include("deprecated.jl")
5656

5757
export DiffBase
5858

src/config.jl

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ end
3131

3232
@compat immutable Tag{F,M} end
3333

34-
Base.@pure order{V}(::Type{V}) = 0
35-
Base.@pure order{T,V,N}(::Type{Dual{T,V,N}}) = 1 + order(V)
34+
@inline order{V}(::Type{V}) = 0
35+
@inline order{T,V,N}(::Type{Dual{T,V,N}}) = 1 + order(V)
3636

3737
##################
3838
# AbstractConfig #
@@ -47,16 +47,16 @@ end
4747

4848
function Base.showerror{F,G}(io::IO, e::ConfigMismatchError{F,G})
4949
print(io, "The provided configuration (of type $(typeof(e.cfg))) was constructed for a",
50-
" function ($G), not the current target function ($F). ForwardDiff cannot safely",
50+
" function other than the current target function. ForwardDiff cannot safely",
5151
" perform differentiation in this context; see the following issue for details:",
5252
" https://github.com/JuliaDiff/ForwardDiff.jl/issues/83. You can resolve this",
5353
" problem by constructing and using a configuration with the appropriate target",
54-
" function, e.g. `ForwardDiff.GradientConfig($f, x)`")
54+
" function, e.g. `ForwardDiff.GradientConfig($(e.f), x)`")
5555
end
5656

5757
Base.copy(cfg::AbstractConfig) = deepcopy(cfg)
5858

59-
@inline chunksize(::AbstractConfig{T,N}) = N
59+
@inline chunksize{T,N}(::AbstractConfig{T,N}) = N
6060

6161
##################
6262
# GradientConfig #
@@ -110,26 +110,26 @@ end
110110
# HessianConfig #
111111
#################
112112

113-
@compat immutable HessianConfig{T,V,N,D,TJ,DJ} <: AbstractConfig{T,N}
114-
jacobian_config::JacobianConfig{TJ,V,N,DJ}
115-
gradient_config::GradientConfig{T,Dual{T,V,N},D}
113+
@compat immutable HessianConfig{T,V,N,D,MJ,DJ} <: AbstractConfig{T,N}
114+
jacobian_config::JacobianConfig{Tag{Void,MJ},V,N,DJ}
115+
gradient_config::GradientConfig{T,Dual{Tag{Void,MJ},V,N},D}
116116
end
117117

118118
function HessianConfig{F,V}(f::F,
119119
x::AbstractArray{V},
120120
chunk::Chunk = Chunk(x),
121-
tag::Tag = Tag{F,order(V)}())
122-
jacobian_config = JacobianConfig(f, x, chunk, tag)
123-
gradient_config = GradientConfig(f, jacobian_config.duals, chunk)
121+
tag::Tag = Tag{F,order(Dual{Void,V,0})}())
122+
jacobian_config = JacobianConfig(nothing, x, chunk)
123+
gradient_config = GradientConfig(f, jacobian_config.duals, chunk, tag)
124124
return HessianConfig(jacobian_config, gradient_config)
125125
end
126126

127127
function HessianConfig{F,V}(result::DiffResult,
128128
f::F,
129129
x::AbstractArray{V},
130130
chunk::Chunk = Chunk(x),
131-
tag::Tag = Tag{F,order(V)}())
132-
jacobian_config = JacobianConfig(f, DiffBase.gradient(result), x, chunk, tag)
133-
gradient_config = GradientConfig(f, jacobian_config.duals[2], chunk)
131+
tag::Tag = Tag{F,order(Dual{Void,V,0})}())
132+
jacobian_config = JacobianConfig(nothing, DiffBase.gradient(result), x, chunk)
133+
gradient_config = GradientConfig(f, jacobian_config.duals[2], chunk, tag)
134134
return HessianConfig(jacobian_config, gradient_config)
135135
end

src/derivative.jl

Lines changed: 27 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,26 +2,44 @@
22
# API methods #
33
###############
44

5-
derivative{F}(f::F, x::Real) = extract_derivative(f(Dual(x, one(x))))
5+
@generated function derivative{F,R<:Real}(f::F, x::R)
6+
T = Tag{F,order(R)}
7+
return quote
8+
$(Expr(:meta, :inline))
9+
return extract_derivative(f(Dual{$T}(x, one(x))))
10+
end
11+
end
612

713
@generated function derivative{F,N}(f::F, x::NTuple{N,Real})
8-
args = [:(Dual(x[$i], Val{N}, Val{$i})) for i in 1:N]
9-
return :(extract_derivative(f($(args...))))
14+
T = Tag{F,maximum(order(R) for R in x.parameters)}
15+
args = [:(Dual{$T}(x[$i], Val{N}, Val{$i})) for i in 1:N]
16+
return quote
17+
$(Expr(:meta, :inline))
18+
extract_derivative(f($(args...)))
19+
end
1020
end
1121

12-
function derivative!{F}(out, f::F, x::Real)
13-
y = f(Dual(x, one(x)))
14-
extract_derivative!(out, y)
15-
return out
22+
@generated function derivative!{F,R<:Real}(out, f::F, x::R)
23+
T = Tag{F,order(R)}
24+
return quote
25+
$(Expr(:meta, :inline))
26+
extract_derivative!(out, f(Dual{$T}(x, one(x))))
27+
return out
28+
end
1629
end
1730

1831
#####################
1932
# result extraction #
2033
#####################
2134

22-
@generated extract_derivative{N}(y::Dual{N}) = Expr(:tuple, [:(partials(y, $i)) for i in 1:N]...)
35+
@generated function extract_derivative{T,V,N}(y::Dual{T,V,N})
36+
return quote
37+
$(Expr(:meta, :inline))
38+
$(Expr(:tuple, [:(partials(y, $i)) for i in 1:N]...))
39+
end
40+
end
2341

24-
@inline extract_derivative(y::Dual{1}) = partials(y, 1)
42+
@inline extract_derivative{T,V}(y::Dual{T,V,1}) = partials(y, 1)
2543
@inline extract_derivative(y::Real) = zero(y)
2644
@inline extract_derivative(y::AbstractArray) = extract_derivative!(similar(y, valtype(eltype(y))), y)
2745

src/dual.jl

Lines changed: 12 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ end
1818

1919
function Base.showerror{X,Y}(io::IO, e::TagMismatchError{X,Y})
2020
print(io, "potential perturbation confusion detected when computing binary operation ",
21-
"on $(e.x) and $(e.y) (tag $X != tag $Y). ForwardDiff cannot safely perform ",
22-
"differentiation in this context; see the following issue for details: ",
23-
"https://github.com/JuliaDiff/ForwardDiff.jl/issues/83")
21+
"on $(e.x) and $(e.y) (tag mismatch: $X != $Y). ForwardDiff cannot safely ",
22+
"perform differentiation in this context; see the following issue for ",
23+
"details: https://github.com/JuliaDiff/ForwardDiff.jl/issues/83")
2424
end
2525

2626
################
@@ -200,7 +200,7 @@ Base.AbstractFloat{T,V,N}(d::Dual{T,V,N}) = Dual{T,promote_type(V, Float16),N}(d
200200
Dual{T}(x - value(y), -partials(y))
201201
)
202202

203-
@inline Base.:-(d::Dual) = Dual(-value(d), -partials(d))
203+
@inline Base.:-{T}(d::Dual{T}) = Dual{T}(-value(d), -partials(d))
204204

205205
# Multiplication #
206206
#----------------#
@@ -338,6 +338,8 @@ end
338338
# Other Functions #
339339
#-----------------#
340340

341+
# hypot
342+
341343
@inline function calc_hypot{T}(x, y, ::Type{T})
342344
vx = value(x)
343345
vy = value(y)
@@ -352,12 +354,7 @@ end
352354
calc_hypot(x, y, T)
353355
)
354356

355-
@inline sincos(x) = (sin(x), cos(x))
356-
357-
@inline function sincos{T}(d::Dual{T})
358-
sd, cd = sincos(value(d))
359-
return (Dual{T}(sd, cd * partials(d)), Dual{T}(cd, -sd * partials(d)))
360-
end
357+
# atan2
361358

362359
@inline function calc_atan2{T}(y, x, ::Type{T})
363360
z = y / x
@@ -374,36 +371,13 @@ end
374371
calc_atan2(x, y, T)
375372
)
376373

377-
@inline function Base.fma(x::Dual, y::Dual, z::Dual)
378-
vx, vy = value(x), value(y)
379-
result = fma(vx, vy, value(z))
380-
return Dual(result,
381-
_mul_partials(partials(x), partials(y), vx, vy) + partials(z))
382-
end
374+
# sincos
383375

384-
@inline function Base.fma(x::Dual, y::Dual, z::Real)
385-
vx, vy = value(x), value(y)
386-
result = fma(vx, vy, z)
387-
return Dual(result, _mul_partials(partials(x), partials(y), vx, vy))
388-
end
389-
390-
@inline function Base.fma(x::Dual, y::Real, z::Dual)
391-
vx = value(x)
392-
result = fma(vx, y, value(z))
393-
return Dual(result, partials(x) * y + partials(z))
394-
end
395-
396-
@inline Base.fma(x::Real, y::Dual, z::Dual) = fma(y, x, z)
397-
398-
@inline function Base.fma(x::Dual, y::Real, z::Real)
399-
vx = value(x)
400-
return Dual(fma(vx, y, value(z)), partials(x) * y)
401-
end
402-
403-
@inline Base.fma(x::Real, y::Dual, z::Real) = fma(y, x, z)
376+
@inline sincos(x) = (sin(x), cos(x))
404377

405-
@inline function Base.fma(x::Real, y::Real, z::Dual)
406-
Dual(fma(x, y, value(z)), partials(z))
378+
@inline function sincos{T}(d::Dual{T})
379+
sd, cd = sincos(value(d))
380+
return (Dual{T}(sd, cd * partials(d)), Dual{T}(cd, -sd * partials(d)))
407381
end
408382

409383
###################

src/gradient.jl

Lines changed: 9 additions & 77 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,20 @@
22
# API methods #
33
###############
44

5-
function gradient{F}(f::F, x, cfg::AbstractConfig = GradientConfig(x))
5+
@compat const AllowedGradientConfig{F,M} = Union{GradientConfig{Tag{F,M}}, GradientConfig{Tag{Void,M}}}
6+
7+
gradient(f, x, cfg::GradientConfig) = throw(ConfigMismatchError(f, cfg))
8+
gradient!(out, f, x, cfg::GradientConfig) = throw(ConfigMismatchError(f, cfg))
9+
10+
function gradient{F,M}(f::F, x, cfg::AllowedGradientConfig{F,M} = GradientConfig(f, x))
611
if chunksize(cfg) == length(x)
712
return vector_mode_gradient(f, x, cfg)
813
else
914
return chunk_mode_gradient(f, x, cfg)
1015
end
1116
end
1217

13-
function gradient!{F}(out, f::F, x, cfg::AbstractConfig = GradientConfig(x))
18+
function gradient!{F,M}(out, f::F, x, cfg::AllowedGradientConfig{F,M} = GradientConfig(f, x))
1419
if chunksize(cfg) == length(x)
1520
vector_mode_gradient!(out, f, x, cfg)
1621
else
@@ -72,9 +77,6 @@ end
7277
# chunk mode #
7378
##############
7479

75-
# single threaded #
76-
#-----------------#
77-
7880
function chunk_mode_gradient_expr(out_definition::Expr)
7981
return quote
8082
@assert length(x) >= N "chunk size cannot be greater than length(x) ($(N) > $(length(x)))"
@@ -119,80 +121,10 @@ function chunk_mode_gradient_expr(out_definition::Expr)
119121
end
120122
end
121123

122-
@eval function chunk_mode_gradient{F,N}(f::F, x, cfg::GradientConfig{N})
124+
@eval function chunk_mode_gradient{F,T,V,N}(f::F, x, cfg::GradientConfig{T,V,N})
123125
$(chunk_mode_gradient_expr(:(out = similar(x, valtype(ydual)))))
124126
end
125127

126-
@eval function chunk_mode_gradient!{F,N}(out, f::F, x, cfg::GradientConfig{N})
128+
@eval function chunk_mode_gradient!{F,T,V,N}(out, f::F, x, cfg::GradientConfig{T,V,N})
127129
$(chunk_mode_gradient_expr(:()))
128130
end
129-
130-
# multithreaded #
131-
#---------------#
132-
133-
if IS_MULTITHREADED_JULIA
134-
function multithread_chunk_mode_expr(out_definition::Expr)
135-
return quote
136-
cfg = gradient_config(multi_cfg)
137-
N = chunksize(cfg)
138-
@assert length(x) >= N "chunk size cannot be greater than length(x) ($(N) > $(length(x)))"
139-
140-
# precalculate loop bounds
141-
xlen = length(x)
142-
remainder = xlen % N
143-
lastchunksize = ifelse(remainder == 0, N, remainder)
144-
lastchunkindex = xlen - lastchunksize + 1
145-
middlechunks = 2:div(xlen - lastchunksize, N)
146-
147-
# fetch and seed work vectors
148-
current_cfg = cfg[compat_threadid()]
149-
current_xdual = current_cfg.duals
150-
current_seeds = current_cfg.seeds
151-
152-
Base.Threads.@threads for t in 1:length(cfg)
153-
seed!(cfg[t].duals, x)
154-
end
155-
156-
# do first chunk manually to calculate output type
157-
seed!(current_xdual, x, 1, current_seeds)
158-
current_ydual = f(current_xdual)
159-
$(out_definition)
160-
extract_gradient_chunk!(out, current_ydual, 1, N)
161-
seed!(current_xdual, x, 1)
162-
163-
# do middle chunks
164-
Base.Threads.@threads for c in middlechunks
165-
# see https://github.com/JuliaLang/julia/issues/14948
166-
local chunk_cfg = cfg[compat_threadid()]
167-
local chunk_xdual = chunk_cfg.duals
168-
local chunk_seeds = chunk_cfg.seeds
169-
local chunk_index = ((c - 1) * N + 1)
170-
seed!(chunk_xdual, x, chunk_index, chunk_seeds)
171-
local chunk_dual = f(chunk_xdual)
172-
extract_gradient_chunk!(out, chunk_dual, chunk_index, N)
173-
seed!(chunk_xdual, x, chunk_index)
174-
end
175-
176-
# do final chunk
177-
seed!(current_xdual, x, lastchunkindex, current_seeds, lastchunksize)
178-
current_ydual = f(current_xdual)
179-
extract_gradient_chunk!(out, current_ydual, lastchunkindex, lastchunksize)
180-
181-
# load value, this is a no-op unless `out` is a DiffResult
182-
extract_value!(out, current_ydual)
183-
184-
return out
185-
end
186-
end
187-
188-
@eval function chunk_mode_gradient{F}(f::F, x, multi_cfg::MultithreadConfig)
189-
$(multithread_chunk_mode_expr(:(out = similar(x, valtype(current_ydual)))))
190-
end
191-
192-
@eval function chunk_mode_gradient!{F}(out, f::F, x, multi_cfg::MultithreadConfig)
193-
$(multithread_chunk_mode_expr(:()))
194-
end
195-
else
196-
chunk_mode_gradient(f, x, cfg::Tuple) = error("Multithreading is not enabled for this Julia installation.")
197-
chunk_mode_gradient!(out, f, x, cfg::Tuple) = chunk_mode_gradient!(f, x, cfg)
198-
end

src/hessian.jl

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,24 +2,30 @@
22
# API methods #
33
###############
44

5-
function hessian{F}(f::F, x, cfg::AbstractConfig = HessianConfig(x))
6-
∇f = y -> gradient(f, y, gradient_config(cfg))
7-
return jacobian(∇f, x, jacobian_config(cfg))
5+
@compat const AllowedHessianConfig{F,M} = Union{HessianConfig{Tag{F,M}}, HessianConfig{Tag{Void,M}}}
6+
7+
hessian(f, x, cfg::HessianConfig) = throw(ConfigMismatchError(f, cfg))
8+
hessian!(out, f, x, cfg::HessianConfig) = throw(ConfigMismatchError(f, cfg))
9+
hessian!(out::DiffResult, f, x, cfg::HessianConfig) = throw(ConfigMismatchError(f, cfg))
10+
11+
function hessian{F,M}(f::F, x, cfg::AllowedHessianConfig{F,M} = HessianConfig(f, x))
12+
∇f = y -> gradient(f, y, cfg.gradient_config)
13+
return jacobian(∇f, x, cfg.jacobian_config)
814
end
915

10-
function hessian!{F}(out, f::F, x, cfg::AbstractConfig = HessianConfig(x))
11-
∇f = y -> gradient(f, y, gradient_config(cfg))
12-
jacobian!(out, ∇f, x, jacobian_config(cfg))
16+
function hessian!{F,M}(out, f::F, x, cfg::AllowedHessianConfig{F,M} = HessianConfig(f, x))
17+
∇f = y -> gradient(f, y, cfg.gradient_config)
18+
jacobian!(out, ∇f, x, cfg.jacobian_config)
1319
return out
1420
end
1521

16-
function hessian!{F}(out::DiffResult, f::F, x, cfg::AbstractConfig = HessianConfig(out, x))
22+
function hessian!{F,M}(out::DiffResult, f::F, x, cfg::AllowedHessianConfig{F,M} = HessianConfig(out, f, x))
1723
∇f! = (y, z) -> begin
1824
result = DiffResult(zero(eltype(y)), y)
19-
gradient!(result, f, z, gradient_config(cfg))
25+
gradient!(result, f, z, cfg.gradient_config)
2026
DiffBase.value!(out, value(DiffBase.value(result)))
2127
return y
2228
end
23-
jacobian!(DiffBase.hessian(out), ∇f!, DiffBase.gradient(out), x, jacobian_config(cfg))
29+
jacobian!(DiffBase.hessian(out), ∇f!, DiffBase.gradient(out), x, cfg.jacobian_config)
2430
return out
2531
end

0 commit comments

Comments
 (0)