Skip to content

Commit 9db36fe

Browse files
committed
format
1 parent 2cc6724 commit 9db36fe

File tree

8 files changed

+78
-62
lines changed

8 files changed

+78
-62
lines changed

docs/make.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@ cp("./docs/Project.toml", "./docs/src/assets/Project.toml", force = true)
66
include("pages.jl")
77

88
makedocs(sitename = "PreallocationTools.jl",
9-
authors = "Chris Rackauckas",
10-
modules = [PreallocationTools],
11-
clean = true,
12-
doctest = false,
13-
format = Documenter.HTML(assets = ["assets/favicon.ico"],
14-
canonical = "https://docs.sciml.ai/PreallocationTools/stable/"),
15-
pages = pages)
9+
authors = "Chris Rackauckas",
10+
modules = [PreallocationTools],
11+
clean = true,
12+
doctest = false,
13+
format = Documenter.HTML(assets = ["assets/favicon.ico"],
14+
canonical = "https://docs.sciml.ai/PreallocationTools/stable/"),
15+
pages = pages)
1616

1717
deploydocs(repo = "github.com/SciML/PreallocationTools.jl.git"; push_preview = true)

src/PreallocationTools.jl

Lines changed: 11 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -9,26 +9,25 @@ struct FixedSizeDiffCache{T <: AbstractArray, S <: AbstractArray}
99
end
1010

1111
function FixedSizeDiffCache(u::AbstractArray{T}, siz,
12-
::Type{Val{chunk_size}}) where {T, chunk_size}
12+
::Type{Val{chunk_size}}) where {T, chunk_size}
1313
x = ArrayInterface.restructure(u,
14-
zeros(ForwardDiff.Dual{nothing, T, chunk_size},
15-
siz...))
14+
zeros(ForwardDiff.Dual{nothing, T, chunk_size},
15+
siz...))
1616
xany = Any[]
1717
FixedSizeDiffCache(deepcopy(u), x, xany)
1818
end
1919

2020
"""
21-
2221
`FixedSizeDiffCache(u::AbstractArray, N = Val{default_cache_size(length(u))})`
2322
2423
Builds a `FixedSizeDiffCache` object that stores both a version of the cache for `u`
2524
and for the `Dual` version of `u`, allowing use of pre-cached vectors with
2625
forward-mode automatic differentiation.
2726
"""
2827
function FixedSizeDiffCache(u::AbstractArray,
29-
::Type{Val{N}} = Val{ForwardDiff.pickchunksize(length(u))}) where {
30-
N
31-
}
28+
::Type{Val{N}} = Val{ForwardDiff.pickchunksize(length(u))}) where {
29+
N,
30+
}
3231
FixedSizeDiffCache(u, size(u), Val{N})
3332
end
3433

@@ -77,7 +76,7 @@ end
7776

7877
function DiffCache(u::AbstractArray{T}, siz, chunk_sizes) where {T}
7978
x = adapt(ArrayInterface.parameterless_type(u),
80-
zeros(T, prod(chunk_sizes .+ 1) * prod(siz)))
79+
zeros(T, prod(chunk_sizes .+ 1) * prod(siz)))
8180
xany = Any[]
8281
DiffCache(u, x, xany)
8382
end
@@ -90,10 +89,9 @@ Builds a `DiffCache` object that stores both a version of the cache for `u`
9089
and for the `Dual` version of `u`, allowing use of pre-cached vectors with
9190
forward-mode automatic differentiation. Supports nested AD via keyword `levels`
9291
or specifying an array of chunk_sizes.
93-
9492
"""
9593
function DiffCache(u::AbstractArray, N::Int = ForwardDiff.pickchunksize(length(u));
96-
levels::Int = 1)
94+
levels::Int = 1)
9795
DiffCache(u, size(u), N * ones(Int, levels))
9896
end
9997
DiffCache(u::AbstractArray, N::AbstractArray{<:Int}) = DiffCache(u, size(u), N)
@@ -106,11 +104,9 @@ DiffCache(u::AbstractArray, ::Val{N}; levels::Int = 1) where {N} = DiffCache(u,
106104
const dualcache = DiffCache
107105

108106
"""
109-
110107
`get_tmp(dc::DiffCache, u)`
111108
112109
Returns the `Dual` or normal cache array stored in `dc` based on the type of `u`.
113-
114110
"""
115111
function get_tmp(dc::DiffCache, u::T) where {T <: ForwardDiff.Dual}
116112
nelem = div(sizeof(T), sizeof(eltype(dc.dual_du))) * length(dc.du)
@@ -166,7 +162,6 @@ end
166162
A lazily allocated buffer object. Given an array `u`, `b[u]` returns an array of the
167163
same type and size `f(size(u))` (defaulting to the same size), which is allocated as
168164
needed and then cached within `b` for subsequent usage.
169-
170165
"""
171166
struct LazyBufferCache{F <: Function}
172167
bufs::Dict # a dictionary mapping types to buffers
@@ -217,7 +212,9 @@ export get_tmp
217212
import Requires
218213
@static if !isdefined(Base, :get_extension)
219214
function __init__()
220-
Requires.@require ReverseDiff="37e2e3b7-166d-5795-8a7a-e32c996b4267" begin include("../ext/PreallocationToolsReverseDiffExt.jl") end
215+
Requires.@require ReverseDiff="37e2e3b7-166d-5795-8a7a-e32c996b4267" begin
216+
include("../ext/PreallocationToolsReverseDiffExt.jl")
217+
end
221218
end
222219
end
223220

test/core_dispatch.jl

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
using LinearAlgebra, Test, PreallocationTools, ForwardDiff, LabelledArrays,
2-
RecursiveArrayTools
1+
using LinearAlgebra,
2+
Test, PreallocationTools, ForwardDiff, LabelledArrays,
3+
RecursiveArrayTools
34

45
function test(u0, dual, chunk_size)
56
cache = PreallocationTools.DiffCache(u0, chunk_size)
@@ -12,8 +13,8 @@ function test(u0, dual, chunk_size)
1213
result_dual1 = get_tmp(cache, dual)
1314
result_dual2 = get_tmp(cache, first(dual))
1415
return allocs_normal1, allocs_normal2, allocs_dual1, allocs_dual2, result_normal1,
15-
result_normal2, result_dual1,
16-
result_dual2
16+
result_normal2, result_dual1,
17+
result_dual2
1718
end
1819

1920
function structequal(struct1, struct2)
@@ -29,7 +30,7 @@ end
2930
chunk_size = 5
3031
u0 = ones(5, 5)
3132
dual = zeros(ForwardDiff.Dual{ForwardDiff.Tag{nothing, Float64}, Float64,
32-
chunk_size}, 5, 5)
33+
chunk_size}, 5, 5)
3334
results = test(u0, dual, chunk_size)
3435
#allocation tests
3536
@test results[1] == 0
@@ -53,7 +54,7 @@ results = test(u0, dual, chunk_size)
5354
chunk_size = 5
5455
u0_B = ones(5, 5)
5556
dual_B = zeros(ForwardDiff.Dual{ForwardDiff.Tag{typeof(something), Float64}, Float64,
56-
chunk_size}, 2, 2)
57+
chunk_size}, 2, 2)
5758
cache_B = FixedSizeDiffCache(u0_B, chunk_size)
5859
tmp_du_BA = get_tmp(cache_B, u0_B)
5960
tmp_dual_du_BA = get_tmp(cache_B, dual_B)
@@ -76,7 +77,7 @@ tmp_dual_du_BN = get_tmp(cache_B, dual_B[1])
7677
chunk_size = 4
7778
u0 = LArray((2, 2); a = 1.0, b = 1.0, c = 1.0, d = 1.0)
7879
zerodual = zero(ForwardDiff.Dual{ForwardDiff.Tag{nothing, Float64}, Float64,
79-
chunk_size})
80+
chunk_size})
8081
dual = LArray((2, 2); a = zerodual, b = zerodual, c = zerodual, d = zerodual)
8182
results = test(u0, dual, chunk_size)
8283
#allocation tests
@@ -102,9 +103,9 @@ results = test(u0, dual, chunk_size)
102103
chunk_size = 2
103104
u0 = ArrayPartition(ones(2, 2), ones(3, 3))
104105
dual_a = zeros(ForwardDiff.Dual{ForwardDiff.Tag{nothing, Float64}, Float64,
105-
chunk_size}, 2, 2)
106+
chunk_size}, 2, 2)
106107
dual_b = zeros(ForwardDiff.Dual{ForwardDiff.Tag{nothing, Float64}, Float64,
107-
chunk_size}, 3, 3)
108+
chunk_size}, 3, 3)
108109
dual = ArrayPartition(dual_a, dual_b)
109110
results = test(u0, dual, chunk_size)
110111
#allocation tests
@@ -128,9 +129,9 @@ results = test(u0, dual, chunk_size)
128129

129130
u0_AP = ArrayPartition(ones(2, 2), ones(3, 3))
130131
dual_a = zeros(ForwardDiff.Dual{ForwardDiff.Tag{typeof(something), Float64}, Float64,
131-
chunk_size}, 2, 2)
132+
chunk_size}, 2, 2)
132133
dual_b = zeros(ForwardDiff.Dual{ForwardDiff.Tag{typeof(something), Float64}, Float64,
133-
chunk_size}, 3, 3)
134+
chunk_size}, 3, 3)
134135
dual_AP = ArrayPartition(dual_a, dual_b)
135136
cache_AP = FixedSizeDiffCache(u0_AP, chunk_size)
136137
tmp_du_APA = get_tmp(cache_AP, u0_AP)

test/core_nesteddual.jl

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
using LinearAlgebra, OrdinaryDiffEq, Test, PreallocationTools, ForwardDiff, Optimization,
2-
OptimizationOptimJL
1+
using LinearAlgebra,
2+
OrdinaryDiffEq, Test, PreallocationTools, ForwardDiff, Optimization,
3+
OptimizationOptimJL
34

45
randmat = rand(5, 3)
56
sto = similar(randmat)
@@ -23,7 +24,7 @@ In setting up the DiffCache, we are setting chunk_size to [1, 1], because we dif
2324
only with respect to τ. This initializes the cache with the minimum memory needed. =#
2425
stod = DiffCache(sto, [1, 1])
2526
df3 = ForwardDiff.derivative-> ForwardDiff.derivative-> claytonsample!(stod, ξ, 0.0),
26-
τ), 0.3)
27+
τ), 0.3)
2728

2829
#= taking the second derivative of claytonsample! with respect to τ with auto-detected chunk-size.
2930
For the given size of sto, ForwardDiff's heuristic chooses chunk_size = 8. Since this is greater
@@ -32,7 +33,7 @@ if we don't specify the keyword argument levels = 2. This should in general not
3233
especially if more levels of nesting occur (see optimization example below). =#
3334
stod = DiffCache(sto)
3435
df4 = ForwardDiff.derivative-> ForwardDiff.derivative-> claytonsample!(stod, ξ, 0.0),
35-
τ), 0.3)
36+
τ), 0.3)
3637

3738
@test df3 df4
3839

@@ -41,7 +42,7 @@ For the given size of sto, ForwardDiff's heuristic chooses chunk_size = 8 and wi
4142
the created cache size is larger than what's needed (even more so than the last example). =#
4243
stod = DiffCache(sto, levels = 2)
4344
df5 = ForwardDiff.derivative-> ForwardDiff.derivative-> claytonsample!(stod, ξ, 0.0),
44-
τ), 0.3)
45+
τ), 0.3)
4546

4647
@test df3 df5
4748

@@ -60,7 +61,7 @@ ps = 2 #use to specify problem size; don't go crazy on this, because of the comp
6061
coeffs = -collect(0.1:0.1:(ps^2 / 10))
6162
cache = DiffCache(zeros(ps, ps), levels = 3)
6263
prob = ODEProblem{true, SciMLBase.FullSpecialize}(foo, ones(ps, ps), (0.0, 1.0),
63-
(coeffs, cache))
64+
(coeffs, cache))
6465
realsol = solve(prob, TRBDF2(), saveat = 0.0:0.1:10.0, reltol = 1e-8)
6566

6667
function objfun(x, prob, realsol, cache)
@@ -85,7 +86,7 @@ newtonsol = solve(optprob, Newton())
8586
#an example where chunk_sizes are not the same on all differentiation levels:
8687
cache = DiffCache(zeros(ps, ps), [4, 4, 2])
8788
prob = ODEProblem{true, SciMLBase.FullSpecialize}(foo, ones(ps, ps), (0.0, 1.0),
88-
(coeffs, cache))
89+
(coeffs, cache))
8990
realsol = solve(prob, TRBDF2(chunk_size = 2), saveat = 0.0:0.1:10.0, reltol = 1e-8)
9091

9192
function objfun(x, prob, realsol, cache)

test/core_odes.jl

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
using LinearAlgebra, OrdinaryDiffEq, Test, PreallocationTools, LabelledArrays,
2-
RecursiveArrayTools
1+
using LinearAlgebra,
2+
OrdinaryDiffEq, Test, PreallocationTools, LabelledArrays,
3+
RecursiveArrayTools
34

45
# upstream
56
OrdinaryDiffEq.DiffEqBase.anyeltypedual(x::FixedSizeDiffCache, counter = 0) = Any
@@ -32,7 +33,7 @@ sol = solve(prob, TRBDF2())
3233
@test sol.retcode == ReturnCode.Success
3334

3435
prob = ODEProblem(foo, ones(5, 5), (0.0, 1.0),
35-
(ones(5, 5), FixedSizeDiffCache(zeros(5, 5))))
36+
(ones(5, 5), FixedSizeDiffCache(zeros(5, 5))))
3637
sol = solve(prob, TRBDF2())
3738
@test sol.retcode == ReturnCode.Success
3839

@@ -44,7 +45,7 @@ function foo(du, u, (A, lbc), t)
4445
nothing
4546
end
4647
prob = ODEProblem{true, SciMLBase.FullSpecialize}(foo, ones(5, 5), (0.0, 1.0),
47-
(ones(5, 5), LazyBufferCache()))
48+
(ones(5, 5), LazyBufferCache()))
4849
sol = solve(prob, TRBDF2())
4950
@test sol.retcode == ReturnCode.Success
5051

@@ -61,7 +62,7 @@ end
6162
#with specified chunk_size
6263
chunk_size = 4
6364
prob = ODEProblem{true, SciMLBase.FullSpecialize}(foo, u0, (0.0, 1.0),
64-
(A, DiffCache(c, chunk_size)))
65+
(A, DiffCache(c, chunk_size)))
6566
sol = solve(prob, TRBDF2(chunk_size = chunk_size))
6667
@test sol.retcode == ReturnCode.Success
6768
#with auto-detected chunk_size

test/general_lbc.jl

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
using Random, OrdinaryDiffEq, LinearAlgebra, Optimization, OptimizationOptimJL,
2-
PreallocationTools
1+
using Random,
2+
OrdinaryDiffEq, LinearAlgebra, Optimization, OptimizationOptimJL,
3+
PreallocationTools
34

45
lbc = GeneralLazyBufferCache(function (p)
5-
init(ODEProblem(ode_fnc, y₀,
6-
(0.0, T), p),
7-
Tsit5(); saveat = t)
8-
end)
6+
init(ODEProblem(ode_fnc, y₀,
7+
(0.0, T), p),
8+
Tsit5(); saveat = t)
9+
end)
910

1011
Random.seed!(2992999)
1112
λ, y₀, σ = -0.5, 15.0, 0.1
@@ -29,5 +30,5 @@ negloglik = (θ, p) -> -loglik(θ, p, lbc[θ[1]])
2930
fnc = OptimizationFunction(negloglik, Optimization.AutoForwardDiff())
3031
ε = zeros(n)
3132
prob = OptimizationProblem(fnc, θ₀, (yᵒ, n, ε), lb = [-10.0, 1e-6, 0.5],
32-
ub = [10.0, 10.0, 25.0])
33+
ub = [10.0, 10.0, 25.0])
3334
solve(prob, LBFGS())

test/gpu_all.jl

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
using LinearAlgebra, OrdinaryDiffEq, Test, PreallocationTools, CUDA, ForwardDiff,
2-
ArrayInterfaceCore
1+
using LinearAlgebra,
2+
OrdinaryDiffEq, Test, PreallocationTools, CUDA, ForwardDiff,
3+
ArrayInterfaceCore
34

45
# upstream
56
OrdinaryDiffEq.DiffEqBase.anyeltypedual(x::FixedSizeDiffCache, counter = 0) = Any
@@ -8,7 +9,7 @@ OrdinaryDiffEq.DiffEqBase.anyeltypedual(x::FixedSizeDiffCache, counter = 0) = An
89
chunk_size = 5
910
u0_CU = cu(ones(5, 5))
1011
dual_CU = cu(zeros(ForwardDiff.Dual{ForwardDiff.Tag{typeof(something), Float32}, Float32,
11-
chunk_size}, 2, 2))
12+
chunk_size}, 2, 2))
1213
dual_N = ForwardDiff.Dual{ForwardDiff.Tag{typeof(something), Float32}, Float32, 5}(0)
1314
cache_CU = DiffCache(u0_CU, chunk_size)
1415
tmp_du_CUA = get_tmp(cache_CU, u0_CU)
@@ -33,7 +34,7 @@ tmp_dual_du_CUN = get_tmp(cache_CU, dual_N)
3334
chunk_size = 5
3435
u0_B = cu(ones(5, 5))
3536
dual_B = cu(zeros(ForwardDiff.Dual{ForwardDiff.Tag{typeof(something), Float32}, Float32,
36-
chunk_size}, 2, 2))
37+
chunk_size}, 2, 2))
3738
cache_B = FixedSizeDiffCache(u0_B, chunk_size)
3839
tmp_du_BA = get_tmp(cache_B, u0_B)
3940
tmp_dual_du_BA = get_tmp(cache_B, dual_B)

test/runtests.jl

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,29 @@ function activate_downstream_env()
1111
end
1212

1313
if GROUP == "All" || GROUP == "Core"
14-
@safetestset "DiffCache Dispatch" begin include("core_dispatch.jl") end
15-
@safetestset "DiffCache ODE tests" begin include("core_odes.jl") end
16-
@safetestset "DiffCache Resizing" begin include("core_resizing.jl") end
17-
@safetestset "DiffCache Nested Duals" begin include("core_nesteddual.jl") end
18-
@safetestset "DiffCache Sparsity Support" begin include("sparsity_support.jl") end
19-
@safetestset "GeneralLazyBufferCache" begin include("general_lbc.jl") end
14+
@safetestset "DiffCache Dispatch" begin
15+
include("core_dispatch.jl")
16+
end
17+
@safetestset "DiffCache ODE tests" begin
18+
include("core_odes.jl")
19+
end
20+
@safetestset "DiffCache Resizing" begin
21+
include("core_resizing.jl")
22+
end
23+
@safetestset "DiffCache Nested Duals" begin
24+
include("core_nesteddual.jl")
25+
end
26+
@safetestset "DiffCache Sparsity Support" begin
27+
include("sparsity_support.jl")
28+
end
29+
@safetestset "GeneralLazyBufferCache" begin
30+
include("general_lbc.jl")
31+
end
2032
end
2133

2234
if !is_APPVEYOR && GROUP == "GPU"
2335
activate_downstream_env()
24-
@safetestset "GPU tests" begin include("gpu_all.jl") end
36+
@safetestset "GPU tests" begin
37+
include("gpu_all.jl")
38+
end
2539
end

0 commit comments

Comments
 (0)