Skip to content

Commit 83250da

Browse files
Merge pull request #70 from SciML/ChrisRackauckas-patch-2
Fix GPU Tests
2 parents e22c0c6 + 6af0692 commit 83250da

File tree

1 file changed

+3
-4
lines changed

1 file changed

+3
-4
lines changed

test/gpu_all.jl

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
using LinearAlgebra,
2-
OrdinaryDiffEq, Test, PreallocationTools, CUDA, ForwardDiff,
3-
ArrayInterfaceCore
2+
OrdinaryDiffEq, Test, PreallocationTools, CUDA, ForwardDiff
43

54
# upstream
65
OrdinaryDiffEq.DiffEqBase.anyeltypedual(x::FixedSizeDiffCache, counter = 0) = Any
@@ -16,8 +15,8 @@ tmp_du_CUA = get_tmp(cache_CU, u0_CU)
1615
tmp_dual_du_CUA = get_tmp(cache_CU, dual_CU)
1716
tmp_du_CUN = get_tmp(cache_CU, 0.0f0)
1817
tmp_dual_du_CUN = get_tmp(cache_CU, dual_N)
19-
@test ArrayInterfaceCore.parameterless_type(typeof(cache_CU.dual_du)) ==
20-
ArrayInterfaceCore.parameterless_type(typeof(u0_CU)) #check that dual cache array is a GPU array for performance reasons.
18+
@test SciMLBase.parameterless_type(typeof(cache_CU.dual_du)) ==
19+
SciMLBase.parameterless_type(typeof(u0_CU)) #check that dual cache array is a GPU array for performance reasons.
2120
@test size(tmp_du_CUA) == size(u0_CU)
2221
@test typeof(tmp_du_CUA) == typeof(u0_CU)
2322
@test eltype(tmp_du_CUA) == eltype(u0_CU)

0 commit comments

Comments
 (0)