Skip to content

Commit 3a35293

Browse files
committed
Reorganize the tests with CUDA, Enzyme and Zygote
1 parent e6eb171 commit 3a35293

File tree

9 files changed

+231
-227
lines changed

9 files changed

+231
-227
lines changed

.buildkite/pipeline.yml

Lines changed: 28 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,32 @@ steps:
77
queue: "juliagpu"
88
cuda: "*"
99
command: |
10-
julia --color=yes --project -e 'using Pkg; Pkg.add("CUDA"); Pkg.add("NLPModels"); Pkg.add("NLPModelsTest"); Pkg.instantiate()'
11-
julia --color=yes --project -e 'include("test/gpu.jl")'
10+
julia --color=yes --project=test -e 'using Pkg; Pkg.add("CUDA"); Pkg.develop(path="."); Pkg.instantiate()'
11+
julia --color=yes --project=test -e 'include("test/gpu.jl")'
12+
timeout_in_minutes: 30
13+
14+
- label: "CPUs -- Enzyme.jl"
15+
plugins:
16+
- JuliaCI/julia#v1:
17+
version: "1.10"
18+
agents:
19+
queue: "juliaecosystem"
20+
os: "linux"
21+
arch: "x86_64"
22+
command: |
23+
julia --color=yes --project=test -e 'using Pkg; Pkg.add("Enzyme"); Pkg.develop(path="."); Pkg.instantiate()'
24+
julia --color=yes --project=test -e 'include("test/enzyme.jl")'
25+
timeout_in_minutes: 30
26+
27+
- label: "CPUs -- Zygote.jl"
28+
plugins:
29+
- JuliaCI/julia#v1:
30+
version: "1.10"
31+
agents:
32+
queue: "juliaecosystem"
33+
os: "linux"
34+
arch: "x86_64"
35+
command: |
36+
julia --color=yes --project=test -e 'using Pkg; Pkg.add("Zygote"); Pkg.develop(path="."); Pkg.instantiate()'
37+
julia --color=yes --project=test -e 'include("test/zygote.jl")'
1238
timeout_in_minutes: 30

test/Project.toml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
11
[deps]
2-
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
3-
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
42
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
53
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
64
ManualNLPModels = "30dfa513-9b2f-4fb3-9796-781eabac1617"
@@ -11,16 +9,12 @@ ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
119
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
1210
SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35"
1311
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
14-
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
1512

1613
[compat]
17-
CUDA = "4, 5"
18-
Enzyme = "0.10, 0.11, 0.12"
1914
ForwardDiff = "0.10"
2015
ManualNLPModels = "0.1"
2116
NLPModels = "0.21"
2217
NLPModelsModifiers = "0.7"
2318
NLPModelsTest = "0.10"
2419
ReverseDiff = "1"
2520
SparseMatrixColorings = "0.4.0"
26-
Zygote = "0.6"

test/enzyme.jl

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
using LinearAlgebra, SparseArrays, Test
2+
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
3+
using ADNLPModels:
4+
gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod!
5+
6+
# Automatically loads the code for Enzyme with Requires
7+
import Enzyme
8+
9+
#=
10+
ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend()
11+
12+
names = OptimizationProblems.meta[!, :name]
13+
list_excluded_enzyme = [
14+
"brybnd",
15+
"clplatea",
16+
"clplateb",
17+
"clplatec",
18+
"curly",
19+
"curly10",
20+
"curly20",
21+
"curly30",
22+
"elec",
23+
"fminsrf2",
24+
"hs101",
25+
"hs117",
26+
"hs119",
27+
"hs86",
28+
"integreq",
29+
"ncb20",
30+
"ncb20b",
31+
"palmer1c",
32+
"palmer1d",
33+
"palmer2c",
34+
"palmer3c",
35+
"palmer4c",
36+
"palmer5c",
37+
"palmer5d",
38+
"palmer6c",
39+
"palmer7c",
40+
"palmer8c",
41+
"sbrybnd",
42+
"tetra",
43+
"tetra_duct12",
44+
"tetra_duct15",
45+
"tetra_duct20",
46+
"tetra_foam5",
47+
"tetra_gear",
48+
"tetra_hook",
49+
"threepk",
50+
"triangle",
51+
"triangle_deer",
52+
"triangle_pacman",
53+
"triangle_turtle",
54+
"watson",
55+
]
56+
for pb in names
57+
@info pb
58+
(pb in list_excluded_enzyme) && continue
59+
nlp = eval(Meta.parse(pb))(
60+
gradient_backend = ADNLPModels.EnzymeADGradient,
61+
jacobian_backend = ADNLPModels.EmptyADbackend,
62+
hessian_backend = ADNLPModels.EmptyADbackend,
63+
)
64+
grad(nlp, get_x0(nlp))
65+
end
66+
=#
67+
68+
#=
69+
ERROR: Duplicated Returns not yet handled
70+
Stacktrace:
71+
[1] autodiff
72+
@.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined]
73+
[2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}})
74+
@ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248
75+
[3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64})
76+
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17
77+
[4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64})
78+
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542
79+
[5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64})
80+
@ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31
81+
[6] top-level scope
82+
@ .\REPL[7]:5
83+
=#

test/nlp/nlpmodelstest.jl

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -18,23 +18,6 @@
1818
@testset "Check multiple precision" begin
1919
multiple_precision_nlp(nlp_from_T, exclude = [], linear_api = true)
2020
end
21-
@testset "Check multiple precision GPU" begin
22-
if CUDA.functional()
23-
CUDA.allowscalar() do
24-
# sparse Jacobian/Hessian doesn't work here
25-
multiple_precision_nlp_array(
26-
T -> nlp_from_T(
27-
T;
28-
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
29-
hessian_backend = ADNLPModels.ForwardDiffADHessian,
30-
),
31-
CuArray,
32-
exclude = [jth_hprod, hprod, jprod],
33-
linear_api = true,
34-
)
35-
end
36-
end
37-
end
3821
@testset "Check view subarray" begin
3922
view_subarray_nlp(nlp_ad, exclude = [])
4023
end

test/nls/nlpmodelstest.jl

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -35,25 +35,6 @@
3535
@testset "Check multiple precision" begin
3636
multiple_precision_nls(nls_from_T, exclude = exclude, linear_api = true)
3737
end
38-
@testset "Check multiple precision GPU" begin
39-
if CUDA.functional()
40-
CUDA.allowscalar() do
41-
# sparse Jacobian/Hessian doesn't work here
42-
multiple_precision_nls_array(
43-
T -> nls_from_T(
44-
T;
45-
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
46-
hessian_backend = ADNLPModels.ForwardDiffADHessian,
47-
jacobian_residual_backend = ADNLPModels.ForwardDiffADJacobian,
48-
hessian_residual_backend = ADNLPModels.ForwardDiffADHessian,
49-
),
50-
CuArray,
51-
exclude = [jprod, jprod_residual, hprod_residual],
52-
linear_api = true,
53-
)
54-
end
55-
end
56-
end
5738
@testset "Check view subarray" begin
5839
view_subarray_nls.(nlss, exclude = exclude)
5940
end

test/runtests.jl

Lines changed: 2 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
using CUDA, LinearAlgebra, SparseArrays, Test
1+
using LinearAlgebra, SparseArrays, Test
22
using SparseMatrixColorings
33
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
44
using ADNLPModels:
@@ -40,109 +40,7 @@ for problem in NLPModelsTest.nls_problems
4040
include("nls/problems/$(lowercase(problem)).jl")
4141
end
4242

43-
# Additional backends used for tests
44-
push!(
45-
ADNLPModels.predefined_backend,
46-
:zygote_backend => Dict(
47-
:gradient_backend => ADNLPModels.ZygoteADGradient,
48-
:jprod_backend => ADNLPModels.ZygoteADJprod,
49-
:jtprod_backend => ADNLPModels.ZygoteADJtprod,
50-
:hprod_backend => ADNLPModels.ForwardDiffADHvprod,
51-
:jacobian_backend => ADNLPModels.ZygoteADJacobian,
52-
:hessian_backend => ADNLPModels.ZygoteADHessian,
53-
:ghjvprod_backend => ADNLPModels.ForwardDiffADGHjvprod,
54-
:jprod_residual_backend => ADNLPModels.ZygoteADJprod,
55-
:jtprod_residual_backend => ADNLPModels.ZygoteADJtprod,
56-
:hprod_residual_backend => ADNLPModels.ForwardDiffADHvprod,
57-
:jacobian_residual_backend => ADNLPModels.ZygoteADJacobian,
58-
:hessian_residual_backend => ADNLPModels.ZygoteADHessian,
59-
),
60-
)
61-
62-
ReverseDiffAD(nvar, f) = ADNLPModels.ADModelBackend(
63-
nvar,
64-
f,
65-
gradient_backend = ADNLPModels.ReverseDiffADGradient,
66-
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
67-
jprod_backend = ADNLPModels.ReverseDiffADJprod,
68-
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
69-
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
70-
hessian_backend = ADNLPModels.ReverseDiffADHessian,
71-
)
72-
73-
function test_getter_setter(nlp)
74-
@test get_adbackend(nlp) == nlp.adbackend
75-
if typeof(nlp) <: ADNLPModel
76-
set_adbackend!(nlp, ReverseDiffAD(nlp.meta.nvar, nlp.f))
77-
elseif typeof(nlp) <: ADNLSModel
78-
function F(x; nequ = nlp.nls_meta.nequ)
79-
Fx = similar(x, nequ)
80-
nlp.F!(Fx, x)
81-
return Fx
82-
end
83-
set_adbackend!(nlp, ReverseDiffAD(nlp.meta.nvar, x -> sum(F(x) .^ 2)))
84-
end
85-
@test typeof(get_adbackend(nlp).gradient_backend) <: ADNLPModels.ReverseDiffADGradient
86-
@test typeof(get_adbackend(nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
87-
@test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
88-
set_adbackend!(
89-
nlp,
90-
gradient_backend = ADNLPModels.ForwardDiffADGradient,
91-
jtprod_backend = ADNLPModels.GenericForwardDiffADJtprod(),
92-
)
93-
@test typeof(get_adbackend(nlp).gradient_backend) <: ADNLPModels.ForwardDiffADGradient
94-
@test typeof(get_adbackend(nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
95-
@test typeof(get_adbackend(nlp).jtprod_backend) <: ADNLPModels.GenericForwardDiffADJtprod
96-
@test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
97-
end
98-
99-
ZygoteAD() = ADNLPModels.ADModelBackend(
100-
ADNLPModels.ZygoteADGradient(),
101-
ADNLPModels.GenericForwardDiffADHvprod(),
102-
ADNLPModels.ZygoteADJprod(),
103-
ADNLPModels.ZygoteADJtprod(),
104-
ADNLPModels.ZygoteADJacobian(0),
105-
ADNLPModels.ZygoteADHessian(0),
106-
ADNLPModels.ForwardDiffADGHjvprod(),
107-
ADNLPModels.EmptyADbackend(),
108-
ADNLPModels.EmptyADbackend(),
109-
ADNLPModels.EmptyADbackend(),
110-
ADNLPModels.EmptyADbackend(),
111-
ADNLPModels.EmptyADbackend(),
112-
)
113-
114-
function test_autodiff_backend_error()
115-
@testset "Error without loading package - $backend" for backend in [:ZygoteAD]
116-
adbackend = eval(backend)()
117-
@test_throws ArgumentError gradient(adbackend.gradient_backend, sum, [1.0])
118-
@test_throws ArgumentError gradient!(adbackend.gradient_backend, [1.0], sum, [1.0])
119-
@test_throws ArgumentError jacobian(adbackend.jacobian_backend, identity, [1.0])
120-
@test_throws ArgumentError hessian(adbackend.hessian_backend, sum, [1.0])
121-
@test_throws ArgumentError Jprod!(
122-
adbackend.jprod_backend,
123-
[1.0],
124-
[1.0],
125-
identity,
126-
[1.0],
127-
Val(:c),
128-
)
129-
@test_throws ArgumentError Jtprod!(
130-
adbackend.jtprod_backend,
131-
[1.0],
132-
[1.0],
133-
identity,
134-
[1.0],
135-
Val(:c),
136-
)
137-
end
138-
end
139-
140-
# Test the argument error without loading the packages
141-
test_autodiff_backend_error()
142-
143-
# Automatically loads the code for Zygote with Requires
144-
import Zygote
145-
43+
include("utils.jl")
14644
include("nlp/basic.jl")
14745
include("nls/basic.jl")
14846
include("nlp/nlpmodelstest.jl")

test/script_OP.jl

Lines changed: 0 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
# script that tests ADNLPModels over OptimizationProblems.jl problems
22

3-
# optional deps
4-
# using Enzyme
5-
63
# AD deps
74
using ForwardDiff, ReverseDiff
85

@@ -55,79 +52,3 @@ for pb in names
5552
continue
5653
end
5754
end
58-
59-
#=
60-
ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend()
61-
62-
names = OptimizationProblems.meta[!, :name]
63-
list_excluded_enzyme = [
64-
"brybnd",
65-
"clplatea",
66-
"clplateb",
67-
"clplatec",
68-
"curly",
69-
"curly10",
70-
"curly20",
71-
"curly30",
72-
"elec",
73-
"fminsrf2",
74-
"hs101",
75-
"hs117",
76-
"hs119",
77-
"hs86",
78-
"integreq",
79-
"ncb20",
80-
"ncb20b",
81-
"palmer1c",
82-
"palmer1d",
83-
"palmer2c",
84-
"palmer3c",
85-
"palmer4c",
86-
"palmer5c",
87-
"palmer5d",
88-
"palmer6c",
89-
"palmer7c",
90-
"palmer8c",
91-
"sbrybnd",
92-
"tetra",
93-
"tetra_duct12",
94-
"tetra_duct15",
95-
"tetra_duct20",
96-
"tetra_foam5",
97-
"tetra_gear",
98-
"tetra_hook",
99-
"threepk",
100-
"triangle",
101-
"triangle_deer",
102-
"triangle_pacman",
103-
"triangle_turtle",
104-
"watson",
105-
]
106-
for pb in names
107-
@info pb
108-
(pb in list_excluded_enzyme) && continue
109-
nlp = eval(Meta.parse(pb))(
110-
gradient_backend = ADNLPModels.EnzymeADGradient,
111-
jacobian_backend = ADNLPModels.EmptyADbackend,
112-
hessian_backend = ADNLPModels.EmptyADbackend,
113-
)
114-
grad(nlp, get_x0(nlp))
115-
end
116-
=#
117-
118-
#=
119-
ERROR: Duplicated Returns not yet handled
120-
Stacktrace:
121-
[1] autodiff
122-
@.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined]
123-
[2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}})
124-
@ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248
125-
[3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64})
126-
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17
127-
[4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64})
128-
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542
129-
[5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64})
130-
@ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31
131-
[6] top-level scope
132-
@ .\REPL[7]:5
133-
=#

0 commit comments

Comments
 (0)