Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 11 additions & 10 deletions docs/src/backend.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,26 +96,27 @@ nlp = ADNLPModel(f, x0)
get_adbackend(nlp) # returns the `ADModelBackend` structure that regroup all the various backends.
```

There are currently two ways to modify instantiated backends. The first one is to instantiate a new `ADModelBackend` and use `set_adbackend!` to modify `nlp`.
To instantiate a new `ADModelBackend` while preserving existing backends, use `set_adbackend`.

```@example adnlp2
adback = ADNLPModels.ADModelBackend(nlp.meta.nvar, nlp.f, gradient_backend = ADNLPModels.ForwardDiffADGradient)
set_adbackend!(nlp, adback)
get_adbackend(nlp)
new_nlp = set_adbackend(nlp, adback)
get_adbackend(new_nlp)
```

The alternative is to use `set_adbackend!` and pass the new backends via `kwargs`. In the second approach, it is possible to pass either the type of the desired backend or an instance as shown below.
An alternative way to use `set_adbackend` is to pass the new backends as keyword arguments.
In this approach, you can pass either the type of the desired backend or an existing instance, as shown below.

```@example adnlp2
set_adbackend!(
new_nlp = set_adbackend(
nlp,
gradient_backend = ADNLPModels.ForwardDiffADGradient,
jtprod_backend = ADNLPModels.GenericForwardDiffADJtprod(),
)
get_adbackend(nlp)
get_adbackend(new_nlp)
```

### Support multiple precision without having to recreate the model
### Multi-precision model creation with backend reuse

One of the strength of `ADNLPModels.jl` is the type flexibility. Let's assume, we first instantiate an `ADNLPModel` with a `Float64` initial guess.

Expand All @@ -133,11 +134,11 @@ x64 = rand(2)
grad(nlp, x64)
```

It is now possible to move to a different type, for instance `Float32`, while keeping the instance `nlp`.
It is now possible to move to a different type for the gradient, for instance `Float32`, while keeping the other backends from the original model `nlp`.

```@example adnlp3
x0_32 = ones(Float32, 2)
set_adbackend!(nlp, gradient_backend = ADNLPModels.ForwardDiffADGradient, x0 = x0_32)
new_nlp = set_adbackend(nlp, gradient_backend = ADNLPModels.ForwardDiffADGradient, x0 = x0_32)
x32 = rand(Float32, 2)
grad(nlp, x32)
grad(new_nlp, x32)
```
26 changes: 14 additions & 12 deletions src/ADNLPModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ function ADNLSModel!(model::AbstractNLSModel; kwargs...)
end
end

export get_adbackend, set_adbackend!
export get_adbackend, set_adbackend

"""
get_c(nlp)
Expand Down Expand Up @@ -244,22 +244,24 @@ Returns the value `adbackend` from nlp.
get_adbackend(nlp::ADModel) = nlp.adbackend

"""
set_adbackend!(nlp, new_adbackend)
set_adbackend!(nlp; kwargs...)
new_nlp = set_adbackend(nlp, new_adbackend)
new_nlp = set_adbackend(nlp; kwargs...)

Replace the current `adbackend` value of nlp by `new_adbackend` or instantiate a new one with `kwargs`, see `ADModelBackend`.
By default, the setter with kwargs will reuse existing backends.
Create a copy of nlp that replaces the current `adbackend` with `new_adbackend` or instantiate a new one with `kwargs`, see `ADModelBackend`.
By default, the setter with keyword arguments will reuse existing backends.
"""
function set_adbackend!(nlp::ADModel, new_adbackend::ADModelBackend)
nlp.adbackend = new_adbackend
return nlp
function _set_adbackend(nlp::ADM, new_adbackend::ADModelBackend) where{ADM}
values = [f == :adbackend ? new_adbackend : getfield(nlp, f) for f in fieldnames(ADM)]
base_type = Base.typename(ADM).wrapper
return base_type(values...)
end
function set_adbackend!(nlp::ADModel; kwargs...)

function _set_adbackend(nlp::ADModel; kwargs...)
args = []
for field in fieldnames(ADNLPModels.ADModelBackend)
push!(args, if field in keys(kwargs) && typeof(kwargs[field]) <: ADBackend
kwargs[field]
elseif field in keys(kwargs) && typeof(kwargs[field]) <: DataType
elseif field in keys(kwargs) && kwargs[field] <: ADBackend
if typeof(nlp) <: ADNLPModel
kwargs[field](nlp.meta.nvar, nlp.f, nlp.meta.ncon; kwargs...)
elseif typeof(nlp) <: ADNLSModel
Expand All @@ -269,8 +271,8 @@ function set_adbackend!(nlp::ADModel; kwargs...)
getfield(nlp.adbackend, field)
end)
end
nlp.adbackend = ADModelBackend(args...)
return nlp
new_nlp = set_adbackend(nlp, ADModelBackend(args...))
return new_nlp
end

end # module
6 changes: 3 additions & 3 deletions src/forward.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ function gradient!(::GenericForwardDiffADGradient, g, f, x)
return ForwardDiff.gradient!(g, f, x)
end

struct ForwardDiffADGradient <: ADBackend
cfg
struct ForwardDiffADGradient{GC} <: ADBackend
cfg::GC
end
function ForwardDiffADGradient(
nvar::Integer,
Expand Down Expand Up @@ -109,7 +109,7 @@ function GenericForwardDiffADJtprod(
return GenericForwardDiffADJtprod()
end
function Jtprod!(::GenericForwardDiffADJtprod, Jtv, f, x, v, ::Val)
Jtv .= ForwardDiff.gradient(x -> dot(f(x), v), x)
ForwardDiff.gradient!(Jtv, x -> dot(f(x), v), x)
return Jtv
end

Expand Down
18 changes: 8 additions & 10 deletions src/nlp.jl
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
export ADNLPModel, ADNLPModel!

mutable struct ADNLPModel{T, S, Si} <: AbstractADNLPModel{T, S}
mutable struct ADNLPModel{T, S, Si, F1, F2, ADMB <: ADModelBackend} <: AbstractADNLPModel{T, S}
meta::NLPModelMeta{T, S}
counters::Counters
adbackend::ADModelBackend
adbackend::ADMB

# Functions
f
f::F1

clinrows::Si
clincols::Si
clinvals::S

c!
c!::F2
end

ADNLPModel(
Expand Down Expand Up @@ -127,7 +127,7 @@ function ADNLPModel(f, x0::S; name::String = "Generic", minimize::Bool = true, k
meta =
NLPModelMeta{T, S}(nvar, x0 = x0, nnzh = nnzh, minimize = minimize, islp = false, name = name)

return ADNLPModel(meta, Counters(), adbackend, f, x -> T[])
return ADNLPModel(meta, Counters(), adbackend, f, (c, x) -> similar(S, 0))
end

function ADNLPModel(
Expand Down Expand Up @@ -157,7 +157,7 @@ function ADNLPModel(
name = name,
)

return ADNLPModel(meta, Counters(), adbackend, f, x -> T[])
return ADNLPModel(meta, Counters(), adbackend, f, x -> similar(S, 0))
end

function ADNLPModel(f, x0::S, c, lcon::S, ucon::S; kwargs...) where {S}
Expand Down Expand Up @@ -222,8 +222,7 @@ function ADNLPModel(
ucon::S;
kwargs...,
) where {S}
T = eltype(S)
return ADNLPModel(f, x0, clinrows, clincols, clinvals, x -> T[], lcon, ucon; kwargs...)
return ADNLPModel(f, x0, clinrows, clincols, clinvals, x -> similar(S, 0), lcon, ucon; kwargs...)
end

function ADNLPModel(
Expand Down Expand Up @@ -339,7 +338,6 @@ function ADNLPModel(
ucon::S;
kwargs...,
) where {S}
T = eltype(S)
return ADNLPModel(
f,
x0,
Expand All @@ -348,7 +346,7 @@ function ADNLPModel(
clinrows,
clincols,
clinvals,
x -> T[],
x -> similar(S, 0),
lcon,
ucon;
kwargs...,
Expand Down
8 changes: 4 additions & 4 deletions src/nls.jl
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
export ADNLSModel, ADNLSModel!

mutable struct ADNLSModel{T, S, Si} <: AbstractADNLSModel{T, S}
mutable struct ADNLSModel{T, S, Si, F1, F2, ADMB <: ADModelBackend} <: AbstractADNLSModel{T, S}
meta::NLPModelMeta{T, S}
nls_meta::NLSMeta{T, S}
counters::NLSCounters
adbackend::ADModelBackend
adbackend::ADMB

# Function
F!
F!::F1

clinrows::Si
clincols::Si
clinvals::S

c!
c!::F2
end

ADNLSModel(
Expand Down
4 changes: 2 additions & 2 deletions src/reverse.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ end
struct GenericReverseDiffADJprod <: ADBackend end
struct GenericReverseDiffADJtprod <: ADBackend end

struct ReverseDiffADGradient <: ADBackend
cfg
struct ReverseDiffADGradient{GC} <: ADBackend
cfg::GC
end

function ReverseDiffADGradient(
Expand Down
2 changes: 2 additions & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
[deps]
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
ManualNLPModels = "30dfa513-9b2f-4fb3-9796-781eabac1617"
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
Expand All @@ -12,6 +13,7 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[compat]
ForwardDiff = "0.10"
JET = "0.9, 0.10"
ManualNLPModels = "0.1"
NLPModels = "0.21"
NLPModelsModifiers = "0.7"
Expand Down
1 change: 1 addition & 0 deletions test/enzyme.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using LinearAlgebra, SparseArrays, Test
using JET
using SparseMatrixColorings
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
using ADNLPModels:
Expand Down
2 changes: 1 addition & 1 deletion test/nlp/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ function test_autodiff_model(name; kwargs...)
@test abs(obj(nlp, β) - norm(y .- β[1] - β[2] * x)^2 / 2) < 1e-12
@test norm(grad(nlp, β)) < 1e-12

test_getter_setter(nlp)
test_allocations(nlp)

@testset "Constructors for ADNLPModel with $name" begin
lvar, uvar, lcon, ucon, y0 = -ones(2), ones(2), -ones(1), ones(1), zeros(1)
Expand Down
2 changes: 1 addition & 1 deletion test/nls/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ function autodiff_nls_test(name; kwargs...)

@test isapprox(residual(nls, ones(2)), zeros(2), rtol = 1e-8)

test_getter_setter(nls)
test_allocations(nls)
end

@testset "Constructors for ADNLSModel" begin
Expand Down
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using LinearAlgebra, SparseArrays, Test
using SparseMatrixColorings
using ForwardDiff, JET
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
using ADNLPModels:
gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod!
Expand Down
51 changes: 17 additions & 34 deletions test/utils.jl
Original file line number Diff line number Diff line change
@@ -1,36 +1,19 @@
ReverseDiffAD(nvar, f) = ADNLPModels.ADModelBackend(
nvar,
f,
gradient_backend = ADNLPModels.ReverseDiffADGradient,
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
jprod_backend = ADNLPModels.ReverseDiffADJprod,
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
hessian_backend = ADNLPModels.ReverseDiffADHessian,
)
function test_allocations(nlp::ADNLPModel)
x = nlp.meta.x0
y = zeros(eltype(nlp.meta.x0), nlp.meta.ncon)
g = zeros(eltype(nlp.meta.x0), nlp.meta.nvar)
@test_opt target_modules=(ADNLPModels,) obj(nlp, x)
@test_opt target_modules=(ADNLPModels,) cons!(nlp, x, y)
@test_opt target_modules=(ADNLPModels,) grad!(nlp, x, g)
end

function test_getter_setter(nlp)
@test get_adbackend(nlp) == nlp.adbackend
if typeof(nlp) <: ADNLPModel
set_adbackend!(nlp, ReverseDiffAD(nlp.meta.nvar, nlp.f))
elseif typeof(nlp) <: ADNLSModel
function F(x; nequ = nlp.nls_meta.nequ)
Fx = similar(x, nequ)
nlp.F!(Fx, x)
return Fx
end
set_adbackend!(nlp, ReverseDiffAD(nlp.meta.nvar, x -> sum(F(x) .^ 2)))
end
@test typeof(get_adbackend(nlp).gradient_backend) <: ADNLPModels.ReverseDiffADGradient
@test typeof(get_adbackend(nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
@test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
set_adbackend!(
nlp,
gradient_backend = ADNLPModels.ForwardDiffADGradient,
jtprod_backend = ADNLPModels.GenericForwardDiffADJtprod(),
)
@test typeof(get_adbackend(nlp).gradient_backend) <: ADNLPModels.ForwardDiffADGradient
@test typeof(get_adbackend(nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
@test typeof(get_adbackend(nlp).jtprod_backend) <: ADNLPModels.GenericForwardDiffADJtprod
@test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
function test_allocations(nlp::ADNLSModel)
x = nlp.meta.x0
y = zeros(eltype(nlp.meta.x0), nlp.meta.ncon)
g = zeros(eltype(nlp.meta.x0), nlp.meta.nvar)
Fx = zeros(eltype(nlp.meta.x0), nlp.nls_meta.nequ)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) obj(nlp, x)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) cons!(nlp, x, y)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) grad!(nlp, x, g, Fx)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) residual!(nlp, x, Fx)
end
1 change: 1 addition & 0 deletions test/zygote.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using LinearAlgebra, SparseArrays, Test
using JET
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
using ADNLPModels:
gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod!
Expand Down
Loading