1- """
2- AutoModelingToolkit <: AbstractADType
3-
4- An AbstractADType choice for use in OptimizationFunction for automatically
5- generating the unspecified derivative functions. Usage:
6-
7- ```julia
8- OptimizationFunction(f,AutoModelingToolkit();kwargs...)
9- ```
10-
11- This uses the [ModelingToolkit.jl](https://github.com/SciML/ModelingToolkit.jl)
12- symbolic system for automatically converting the `f` function into
13- a symbolic equation and uses symbolic differentiation in order to generate
14- a fast derivative code. Note that this will also compile a new version
15- of your `f` function that is automatically optimized. Because of the
16- required symbolic analysis, the state and parameters are required in
17- the function definition, i.e.:
18-
19- Summary:
20-
21- - Not compatible with GPUs
22- - Compatible with Hessian-based optimization
23- - Not compatible with Hv-based optimization
24- - Not compatible with constraint functions
25-
26- ## Constructor
27-
28- ```julia
29- OptimizationFunction(f,AutoModelingToolkit(),x0,p,
30- grad = false, hess = false, sparse = false,
31- checkbounds = false,
32- linenumbers = true,
33- parallel=SerialForm(),
34- kwargs...)
35- ```
36-
37- The special keyword arguments are as follows:
38-
39- - `grad`: whether to symbolically generate the gradient function.
40- - `hess`: whether to symbolically generate the Hessian function.
41- - `sparse`: whether to use sparsity detection in the Hessian.
42- - `checkbounds`: whether to perform bounds checks in the generated code.
43- - `linenumbers`: whether to include line numbers in the generated code.
44- - `parallel`: whether to automatically parallelize the calculations.
45-
46- For more information, see the [ModelingToolkit.jl `OptimizationSystem` documentation](https://mtk.sciml.ai/dev/systems/OptimizationSystem/)
47- """
481struct AutoModelingToolkit <: AbstractADType
492 obj_sparse:: Bool
503 cons_sparse:: Bool
@@ -56,6 +9,8 @@ function instantiate_function(f, x, adtype::AutoModelingToolkit, p, num_cons=0)
569 p = isnothing (p) ? SciMLBase. NullParameters () : p
5710 sys = ModelingToolkit. modelingtoolkitize (OptimizationProblem (f, x, p))
5811
12+ hess_prototype, cons_jac_prototype, cons_hess_prototype = nothing , nothing , nothing
13+
5914 if f. grad === nothing
6015 grad_oop, grad_iip = ModelingToolkit. generate_gradient (sys, expression= Val{false })
6116 grad (J, u) = (grad_iip (J, u, p); J)
@@ -72,7 +27,7 @@ function instantiate_function(f, x, adtype::AutoModelingToolkit, p, num_cons=0)
7227
7328 if f. hv === nothing
7429 hv = function (H, θ, v, args... )
75- res = ArrayInterfaceCore. zeromatrix (θ)
30+ res = adtype . obj_sparse ? hess_prototype : ArrayInterfaceCore. zeromatrix (θ)
7631 hess (res, θ, args... )
7732 H .= res * v
7833 end
@@ -105,7 +60,19 @@ function instantiate_function(f, x, adtype::AutoModelingToolkit, p, num_cons=0)
10560 cons_h = f. cons_h
10661 end
10762
108- return OptimizationFunction {true} (f. f, adtype; grad= grad, hess= hess, hv= hv,
63+ if adtype. obj_sparse
64+ _hess_prototype = ModelingToolkit. hessian_sparsity (sys)
65+ hess_prototype = convert .(eltype (x), _hess_prototype)
66+ end
67+
68+ if adtype. cons_sparse
69+ _cons_jac_prototype = ModelingToolkit. jacobian_sparsity (cons_sys)
70+ cons_jac_prototype = convert .(eltype (x), _cons_jac_prototype)
71+ _cons_hess_prototype = ModelingToolkit. hessian_sparsity (cons_sys)
72+ cons_hess_prototype = [convert .(eltype (x), _cons_hess_prototype[i]) for i in 1 : num_cons]
73+ end
74+
75+ return OptimizationFunction {true} (f. f, adtype; grad= grad, hess= hess, hv= hv,
10976 cons= cons, cons_j= cons_j, cons_h= cons_h,
110- hess_prototype= nothing , cons_jac_prototype= nothing , cons_hess_prototype= nothing )
77+ hess_prototype= hess_prototype , cons_jac_prototype= cons_jac_prototype , cons_hess_prototype= cons_hess_prototype )
11178end
0 commit comments