Skip to content

Commit 90c3d63

Browse files
Merge pull request #50 from SciML/optim1
update to optim 1
2 parents ff0cc8d + 6ece7cd commit 90c3d63

File tree

4 files changed

+61
-57
lines changed

4 files changed

+61
-57
lines changed

Project.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ FiniteDiff = "2.5"
2929
Flux = "0.11"
3030
ForwardDiff = "0.10"
3131
LoggingExtras = "0.4"
32-
Optim = "0.22"
32+
Optim = "0.22, 1"
3333
ProgressLogging = "0.1"
3434
Requires = "1.0"
3535
ReverseDiff = "1.4"
@@ -43,7 +43,8 @@ BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209"
4343
CMAEvolutionStrategy = "8d3b24bd-414e-49e0-94fb-163cc3a3e411"
4444
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
4545
NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd"
46+
SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f"
4647
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
4748

4849
[targets]
49-
test = ["BlackBoxOptim", "Evolutionary", "NLopt", "CMAEvolutionStrategy", "Test"]
50+
test = ["BlackBoxOptim", "Evolutionary", "NLopt", "CMAEvolutionStrategy", "SafeTestsets", "Test"]

src/solve.jl

Lines changed: 54 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -65,19 +65,19 @@ function __solve(prob::OptimizationProblem, opt;cb = (args...) -> (false), maxit
6565
# this is a Flux optimizer
6666
θ = copy(prob.x)
6767
ps = Flux.params(θ)
68-
68+
6969
t0 = time()
70-
70+
7171
local x, min_err, _loss
7272
min_err = typemax(eltype(prob.x)) #dummy variables
7373
min_opt = 1
74-
75-
76-
if prob.f isa OptimizationFunction
74+
75+
76+
if prob.f isa OptimizationFunction
7777
_loss = function(θ)
7878
x = prob.f.f(θ, prob.p)
7979
end
80-
else
80+
else
8181
_loss = function(θ)
8282
x = prob.f(θ, prob.p)
8383
end
@@ -98,7 +98,7 @@ function __solve(prob::OptimizationProblem, opt;cb = (args...) -> (false), maxit
9898
msg = @sprintf("loss: %.3g", x[1])
9999
progress && ProgressLogging.@logprogress msg i/maxiters
100100
update!(opt, ps, gs)
101-
101+
102102
if save_best
103103
if first(x) < first(min_err) #found a better solution
104104
min_opt = opt
@@ -111,9 +111,9 @@ function __solve(prob::OptimizationProblem, opt;cb = (args...) -> (false), maxit
111111
end
112112
end
113113
end
114-
114+
115115
_time = time()
116-
116+
117117
Optim.MultivariateOptimizationResults(opt,
118118
prob.x,# initial_x,
119119
θ, #pick_best_x(f_incr_pick, state),
@@ -140,9 +140,10 @@ function __solve(prob::OptimizationProblem, opt;cb = (args...) -> (false), maxit
140140
0,
141141
true,
142142
NaN,
143-
_time-t0)
143+
_time-t0,
144+
NamedTuple())
144145
end
145-
146+
146147

147148
decompose_trace(trace::Optim.OptimizationTrace) = last(trace)
148149
decompose_trace(trace) = trace
@@ -157,7 +158,7 @@ function __solve(prob::OptimizationProblem, opt::Optim.AbstractOptimizer;cb = (a
157158
end
158159
cb_call
159160
end
160-
161+
161162
if prob.f isa OptimizationFunction
162163
_loss = function(θ)
163164
x = prob.f.f(θ, prob.p)
@@ -197,11 +198,11 @@ function __solve(prob::OptimizationProblem, opt::Union{Optim.Fminbox,Optim.SAMIN
197198
end
198199
cb_call
199200
end
200-
201+
201202
if prob.f isa OptimizationFunction && !(opt isa Optim.SAMIN)
202203
_loss = function(θ)
203204
x = prob.f.f(θ, prob.p)
204-
return x[1]
205+
return x[1]
205206
end
206207
fg! = function (G,θ)
207208
if G !== nothing
@@ -212,14 +213,14 @@ function __solve(prob::OptimizationProblem, opt::Union{Optim.Fminbox,Optim.SAMIN
212213
end
213214
optim_f = OnceDifferentiable(_loss, prob.f.grad, fg!, prob.x)
214215
else
215-
!(opt isa Optim.ZerothOrderOptimizer) && error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")
216+
!(opt isa Optim.ZerothOrderOptimizer || opt isa Optim.SAMIN) && error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")
216217
_loss = function(θ)
217218
x = prob.f isa OptimizationFunction ? prob.f.f(θ, prob.p) : prob.f(θ, prob.p)
218-
return x[1]
219+
return x[1]
219220
end
220221
optim_f = _loss
221222
end
222-
223+
223224
Optim.optimize(optim_f, prob.lb, prob.ub, prob.x, opt, Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...))
224225
end
225226

@@ -228,14 +229,14 @@ function __init__()
228229
decompose_trace(opt::BlackBoxOptim.OptRunController) = BlackBoxOptim.best_candidate(opt)
229230

230231
struct BBO
231-
method::Symbol
232+
method::Symbol
232233
end
233234

234235
BBO() = BBO(:adaptive_de_rand_1_bin)
235236

236237
function __solve(prob::OptimizationProblem, opt::BBO; cb = (args...) -> (false), maxiters = 1000, kwargs...)
237238
local x, _loss
238-
239+
239240
function _cb(trace)
240241
cb_call = cb(decompose_trace(trace),x...)
241242
if !(typeof(cb_call) <: Bool)
@@ -247,20 +248,20 @@ function __init__()
247248
cb_call
248249
end
249250

250-
if prob.f isa OptimizationFunction
251+
if prob.f isa OptimizationFunction
251252
_loss = function(θ)
252253
x = prob.f.f(θ, prob.p)
253254
return x[1]
254255
end
255-
else
256+
else
256257
_loss = function(θ)
257258
x = prob.f(θ, prob.p)
258259
return x[1]
259260
end
260261
end
261262

262263
bboptre = BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], MaxSteps = maxiters, CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...)
263-
264+
264265
Optim.MultivariateOptimizationResults(opt.method,
265266
[NaN],# initial_x,
266267
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
@@ -287,15 +288,16 @@ function __init__()
287288
0,
288289
true,
289290
NaN,
290-
bboptre.elapsed_time)
291+
bboptre.elapsed_time,
292+
NamedTuple())
291293
end
292294
end
293295

294296
@require NLopt="76087f3c-5699-56af-9a33-bf431cd00edd" begin
295-
function __solve(prob::OptimizationProblem, opt::NLopt.Opt; maxiters = 1000, nstart = 1, local_method = nothing, kwargs...)
297+
function __solve(prob::OptimizationProblem, opt::NLopt.Opt; maxiters = 1000, nstart = 1, local_method = nothing, kwargs...)
296298
local x
297299

298-
if prob.f isa OptimizationFunction
300+
if prob.f isa OptimizationFunction
299301
_loss = function(θ)
300302
x = prob.f.f(θ, prob.p)
301303
return x[1]
@@ -304,11 +306,11 @@ function __init__()
304306
if length(G) > 0
305307
prob.f.grad(G, θ)
306308
end
307-
309+
308310
return _loss(θ)
309311
end
310312
NLopt.min_objective!(opt, fg!)
311-
else
313+
else
312314
_loss = function(θ,G)
313315
x = prob.f(θ, prob.p)
314316
return x[1]
@@ -317,7 +319,7 @@ function __init__()
317319
end
318320

319321
if prob.ub !== nothing
320-
NLopt.upper_bounds!(opt, prob.ub)
322+
NLopt.upper_bounds!(opt, prob.ub)
321323
end
322324
if prob.lb !== nothing
323325
NLopt.lower_bounds!(opt, prob.lb)
@@ -360,20 +362,21 @@ function __init__()
360362
0,
361363
ret,
362364
NaN,
363-
_time-t0,)
364-
end
365+
_time-t0,
366+
NamedTuple())
367+
end
365368
end
366369

367370
@require MultistartOptimization = "3933049c-43be-478e-a8bb-6e0f7fd53575" begin
368371
function __solve(prob::OptimizationProblem, opt::MultistartOptimization.TikTak; local_method, local_maxiters = 1000, kwargs...)
369372
local x, _loss
370-
371-
if prob.f isa OptimizationFunction
373+
374+
if prob.f isa OptimizationFunction
372375
_loss = function(θ)
373376
x = prob.f.f(θ, prob.p)
374377
return x[1]
375378
end
376-
else
379+
else
377380
_loss = function(θ)
378381
x = prob.f(θ, prob.p)
379382
return x[1]
@@ -386,9 +389,9 @@ function __init__()
386389
multistart_method = opt
387390
local_method = MultistartOptimization.NLoptLocalMethod(local_method, maxeval = local_maxiters)
388391
p = MultistartOptimization.multistart_minimization(multistart_method, local_method, P)
389-
392+
390393
t1 = time()
391-
394+
392395
Optim.MultivariateOptimizationResults(opt,
393396
[NaN],# initial_x,
394397
p.location, #pick_best_x(f_incr_pick, state),
@@ -415,25 +418,26 @@ function __init__()
415418
0,
416419
true,
417420
NaN,
418-
t1 - t0)
421+
t1 - t0,
422+
NamedTuple())
419423
end
420424
end
421425

422426
@require QuadDIRECT = "dae52e8d-d666-5120-a592-9e15c33b8d7a" begin
423427
export QuadDirect
424-
428+
425429
struct QuadDirect
426430
end
427431

428432
function __solve(prob::OptimizationProblem, opt::QuadDirect; splits, maxiters = 1000, kwargs...)
429433
local x, _loss
430-
431-
if prob.f isa OptimizationFunction
434+
435+
if prob.f isa OptimizationFunction
432436
_loss = function(θ)
433437
x = prob.f.f(θ, prob.p)
434438
return x[1]
435439
end
436-
else
440+
else
437441
_loss = function(θ)
438442
x = prob.f(θ, prob.p)
439443
return x[1]
@@ -472,7 +476,8 @@ function __init__()
472476
0,
473477
true,
474478
NaN,
475-
t1 - t0)
479+
t1 - t0,
480+
NamedTuple())
476481
end
477482
end
478483

@@ -485,7 +490,7 @@ function __init__()
485490

486491
function __solve(prob::OptimizationProblem, opt::Evolutionary.AbstractOptimizer; cb = (args...) -> (false), maxiters = 1000, kwargs...)
487492
local x, _loss
488-
493+
489494
function _cb(trace)
490495
cb_call = cb(decompose_trace(trace).metadata["x"],trace.value...)
491496
if !(typeof(cb_call) <: Bool)
@@ -494,12 +499,12 @@ function __init__()
494499
cb_call
495500
end
496501

497-
if prob.f isa OptimizationFunction
502+
if prob.f isa OptimizationFunction
498503
_loss = function(θ)
499504
x = prob.f.f(θ, prob.p)
500505
return x[1]
501506
end
502-
else
507+
else
503508
_loss = function(θ)
504509
x = prob.f(θ, prob.p)
505510
return x[1]
@@ -515,7 +520,7 @@ function __init__()
515520

516521
function __solve(prob::OptimizationProblem, opt::CMAEvolutionStrategyOpt; cb = (args...) -> (false), maxiters = 1000, kwargs...)
517522
local x, _loss
518-
523+
519524
function _cb(trace)
520525
cb_call = cb(decompose_trace(trace).metadata["x"],trace.value...)
521526
if !(typeof(cb_call) <: Bool)
@@ -524,12 +529,12 @@ function __init__()
524529
cb_call
525530
end
526531

527-
if prob.f isa OptimizationFunction
532+
if prob.f isa OptimizationFunction
528533
_loss = function(θ)
529534
x = prob.f.f(θ, prob.p)
530535
return x[1]
531536
end
532-
else
537+
else
533538
_loss = function(θ)
534539
x = prob.f(θ, prob.p)
535540
return x[1]
@@ -565,7 +570,8 @@ function __init__()
565570
0,
566571
true,
567572
NaN,
568-
result.logger.times[end] - result.logger.times[1])
573+
result.logger.times[end] - result.logger.times[1],
574+
NamedTuple())
569575
end
570576
end
571577
end

test/rosenbrock.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ rosenbrock(x, p=nothing) = (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2
2121

2222
l1 = rosenbrock(x0)
2323
prob = OptimizationProblem(rosenbrock, x0)
24-
sol = solve(prob, NelderMead())
24+
sol = solve(prob, NelderMead())
2525
@test 10*sol.minimum < l1
2626

2727

test/runtests.jl

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,4 @@
1-
using GalacticOptim
2-
using Test
1+
using SafeTestsets
32

4-
@testset "GalacticOptim.jl" begin
5-
include("rosenbrock.jl")
6-
include("ADtests.jl")
7-
end
3+
@safetestset "Rosenbrock" begin include("rosenbrock.jl") end
4+
@safetestset "AD Tests" begin include("ADtests.jl") end

0 commit comments

Comments
 (0)