Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NLopt: Reuse constraint evaluations #832

Merged
merged 4 commits into from
Sep 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# v4 Breaking changes

The main change in this breaking release has been the way mini-batching is handled. The data argument in the solve call and the implicit iteration of that in the callback has been removed,
the stochastic solvers (Optimisers.jl and Sophia) now handle it explicitly. You would now pass in a DataLoader to OptimziationProblem as the second argument to the objective etc (p) if you
The main change in this breaking release has been the way mini-batching is handled. The data argument in the solve call and the implicit iteration of that in the callback has been removed,
the stochastic solvers (Optimisers.jl and Sophia) now handle it explicitly. You would now pass in a DataLoader to OptimziationProblem as the second argument to the objective etc (p) if you

Check warning on line 4 in NEWS.md

View workflow job for this annotation

GitHub Actions / Spell Check with Typos

"Optimziation" should be "Optimization".
want to do minibatching, else for full batch just pass in the full data.
45 changes: 33 additions & 12 deletions lib/OptimizationNLopt/src/OptimizationNLopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -232,29 +232,50 @@ function SciMLBase.__solve(cache::OptimizationCache{
if cache.f.cons !== nothing
eqinds = map((y) -> y[1] == y[2], zip(cache.lcons, cache.ucons))
ineqinds = map((y) -> y[1] != y[2], zip(cache.lcons, cache.ucons))
cons_cache = zeros(eltype(cache.u0), sum(eqinds) + sum(ineqinds))
thetacache = rand(size(cache.u0))
Jthetacache = rand(size(cache.u0))
Jcache = zeros(eltype(cache.u0), sum(ineqinds) + sum(eqinds), length(cache.u0))
evalcons = function (θ, ineqoreq)
if thetacache != θ
cache.f.cons(cons_cache, θ)
thetacache = copy(θ)
end
if ineqoreq == :eq
return @view(cons_cache[eqinds])
else
return @view(cons_cache[ineqinds])
end
end

evalconj = function (θ, ineqoreq)
if Jthetacache != θ
cache.f.cons_j(Jcache, θ)
Jthetacache = copy(θ)
end

if ineqoreq == :eq
return @view(Jcache[eqinds, :])'
else
return @view(Jcache[ineqinds, :])'
end
end

if sum(ineqinds) > 0
ineqcons = function (res, θ, J)
cons_cache = zeros(eltype(res), sum(eqinds) + sum(ineqinds))
cache.f.cons(cons_cache, θ)
res .= @view(cons_cache[ineqinds])
res .= copy(evalcons(θ, :ineq))
if length(J) > 0
Jcache = zeros(eltype(J), sum(ineqinds) + sum(eqinds), length(θ))
cache.f.cons_j(Jcache, θ)
J .= @view(Jcache[ineqinds, :])'
J .= copy(evalconj(θ, :ineq))
end
end
NLopt.inequality_constraint!(
opt_setup, ineqcons, [cache.solver_args.cons_tol for i in 1:sum(ineqinds)])
end
if sum(eqinds) > 0
eqcons = function (res, θ, J)
cons_cache = zeros(eltype(res), sum(eqinds) + sum(ineqinds))
cache.f.cons(cons_cache, θ)
res .= @view(cons_cache[eqinds])
res .= copy(evalcons(θ, :eq))
if length(J) > 0
Jcache = zeros(eltype(res), sum(eqinds) + sum(ineqinds), length(θ))
cache.f.cons_j(Jcache, θ)
J .= @view(Jcache[eqinds, :])'
J .= copy(evalconj(θ, :eq))
end
end
NLopt.equality_constraint!(
Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationNLopt/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ using Test, Random
# @test sol.retcode == ReturnCode.Success
@test 10 * sol.objective < l1

Random.seed!(1)
prob = OptimizationProblem(optprob, [0.5, 0.5], _p, lcons = [-Inf, -Inf],
ucons = [0.0, 0.0], lb = [-1.0, -1.0], ub = [1.0, 1.0])
sol = solve(prob, NLopt.GN_ISRES(), maxiters = 1000)
Expand Down
3 changes: 2 additions & 1 deletion lib/OptimizationOptimisers/src/OptimizationOptimisers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,8 @@ function SciMLBase.__solve(cache::OptimizationCache{
cache.f.grad(G, θ)
x = cache.f(θ)
end
opt_state = Optimization.OptimizationState(iter = i + (epoch-1)*length(data),
opt_state = Optimization.OptimizationState(
iter = i + (epoch - 1) * length(data),
u = θ,
objective = x[1],
grad = G,
Expand Down
3 changes: 2 additions & 1 deletion lib/OptimizationOptimisers/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ using Zygote
end

@testset "Minibatching" begin
using Optimization, OptimizationOptimisers, Lux, Zygote, MLUtils, Random, ComponentArrays
using Optimization, OptimizationOptimisers, Lux, Zygote, MLUtils, Random,
ComponentArrays

x = rand(10000)
y = sin.(x)
Expand Down
2 changes: 1 addition & 1 deletion test/minibatch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ optprob = OptimizationProblem(optfun, pp, train_loader)

res1 = Optimization.solve(optprob,
Optimization.Sophia(), callback = callback,
maxiters = 1000)
maxiters = 2000)
@test 10res1.objective < l1

optfun = OptimizationFunction(loss_adjoint,
Expand Down
Loading