From 0c715cbc7a93d7747ce62b854065c6a5143c0c03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Miclu=C8=9Ba-C=C3=A2mpeanu?= Date: Sat, 17 May 2025 04:08:07 +0300 Subject: [PATCH] feat: add initial version of OptimizationIpopt This pacakge directly uses the C interface in Ipopt.jl. The implementation is based on OptimizationMOI, but it also adds Ipopt specific elements, such as the callback handling. Co-authored-by: Vaibhav Dixit Co-authored-by: Valentin Kaisermayer <50108075+ValentinKaisermayer@users.noreply.github.com> Co-authored-by: Fredrik Bagge Carlson Co-authored-by: Oscar Dowson --- lib/OptimizationIpopt/LICENSE | 21 + lib/OptimizationIpopt/Project.toml | 21 + .../src/OptimizationIpopt.jl | 209 ++++++++++ lib/OptimizationIpopt/src/cache.jl | 367 ++++++++++++++++++ lib/OptimizationIpopt/src/callback.jl | 82 ++++ lib/OptimizationIpopt/test/runtests.jl | 107 +++++ 6 files changed, 807 insertions(+) create mode 100644 lib/OptimizationIpopt/LICENSE create mode 100644 lib/OptimizationIpopt/Project.toml create mode 100644 lib/OptimizationIpopt/src/OptimizationIpopt.jl create mode 100644 lib/OptimizationIpopt/src/cache.jl create mode 100644 lib/OptimizationIpopt/src/callback.jl create mode 100644 lib/OptimizationIpopt/test/runtests.jl diff --git a/lib/OptimizationIpopt/LICENSE b/lib/OptimizationIpopt/LICENSE new file mode 100644 index 000000000..ac2363b14 --- /dev/null +++ b/lib/OptimizationIpopt/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Sebastian Micluța-Câmpeanu and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/lib/OptimizationIpopt/Project.toml b/lib/OptimizationIpopt/Project.toml new file mode 100644 index 000000000..e6353d651 --- /dev/null +++ b/lib/OptimizationIpopt/Project.toml @@ -0,0 +1,21 @@ +name = "OptimizationIpopt" +uuid = "43fad042-7963-4b32-ab19-e2a4f9a67124" +authors = ["Sebastian Micluța-Câmpeanu and contributors"] +version = "0.1.0" + +[deps] +Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" +SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" +SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +SymbolicIndexingInterface = "2efcf032-c050-4f8e-a9bb-153293bab1f5" + +[compat] +Ipopt = "1.10.3" +LinearAlgebra = "1.11.0" +Optimization = "4.3.0" +SciMLBase = "2.90.0" +SparseArrays = "1.11.0" +SymbolicIndexingInterface = "0.3.40" +julia = "1.10" diff --git a/lib/OptimizationIpopt/src/OptimizationIpopt.jl b/lib/OptimizationIpopt/src/OptimizationIpopt.jl new file mode 100644 index 000000000..4509bd55b --- /dev/null +++ b/lib/OptimizationIpopt/src/OptimizationIpopt.jl @@ -0,0 +1,209 @@ +module OptimizationIpopt + +using Optimization +using Ipopt +using LinearAlgebra +using SparseArrays +using SciMLBase +using SymbolicIndexingInterface + +export IpoptOptimizer + +const DenseOrSparse{T} = Union{Matrix{T}, SparseMatrixCSC{T}} + +struct IpoptOptimizer end + +function SciMLBase.supports_opt_cache_interface(alg::IpoptOptimizer) + true +end + +function SciMLBase.requiresgradient(opt::IpoptOptimizer) + true +end +function SciMLBase.requireshessian(opt::IpoptOptimizer) + true +end +function SciMLBase.requiresconsjac(opt::IpoptOptimizer) + true +end +function SciMLBase.requiresconshess(opt::IpoptOptimizer) + true +end + +function SciMLBase.allowsbounds(opt::IpoptOptimizer) + true +end +function SciMLBase.allowsconstraints(opt::IpoptOptimizer) + true +end + +include("cache.jl") +include("callback.jl") + +function __map_optimizer_args(cache, + opt::IpoptOptimizer; + maxiters::Union{Number, Nothing} = nothing, + maxtime::Union{Number, Nothing} = nothing, + abstol::Union{Number, Nothing} = nothing, + reltol::Union{Number, Nothing} = nothing, + hessian_approximation = "exact", + verbose = false, + progress = false, + callback = nothing, + kwargs...) + jacobian_sparsity = jacobian_structure(cache) + hessian_sparsity = hessian_lagrangian_structure(cache) + + eval_f(x) = eval_objective(cache, x) + eval_grad_f(x, grad_f) = eval_objective_gradient(cache, grad_f, x) + eval_g(x, g) = eval_constraint(cache, g, x) + function eval_jac_g(x, rows, cols, values) + if values === nothing + for i in 1:length(jacobian_sparsity) + rows[i], cols[i] = jacobian_sparsity[i] + end + else + eval_constraint_jacobian(cache, values, x) + end + return + end + function eval_h(x, rows, cols, obj_factor, lambda, values) + if values === nothing + for i in 1:length(hessian_sparsity) + rows[i], cols[i] = hessian_sparsity[i] + end + else + eval_hessian_lagrangian(cache, values, x, obj_factor, lambda) + end + return + end + + lb = isnothing(cache.lb) ? fill(-Inf, cache.n) : cache.lb + ub = isnothing(cache.ub) ? fill(Inf, cache.n) : cache.ub + + prob = Ipopt.CreateIpoptProblem( + cache.n, + lb, + ub, + cache.num_cons, + cache.lcons, + cache.ucons, + length(jacobian_structure(cache)), + length(hessian_lagrangian_structure(cache)), + eval_f, + eval_g, + eval_grad_f, + eval_jac_g, + eval_h + ) + progress_callback = IpoptProgressLogger(cache.progress, cache, prob) + intermediate = (args...) -> progress_callback(args...) + Ipopt.SetIntermediateCallback(prob, intermediate) + + if !isnothing(maxiters) + Ipopt.AddIpoptIntOption(prob, "max_iter", maxiters) + end + if !isnothing(maxtime) + Ipopt.AddIpoptNumOption(prob, "max_cpu_time", maxtime) + end + if !isnothing(abstol) + Ipopt.AddIpoptNumOption(prob, "tol", abstol) + end + if verbose isa Bool + Ipopt.AddIpoptIntOption(prob, "print_level", verbose * 5) + else + Ipopt.AddIpoptIntOption(prob, "print_level", verbose) + end + Ipopt.AddIpoptStrOption(prob, "hessian_approximation", hessian_approximation) + + return prob +end + +function map_retcode(solvestat) + status = Ipopt.ApplicationReturnStatus(solvestat) + if status in [ + Ipopt.Solve_Succeeded, + Ipopt.Solved_To_Acceptable_Level, + Ipopt.User_Requested_Stop, + Ipopt.Feasible_Point_Found + ] + return ReturnCode.Success + elseif status in [ + Ipopt.Infeasible_Problem_Detected, + Ipopt.Search_Direction_Becomes_Too_Small, + Ipopt.Diverging_Iterates + ] + return ReturnCode.Infeasible + elseif status == Ipopt.Maximum_Iterations_Exceeded + return ReturnCode.MaxIters + elseif status in [Ipopt.Maximum_CpuTime_Exceeded + Ipopt.Maximum_WallTime_Exceeded] + return ReturnCode.MaxTime + else + return ReturnCode.Failure + end +end + +function SciMLBase.__solve(cache::IpoptCache) + maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters) + maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) + + opt_setup = __map_optimizer_args(cache, + cache.opt; + abstol = cache.solver_args.abstol, + reltol = cache.solver_args.reltol, + maxiters = maxiters, + maxtime = maxtime, + cache.solver_args...) + + opt_setup.x .= cache.reinit_cache.u0 + + start_time = time() + status = Ipopt.IpoptSolve(opt_setup) + + opt_ret = map_retcode(status) + + if cache.progress + # Set progressbar to 1 to finish it + Base.@logmsg(Base.LogLevel(-1), "", progress=1, _id=:OptimizationIpopt) + end + + minimum = opt_setup.obj_val + minimizer = opt_setup.x + + stats = Optimization.OptimizationStats(; time = time() - start_time, + iterations = cache.iterations, fevals = cache.f_calls, gevals = cache.f_grad_calls) + + finalize(opt_setup) + + return SciMLBase.build_solution(cache, + cache.opt, + minimizer, + minimum; + original = opt_setup, + retcode = opt_ret, + stats = stats) +end + +function SciMLBase.__init(prob::OptimizationProblem, + opt::IpoptOptimizer; + maxiters::Union{Number, Nothing} = nothing, + maxtime::Union{Number, Nothing} = nothing, + abstol::Union{Number, Nothing} = nothing, + reltol::Union{Number, Nothing} = nothing, + mtkize = false, + kwargs...) + cache = IpoptCache(prob, opt; + maxiters, + maxtime, + abstol, + reltol, + mtkize, + kwargs... + ) + cache.reinit_cache.u0 .= prob.u0 + + return cache +end + +end # OptimizationIpopt diff --git a/lib/OptimizationIpopt/src/cache.jl b/lib/OptimizationIpopt/src/cache.jl new file mode 100644 index 000000000..595278a0d --- /dev/null +++ b/lib/OptimizationIpopt/src/cache.jl @@ -0,0 +1,367 @@ +mutable struct IpoptCache{T, F <: OptimizationFunction, RC, LB, UB, I, S, + JT <: DenseOrSparse{T}, HT <: DenseOrSparse{T}, + CHT <: DenseOrSparse{T}, CB, O} <: SciMLBase.AbstractOptimizationCache + const f::F + const n::Int + const num_cons::Int + const reinit_cache::RC + const lb::LB + const ub::UB + const int::I + const lcons::Vector{T} + const ucons::Vector{T} + const sense::S + J::JT + H::HT + cons_H::Vector{CHT} + const callback::CB + const progress::Bool + f_calls::Int + f_grad_calls::Int + iterations::Cint + obj_expr::Union{Expr, Nothing} + cons_expr::Union{Vector{Expr}, Nothing} + const opt::O + const solver_args::NamedTuple +end + +function Base.getproperty(cache::IpoptCache, name::Symbol) + if name in fieldnames(OptimizationBase.ReInitCache) + return getfield(cache.reinit_cache, name) + end + return getfield(cache, name) +end +function Base.setproperty!(cache::IpoptCache, name::Symbol, x) + if name in fieldnames(OptimizationBase.ReInitCache) + return setfield!(cache.reinit_cache, name, x) + end + return setfield!(cache, name, x) +end + +function SciMLBase.get_p(sol::SciMLBase.OptimizationSolution{ + T, + N, + uType, + C +}) where {T, N, uType, C <: IpoptCache} + sol.cache.p +end +function SciMLBase.get_observed(sol::SciMLBase.OptimizationSolution{ + T, + N, + uType, + C +}) where {T, N, uType, C <: IpoptCache} + sol.cache.f.observed +end +function SciMLBase.get_syms(sol::SciMLBase.OptimizationSolution{ + T, + N, + uType, + C +}) where {T, N, uType, C <: IpoptCache} + variable_symbols(sol.cache.f) +end +function SciMLBase.get_paramsyms(sol::SciMLBase.OptimizationSolution{ + T, + N, + uType, + C +}) where {T, N, uType, C <: IpoptCache} + parameter_symbols(sol.cache.f) +end + +function IpoptCache(prob, opt; + mtkize, + callback = nothing, + progress = false, + kwargs...) + reinit_cache = OptimizationBase.ReInitCache(prob.u0, prob.p) # everything that can be changed via `reinit` + + num_cons = prob.ucons === nothing ? 0 : length(prob.ucons) + if prob.f.adtype isa ADTypes.AutoSymbolics || (prob.f.adtype isa ADTypes.AutoSparse && + prob.f.adtype.dense_ad isa ADTypes.AutoSymbolics) + f = Optimization.instantiate_function( + prob.f, reinit_cache, prob.f.adtype, num_cons; + g = true, h = true, cons_j = true, cons_h = true) + else + f = Optimization.instantiate_function( + prob.f, reinit_cache, prob.f.adtype, num_cons; + g = true, h = true, cons_j = true, cons_vjp = true, lag_h = true) + end + T = eltype(prob.u0) + n = length(prob.u0) + + J = if isnothing(f.cons_jac_prototype) + zeros(T, num_cons, n) + else + convert.(T, f.cons_jac_prototype) + end + lagh = !isnothing(f.lag_hess_prototype) + H = if lagh # lag hessian takes precedence + convert.(T, f.lag_hess_prototype) + elseif !isnothing(f.hess_prototype) + convert.(T, f.hess_prototype) + else + zeros(T, n, n) + end + cons_H = if lagh + Matrix{T}[zeros(T, 0, 0) for i in 1:num_cons] # No need to allocate this up if using lag hessian + elseif isnothing(f.cons_hess_prototype) + Matrix{T}[zeros(T, n, n) for i in 1:num_cons] + else + [convert.(T, f.cons_hess_prototype[i]) for i in 1:num_cons] + end + lcons = prob.lcons === nothing ? fill(T(-Inf), num_cons) : prob.lcons + ucons = prob.ucons === nothing ? fill(T(Inf), num_cons) : prob.ucons + + if f.sys isa SymbolicIndexingInterface.SymbolCache{Nothing, Nothing, Nothing} && mtkize + try + sys = MTK.modelingtoolkitize(prob) + catch err + throw(ArgumentError("Automatic symbolic expression generation with ModelingToolkit failed with error: $err. + Try by setting `mtkize = false` instead if the solver doesn't require symbolic expressions.")) + end + if !isnothing(prob.p) && !(prob.p isa SciMLBase.NullParameters) + unames = variable_symbols(sys) + pnames = parameter_symbols(sys) + us = [unames[i] => prob.u0[i] for i in 1:length(prob.u0)] + ps = [pnames[i] => prob.p[i] for i in 1:length(prob.p)] + sysprob = OptimizationProblem(sys, us, ps) + else + unames = variable_symbols(sys) + us = [unames[i] => prob.u0[i] for i in 1:length(prob.u0)] + sysprob = OptimizationProblem(sys, us) + end + + obj_expr = sysprob.f.expr + cons_expr = sysprob.f.cons_expr + else + sys = f.sys isa SymbolicIndexingInterface.SymbolCache{Nothing, Nothing, Nothing} ? + nothing : f.sys + obj_expr = f.expr + cons_expr = f.cons_expr + end + + if sys === nothing + expr = obj_expr + _cons_expr = cons_expr + else + expr_map = get_expr_map(sys) + expr = convert_to_expr(obj_expr, expr_map; expand_expr = false) + expr = repl_getindex!(expr) + cons = MTK.constraints(sys) + _cons_expr = Vector{Expr}(undef, length(cons)) + for i in eachindex(cons) + _cons_expr[i] = repl_getindex!(convert_to_expr(cons_expr[i], + expr_map; + expand_expr = false)) + end + end + solver_args = NamedTuple(kwargs) + + return IpoptCache( + f, + n, + num_cons, + reinit_cache, + prob.lb, + prob.ub, + prob.int, + lcons, + ucons, + prob.sense, + J, + H, + cons_H, + callback, + progress, + 0, + 0, + Cint(0), + expr, + _cons_expr, + opt, + solver_args + ) +end + +function eval_objective(cache::IpoptCache, x) + l = cache.f(x, cache.p) + cache.f_calls += 1 + return cache.sense === Optimization.MaxSense ? -l : l +end + +function eval_constraint(cache::IpoptCache, g, x) + cache.f.cons(g, x) + return +end + +function eval_objective_gradient(cache::IpoptCache, G, x) + if cache.f.grad === nothing + error("Use OptimizationFunction to pass the objective gradient or " * + "automatically generate it with one of the autodiff backends." * + "If you are using the ModelingToolkit symbolic interface, pass the `grad` kwarg set to `true` in `OptimizationProblem`.") + end + cache.f.grad(G, x) + cache.f_grad_calls += 1 + + if cache.sense === Optimization.MaxSense + G .*= -one(eltype(G)) + end + + return +end + +function jacobian_structure(cache::IpoptCache) + if cache.J isa SparseMatrixCSC + rows, cols, _ = findnz(cache.J) + inds = Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols)] + else + rows, cols = size(cache.J) + inds = Tuple{Int, Int}[(i, j) for j in 1:cols for i in 1:rows] + end + return inds +end + +function eval_constraint_jacobian(cache::IpoptCache, j, x) + if isempty(j) + return + elseif cache.f.cons_j === nothing + error("Use OptimizationFunction to pass the constraints' jacobian or " * + "automatically generate i with one of the autodiff backends." * + "If you are using the ModelingToolkit symbolic interface, pass the `cons_j` kwarg set to `true` in `OptimizationProblem`.") + end + # Get and cache the Jacobian object here once. `evaluator.J` calls + # `getproperty`, which is expensive because it calls `fieldnames`. + J = cache.J + cache.f.cons_j(J, x) + if J isa SparseMatrixCSC + nnz = nonzeros(J) + @assert length(j) == length(nnz) + for (i, Ji) in zip(eachindex(j), nnz) + j[i] = Ji + end + else + j .= vec(J) + end + return +end + +function hessian_lagrangian_structure(cache::IpoptCache) + lagh = cache.f.lag_h !== nothing + if cache.f.lag_hess_prototype isa SparseMatrixCSC + rows, cols, _ = findnz(cache.f.lag_hess_prototype) + return Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols) if i <= j] + end + sparse_obj = cache.H isa SparseMatrixCSC + sparse_constraints = all(H -> H isa SparseMatrixCSC, cache.cons_H) + if !lagh && !sparse_constraints && any(H -> H isa SparseMatrixCSC, cache.cons_H) + # Some constraint hessians are dense and some are sparse! :( + error("Mix of sparse and dense constraint hessians are not supported") + end + N = length(cache.u0) + inds = if sparse_obj + rows, cols, _ = findnz(cache.H) + Tuple{Int, Int}[(i, j) for (i, j) in zip(rows, cols) if i <= j] + else + Tuple{Int, Int}[(row, col) for col in 1:N for row in 1:col] + end + lagh && return inds + if sparse_constraints + for Hi in cache.cons_H + r, c, _ = findnz(Hi) + for (i, j) in zip(r, c) + if i <= j + push!(inds, (i, j)) + end + end + end + elseif !sparse_obj + # Performance optimization. If both are dense, no need to repeat + else + for col in 1:N, row in 1:col + push!(inds, (row, col)) + end + end + return inds +end + +function eval_hessian_lagrangian(cache::IpoptCache{T}, + h, + x, + σ, + μ) where {T} + if cache.f.lag_h !== nothing + cache.f.lag_h(h, x, σ, Vector(μ)) + + if cache.sense === Optimization.MaxSense + h .*= -one(eltype(h)) + end + + return + end + if cache.f.hess === nothing + error("Use OptimizationFunction to pass the objective hessian or " * + "automatically generate it with one of the autodiff backends." * + "If you are using the ModelingToolkit symbolic interface, pass the `hess` kwarg set to `true` in `OptimizationProblem`.") + end + # Get and cache the Hessian object here once. `evaluator.H` calls + # `getproperty`, which is expensive because it calls `fieldnames`. + H = cache.H + fill!(h, zero(T)) + k = 0 + cache.f.hess(H, x) + sparse_objective = H isa SparseMatrixCSC + if sparse_objective + rows, cols, _ = findnz(H) + for (i, j) in zip(rows, cols) + if i <= j + k += 1 + h[k] = σ * H[i, j] + end + end + else + for i in 1:size(H, 1), j in 1:i + k += 1 + h[k] = σ * H[i, j] + end + end + # A count of the number of non-zeros in the objective Hessian is needed if + # the constraints are dense. + nnz_objective = k + if !isempty(μ) && !all(iszero, μ) + if cache.f.cons_h === nothing + error("Use OptimizationFunction to pass the constraints' hessian or " * + "automatically generate it with one of the autodiff backends." * + "If you are using the ModelingToolkit symbolic interface, pass the `cons_h` kwarg set to `true` in `OptimizationProblem`.") + end + cache.f.cons_h(cache.cons_H, x) + for (μi, Hi) in zip(μ, cache.cons_H) + if Hi isa SparseMatrixCSC + rows, cols, _ = findnz(Hi) + for (i, j) in zip(rows, cols) + if i <= j + k += 1 + h[k] += μi * Hi[i, j] + end + end + else + # The constraints are dense. We only store one copy of the + # Hessian, so reset `k` to where it starts. That will be + # `nnz_objective` if the objective is sprase, and `0` otherwise. + k = sparse_objective ? nnz_objective : 0 + for i in 1:size(Hi, 1), j in 1:i + k += 1 + h[k] += μi * Hi[i, j] + end + end + end + end + + if cache.sense === Optimization.MaxSense + h .*= -one(eltype(h)) + end + + return +end diff --git a/lib/OptimizationIpopt/src/callback.jl b/lib/OptimizationIpopt/src/callback.jl new file mode 100644 index 000000000..ab1f28b0e --- /dev/null +++ b/lib/OptimizationIpopt/src/callback.jl @@ -0,0 +1,82 @@ +struct IpoptState + alg_mod::Cint + iter_count::Cint + obj_value::Float64 + inf_pr::Float64 + inf_du::Float64 + mu::Float64 + d_norm::Float64 + regularization_size::Float64 + alpha_du::Float64 + alpha_pr::Float64 + ls_trials::Cint + z_L::Vector{Float64} + z_U::Vector{Float64} + lambda::Vector{Float64} +end + +struct IpoptProgressLogger{C <: IpoptCache, P} + progress::Bool + cache::C + prob::P +end + +function (cb::IpoptProgressLogger)( + alg_mod::Cint, + iter_count::Cint, + obj_value::Float64, + inf_pr::Float64, + inf_du::Float64, + mu::Float64, + d_norm::Float64, + regularization_size::Float64, + alpha_du::Float64, + alpha_pr::Float64, + ls_trials::Cint +) + n = cb.cache.n + m = cb.cache.num_cons + u, z_L, z_U = zeros(n), zeros(n), zeros(n) + g, lambda = zeros(m), zeros(m) + scaled = false + Ipopt.GetIpoptCurrentIterate(cb.prob, scaled, n, u, z_L, z_U, m, g, lambda) + + original = IpoptState( + alg_mod, + iter_count, + obj_value, + inf_pr, + inf_du, + mu, + d_norm, + regularization_size, + alpha_du, + alpha_pr, + ls_trials, + z_L, + z_U, + lambda + ) + + opt_state = Optimization.OptimizationState(; + iter = Int(iter_count), u, objective = obj_value, original) + cb.cache.iterations = iter_count + + if cb.cache.progress + maxiters = cb.cache.solver_args.maxiters + msg = "objective: " * + sprint(show, obj_value, context = :compact => true) + if !isnothing(maxiters) + # we stop at either convergence or max_steps + Base.@logmsg(Base.LogLevel(-1), msg, progress=iter_count / maxiters, + _id=:OptimizationIpopt) + end + end + if !isnothing(cb.cache.callback) + # return `true` to keep going, or `false` to terminate the optimization + # this is the other way around compared to Optimization.jl callbacks + !cb.cache.callback(opt_state, obj_value) + else + true + end +end diff --git a/lib/OptimizationIpopt/test/runtests.jl b/lib/OptimizationIpopt/test/runtests.jl new file mode 100644 index 000000000..8d550ac69 --- /dev/null +++ b/lib/OptimizationIpopt/test/runtests.jl @@ -0,0 +1,107 @@ +using Optimization, OptimizationIpopt +using Zygote +using Symbolics +using Test +using SparseArrays +using ModelingToolkit + +rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 +x0 = zeros(2) +_p = [1.0, 100.0] +l1 = rosenbrock(x0, _p) + +optfunc = OptimizationFunction((x, p) -> -rosenbrock(x, p), Optimization.AutoZygote()) +prob = OptimizationProblem(optfunc, x0, _p; sense = Optimization.MaxSense) + +callback = function (state, l) + display(l) + return false +end + +sol = solve(prob, IpoptOptimizer(); callback, hessian_approximation = "exact") +@test SciMLBase.successful_retcode(sol) +@test sol ≈ [1, 1] + +sol = solve(prob, IpoptOptimizer(); callback, hessian_approximation = "limited-memory") +@test SciMLBase.successful_retcode(sol) +@test sol ≈ [1, 1] + +function _test_sparse_derivatives_hs071(backend, optimizer) + function objective(x, ::Any) + return x[1] * x[4] * (x[1] + x[2] + x[3]) + x[3] + end + function constraints(res, x, ::Any) + res .= [ + x[1] * x[2] * x[3] * x[4], + x[1]^2 + x[2]^2 + x[3]^2 + x[4]^2 + ] + end + prob = OptimizationProblem( + OptimizationFunction(objective, backend; cons = constraints), + [1.0, 5.0, 5.0, 1.0]; + sense = Optimization.MinSense, + lb = [1.0, 1.0, 1.0, 1.0], + ub = [5.0, 5.0, 5.0, 5.0], + lcons = [25.0, 40.0], + ucons = [Inf, 40.0]) + sol = solve(prob, optimizer) + @test isapprox(sol.objective, 17.014017145179164; atol = 1e-6) + x = [1.0, 4.7429996418092970, 3.8211499817883077, 1.3794082897556983] + @test isapprox(sol.u, x; atol = 1e-6) + @test prod(sol.u) >= 25.0 - 1e-6 + @test isapprox(sum(sol.u .^ 2), 40.0; atol = 1e-6) + return +end + +@testset "backends" begin + backends = (Optimization.AutoModelingToolkit(false, false), + Optimization.AutoModelingToolkit(true, false), + Optimization.AutoModelingToolkit(false, true), + Optimization.AutoModelingToolkit(true, true)) + for backend in backends + @testset "$backend" begin + _test_sparse_derivatives_hs071(backend, IpoptOptimizer()) + end + end +end + +@testset "cache" begin + @variables x + @parameters a = 1.0 + @named sys = OptimizationSystem((x - a)^2, [x], [a];) + sys = complete(sys) + prob = OptimizationProblem(sys, [x => 0.0], []; grad = true, hess = true) + cache = init(prob, IpoptOptimizer(); print_level = 0) + @test cache isa OptimizationIpopt.IpoptCache + sol = solve!(cache) + @test sol.u ≈ [1.0] # ≈ [1] + + @test_broken begin # needs reinit/remake fixes + cache = Optimization.reinit!(cache; p = [2.0]) + sol = solve!(cache) + @test sol.u ≈ [2.0] # ≈ [2] + end +end + +@testset "tutorial" begin + rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 + x0 = zeros(2) + _p = [1.0, 1.0] + + cons(res, x, p) = (res .= [x[1]^2 + x[2]^2, x[1] * x[2]]) + + function lagh(res, x, sigma, mu, p) + lH = sigma * [2 + 8(x[1]^2) * p[2]-4(x[2] - (x[1]^2)) * p[2] -4p[2]*x[1] + -4p[2]*x[1] 2p[2]] .+ [2mu[1] mu[2] + mu[2] 2mu[1]] + res .= lH[[1, 3, 4]] + end + lag_hess_prototype = sparse([1 1; 0 1]) + + optprob = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff(); + cons = cons, lag_h = lagh, lag_hess_prototype) + prob = OptimizationProblem(optprob, x0, _p, lcons = [1.0, 0.5], ucons = [1.0, 0.5]) + sol = solve(prob, IpoptOptimizer()) + + @test SciMLBase.successful_retcode(sol) +end