diff --git a/src/R2_alg.jl b/src/R2_alg.jl index e7098f8b..4613270e 100644 --- a/src/R2_alg.jl +++ b/src/R2_alg.jl @@ -132,7 +132,7 @@ For advanced usage, first define a solver "R2Solver" to preallocate the memory u solver = R2Solver(reg_nlp) solve!(solver, reg_nlp) - stats = GenericExecutionStats(reg_nlp.model) + stats = RegularizedExecutionStats(reg_nlp) solver = R2Solver(reg_nlp) solve!(solver, reg_nlp, stats) @@ -292,7 +292,7 @@ function R2(reg_nlp::AbstractRegularizedNLPModel; kwargs...) kwargs_dict = Dict(kwargs...) max_iter = pop!(kwargs_dict, :max_iter, 10000) solver = R2Solver(reg_nlp, max_iter = max_iter) - stats = GenericExecutionStats(reg_nlp.model) + stats = GenericExecutionStats(reg_nlp.model) # TODO: change this to `stats = RegularizedExecutionStats(reg_nlp)` when FHist etc. is ruled out. cb = pop!( kwargs_dict, :callback, diff --git a/src/utils.jl b/src/utils.jl index b250dcb6..cb8d3a1c 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -1,3 +1,7 @@ +export RegularizedExecutionStats + +import SolverCore.GenericExecutionStats + # use Arpack to obtain largest eigenvalue in magnitude with a minimum of robustness function LinearAlgebra.opnorm(B; kwargs...) _, s, _ = tsvd(B) @@ -20,3 +24,19 @@ ShiftedProximalOperators.iprox!( LinearAlgebra.diag(op::AbstractDiagonalQuasiNewtonOperator) = copy(op.d) LinearAlgebra.diag(op::SpectralGradient{T}) where {T} = zeros(T, op.nrow) .* op.d[1] + +""" + GenericExecutionStats(reg_nlp :: AbstractRegularizedNLPModel{T, V}) + +Construct a GenericExecutionStats object from an AbstractRegularizedNLPModel. +More specifically, construct a GenericExecutionStats on the NLPModel of reg_nlp and add three solver_specific entries namely :smooth_obj, :nonsmooth_obj and :xi. +This is useful for reducing the number of allocations when calling solve!(..., reg_nlp, stats) and should be used by default. +Warning: This should *not* be used when adding other solver_specific entries that do not have the current scalar type. +""" +function RegularizedExecutionStats(reg_nlp :: AbstractRegularizedNLPModel{T, V}) where{T, V} + stats = GenericExecutionStats(reg_nlp.model, solver_specific = Dict{Symbol, T}()) + set_solver_specific!(stats, :smooth_obj, T(Inf)) + set_solver_specific!(stats, :nonsmooth_obj, T(Inf)) + set_solver_specific!(stats, :xi, T(Inf)) + return stats +end \ No newline at end of file diff --git a/test/test_allocs.jl b/test/test_allocs.jl index 62fbd64e..3eb218eb 100644 --- a/test/test_allocs.jl +++ b/test/test_allocs.jl @@ -41,7 +41,7 @@ end for solver ∈ (:R2Solver,) reg_nlp = RegularizedNLPModel(bpdn, h) solver = eval(solver)(reg_nlp) - stats = GenericExecutionStats(bpdn, solver_specific = Dict{Symbol, Float64}()) + stats = RegularizedExecutionStats(reg_nlp) @test @wrappedallocs(solve!(solver, reg_nlp, stats)) == 0 end end