From cf8f0f441ededf348d26af20ad6724fdc2af2af1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Mon, 17 Jul 2023 09:18:32 +0200 Subject: [PATCH] Parse NL at MOI level --- src/moi_nlp_model.jl | 17 ++++++++++------- src/moi_nls_model.jl | 8 +++++--- src/utils.jl | 36 ++++++++++++++++-------------------- 3 files changed, 31 insertions(+), 30 deletions(-) diff --git a/src/moi_nlp_model.jl b/src/moi_nlp_model.jl index 3587956..780bf3d 100644 --- a/src/moi_nlp_model.jl +++ b/src/moi_nlp_model.jl @@ -16,16 +16,19 @@ Construct a `MathOptNLPModel` from a `JuMP` model. `hessian` should be set to `false` for multivariate user-defined functions registered without hessian. """ -function MathOptNLPModel(jmodel::JuMP.Model; hessian::Bool = true, name::String = "Generic") - moimodel = backend(jmodel) +function MathOptNLPModel(jmodel::JuMP.Model; kws...) + _nlp_sync!(jmodel) + return MathOptNLPModel(backend(jmodel); kws...) +end +function MathOptNLPModel(moimodel::MOI.ModelLike; hessian::Bool = true, name::String = "Generic") nvar, lvar, uvar, x0 = parser_variables(moimodel) nlin, lincon, lin_lcon, lin_ucon = parser_MOI(moimodel) - eval = NLPEvaluator(jmodel) - nnln, nlcon, nl_lcon, nl_ucon = parser_NL(jmodel, eval, hessian = hessian) + nlp_data = MOI.get(moimodel, MOI.NLPBlock()) + nnln, nlcon, nl_lcon, nl_ucon = parser_NL(nlp_data, hessian = hessian) - if jmodel.nlp_model.objective !== nothing + if nlp_data.has_objective obj = Objective("NONLINEAR", 0.0, spzeros(Float64, nvar), COO(), 0) else obj = parser_objective_MOI(moimodel, nvar) @@ -51,12 +54,12 @@ function MathOptNLPModel(jmodel::JuMP.Model; hessian::Bool = true, name::String lin = collect(1:nlin), lin_nnzj = lincon.nnzj, nln_nnzj = nlcon.nnzj, - minimize = objective_sense(jmodel) == MOI.MIN_SENSE, + minimize = MOI.get(moimodel, MOI.ObjectiveSense()) == MOI.MIN_SENSE, islp = (obj.type == "LINEAR") && (nnln == 0), name = name, ) - return MathOptNLPModel(meta, eval, lincon, nlcon, obj, Counters()) + return MathOptNLPModel(meta, nlp_data.evaluator, lincon, nlcon, obj, Counters()) end function NLPModels.obj(nlp::MathOptNLPModel, x::AbstractVector) diff --git a/src/moi_nls_model.jl b/src/moi_nls_model.jl index 6e4c904..0720e40 100644 --- a/src/moi_nls_model.jl +++ b/src/moi_nls_model.jl @@ -28,10 +28,12 @@ function MathOptNLSModel(cmodel::JuMP.Model, F; hessian::Bool = true, name::Stri lls, linequ, nlinequ = parser_linear_expression(cmodel, nvar, F) Feval, nlequ, nnlnequ = parser_nonlinear_expression(cmodel, nvar, F, hessian = hessian) + _nlp_sync!(cmodel) + moimodel = backend(cmodel) nlin, lincon, lin_lcon, lin_ucon = parser_MOI(moimodel) - ceval = NLPEvaluator(cmodel) - nnln, nlcon, nl_lcon, nl_ucon = parser_NL(cmodel, ceval, hessian = hessian) + nlp_data = MOI.get(moimodel, MOI.NLPBlock()) + nnln, nlcon, nl_lcon, nl_ucon = parser_NL(nlp_data, hessian = hessian) nequ = nlinequ + nnlnequ Fnnzj = linequ.nnzj + nlequ.nnzj @@ -66,7 +68,7 @@ function MathOptNLSModel(cmodel::JuMP.Model, F; hessian::Bool = true, name::Stri meta, NLSMeta(nequ, nvar, nnzj = Fnnzj, nnzh = Fnnzh, lin = collect(1:nlinequ)), Feval, - ceval, + nlp_data.evaluator, lls, linequ, nlequ, diff --git a/src/utils.jl b/src/utils.jl index 5c026a4..79f7ea3 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -244,27 +244,12 @@ end Parse nonlinear constraints of a `MOI.Nonlinear.Evaluator`. """ -function parser_NL(jmodel, eval; hessian::Bool = true) - nnln = num_nonlinear_constraints(jmodel) - nl_lcon = fill(-Inf, nnln) - nl_ucon = fill(Inf, nnln) - for (i, (_, nl_constraint)) in enumerate(jmodel.nlp_model.constraints) - rhs = nl_constraint.set - if rhs isa MOI.EqualTo - nl_lcon[i] = rhs.value - nl_ucon[i] = rhs.value - elseif rhs isa MOI.GreaterThan - nl_lcon[i] = rhs.lower - elseif rhs isa MOI.LessThan - nl_ucon[i] = rhs.upper - elseif rhs isa MOI.Interval - nl_lcon[i] = rhs.lower - nl_ucon[i] = rhs.upper - else - error("Unexpected constraint type: $(typeof(rhs))") - end - end +function parser_NL(nlp_data; hessian::Bool = true) + nnln = length(nlp_data.constraint_bounds) + nl_lcon = Float64[bounds.lower for bounds in nlp_data.constraint_bounds] + nl_ucon = Float64[bounds.upper for bounds in nlp_data.constraint_bounds] + eval = nlp_data.evaluator MOI.initialize(eval, hessian ? [:Grad, :Jac, :JacVec, :Hess, :HessVec] : [:Grad, :Jac, :JacVec]) jac = MOI.jacobian_structure(eval) @@ -505,3 +490,14 @@ function parser_nonlinear_expression(cmodel, nvar, F; hessian::Bool = true) return Feval, nlequ, nnlnequ end + +function _nlp_sync!(model::JuMP.Model) + # The nlp model of the backend is not kept in sync, so re-set it here as in `JuMP.optimize!` + evaluator = MOI.Nonlinear.Evaluator( + # `force = true` is needed if there is not NL objective or constraint + JuMP.nonlinear_model(model, force = true), + MOI.Nonlinear.SparseReverseMode(), + JuMP.index.(JuMP.all_variables(model)), + ) + MOI.set(model, MOI.NLPBlock(), MOI.NLPBlockData(evaluator)) +end