diff --git a/Project.toml b/Project.toml index a9fbf620..a8fb99ce 100644 --- a/Project.toml +++ b/Project.toml @@ -36,12 +36,12 @@ IterativeSolvers = "~0.9.1" JuMP = "~0.21" LightGraphs = "~1.3" MKL_jll = "~2020" -MUMPS_seq_jll = "~5.2.1, ~5.3.5" +MUMPS_seq_jll = "~5.2.1, 5.3.5" MathOptInterface = "~0.9" Metis = "~1.0" NLPModels = "~0.14" OpenBLAS32_jll = "~0.3" -Plasmo = "~0.3" +Plasmo = "~0.3, 0.4" SolverCore = "~0.1" StaticArrays = "~0.12, 1.0" julia = "1.6" diff --git a/src/Interfaces/Plasmo_interface.jl b/src/Interfaces/Plasmo_interface.jl index fe81529e..ee2f90b3 100644 --- a/src/Interfaces/Plasmo_interface.jl +++ b/src/Interfaces/Plasmo_interface.jl @@ -4,7 +4,15 @@ const dummy_function = ()->nothing num_linkconstraints(modeledge::OptiEdge) = length(modeledge.linkconstraints) -moi_optimizer(modelnode::OptiNode) = modelnode.model.moi_backend.optimizer.model +function _caching_optimizer(modelnode::OptiNode) + if isa(modelnode.model.moi_backend,MOIU.CachingOptimizer) + return modelnode.model.moi_backend + else + return modelnode.model.moi_backend.optimizer + end +end +moi_optimizer(modelnode::OptiNode) = _caching_optimizer(modelnode).optimizer.model +_caching_optimizer(model::Any) = model.moi_backend function set_g_link!(linkedge::OptiEdge,l,gl,gu) cnt = 1 for (ind,linkcon) in linkedge.linkconstraints @@ -165,9 +173,9 @@ function NonlinearProgram(graph::OptiGraph) for modelnode in modelnodes num_variables(modelnode) == 0 && error("Empty node exist! Delete the empty nodes.") end - + @blas_safe_threads for k=1:length(modelnodes) - set_optimizer(modelnodes[k].model,Optimizer) + set_optimizer(modelnodes[k],Optimizer) if modelnodes[k].model.nlp_data !== nothing MOI.set(modelnodes[k].model, MOI.NLPBlock(), _create_nlp_block_data(modelnodes[k].model)) @@ -234,7 +242,8 @@ function NonlinearProgram(graph::OptiGraph) modelmap=Dict(modelnodes[k].model=> k for k=1:K) x_index_map = Dict( - var=>ninds[modelmap[var.model]][backend(var.model).model_to_optimizer_map[var.index].value] + # var=>ninds[modelmap[var.model]][backend(var.model).optimizer.model_to_optimizer_map[var.index].value] + var=>ninds[modelmap[var.model]][_caching_optimizer(getnode(var)).model_to_optimizer_map[var.index].value] for modelnode in modelnodes for var in all_variables(modelnode)) cnt = 0 g_index_map = Dict(con=> m + (cnt+=1) for linkedge in linkedges for (ind,con) in linkedge.linkconstraints) diff --git a/src/MadNLP.jl b/src/MadNLP.jl index 0391e0f1..b6ccc267 100644 --- a/src/MadNLP.jl +++ b/src/MadNLP.jl @@ -18,7 +18,7 @@ import Base: string, show, print, size, getindex, copyto!, @kwdef import StaticArrays: SVector, setindex import SuiteSparse: UMFPACK import LightGraphs: Graph, Edge, add_edge!, edges, src, dst, neighbors, nv -import Plasmo: OptiGraph, OptiNode, OptiEdge, all_nodes, all_edges, all_variables, num_all_nodes, num_variables, getlinkconstraints +import Plasmo: OptiGraph, OptiNode, OptiEdge, all_nodes, all_edges, all_variables, num_all_nodes, num_variables, getlinkconstraints, getnode import JuMP: _create_nlp_block_data, set_optimizer, GenericAffExpr, backend, termination_status import NLPModels: finalize, AbstractNLPModel, obj, grad!, cons!, jac_coord!, hess_coord!, hess_structure!, jac_structure! import SolverCore: GenericExecutionStats