Skip to content

Commit

Permalink
[MadNLPGPU] compat for CUDA updated (#49)
Browse files Browse the repository at this point in the history
  • Loading branch information
sshin23 committed Jul 7, 2021
1 parent cfe94d1 commit 78cb7f5
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 19 deletions.
2 changes: 2 additions & 0 deletions .ci/ci.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
rm("Manifest.toml";force=true)

using Pkg

if ARGS[1] == "full"
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
![Logo](logo-full.svg)
![Logo](logo-full.svg)
---

[![build](https://github.com/sshin23/MadNLP.jl/workflows/build/badge.svg?branch=dev%2Fgithub_actions)](https://github.com/sshin23/MadNLP.jl/actions?query=workflow%3Abuild) [![codecov](https://codecov.io/gh/sshin23/MadNLP.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/sshin23/MadNLP.jl)
Expand Down
2 changes: 1 addition & 1 deletion lib/MadNLPGPU/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6"

[compat]
CUDA = "~2,~3.1,~3.2"
CUDA = "~2,~3"
MadNLP = "~0.2"
julia = "1.5"

Expand Down
35 changes: 18 additions & 17 deletions lib/MadNLPGraph/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,23 +38,24 @@ using Test, Plasmo, MadNLP, MadNLPTests, MadNLPGraph
end
end

optimizer_constructors = [
("default",()->MadNLP.Optimizer(print_level=MadNLP.ERROR)),
("schur",()->MadNLP.Optimizer(linear_solver=MadNLPSchur,schur_num_parts=2,print_level=MadNLP.ERROR)),
("schwarz-single",()->MadNLP.Optimizer(linear_solver=MadNLPSchwarz,schwarz_num_parts=2,print_level=MadNLP.ERROR)),
("schwarz-double",()->MadNLP.Optimizer(linear_solver=MadNLPSchwarz,schwarz_num_parts_upper=2,schwarz_num_parts=10,
print_level=MadNLP.ERROR))
]
# maybe not necessary for MadNLPGraph
# optimizer_constructors = [
# ("default",()->MadNLP.Optimizer(print_level=MadNLP.ERROR)),
# ("schur",()->MadNLP.Optimizer(linear_solver=MadNLPSchur,schur_num_parts=2,print_level=MadNLP.ERROR)),
# ("schwarz-single",()->MadNLP.Optimizer(linear_solver=MadNLPSchwarz,schwarz_num_parts=2,print_level=MadNLP.ERROR)),
# ("schwarz-double",()->MadNLP.Optimizer(linear_solver=MadNLPSchwarz,schwarz_num_parts_upper=2,schwarz_num_parts=10,
# print_level=MadNLP.ERROR))
# ]

for (name,optimizer_constructor) in optimizer_constructors
@testset "jump-$name" begin
node,~=combine(graph)
m = node.model
set_optimizer(m,optimizer_constructor)
optimize!(m);
@test solcmp([0.0,0.03137979101284875,0.0627286139604959,0.09401553133948139,0.12520966673966746,0.15628023531552773,0.18719657416707308,0.21792817260043182,0.24844470223822107,0.2787160469499936], value.(getnode(graph,1)[:x][1:10]))
@test termination_status(m) == MOI.LOCALLY_SOLVED
end
end
# for (name,optimizer_constructor) in optimizer_constructors
# @testset "jump-$name" begin
# node,~=combine(graph)
# m = node.model
# set_optimizer(m,optimizer_constructor)
# optimize!(m);
# @test solcmp([0.0,0.03137979101284875,0.0627286139604959,0.09401553133948139,0.12520966673966746,0.15628023531552773,0.18719657416707308,0.21792817260043182,0.24844470223822107,0.2787160469499936], value.(getnode(graph,1)[:x][1:10]))
# @test termination_status(m) == MOI.LOCALLY_SOLVED
# end
# end

end

0 comments on commit 78cb7f5

Please sign in to comment.