From 7ca342a5dd9fbdd1ed386d8b012016cd5e6480ef Mon Sep 17 00:00:00 2001 From: Boris Kaus Date: Tue, 8 Oct 2024 12:05:28 +0200 Subject: [PATCH] change how we call MPI --- src/Run.jl | 19 ++++++++++++++++++- src/run_lamem.jl | 13 ++++++++++++- test/test_julia_setups.jl | 4 ++-- 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/src/Run.jl b/src/Run.jl index 6176240..bec3adf 100644 --- a/src/Run.jl +++ b/src/Run.jl @@ -9,7 +9,24 @@ include("run_lamem.jl") include("run_lamem_save_grid.jl") include("utils_Run.jl") -const mpiexec = MPI.mpiexec() +#setup MPI +if isdefined(LaMEM_jll,:MPICH_jll) + const mpiexec = LaMEM_jll.MPICH_jll.mpiexec() + const MPI_LIBPATH = LaMEM_jll.MPICH_jll.LIBPATH +elseif isdefined(LaMEM_jll,:MicrosoftMPI_jll) + const mpiexec = LaMEM_jll.MicrosoftMPI_jll.mpiexec() + const MPI_LIBPATH = LaMEM_jll.MicrosoftMPI_jll.LIBPATH +elseif isdefined(LaMEM_jll,:OpenMPI_jll) + const mpiexec = LaMEM_jll.OpenMPI_jll.mpiexec() + const MPI_LIBPATH = LaMEM_jll.OpenMPI_jll.LIBPATH +elseif isdefined(LaMEM_jll,:MPItrampoline_jll) + const mpiexec = LaMEM_jll.MPItrampoline_jll.mpiexec() + const MPI_LIBPATH = LaMEM_jll.MPItrampoline_jll.LIBPATH +else + println("Be careful! No MPI library detected; parallel runs won't work") + const mpiexec = nothing + const MPI_LIBPATH = Ref{String}("") +end end \ No newline at end of file diff --git a/src/run_lamem.jl b/src/run_lamem.jl index be7b3af..ab8bbd4 100644 --- a/src/run_lamem.jl +++ b/src/run_lamem.jl @@ -18,6 +18,14 @@ function deactivate_multithreading(cmd::Cmd) return cmd end +# Shamelessly stolen from the tests of LBT +if Sys.iswindows() + pathsep = ';' +elseif Sys.isapple() + pathsep = ':' +else + pathsep = ':' +end """ run_lamem(ParamFile::String, cores::Int64=1, args:String=""; wait=true, deactivate_multithreads=true) @@ -56,7 +64,10 @@ function run_lamem(ParamFile::String, cores::Int64=1, args::String=""; wait=true run(cmd, wait=wait); else # set correct environment - mpirun = setenv(mpiexec, LaMEM_jll.LaMEM().env); + #mpirun = setenv(mpiexec, LaMEM_jll.LaMEM().env); + key = LaMEM_jll.JLLWrappers.JLLWrappers.LIBPATH_env + mpirun = addenv(mpiexec, key=>join((LaMEM_jll.LIBPATH[], MPI_LIBPATH[]), pathsep)); + # create command-line object cmd = `$(mpirun) -n $cores_compute $(LaMEM_jll.LaMEM().exec) -ParamFile $(ParamFile) $args` diff --git a/test/test_julia_setups.jl b/test/test_julia_setups.jl index ccde72c..4d3d2a1 100644 --- a/test/test_julia_setups.jl +++ b/test/test_julia_setups.jl @@ -12,7 +12,7 @@ using GeophysicalModelGenerator model = Model(Grid(nel=(16,16,16), x=[-2,2], coord_y=[-1,1], coord_z=[-1,1]), Time(nstep_max=2, dt=1, dt_max=10), Solver(SolverType="multigrid", MGLevels=2), - Output(out_dir="example_1")) + Output(out_dir="example_1", out_dev_stress=1)) # Specify material properties matrix = Phase(ID=0,Name="matrix",eta=1e20,rho=3000) @@ -27,7 +27,7 @@ using GeophysicalModelGenerator # read last timestep data,time = read_LaMEM_timestep(model,last=true); - + @test sum(data.fields.velocity[3][:,:,:]) ≈ 0.10747005f0 rtol=1e-1 # check Vz # @test sum(data.fields.velocity[3][:,:,:]) ≈ 0.10866211f0 # check Vz