MPI.jl
provides wrappers for the system MPI libraries. And the MPIClusterManagers.jl
package lets you control MPI workflows within Julia
# to import MPIManager
using MPIClusterManagers
# need to also import Distributed to use addprocs()
using Distributed
# specify, number of mpi workers, launch cmd, etc.
manager=MPIManager(np=4)
# start mpi workers and add them as julia workers too.
addprocs(manager)
4-element Vector{Int64}: 2 3 4 5
@mpi_do manager begin
using MPI: MPI, Comm, Win, free
comm = MPI.COMM_WORLD
rank = MPI.Comm_rank(comm)
size = MPI.Comm_size(comm)
println("Hello world, I am $(rank) of $(size)")
end
From worker 3: Hello world, I am 1 of 4 From worker 5: Hello world, I am 3 of 4 From worker 2: Hello world, I am 0 of 4 From worker 4: Hello world, I am 2 of 4
@mpi_do manager begin
using SharedArrays
a = SharedArray{Int64}(4)
end
@mpi_do manager begin
a[rank + 1] = rank
println("$(rank): $(a)")
end
From worker 2: 0: [0, 0, 0, 0] From worker 4: 2: [0, 0, 2, 0] From worker 5: 3: [0, 0, 0, 3] From worker 3: 1: [0, 1, 0, 0]
@mpi_do manager begin
index = (rank+2)%4 + 1
println("$(rank): $(a[index])")
end
From worker 4: 2: 0 From worker 2: 0: 0 From worker 3: 1: 0 From worker 5: 3: 0