Skip to content

Commit

Permalink
Improve distributed tests
Browse files Browse the repository at this point in the history
  • Loading branch information
luraess committed Nov 20, 2023
1 parent 92e97ae commit 9a90435
Show file tree
Hide file tree
Showing 4 changed files with 120 additions and 88 deletions.
20 changes: 18 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,15 @@ using FastIce

using Pkg

excludedfiles = ["test_excluded.jl"];
excludedfiles = ["test_excluded.jl"]

# distributed
test_distributed = ["test_distributed_2D.jl", "test_distributed_3D.jl"]
nprocs_2D = 4
nprocs_3D = 8
ENV["DIMX"] = 2
ENV["DIMY"] = 2
ENV["DIMZ"] = 2

function parse_flags!(args, flag; default=nothing, typ=typeof(default))
for f in args
Expand Down Expand Up @@ -42,7 +50,15 @@ function runtests()
continue
end
try
run(`$exename --startup-file=no $(joinpath(testdir, f))`)
if basename(f) test_distributed
nprocs = contains(f, "2D") ? nprocs_2D : nprocs_3D
cmd(n=nprocs) = `mpiexecjl -n $n $exename --startup-file=no --color=yes $(joinpath(testdir, f))`
withenv("JULIA_NUM_THREADS" => "4") do
run(cmd())
end
else
run(`$exename --startup-file=no $(joinpath(testdir, f))`)
end
catch ex
nfail += 1
end
Expand Down
86 changes: 0 additions & 86 deletions test/test_distributed.jl

This file was deleted.

47 changes: 47 additions & 0 deletions test/test_distributed_2D.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
include("common.jl")

using MPI
using FastIce.Distributed

MPI.Init()

nprocs = MPI.Comm_size(MPI.COMM_WORLD)

backend = CPU() # until we have testing environment setup for GPU-aware MPI, run only on CPU

@testset "Distributed 2D" begin
mpi_dims = parse.(Int, (get(ENV, "DIMX", "1"),
get(ENV, "DIMY", "1")))
dims = (0, 0)
topo = CartesianTopology(dims)
local_size = (4, 5)
@testset "Topology" begin
@test dimensions(topo) == mpi_dims
@test length(neighbors(topo)) == 2
@test node_size(topo) == nprocs
if global_rank(topo) == 0
@test neighbor(topo, 1, 1) == MPI.PROC_NULL
@test neighbor(topo, 2, 1) == MPI.PROC_NULL
@test has_neighbor(topo, 1, 1) == false
@test has_neighbor(topo, 2, 1) == false
if mpi_dims[2] > 1
@test neighbor(topo, 1, 2) == mpi_dims[2]
@test has_neighbor(topo, 1, 2) == true
else
@test neighbor(topo, 1, 2) == MPI.PROC_NULL
@test has_neighbor(topo, 1, 2) == false
end
end
@test global_grid_size(topo, local_size) == mpi_dims .* local_size
end
@testset "gather!" begin
src = fill(global_rank(topo) + 1, local_size)
dst = (global_rank(topo) == 0) ? zeros(Int, mpi_dims .* local_size) : nothing
gather!(dst, src, cartesian_communicator(topo))
if global_rank(topo) == 0
@test dst == repeat(reshape(1:global_size(topo), dimensions(topo))'; inner=size(src))
end
end
end

MPI.Finalize()
55 changes: 55 additions & 0 deletions test/test_distributed_3D.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
include("common.jl")

using MPI
using FastIce.Distributed

MPI.Init()

nprocs = MPI.Comm_size(MPI.COMM_WORLD)

backend = CPU() # until we have testing environment setup for GPU-aware MPI, run only on CPU

@testset "Distributed 3D" begin
mpi_dims = parse.(Int, (get(ENV, "DIMX", "1"),
get(ENV, "DIMY", "1"),
get(ENV, "DIMZ", "1")))
dims = (0, 0, 0)
topo = CartesianTopology(dims)
local_size = (4, 5, 6)
@testset "Topology" begin
@test dimensions(topo) == mpi_dims
@test length(neighbors(topo)) == 3
@test node_size(topo) == nprocs
if global_rank(topo) == 0
@test neighbor(topo, 1, 1) == MPI.PROC_NULL
@test neighbor(topo, 2, 1) == MPI.PROC_NULL
@test neighbor(topo, 3, 1) == MPI.PROC_NULL
@test has_neighbor(topo, 1, 1) == false
@test has_neighbor(topo, 2, 1) == false
@test has_neighbor(topo, 3, 1) == false
if mpi_dims[2] > 1 && mpi_dims[3] > 1
@test neighbor(topo, 1, 2) == mpi_dims[2] * mpi_dims[3]
@test neighbor(topo, 2, 2) == mpi_dims[3]
@test has_neighbor(topo, 1, 2) == true
@test has_neighbor(topo, 2, 2) == true
else
@test neighbor(topo, 1, 2) == MPI.PROC_NULL
@test neighbor(topo, 2, 2) == MPI.PROC_NULL
@test has_neighbor(topo, 1, 2) == false
@test has_neighbor(topo, 2, 2) == false
end
end
@test global_grid_size(topo, local_size) == mpi_dims .* local_size
end
@testset "gather!" begin
src = fill(global_rank(topo) + 1, local_size)
dst = (global_rank(topo) == 0) ? zeros(Int, mpi_dims .* local_size) : nothing
gather!(dst, src, cartesian_communicator(topo))
ranks_mat = permutedims(reshape(1:global_size(topo), dimensions(topo)), reverse(1:3))
if global_rank(topo) == 0
@test dst == repeat(ranks_mat; inner=size(src))
end
end
end

MPI.Finalize()

0 comments on commit 9a90435

Please sign in to comment.