From b0df437c28295e7028185b5174bed5e778bad854 Mon Sep 17 00:00:00 2001 From: Jeremy E Kozdon Date: Mon, 19 Jul 2021 15:41:37 -0700 Subject: [PATCH] Disable MPI test on Windows --- test/dmda.jl | 12 +++++++++++- test/runtests.jl | 11 +++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/test/dmda.jl b/test/dmda.jl index ec610218..abdc4899 100644 --- a/test/dmda.jl +++ b/test/dmda.jl @@ -1,13 +1,13 @@ using Test using PETSc, MPI MPI.Initialized() || MPI.Init() -PETSc.initialize() @testset "DMDACreate1D" begin comm = MPI.COMM_WORLD mpirank = MPI.Comm_rank(comm) mpisize = MPI.Comm_size(comm) for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) PetscScalar = PETSc.scalartype(petsclib) PetscInt = PETSc.inttype(petsclib) # Loop over all boundary types and try to use them @@ -108,8 +108,10 @@ PETSc.initialize() # TODO: Need a better test? ksp = PETSc.KSP(da) @test PETSc.gettype(ksp) == "gmres" + end end + PETSc.finalize(petsclib) end end @@ -122,6 +124,7 @@ end for petsclib in PETSc.petsclibs PetscScalar = PETSc.scalartype(petsclib) PetscInt = PETSc.inttype(petsclib) + PETSc.initialize(petsclib) # Loop over all boundary types and stencil types for stencil_type in instances(PETSc.DMDAStencilType), boundary_type_y in instances(PETSc.DMBoundaryType), @@ -216,6 +219,7 @@ end @test PETSc.gettype(ksp) == "gmres" end end + PETSc.finalize(petsclib) end end @@ -229,6 +233,7 @@ end for petsclib in PETSc.petsclibs PetscScalar = PETSc.scalartype(petsclib) PetscInt = PETSc.inttype(petsclib) + PETSc.initialize(petsclib) # Loop over all boundary types and stencil types for stencil_type in instances(PETSc.DMDAStencilType), boundary_type_z in instances(PETSc.DMBoundaryType), @@ -337,6 +342,7 @@ end @test PETSc.gettype(ksp) == "gmres" end end + PETSc.finalize(petsclib) end end @@ -345,6 +351,7 @@ end mpirank = MPI.Comm_rank(comm) mpisize = MPI.Comm_size(comm) for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) PetscScalar = PETSc.scalartype(petsclib) PetscInt = PETSc.inttype(petsclib) boundary_type = PETSc.DM_BOUNDARY_NONE @@ -393,6 +400,7 @@ end @test mat[i, (i - 1):(i + 1)] == [1, -2, 1] end end + PETSc.finalize(petsclib) end end @@ -401,6 +409,7 @@ end mpirank = MPI.Comm_rank(comm) mpisize = MPI.Comm_size(comm) for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) PetscScalar = PETSc.scalartype(petsclib) PetscInt = PETSc.inttype(petsclib) boundary_type = PETSc.DM_BOUNDARY_NONE @@ -472,6 +481,7 @@ end for (loc, glo) in enumerate(ghost_lower:ghost_upper) @test coord_vec[loc] ≈ (glo - 1) * Δx end + PETSc.finalize(petsclib) end end diff --git a/test/runtests.jl b/test/runtests.jl index dff90f93..91932b7b 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,10 +2,13 @@ using Test using MPI # Do the MPI tests first so we do not have mpi running inside MPI -@testset "mpi tests" begin - @test mpiexec() do mpi_cmd - cmd = `$mpi_cmd -n 4 $(Base.julia_cmd()) --project dmda.jl` - success(pipeline(cmd, stderr = stderr)) +# XXX: Currently not working on windows, not sure why +if !Sys.iswindows() + @testset "mpi tests" begin + @test mpiexec() do mpi_cmd + cmd = `$mpi_cmd -n 4 $(Base.julia_cmd()) --project dmda.jl` + success(pipeline(cmd, stderr = stderr)) + end end end