Actual source code: ex7.c

  1: static char help[] = "Test VecScatter of different block sizes across processes\n\n";

  3: #include <petscvec.h>
  4: int main(int argc, char **argv)
  5: {
  6:   PetscInt           i, bs, n, low, high;
  7:   PetscMPIInt        nproc, rank;
  8:   Vec                x, y, z;
  9:   IS                 ix, iy;
 10:   VecScatter         vscat;
 11:   const PetscScalar *yv;

 13:   PetscFunctionBeginUser;
 14:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 15:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &nproc));
 16:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 17:   PetscCheck(nproc == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This test can only run on two MPI ranks");

 19:   /* Create an MPI vector x of size 12 on two processes, and set x = {0, 1, 2, .., 11} */
 20:   PetscCall(VecCreateMPI(PETSC_COMM_WORLD, 6, PETSC_DECIDE, &x));
 21:   PetscCall(VecGetOwnershipRange(x, &low, &high));
 22:   for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
 23:   PetscCall(VecAssemblyBegin(x));
 24:   PetscCall(VecAssemblyEnd(x));

 26:   /* Create a seq vector y, and a parallel to sequential (PtoS) vecscatter to scatter x to y */
 27:   if (rank == 0) {
 28:     /* On rank 0, seq y is of size 6. We will scatter x[0,1,2,6,7,8] to y[0,1,2,3,4,5] using IS with bs=3 */
 29:     PetscInt idx[2] = {0, 2};
 30:     PetscInt idy[2] = {0, 1};
 31:     n               = 6;
 32:     bs              = 3;
 33:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &y));
 34:     PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, 2, idx, PETSC_COPY_VALUES, &ix));
 35:     PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, 2, idy, PETSC_COPY_VALUES, &iy));
 36:   } else {
 37:     /* On rank 1, seq y is of size 4. We will scatter x[4,5,10,11] to y[0,1,2,3] using IS with bs=2 */
 38:     PetscInt idx[2] = {2, 5};
 39:     PetscInt idy[2] = {0, 1};
 40:     n               = 4;
 41:     bs              = 2;
 42:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &y));
 43:     PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, 2, idx, PETSC_COPY_VALUES, &ix));
 44:     PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, 2, idy, PETSC_COPY_VALUES, &iy));
 45:   }
 46:   PetscCall(VecScatterCreate(x, ix, y, iy, &vscat));

 48:   /* Do the vecscatter */
 49:   PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
 50:   PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));

 52:   /* Print y. Since y is sequential, we put y in a parallel z to print its value on both ranks */
 53:   PetscCall(VecGetArrayRead(y, &yv));
 54:   PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, n, PETSC_DECIDE, yv, &z));
 55:   PetscCall(VecView(z, PETSC_VIEWER_STDOUT_WORLD));
 56:   PetscCall(VecRestoreArrayRead(y, &yv));

 58:   PetscCall(ISDestroy(&ix));
 59:   PetscCall(ISDestroy(&iy));
 60:   PetscCall(VecDestroy(&x));
 61:   PetscCall(VecDestroy(&y));
 62:   PetscCall(VecDestroy(&z));
 63:   PetscCall(VecScatterDestroy(&vscat));

 65:   PetscCall(PetscFinalize());
 66:   return 0;
 67: }

 69: /*TEST

 71:    test:
 72:       nsize: 2
 73:       args:
 74:       requires:
 75: TEST*/