Actual source code: ex6.c

  1: static char help[] = "  Test VecScatter with x, y on different communicators\n\n";

  3: #include <petscvec.h>

  5: int main(int argc, char **argv)
  6: {
  7:   PetscInt           i, n = 5, rstart;
  8:   PetscScalar       *val;
  9:   const PetscScalar *dat;
 10:   Vec                x, y1, y2;
 11:   MPI_Comm           newcomm;
 12:   PetscMPIInt        nproc, rank;
 13:   IS                 ix;
 14:   VecScatter         vscat1, vscat2;

 16:   PetscFunctionBegin;
 17:   PetscFunctionBeginUser;
 18:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 19:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &nproc));
 20:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 22:   PetscCheck(nproc == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This test must run with exactly two MPI ranks");

 24:   /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */
 25:   PetscCall(VecCreateMPI(PETSC_COMM_WORLD, n, PETSC_DECIDE, &x));
 26:   PetscCall(VecDuplicate(x, &y1));
 27:   PetscCall(VecGetOwnershipRange(x, &rstart, NULL));

 29:   /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */
 30:   PetscCall(VecGetArray(x, &val));
 31:   for (i = 0; i < n; i++) val[i] = rstart + i;
 32:   PetscCall(VecRestoreArray(x, &val));

 34:   /* Create index set ix = {0, 1, 2, ..., 9} */
 35:   PetscCall(ISCreateStride(PETSC_COMM_WORLD, n, rstart, 1, &ix));

 37:   /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/
 38:   PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, 0 /*color*/, -rank /*key*/, &newcomm));
 39:   PetscCall(VecCreateMPI(newcomm, n, PETSC_DECIDE, &y2));

 41:   /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */
 42:   PetscCall(VecScatterCreate(x, ix, y1, ix, &vscat1));
 43:   PetscCall(VecScatterCreate(x, ix, y2, ix, &vscat2));

 45:   PetscCall(VecScatterBegin(vscat1, x, y1, INSERT_VALUES, SCATTER_FORWARD));
 46:   PetscCall(VecScatterBegin(vscat2, x, y2, INSERT_VALUES, SCATTER_FORWARD));
 47:   PetscCall(VecScatterEnd(vscat1, x, y1, INSERT_VALUES, SCATTER_FORWARD));
 48:   PetscCall(VecScatterEnd(vscat2, x, y2, INSERT_VALUES, SCATTER_FORWARD));

 50:   /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */
 51:   if (rank == 0) {
 52:     /* Print the part of x on rank 0, which is 0 1 2 3 4 */
 53:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "\nOn rank 0 of PETSC_COMM_WORLD, x  = "));
 54:     PetscCall(VecGetArrayRead(x, &dat));
 55:     for (i = 0; i < n; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, " %.0g", (double)PetscRealPart(dat[i])));
 56:     PetscCall(VecRestoreArrayRead(x, &dat));

 58:     /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */
 59:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "\nOn rank 0 of PETSC_COMM_WORLD, y1 = "));
 60:     PetscCall(VecGetArrayRead(y1, &dat));
 61:     for (i = 0; i < n; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, " %.0g", (double)PetscRealPart(dat[i])));
 62:     PetscCall(VecRestoreArrayRead(y1, &dat));

 64:     /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */
 65:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "\nOn rank 0 of PETSC_COMM_WORLD, y2 = "));
 66:     PetscCall(VecGetArrayRead(y2, &dat));
 67:     for (i = 0; i < n; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, " %.0g", (double)PetscRealPart(dat[i])));
 68:     PetscCall(VecRestoreArrayRead(y2, &dat));
 69:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "\n"));
 70:   }

 72:   PetscCall(ISDestroy(&ix));
 73:   PetscCall(VecDestroy(&x));
 74:   PetscCall(VecDestroy(&y1));
 75:   PetscCall(VecDestroy(&y2));
 76:   PetscCall(VecScatterDestroy(&vscat1));
 77:   PetscCall(VecScatterDestroy(&vscat2));
 78:   PetscCallMPI(MPI_Comm_free(&newcomm));
 79:   PetscCall(PetscFinalize());
 80:   return 0;
 81: }

 83: /*TEST

 85:    test:
 86:       nsize: 2
 87:       requires: double
 88: TEST*/