Actual source code: ex23.c

  1: static char help[] = "Scatters from a parallel vector to a sequential vector.\n\
  2:   Using a blocked send and a strided receive.\n\n";

  4: /*
  5:         0 1 2 3 | 4 5 6 7 ||  8 9 10 11

  7:      Scatter first and third block to first processor and
  8:      second and third block to second processor
  9: */

 11: #include <petscvec.h>

 13: int main(int argc, char **argv)
 14: {
 15:   PetscInt    i, blocks[2], nlocal;
 16:   PetscMPIInt size, rank;
 17:   PetscScalar value;
 18:   Vec         x, y;
 19:   IS          is1, is2;
 20:   VecScatter  ctx = 0;
 21:   PetscViewer subviewer;

 23:   PetscFunctionBeginUser;
 24:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 25:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 26:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 28:   PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 2 processors");

 30:   /* create two vectors */
 31:   if (rank == 0) nlocal = 8;
 32:   else nlocal = 4;
 33:   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
 34:   PetscCall(VecSetSizes(x, nlocal, 12));
 35:   PetscCall(VecSetFromOptions(x));
 36:   PetscCall(VecCreate(PETSC_COMM_SELF, &y));
 37:   PetscCall(VecSetSizes(y, 8, PETSC_DECIDE));
 38:   PetscCall(VecSetFromOptions(y));

 40:   /* create two index sets */
 41:   if (rank == 0) {
 42:     blocks[0] = 0;
 43:     blocks[1] = 2;
 44:   } else {
 45:     blocks[0] = 1;
 46:     blocks[1] = 2;
 47:   }
 48:   PetscCall(ISCreateBlock(PETSC_COMM_SELF, 4, 2, blocks, PETSC_COPY_VALUES, &is1));
 49:   PetscCall(ISCreateStride(PETSC_COMM_SELF, 8, 0, 1, &is2));

 51:   for (i = 0; i < 12; i++) {
 52:     value = i;
 53:     PetscCall(VecSetValues(x, 1, &i, &value, INSERT_VALUES));
 54:   }
 55:   PetscCall(VecAssemblyBegin(x));
 56:   PetscCall(VecAssemblyEnd(x));

 58:   PetscCall(VecScatterCreate(x, is1, y, is2, &ctx));
 59:   PetscCall(VecScatterBegin(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD));
 60:   PetscCall(VecScatterEnd(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD));
 61:   PetscCall(VecScatterDestroy(&ctx));

 63:   PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &subviewer));
 64:   PetscCall(VecView(y, subviewer));
 65:   PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &subviewer));

 67:   PetscCall(VecDestroy(&x));
 68:   PetscCall(VecDestroy(&y));
 69:   PetscCall(ISDestroy(&is1));
 70:   PetscCall(ISDestroy(&is2));

 72:   PetscCall(PetscFinalize());
 73:   return 0;
 74: }

 76: /*TEST

 78:    testset:
 79:       nsize: 2
 80:       output_file: output/ex23_1.out
 81:       filter: grep -v "  type:"
 82:       diff_args: -j
 83:       test:
 84:         suffix: standard
 85:         args: -vec_type standard
 86:       test:
 87:         requires: cuda
 88:         suffix: cuda
 89:         args: -vec_type cuda
 90:       test:
 91:         requires: viennacl
 92:         suffix: viennacl
 93:         args: -vec_type viennacl
 94:       test:
 95:         requires: kokkos_kernels
 96:         suffix: kokkos
 97:         args: -vec_type kokkos
 98:       test:
 99:         requires: hip
100:         suffix: hip
101:         args: -vec_type hip

103: TEST*/