Actual source code: ex14.c
1: static char help[] = "Test event log of VecScatter with various block sizes\n\n";
3: #include <petscvec.h>
5: int main(int argc, char **argv)
6: {
7: PetscInt i, j, low, high, n = 256, N, errors, tot_errors;
8: PetscInt bs = 1, ix[2], iy[2];
9: PetscMPIInt nproc, rank;
10: PetscScalar *xval;
11: const PetscScalar *yval;
12: Vec x, y;
13: IS isx, isy;
14: VecScatter ctx;
15: const PetscInt niter = 10;
16: PetscLogStage stage1, stage2;
17: PetscLogEvent event1, event2;
19: PetscFunctionBegin;
20: PetscFunctionBeginUser;
21: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
22: PetscCall(PetscLogDefaultBegin());
23: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &nproc));
24: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
26: PetscCall(PetscLogStageRegister("Scatter(bs=1)", &stage1));
27: PetscCall(PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1));
28: PetscCall(PetscLogStageRegister("Scatter(bs=4)", &stage2));
29: PetscCall(PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2));
31: /* Create a parallel vector x and a sequential vector y */
32: PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
33: PetscCall(VecSetSizes(x, n, PETSC_DECIDE));
34: PetscCall(VecSetFromOptions(x));
35: PetscCall(VecGetOwnershipRange(x, &low, &high));
36: PetscCall(VecGetSize(x, &N));
37: PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &y));
39: /*=======================================
40: test VecScatter with bs = 1
41: ======================================*/
43: /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
44: which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
45: */
46: bs = 1;
47: ix[0] = rank ? low - 1 : N - 1; /* ix[] contains global indices of the two ghost points */
48: ix[1] = (rank != nproc - 1) ? high : 0;
49: iy[0] = 0;
50: iy[1] = 1;
52: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 2, ix, PETSC_COPY_VALUES, &isx));
53: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 2, iy, PETSC_COPY_VALUES, &isy));
54: PetscCall(VecScatterCreate(x, isx, y, isy, &ctx));
55: PetscCall(VecScatterSetUp(ctx));
57: PetscCall(PetscLogStagePush(stage1));
58: PetscCall(PetscLogEventBegin(event1, 0, 0, 0, 0));
59: errors = 0;
60: for (i = 0; i < niter; i++) {
61: /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
62: PetscCall(VecGetArray(x, &xval));
63: for (j = 0; j < n; j++) xval[j] = (PetscScalar)(low + j + i);
64: PetscCall(VecRestoreArray(x, &xval));
65: /* scatter the ghosts to y */
66: PetscCall(VecScatterBegin(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD));
67: PetscCall(VecScatterEnd(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD));
68: /* check if y has correct values */
69: PetscCall(VecGetArrayRead(y, &yval));
70: if ((PetscInt)PetscRealPart(yval[0]) != ix[0] + i) errors++;
71: if ((PetscInt)PetscRealPart(yval[1]) != ix[1] + i) errors++;
72: PetscCall(VecRestoreArrayRead(y, &yval));
73: }
74: PetscCall(PetscLogEventEnd(event1, 0, 0, 0, 0));
75: PetscCall(PetscLogStagePop());
77: /* check if we found wrong values on any processors */
78: PetscCallMPI(MPIU_Allreduce(&errors, &tot_errors, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD));
79: if (tot_errors) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n", bs));
81: /* print out event log of VecScatter(bs=1) */
82: if (PetscDefined(USE_LOG)) {
83: PetscLogDouble numMessages, messageLength;
84: PetscEventPerfInfo eventInfo;
85: PetscInt tot_msg, tot_len, avg_len;
87: PetscCall(PetscLogEventGetPerfInfo(stage1, event1, &eventInfo));
88: PetscCallMPI(MPIU_Allreduce(&eventInfo.numMessages, &numMessages, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD));
89: PetscCallMPI(MPIU_Allreduce(&eventInfo.messageLength, &messageLength, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD));
90: tot_msg = (PetscInt)numMessages * 0.5; /* two MPI calls (Send & Recv) per message */
91: tot_len = (PetscInt)messageLength * 0.5;
92: avg_len = tot_msg ? (PetscInt)(messageLength / numMessages) : 0;
93: /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
94: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n", bs, tot_msg, tot_len, avg_len));
95: }
97: PetscCall(ISDestroy(&isx));
98: PetscCall(ISDestroy(&isy));
99: PetscCall(VecScatterDestroy(&ctx));
101: /*=======================================
102: test VecScatter with bs = 4
103: ======================================*/
105: /* similar to the 3-point stencil above, except that this time a ghost is a block */
106: bs = 4; /* n needs to be a multiple of bs to make the following code work */
107: ix[0] = rank ? low / bs - 1 : N / bs - 1; /* ix[] contains global indices of the two ghost blocks */
108: ix[1] = (rank != nproc - 1) ? high / bs : 0;
109: iy[0] = 0;
110: iy[1] = 1;
112: PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, 2, ix, PETSC_COPY_VALUES, &isx));
113: PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, 2, iy, PETSC_COPY_VALUES, &isy));
115: PetscCall(VecScatterCreate(x, isx, y, isy, &ctx));
116: /* Call SetUp explicitly, otherwise messages in implicit SetUp will be counted in events below */
117: PetscCall(VecScatterSetUp(ctx));
119: PetscCall(PetscLogStagePush(stage2));
120: PetscCall(PetscLogEventBegin(event2, 0, 0, 0, 0));
121: errors = 0;
122: for (i = 0; i < niter; i++) {
123: /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
124: PetscCall(VecGetArray(x, &xval));
125: for (j = 0; j < n; j++) xval[j] = (PetscScalar)(low + j + i);
126: PetscCall(VecRestoreArray(x, &xval));
127: /* scatter the ghost blocks to y */
128: PetscCall(VecScatterBegin(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD));
129: PetscCall(VecScatterEnd(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD));
130: /* check if y has correct values */
131: PetscCall(VecGetArrayRead(y, &yval));
132: if ((PetscInt)PetscRealPart(yval[0]) != ix[0] * bs + i) errors++;
133: if ((PetscInt)PetscRealPart(yval[bs]) != ix[1] * bs + i) errors++;
134: PetscCall(VecRestoreArrayRead(y, &yval));
135: }
136: PetscCall(PetscLogEventEnd(event2, 0, 0, 0, 0));
137: PetscCall(PetscLogStagePop());
139: /* check if we found wrong values on any processors */
140: PetscCallMPI(MPIU_Allreduce(&errors, &tot_errors, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD));
141: if (tot_errors) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n", bs));
143: /* print out event log of VecScatter(bs=4) */
144: if (PetscDefined(USE_LOG)) {
145: PetscLogDouble numMessages, messageLength;
146: PetscEventPerfInfo eventInfo;
147: PetscInt tot_msg, tot_len, avg_len;
149: PetscCall(PetscLogEventGetPerfInfo(stage2, event2, &eventInfo));
150: PetscCallMPI(MPIU_Allreduce(&eventInfo.numMessages, &numMessages, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD));
151: PetscCallMPI(MPIU_Allreduce(&eventInfo.messageLength, &messageLength, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD));
152: tot_msg = (PetscInt)numMessages * 0.5; /* two MPI calls (Send & Recv) per message */
153: tot_len = (PetscInt)messageLength * 0.5;
154: avg_len = tot_msg ? (PetscInt)(messageLength / numMessages) : 0;
155: /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
156: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n", bs, tot_msg, tot_len, avg_len));
157: }
159: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Program finished\n"));
160: PetscCall(ISDestroy(&isx));
161: PetscCall(ISDestroy(&isy));
162: PetscCall(VecScatterDestroy(&ctx));
163: PetscCall(VecDestroy(&x));
164: PetscCall(VecDestroy(&y));
165: PetscCall(PetscFinalize());
166: return 0;
167: }
169: /*TEST
171: test:
172: nsize: 4
173: args:
174: requires: double defined(PETSC_USE_LOG)
176: TEST*/