Actual source code: ex15.c
1: static char help[] = " Test VecScatterRemap() on various vecscatter. \n\
2: We may do optimization based on index patterns. After index remapping by VecScatterRemap(), we need to \n\
3: make sure the vecscatter works as expected with the optimizaiton. \n\
4: VecScatterRemap() does not support all kinds of vecscatters. In addition, it only supports remapping \n\
5: entries where we read the data (i.e., todata in parallel scatter, fromdata in sequential scatter). This test \n\
6: tests VecScatterRemap on parallel to parallel (PtoP) vecscatter, sequential general to sequential \n\
7: general (SGToSG) vecscatter and sequential general to sequential stride 1 (SGToSS_Stride1) vecscatter.\n\n";
9: #include <petscvec.h>
11: int main(int argc, char **argv)
12: {
13: PetscInt i, n, *ix, *iy, *tomap, start;
14: Vec x, y;
15: PetscMPIInt nproc, rank;
16: IS isx, isy;
17: const PetscInt *ranges;
18: VecScatter vscat;
20: PetscFunctionBegin;
21: PetscFunctionBeginUser;
22: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
23: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &nproc));
24: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
26: PetscCheck(nproc == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This test must run with exactly two MPI ranks");
28: /* ====================================================================
29: (1) test VecScatterRemap on a parallel to parallel (PtoP) vecscatter
30: ====================================================================
31: */
33: n = 64; /* long enough to trigger memcpy optimizations both in local scatter and remote scatter */
35: /* create two MPI vectors x, y of length n=64, N=128 */
36: PetscCall(VecCreateFromOptions(PETSC_COMM_WORLD, NULL, 1, n, PETSC_DECIDE, &x));
37: PetscCall(VecDuplicate(x, &y));
39: /* Initialize x as {0~127} */
40: PetscCall(VecGetOwnershipRanges(x, &ranges));
41: for (i = ranges[rank]; i < ranges[rank + 1]; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
42: PetscCall(VecAssemblyBegin(x));
43: PetscCall(VecAssemblyEnd(x));
45: /* create two general index sets isx = {0~127} and isy = {32~63,64~95,96~127,0~31}. isx is sequential, but we use
46: it as general and let PETSc detect the pattern and optimize it. indices in isy are set to make the vecscatter
47: have both local scatter and remote scatter (i.e., MPI communication)
48: */
49: PetscCall(PetscMalloc2(n, &ix, n, &iy));
50: start = ranges[rank];
51: for (i = ranges[rank]; i < ranges[rank + 1]; i++) ix[i - start] = i;
52: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, n, ix, PETSC_COPY_VALUES, &isx));
54: if (rank == 0) {
55: for (i = 0; i < n; i++) iy[i] = i + 32;
56: } else
57: for (i = 0; i < n / 2; i++) {
58: iy[i] = i + 96;
59: iy[i + n / 2] = i;
60: }
62: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, n, iy, PETSC_COPY_VALUES, &isy));
64: /* create a vecscatter that shifts x to the tail by quarter periodically and puts the results in y */
65: PetscCall(VecScatterCreate(x, isx, y, isy, &vscat));
66: PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
67: PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
69: /* view y to check the result. y should be {Q3,Q0,Q1,Q2} of x, that is {96~127,0~31,32~63,64~95} */
70: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Before VecScatterRemap on PtoP, MPI vector y is:\n"));
71: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_WORLD));
73: /* now call the weird subroutine VecScatterRemap to slightly change the vecscatter. It changes where we read vector
74: x entries to send out, but does not change the communication pattern (i.e., send/recv pairs and msg lengths).
76: We create tomap as {32~63,0~31}. Originally, we read from indices {0~64} of the local x to send out. The remap
77: does indices[i] = tomap[indices[i]]. Therefore, after the remap, we read from indices {32~63,0~31} of the local x.
78: isy is unchanged. So, we will shift x to {Q2,Q1,Q0,Q3}, that is {64~95,32~63,0~31,96~127}
79: */
80: PetscCall(PetscMalloc1(n, &tomap));
81: for (i = 0; i < n / 2; i++) {
82: tomap[i] = i + n / 2;
83: tomap[i + n / 2] = i;
84: };
85: PetscCall(VecScatterRemap(vscat, tomap, NULL));
86: PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
87: PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
89: /* view y to check the result. y should be {64~95,32~63,0~31,96~127} */
90: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "After VecScatterRemap on PtoP, MPI vector y is:\n"));
91: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_WORLD));
93: /* destroy everything before we recreate them in different types */
94: PetscCall(PetscFree2(ix, iy));
95: PetscCall(VecDestroy(&x));
96: PetscCall(VecDestroy(&y));
97: PetscCall(ISDestroy(&isx));
98: PetscCall(ISDestroy(&isy));
99: PetscCall(PetscFree(tomap));
100: PetscCall(VecScatterDestroy(&vscat));
102: /* ==========================================================================================
103: (2) test VecScatterRemap on a sequential general to sequential general (SGToSG) vecscatter
104: ==========================================================================================
105: */
106: n = 64; /* long enough to trigger memcpy optimizations in local scatter */
108: /* create two seq vectors x, y of length n */
109: PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, n, n, &x));
110: PetscCall(VecDuplicate(x, &y));
112: /* Initialize x as {0~63} */
113: for (i = 0; i < n; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
114: PetscCall(VecAssemblyBegin(x));
115: PetscCall(VecAssemblyEnd(x));
117: /* create two general index sets isx = isy = {0~63}, which are sequential, but we use them as
118: general and let PETSc detect the pattern and optimize it */
119: PetscCall(PetscMalloc2(n, &ix, n, &iy));
120: for (i = 0; i < n; i++) ix[i] = i;
121: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, ix, PETSC_COPY_VALUES, &isx));
122: PetscCall(ISDuplicate(isx, &isy));
124: /* create a vecscatter that just copies x to y */
125: PetscCall(VecScatterCreate(x, isx, y, isy, &vscat));
126: PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
127: PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
129: /* view y to check the result. y should be {0~63} */
130: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBefore VecScatterRemap on SGToSG, SEQ vector y is:\n"));
131: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_WORLD));
133: /* now call the weird subroutine VecScatterRemap to slightly change the vecscatter.
135: Create tomap as {32~63,0~31}. Originally, we read from indices {0~64} of seq x to write to y. The remap
136: does indices[i] = tomap[indices[i]]. Therefore, after the remap, we read from indices{32~63,0~31} of seq x.
137: */
138: PetscCall(PetscMalloc1(n, &tomap));
139: for (i = 0; i < n / 2; i++) {
140: tomap[i] = i + n / 2;
141: tomap[i + n / 2] = i;
142: };
143: PetscCall(VecScatterRemap(vscat, tomap, NULL));
144: PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
145: PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
147: /* view y to check the result. y should be {32~63,0~31} */
148: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "After VecScatterRemap on SGToSG, SEQ vector y is:\n"));
149: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_WORLD));
151: /* destroy everything before we recreate them in different types */
152: PetscCall(PetscFree2(ix, iy));
153: PetscCall(VecDestroy(&x));
154: PetscCall(VecDestroy(&y));
155: PetscCall(ISDestroy(&isx));
156: PetscCall(ISDestroy(&isy));
157: PetscCall(PetscFree(tomap));
158: PetscCall(VecScatterDestroy(&vscat));
160: /* ===================================================================================================
161: (3) test VecScatterRemap on a sequential general to sequential stride 1 (SGToSS_Stride1) vecscatter
162: ===================================================================================================
163: */
164: n = 64; /* long enough to trigger memcpy optimizations in local scatter */
166: /* create two seq vectors x of length n, and y of length n/2 */
167: PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, n, n, &x));
168: PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, n / 2, n / 2, &y));
170: /* Initialize x as {0~63} */
171: for (i = 0; i < n; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
172: PetscCall(VecAssemblyBegin(x));
173: PetscCall(VecAssemblyEnd(x));
175: /* create a general index set isx = {0:63:2}, which actually is a stride IS with first=0, n=32, step=2,
176: but we use it as general and let PETSc detect the pattern and optimize it. */
177: PetscCall(PetscMalloc2(n / 2, &ix, n / 2, &iy));
178: for (i = 0; i < n / 2; i++) ix[i] = i * 2;
179: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n / 2, ix, PETSC_COPY_VALUES, &isx));
181: /* create a stride1 index set isy = {0~31}. We intentionally set the step to 1 to trigger optimizations */
182: PetscCall(ISCreateStride(PETSC_COMM_SELF, 32, 0, 1, &isy));
184: /* create a vecscatter that just copies even entries of x to y */
185: PetscCall(VecScatterCreate(x, isx, y, isy, &vscat));
186: PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
187: PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
189: /* view y to check the result. y should be {0:63:2} */
190: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nBefore VecScatterRemap on SGToSS_Stride1, SEQ vector y is:\n"));
191: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_WORLD));
193: /* now call the weird subroutine VecScatterRemap to slightly change the vecscatter.
195: Create tomap as {32~63,0~31}. Originally, we read from indices{0:63:2} of seq x to write to y. The remap
196: does indices[i] = tomap[indices[i]]. Therefore, after the remap, we read from indices{32:63:2,0:31:2} of seq x.
197: */
198: PetscCall(PetscMalloc1(n, &tomap));
199: for (i = 0; i < n / 2; i++) {
200: tomap[i] = i + n / 2;
201: tomap[i + n / 2] = i;
202: };
203: PetscCall(VecScatterRemap(vscat, tomap, NULL));
204: PetscCall(VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
205: PetscCall(VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD));
207: /* view y to check the result. y should be {32:63:2,0:31:2} */
208: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "After VecScatterRemap on SGToSS_Stride1, SEQ vector y is:\n"));
209: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_WORLD));
211: /* destroy everything before PetscFinalize */
212: PetscCall(PetscFree2(ix, iy));
213: PetscCall(VecDestroy(&x));
214: PetscCall(VecDestroy(&y));
215: PetscCall(ISDestroy(&isx));
216: PetscCall(ISDestroy(&isy));
217: PetscCall(PetscFree(tomap));
218: PetscCall(VecScatterDestroy(&vscat));
220: PetscCall(PetscFinalize());
221: return 0;
222: }
224: /*TEST
226: test:
227: suffix: 1
228: nsize: 2
229: diff_args: -j
230: requires: double
231: TEST*/