Actual source code: ex4.c
1: static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
3: #include <petscdm.h>
4: #include <petscdmda.h>
6: int main(int argc, char **argv)
7: {
8: PetscMPIInt rank;
9: PetscInt M = 10, N = 8, m = PETSC_DECIDE;
10: PetscInt s = 2, w = 2, n = PETSC_DECIDE, nloc, l, i, j, kk;
11: PetscInt Xs, Xm, Ys, Ym, iloc, *iglobal;
12: const PetscInt *ltog;
13: PetscInt *lx = NULL, *ly = NULL;
14: PetscBool testorder = PETSC_FALSE, flg;
15: DMBoundaryType bx = DM_BOUNDARY_NONE, by = DM_BOUNDARY_NONE;
16: DM da;
17: PetscViewer viewer;
18: Vec local, global;
19: PetscScalar value;
20: DMDAStencilType st = DMDA_STENCIL_BOX;
21: AO ao;
23: PetscFunctionBeginUser;
24: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
25: PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 300, 0, 400, 400, &viewer));
27: /* Readoptions */
28: PetscCall(PetscOptionsGetInt(NULL, NULL, "-NX", &M, NULL));
29: PetscCall(PetscOptionsGetInt(NULL, NULL, "-NY", &N, NULL));
30: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
31: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
32: PetscCall(PetscOptionsGetInt(NULL, NULL, "-s", &s, NULL));
33: PetscCall(PetscOptionsGetInt(NULL, NULL, "-w", &w, NULL));
35: flg = PETSC_FALSE;
36: PetscCall(PetscOptionsGetBool(NULL, NULL, "-xperiodic", &flg, NULL));
37: if (flg) bx = DM_BOUNDARY_PERIODIC;
38: flg = PETSC_FALSE;
39: PetscCall(PetscOptionsGetBool(NULL, NULL, "-yperiodic", &flg, NULL));
40: if (flg) by = DM_BOUNDARY_PERIODIC;
41: flg = PETSC_FALSE;
42: PetscCall(PetscOptionsGetBool(NULL, NULL, "-xghosted", &flg, NULL));
43: if (flg) bx = DM_BOUNDARY_GHOSTED;
44: flg = PETSC_FALSE;
45: PetscCall(PetscOptionsGetBool(NULL, NULL, "-yghosted", &flg, NULL));
46: if (flg) by = DM_BOUNDARY_GHOSTED;
47: flg = PETSC_FALSE;
48: PetscCall(PetscOptionsGetBool(NULL, NULL, "-star", &flg, NULL));
49: if (flg) st = DMDA_STENCIL_STAR;
50: flg = PETSC_FALSE;
51: PetscCall(PetscOptionsGetBool(NULL, NULL, "-box", &flg, NULL));
52: if (flg) st = DMDA_STENCIL_BOX;
53: flg = PETSC_FALSE;
54: PetscCall(PetscOptionsGetBool(NULL, NULL, "-testorder", &testorder, NULL));
55: /*
56: Test putting two nodes in x and y on each processor, exact last processor
57: in x and y gets the rest.
58: */
59: flg = PETSC_FALSE;
60: PetscCall(PetscOptionsGetBool(NULL, NULL, "-distribute", &flg, NULL));
61: if (flg) {
62: PetscCheck(m != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -m option with -distribute option");
63: PetscCall(PetscMalloc1(m, &lx));
64: for (i = 0; i < m - 1; i++) lx[i] = 4;
65: lx[m - 1] = M - 4 * (m - 1);
66: PetscCheck(n != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -n option with -distribute option");
67: PetscCall(PetscMalloc1(n, &ly));
68: for (i = 0; i < n - 1; i++) ly[i] = 2;
69: ly[n - 1] = N - 2 * (n - 1);
70: }
72: /* Create distributed array and get vectors */
73: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, bx, by, st, M, N, m, n, w, s, lx, ly, &da));
74: PetscCall(DMSetFromOptions(da));
75: PetscCall(DMSetUp(da));
76: PetscCall(PetscFree(lx));
77: PetscCall(PetscFree(ly));
79: PetscCall(DMView(da, viewer));
80: PetscCall(DMCreateGlobalVector(da, &global));
81: PetscCall(DMCreateLocalVector(da, &local));
83: /* Set global vector; send ghost points to local vectors */
84: value = 1;
85: PetscCall(VecSet(global, value));
86: PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
87: PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));
89: /* Scale local vectors according to processor rank; pass to global vector */
90: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
91: value = rank;
92: PetscCall(VecScale(local, value));
93: PetscCall(DMLocalToGlobalBegin(da, local, INSERT_VALUES, global));
94: PetscCall(DMLocalToGlobalEnd(da, local, INSERT_VALUES, global));
96: if (!testorder) { /* turn off printing when testing ordering mappings */
97: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nGlobal Vectors:\n"));
98: PetscCall(VecView(global, PETSC_VIEWER_STDOUT_WORLD));
99: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n\n"));
100: }
102: /* Send ghost points to local vectors */
103: PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
104: PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));
106: flg = PETSC_FALSE;
107: PetscCall(PetscOptionsGetBool(NULL, NULL, "-local_print", &flg, NULL));
108: if (flg) {
109: PetscViewer sviewer;
111: PetscCall(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
112: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nLocal Vector: processor %d\n", rank));
113: PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
114: PetscCall(VecView(local, sviewer));
115: PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
116: PetscCall(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
117: }
119: /* Tests mappings between application/PETSc orderings */
120: if (testorder) {
121: ISLocalToGlobalMapping ltogm;
123: PetscCall(DMGetLocalToGlobalMapping(da, <ogm));
124: PetscCall(ISLocalToGlobalMappingGetSize(ltogm, &nloc));
125: PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, <og));
126: PetscCall(DMDAGetGhostCorners(da, &Xs, &Ys, NULL, &Xm, &Ym, NULL));
127: PetscCall(DMDAGetAO(da, &ao));
128: PetscCall(PetscMalloc1(nloc, &iglobal));
130: /* Set iglobal to be global indices for each processor's local and ghost nodes,
131: using the DMDA ordering of grid points */
132: kk = 0;
133: for (j = Ys; j < Ys + Ym; j++) {
134: for (i = Xs; i < Xs + Xm; i++) {
135: iloc = w * ((j - Ys) * Xm + i - Xs);
136: for (l = 0; l < w; l++) iglobal[kk++] = ltog[iloc + l];
137: }
138: }
140: /* Map this to the application ordering (which for DMDAs is just the natural ordering
141: that would be used for 1 processor, numbering most rapidly by x, then y) */
142: PetscCall(AOPetscToApplication(ao, nloc, iglobal));
144: /* Then map the application ordering back to the PETSc DMDA ordering */
145: PetscCall(AOApplicationToPetsc(ao, nloc, iglobal));
147: /* Verify the mappings */
148: kk = 0;
149: for (j = Ys; j < Ys + Ym; j++) {
150: for (i = Xs; i < Xs + Xm; i++) {
151: iloc = w * ((j - Ys) * Xm + i - Xs);
152: for (l = 0; l < w; l++) {
153: if (iglobal[kk] != ltog[iloc + l]) {
154: PetscCall(PetscFPrintf(PETSC_COMM_SELF, stdout, "[%d] Problem with mapping: j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n", rank, j, i, l, ltog[iloc + l], iglobal[kk]));
155: }
156: kk++;
157: }
158: }
159: }
160: PetscCall(PetscFree(iglobal));
161: PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, <og));
162: }
164: /* Free memory */
165: PetscCall(PetscViewerDestroy(&viewer));
166: PetscCall(VecDestroy(&local));
167: PetscCall(VecDestroy(&global));
168: PetscCall(DMDestroy(&da));
170: PetscCall(PetscFinalize());
171: return 0;
172: }
174: /*TEST
176: test:
177: nsize: 4
178: args: -nox
179: filter: grep -v -i Object
180: requires: x
182: test:
183: suffix: 2
184: args: -testorder -nox
185: requires: x
187: TEST*/