Actual source code: ex18.c

  1: static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";

  3: #include <petsc/private/dmpleximpl.h>
  4: /* List of test meshes

  6: Network
  7: -------
  8: Test 0 (2 ranks):

 10: network=0:
 11: ---------
 12:   cell 0   cell 1   cell 2          nCells-1       (edge)
 13: 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)

 15:   vertex distribution:
 16:     rank 0: 0 1
 17:     rank 1: 2 3 ... nCells
 18:   cell(edge) distribution:
 19:     rank 0: 0 1
 20:     rank 1: 2 ... nCells-1

 22: network=1:
 23: ---------
 24:                v2
 25:                 ^
 26:                 |
 27:                cell 2
 28:                 |
 29:  v0 --cell 0--> v3--cell 1--> v1

 31:   vertex distribution:
 32:     rank 0: 0 1 3
 33:     rank 1: 2
 34:   cell(edge) distribution:
 35:     rank 0: 0 1
 36:     rank 1: 2

 38:   example:
 39:     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50

 41: Triangle
 42: --------
 43: Test 0 (2 ranks):
 44: Two triangles sharing a face

 46:         2
 47:       / | \
 48:      /  |  \
 49:     /   |   \
 50:    0  0 | 1  3
 51:     \   |   /
 52:      \  |  /
 53:       \ | /
 54:         1

 56:   vertex distribution:
 57:     rank 0: 0 1
 58:     rank 1: 2 3
 59:   cell distribution:
 60:     rank 0: 0
 61:     rank 1: 1

 63: Test 1 (3 ranks):
 64: Four triangles partitioned across 3 ranks

 66:    0 _______ 3
 67:    | \     / |
 68:    |  \ 1 /  |
 69:    |   \ /   |
 70:    | 0  2  2 |
 71:    |   / \   |
 72:    |  / 3 \  |
 73:    | /     \ |
 74:    1 ------- 4

 76:   vertex distribution:
 77:     rank 0: 0 1
 78:     rank 1: 2 3
 79:     rank 2: 4
 80:   cell distribution:
 81:     rank 0: 0
 82:     rank 1: 1
 83:     rank 2: 2 3

 85: Test 2 (3 ranks):
 86: Four triangles partitioned across 3 ranks

 88:    1 _______ 3
 89:    | \     / |
 90:    |  \ 1 /  |
 91:    |   \ /   |
 92:    | 0  0  2 |
 93:    |   / \   |
 94:    |  / 3 \  |
 95:    | /     \ |
 96:    2 ------- 4

 98:   vertex distribution:
 99:     rank 0: 0 1
100:     rank 1: 2 3
101:     rank 2: 4
102:   cell distribution:
103:     rank 0: 0
104:     rank 1: 1
105:     rank 2: 2 3

107: Tetrahedron
108: -----------
109: Test 0:
110: Two tets sharing a face

112:  cell   3 _______    cell
113:  0    / | \      \   1
114:      /  |  \      \
115:     /   |   \      \
116:    0----|----4-----2
117:     \   |   /      /
118:      \  |  /      /
119:       \ | /      /
120:         1-------
121:    y
122:    | x
123:    |/
124:    *----z

126:   vertex distribution:
127:     rank 0: 0 1
128:     rank 1: 2 3 4
129:   cell distribution:
130:     rank 0: 0
131:     rank 1: 1

133: Quadrilateral
134: -------------
135: Test 0 (2 ranks):
136: Two quads sharing a face

138:    3-------2-------5
139:    |       |       |
140:    |   0   |   1   |
141:    |       |       |
142:    0-------1-------4

144:   vertex distribution:
145:     rank 0: 0 1 2
146:     rank 1: 3 4 5
147:   cell distribution:
148:     rank 0: 0
149:     rank 1: 1

151: TODO Test 1:
152: A quad and a triangle sharing a face

154:    5-------4
155:    |       | \
156:    |   0   |  \
157:    |       | 1 \
158:    2-------3----6

160: Hexahedron
161: ----------
162: Test 0 (2 ranks):
163: Two hexes sharing a face

165: cell   7-------------6-------------11 cell
166: 0     /|            /|            /|     1
167:      / |   F1      / |   F7      / |
168:     /  |          /  |          /  |
169:    4-------------5-------------10  |
170:    |   |     F4  |   |     F10 |   |
171:    |   |         |   |         |   |
172:    |F5 |         |F3 |         |F9 |
173:    |   |  F2     |   |   F8    |   |
174:    |   3---------|---2---------|---9
175:    |  /          |  /          |  /
176:    | /   F0      | /    F6     | /
177:    |/            |/            |/
178:    0-------------1-------------8

180:   vertex distribution:
181:     rank 0: 0 1 2 3 4 5
182:     rank 1: 6 7 8 9 10 11
183:   cell distribution:
184:     rank 0: 0
185:     rank 1: 1

187: */

189: typedef enum {
190:   NONE,
191:   CREATE,
192:   AFTER_CREATE,
193:   AFTER_DISTRIBUTE
194: } InterpType;

196: typedef struct {
197:   PetscInt    debug;        /* The debugging level */
198:   PetscInt    testNum;      /* Indicates the mesh to create */
199:   PetscInt    dim;          /* The topological mesh dimension */
200:   PetscBool   cellSimplex;  /* Use simplices or hexes */
201:   PetscBool   distribute;   /* Distribute the mesh */
202:   InterpType  interpolate;  /* Interpolate the mesh before or after DMPlexDistribute() */
203:   PetscBool   useGenerator; /* Construct mesh with a mesh generator */
204:   PetscBool   testOrientIF; /* Test for different original interface orientations */
205:   PetscBool   testHeavy;    /* Run the heavy PointSF test */
206:   PetscBool   customView;   /* Show results of DMPlexIsInterpolated() etc. */
207:   PetscInt    ornt[2];      /* Orientation of interface on rank 0 and rank 1 */
208:   PetscInt    faces[3];     /* Number of faces per dimension for generator */
209:   PetscScalar coords[128];
210:   PetscReal   coordsTol;
211:   PetscInt    ncoords;
212:   PetscInt    pointsToExpand[128];
213:   PetscInt    nPointsToExpand;
214:   PetscBool   testExpandPointsEmpty;
215:   char        filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216: } AppCtx;

218: struct _n_PortableBoundary {
219:   Vec           coordinates;
220:   PetscInt      depth;
221:   PetscSection *sections;
222: };
223: typedef struct _n_PortableBoundary *PortableBoundary;

225: static PetscLogStage stage[3];

227: static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
228: static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
229: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
230: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);

232: static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
233: {
234:   PetscInt d;

236:   PetscFunctionBegin;
237:   if (!*bnd) PetscFunctionReturn(PETSC_SUCCESS);
238:   PetscCall(VecDestroy(&(*bnd)->coordinates));
239:   for (d = 0; d < (*bnd)->depth; d++) PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
240:   PetscCall(PetscFree((*bnd)->sections));
241:   PetscCall(PetscFree(*bnd));
242:   PetscFunctionReturn(PETSC_SUCCESS);
243: }

245: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
246: {
247:   const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
248:   PetscInt    interp         = NONE, dim;
249:   PetscBool   flg1, flg2;

251:   PetscFunctionBegin;
252:   options->debug                 = 0;
253:   options->testNum               = 0;
254:   options->dim                   = 2;
255:   options->cellSimplex           = PETSC_TRUE;
256:   options->distribute            = PETSC_FALSE;
257:   options->interpolate           = NONE;
258:   options->useGenerator          = PETSC_FALSE;
259:   options->testOrientIF          = PETSC_FALSE;
260:   options->testHeavy             = PETSC_TRUE;
261:   options->customView            = PETSC_FALSE;
262:   options->testExpandPointsEmpty = PETSC_FALSE;
263:   options->ornt[0]               = 0;
264:   options->ornt[1]               = 0;
265:   options->faces[0]              = 2;
266:   options->faces[1]              = 2;
267:   options->faces[2]              = 2;
268:   options->filename[0]           = '\0';
269:   options->coordsTol             = PETSC_DEFAULT;

271:   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
272:   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0));
273:   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0));
274:   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
275:   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
276:   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
277:   options->interpolate = (InterpType)interp;
278:   PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE, comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
279:   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
280:   options->ncoords = 128;
281:   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
282:   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
283:   options->nPointsToExpand = 128;
284:   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
285:   if (options->nPointsToExpand) PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
286:   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
287:   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
288:   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3));
289:   dim = 3;
290:   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
291:   if (flg2) {
292:     PetscCheck(!flg1 || dim == options->dim, comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
293:     options->dim = dim;
294:   }
295:   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
296:   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0));
297:   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0));
298:   PetscCheck(flg2 == options->testOrientIF, comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
299:   if (options->testOrientIF) {
300:     PetscInt i;
301:     for (i = 0; i < 2; i++) {
302:       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
303:     }
304:     options->filename[0]  = 0;
305:     options->useGenerator = PETSC_FALSE;
306:     options->dim          = 3;
307:     options->cellSimplex  = PETSC_TRUE;
308:     options->interpolate  = CREATE;
309:     options->distribute   = PETSC_FALSE;
310:   }
311:   PetscOptionsEnd();
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
316: {
317:   PetscInt    testNum = user->testNum;
318:   PetscMPIInt rank, size;
319:   PetscInt    numCorners = 2, i;
320:   PetscInt    numCells, numVertices, network;
321:   PetscInt   *cells;
322:   PetscReal  *coords;

324:   PetscFunctionBegin;
325:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
326:   PetscCallMPI(MPI_Comm_size(comm, &size));
327:   PetscCheck(size <= 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes", testNum);

329:   numCells = 3;
330:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
331:   PetscCheck(numCells >= 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3", numCells);

333:   if (size == 1) {
334:     numVertices = numCells + 1;
335:     PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords));
336:     for (i = 0; i < numCells; i++) {
337:       cells[2 * i]      = i;
338:       cells[2 * i + 1]  = i + 1;
339:       coords[2 * i]     = i;
340:       coords[2 * i + 1] = i + 1;
341:     }

343:     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
344:     PetscCall(PetscFree2(cells, coords));
345:     PetscFunctionReturn(PETSC_SUCCESS);
346:   }

348:   network = 0;
349:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
350:   if (network == 0) {
351:     switch (rank) {
352:     case 0: {
353:       numCells    = 2;
354:       numVertices = numCells;
355:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
356:       cells[0]  = 0;
357:       cells[1]  = 1;
358:       cells[2]  = 1;
359:       cells[3]  = 2;
360:       coords[0] = 0.;
361:       coords[1] = 1.;
362:       coords[2] = 1.;
363:       coords[3] = 2.;
364:     } break;
365:     case 1: {
366:       numCells -= 2;
367:       numVertices = numCells + 1;
368:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
369:       for (i = 0; i < numCells; i++) {
370:         cells[2 * i]      = 2 + i;
371:         cells[2 * i + 1]  = 2 + i + 1;
372:         coords[2 * i]     = 2 + i;
373:         coords[2 * i + 1] = 2 + i + 1;
374:       }
375:     } break;
376:     default:
377:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
378:     }
379:   } else { /* network_case = 1 */
380:     /* ----------------------- */
381:     switch (rank) {
382:     case 0: {
383:       numCells    = 2;
384:       numVertices = 3;
385:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
386:       cells[0] = 0;
387:       cells[1] = 3;
388:       cells[2] = 3;
389:       cells[3] = 1;
390:     } break;
391:     case 1: {
392:       numCells    = 1;
393:       numVertices = 1;
394:       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
395:       cells[0] = 3;
396:       cells[1] = 2;
397:     } break;
398:     default:
399:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
400:     }
401:   }
402:   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
403:   PetscCall(PetscFree2(cells, coords));
404:   PetscFunctionReturn(PETSC_SUCCESS);
405: }

407: static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
408: {
409:   PetscInt    testNum = user->testNum, p;
410:   PetscMPIInt rank, size;

412:   PetscFunctionBegin;
413:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
414:   PetscCallMPI(MPI_Comm_size(comm, &size));
415:   switch (testNum) {
416:   case 0:
417:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
418:     switch (rank) {
419:     case 0: {
420:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
421:       const PetscInt cells[3]        = {0, 1, 2};
422:       PetscReal      coords[4]       = {-0.5, 0.5, 0.0, 0.0};
423:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

425:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
426:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
427:     } break;
428:     case 1: {
429:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
430:       const PetscInt cells[3]        = {1, 3, 2};
431:       PetscReal      coords[4]       = {0.0, 1.0, 0.5, 0.5};
432:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

434:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
435:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
436:     } break;
437:     default:
438:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
439:     }
440:     break;
441:   case 1:
442:     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
443:     switch (rank) {
444:     case 0: {
445:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
446:       const PetscInt cells[3]        = {0, 1, 2};
447:       PetscReal      coords[4]       = {0.0, 1.0, 0.0, 0.0};
448:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

450:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
451:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
452:     } break;
453:     case 1: {
454:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
455:       const PetscInt cells[3]        = {0, 2, 3};
456:       PetscReal      coords[4]       = {0.5, 0.5, 1.0, 1.0};
457:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

459:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
460:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
461:     } break;
462:     case 2: {
463:       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
464:       const PetscInt cells[6]         = {2, 4, 3, 2, 1, 4};
465:       PetscReal      coords[2]        = {1.0, 0.0};
466:       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};

468:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
469:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
470:     } break;
471:     default:
472:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
473:     }
474:     break;
475:   case 2:
476:     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
477:     switch (rank) {
478:     case 0: {
479:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
480:       const PetscInt cells[3]        = {1, 2, 0};
481:       PetscReal      coords[4]       = {0.5, 0.5, 0.0, 1.0};
482:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

484:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
485:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
486:     } break;
487:     case 1: {
488:       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
489:       const PetscInt cells[3]        = {1, 0, 3};
490:       PetscReal      coords[4]       = {0.0, 0.0, 1.0, 1.0};
491:       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};

493:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
494:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
495:     } break;
496:     case 2: {
497:       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
498:       const PetscInt cells[6]         = {0, 4, 3, 0, 2, 4};
499:       PetscReal      coords[2]        = {1.0, 0.0};
500:       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};

502:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
503:       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
504:     } break;
505:     default:
506:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
507:     }
508:     break;
509:   default:
510:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
511:   }
512:   PetscFunctionReturn(PETSC_SUCCESS);
513: }

515: static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
516: {
517:   PetscInt    testNum = user->testNum, p;
518:   PetscMPIInt rank, size;

520:   PetscFunctionBegin;
521:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
522:   PetscCallMPI(MPI_Comm_size(comm, &size));
523:   switch (testNum) {
524:   case 0:
525:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
526:     switch (rank) {
527:     case 0: {
528:       const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
529:       const PetscInt cells[4]        = {0, 2, 1, 3};
530:       PetscReal      coords[6]       = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
531:       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};

533:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
534:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
535:     } break;
536:     case 1: {
537:       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
538:       const PetscInt cells[4]        = {1, 2, 4, 3};
539:       PetscReal      coords[9]       = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
540:       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};

542:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
543:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
544:     } break;
545:     default:
546:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
547:     }
548:     break;
549:   default:
550:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
551:   }
552:   if (user->testOrientIF) {
553:     PetscInt ifp[] = {8, 6};

555:     PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh before orientation"));
556:     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
557:     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
558:     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
559:     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
560:     PetscCall(DMPlexCheckFaces(*dm, 0));
561:     PetscCall(DMPlexOrientInterface_Internal(*dm));
562:     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
563:   }
564:   PetscFunctionReturn(PETSC_SUCCESS);
565: }

567: static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
568: {
569:   PetscInt    testNum = user->testNum, p;
570:   PetscMPIInt rank, size;

572:   PetscFunctionBegin;
573:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
574:   PetscCallMPI(MPI_Comm_size(comm, &size));
575:   switch (testNum) {
576:   case 0:
577:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
578:     switch (rank) {
579:     case 0: {
580:       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
581:       const PetscInt cells[4]            = {0, 1, 2, 3};
582:       PetscReal      coords[6]           = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
583:       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};

585:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
586:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
587:     } break;
588:     case 1: {
589:       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
590:       const PetscInt cells[4]            = {1, 4, 5, 2};
591:       PetscReal      coords[6]           = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
592:       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};

594:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
595:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
596:     } break;
597:     default:
598:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
599:     }
600:     break;
601:   default:
602:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
603:   }
604:   PetscFunctionReturn(PETSC_SUCCESS);
605: }

607: static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
608: {
609:   PetscInt    testNum = user->testNum, p;
610:   PetscMPIInt rank, size;

612:   PetscFunctionBegin;
613:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
614:   PetscCallMPI(MPI_Comm_size(comm, &size));
615:   switch (testNum) {
616:   case 0:
617:     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
618:     switch (rank) {
619:     case 0: {
620:       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
621:       const PetscInt cells[8]            = {0, 3, 2, 1, 4, 5, 6, 7};
622:       PetscReal      coords[6 * 3]       = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
623:       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};

625:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
626:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
627:     } break;
628:     case 1: {
629:       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
630:       const PetscInt cells[8]            = {1, 2, 9, 8, 5, 10, 11, 6};
631:       PetscReal      coords[6 * 3]       = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
632:       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};

634:       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
635:       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
636:     } break;
637:     default:
638:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
639:     }
640:     break;
641:   default:
642:     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
643:   }
644:   PetscFunctionReturn(PETSC_SUCCESS);
645: }

647: static PetscErrorCode CustomView(DM dm, PetscViewer v)
648: {
649:   DMPlexInterpolatedFlag interpolated;
650:   PetscBool              distributed;

652:   PetscFunctionBegin;
653:   PetscCall(DMPlexIsDistributed(dm, &distributed));
654:   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
655:   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
656:   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
657:   PetscFunctionReturn(PETSC_SUCCESS);
658: }

660: static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
661: {
662:   const char *filename     = user->filename;
663:   PetscBool   testHeavy    = user->testHeavy;
664:   PetscBool   interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
665:   PetscBool   distributed  = PETSC_FALSE;

667:   PetscFunctionBegin;
668:   *serialDM = NULL;
669:   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
670:   PetscCall(PetscLogStagePush(stage[0]));
671:   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
672:   PetscCall(PetscLogStagePop());
673:   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
674:   PetscCall(DMPlexIsDistributed(*dm, &distributed));
675:   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
676:   if (testHeavy && distributed) {
677:     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
678:     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
679:     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
680:     PetscCheck(!distributed, comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
681:   }
682:   PetscCall(DMGetDimension(*dm, &user->dim));
683:   PetscFunctionReturn(PETSC_SUCCESS);
684: }

686: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
687: {
688:   PetscPartitioner part;
689:   PortableBoundary boundary       = NULL;
690:   DM               serialDM       = NULL;
691:   PetscBool        cellSimplex    = user->cellSimplex;
692:   PetscBool        useGenerator   = user->useGenerator;
693:   PetscBool        interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
694:   PetscBool        interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
695:   PetscBool        interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
696:   PetscBool        testHeavy      = user->testHeavy;
697:   PetscMPIInt      rank;

699:   PetscFunctionBegin;
700:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
701:   if (user->filename[0]) {
702:     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
703:   } else if (useGenerator) {
704:     PetscCall(PetscLogStagePush(stage[0]));
705:     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, 0, PETSC_TRUE, dm));
706:     PetscCall(PetscLogStagePop());
707:   } else {
708:     PetscCall(PetscLogStagePush(stage[0]));
709:     switch (user->dim) {
710:     case 1:
711:       PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
712:       break;
713:     case 2:
714:       if (cellSimplex) {
715:         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
716:       } else {
717:         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
718:       }
719:       break;
720:     case 3:
721:       if (cellSimplex) {
722:         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
723:       } else {
724:         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
725:       }
726:       break;
727:     default:
728:       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
729:     }
730:     PetscCall(PetscLogStagePop());
731:   }
732:   PetscCheck(user->ncoords % user->dim == 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisible by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
733:   PetscCall(PetscObjectSetName((PetscObject)*dm, "Original Mesh"));
734:   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));

736:   if (interpSerial) {
737:     DM idm;

739:     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
740:     PetscCall(PetscLogStagePush(stage[2]));
741:     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
742:     PetscCall(PetscLogStagePop());
743:     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
744:     PetscCall(DMDestroy(dm));
745:     *dm = idm;
746:     PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh"));
747:     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
748:   }

750:   /* Set partitioner options */
751:   PetscCall(DMPlexGetPartitioner(*dm, &part));
752:   if (part) {
753:     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
754:     PetscCall(PetscPartitionerSetFromOptions(part));
755:   }

757:   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
758:   if (testHeavy) {
759:     PetscBool distributed;

761:     PetscCall(DMPlexIsDistributed(*dm, &distributed));
762:     if (!serialDM && !distributed) {
763:       serialDM = *dm;
764:       PetscCall(PetscObjectReference((PetscObject)*dm));
765:     }
766:     if (serialDM) PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
767:     if (boundary) {
768:       /* check DM which has been created in parallel and already interpolated */
769:       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
770:     }
771:     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
772:     PetscCall(DMPlexOrientInterface_Internal(*dm));
773:   }
774:   if (user->distribute) {
775:     DM pdm = NULL;

777:     /* Redistribute mesh over processes using that partitioner */
778:     PetscCall(PetscLogStagePush(stage[1]));
779:     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
780:     PetscCall(PetscLogStagePop());
781:     if (pdm) {
782:       PetscCall(DMDestroy(dm));
783:       *dm = pdm;
784:       PetscCall(PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh"));
785:       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
786:     }

788:     if (interpParallel) {
789:       DM idm;

791:       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
792:       PetscCall(PetscLogStagePush(stage[2]));
793:       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
794:       PetscCall(PetscLogStagePop());
795:       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
796:       PetscCall(DMDestroy(dm));
797:       *dm = idm;
798:       PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh"));
799:       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
800:     }
801:   }
802:   if (testHeavy) {
803:     if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
804:     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
805:     PetscCall(DMPlexOrientInterface_Internal(*dm));
806:   }

808:   PetscCall(PetscObjectSetName((PetscObject)*dm, "Parallel Mesh"));
809:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
810:   PetscCall(DMSetFromOptions(*dm));
811:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));

813:   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
814:   PetscCall(DMDestroy(&serialDM));
815:   PetscCall(PortableBoundaryDestroy(&boundary));
816:   PetscFunctionReturn(PETSC_SUCCESS);
817: }

819: #define ps2d(number) ((double)PetscRealPart(number))
820: static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
821: {
822:   PetscFunctionBegin;
823:   PetscCheck(dim <= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
824:   if (tol >= 1e-3) {
825:     switch (dim) {
826:     case 1:
827:       PetscCall(PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0])));
828:     case 2:
829:       PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1])));
830:     case 3:
831:       PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
832:     }
833:   } else {
834:     switch (dim) {
835:     case 1:
836:       PetscCall(PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0])));
837:     case 2:
838:       PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1])));
839:     case 3:
840:       PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
841:     }
842:   }
843:   PetscFunctionReturn(PETSC_SUCCESS);
844: }

846: static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
847: {
848:   PetscInt           dim, i, npoints;
849:   IS                 pointsIS;
850:   const PetscInt    *points;
851:   const PetscScalar *coords;
852:   char               coordstr[128];
853:   MPI_Comm           comm;
854:   PetscMPIInt        rank;

856:   PetscFunctionBegin;
857:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
858:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
859:   PetscCall(DMGetDimension(dm, &dim));
860:   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
861:   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
862:   PetscCall(ISGetIndices(pointsIS, &points));
863:   PetscCall(ISGetLocalSize(pointsIS, &npoints));
864:   PetscCall(VecGetArrayRead(coordsVec, &coords));
865:   for (i = 0; i < npoints; i++) {
866:     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol));
867:     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
868:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
869:     PetscCall(PetscViewerFlush(viewer));
870:   }
871:   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
872:   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
873:   PetscCall(ISRestoreIndices(pointsIS, &points));
874:   PetscCall(ISDestroy(&pointsIS));
875:   PetscFunctionReturn(PETSC_SUCCESS);
876: }

878: static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
879: {
880:   IS            is;
881:   PetscSection *sects;
882:   IS           *iss;
883:   PetscInt      d, depth;
884:   PetscMPIInt   rank;
885:   PetscViewer   viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;

887:   PetscFunctionBegin;
888:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
889:   if (user->testExpandPointsEmpty && rank == 0) {
890:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
891:   } else {
892:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
893:   }
894:   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
895:   PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
896:   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank));
897:   for (d = depth - 1; d >= 0; d--) {
898:     IS        checkIS;
899:     PetscBool flg;

901:     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d));
902:     PetscCall(PetscSectionView(sects[d], sviewer));
903:     PetscCall(ISView(iss[d], sviewer));
904:     /* check reverse operation */
905:     if (d < depth - 1) {
906:       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
907:       PetscCall(ISEqualUnsorted(checkIS, iss[d + 1], &flg));
908:       PetscCheck(flg, PetscObjectComm((PetscObject)checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
909:       PetscCall(ISDestroy(&checkIS));
910:     }
911:   }
912:   PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
913:   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
914:   PetscCall(ISDestroy(&is));
915:   PetscFunctionReturn(PETSC_SUCCESS);
916: }

918: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
919: {
920:   PetscInt        n, n1, ncone, numCoveredPoints, o, p, q, start, end;
921:   const PetscInt *coveredPoints;
922:   const PetscInt *arr, *cone;
923:   PetscInt       *newarr;

925:   PetscFunctionBegin;
926:   PetscCall(ISGetLocalSize(is, &n));
927:   PetscCall(PetscSectionGetStorageSize(section, &n1));
928:   PetscCall(PetscSectionGetChart(section, &start, &end));
929:   PetscCheck(n == n1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
930:   PetscCall(ISGetIndices(is, &arr));
931:   PetscCall(PetscMalloc1(end - start, &newarr));
932:   for (q = start; q < end; q++) {
933:     PetscCall(PetscSectionGetDof(section, q, &ncone));
934:     PetscCall(PetscSectionGetOffset(section, q, &o));
935:     cone = &arr[o];
936:     if (ncone == 1) {
937:       numCoveredPoints = 1;
938:       p                = cone[0];
939:     } else {
940:       PetscInt i;
941:       p = PETSC_INT_MAX;
942:       for (i = 0; i < ncone; i++)
943:         if (cone[i] < 0) {
944:           p = -1;
945:           break;
946:         }
947:       if (p >= 0) {
948:         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
949:         PetscCheck(numCoveredPoints <= 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT, q);
950:         if (numCoveredPoints) p = coveredPoints[0];
951:         else p = -2;
952:         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
953:       }
954:     }
955:     newarr[q - start] = p;
956:   }
957:   PetscCall(ISRestoreIndices(is, &arr));
958:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis));
959:   PetscFunctionReturn(PETSC_SUCCESS);
960: }

962: static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
963: {
964:   PetscInt d;
965:   IS       is, newis;

967:   PetscFunctionBegin;
968:   is = boundary_expanded_is;
969:   PetscCall(PetscObjectReference((PetscObject)is));
970:   for (d = 0; d < depth - 1; ++d) {
971:     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
972:     PetscCall(ISDestroy(&is));
973:     is = newis;
974:   }
975:   *boundary_is = is;
976:   PetscFunctionReturn(PETSC_SUCCESS);
977: }

979: #define CHKERRQI(incall, ierr) \
980:   if (ierr) incall = PETSC_FALSE;

982: static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
983: {
984:   PetscViewer       viewer;
985:   PetscBool         flg;
986:   static PetscBool  incall = PETSC_FALSE;
987:   PetscViewerFormat format;

989:   PetscFunctionBegin;
990:   if (incall) PetscFunctionReturn(PETSC_SUCCESS);
991:   incall = PETSC_TRUE;
992:   CHKERRQI(incall, PetscOptionsCreateViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
993:   if (flg) {
994:     CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
995:     CHKERRQI(incall, DMLabelView(label, viewer));
996:     CHKERRQI(incall, PetscViewerPopFormat(viewer));
997:     CHKERRQI(incall, PetscViewerDestroy(&viewer));
998:   }
999:   incall = PETSC_FALSE;
1000:   PetscFunctionReturn(PETSC_SUCCESS);
1001: }

1003: /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1004: static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1005: {
1006:   IS tmpis;

1008:   PetscFunctionBegin;
1009:   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1010:   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1011:   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1012:   PetscCall(ISDestroy(&tmpis));
1013:   PetscFunctionReturn(PETSC_SUCCESS);
1014: }

1016: /* currently only for simple PetscSection without fields or constraints */
1017: static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1018: {
1019:   PetscSection sec;
1020:   PetscInt     chart[2], p;
1021:   PetscInt    *dofarr;
1022:   PetscMPIInt  rank;

1024:   PetscFunctionBegin;
1025:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1026:   if (rank == rootrank) PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1027:   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1028:   PetscCall(PetscMalloc1(chart[1] - chart[0], &dofarr));
1029:   if (rank == rootrank) {
1030:     for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]]));
1031:   }
1032:   PetscCallMPI(MPI_Bcast(dofarr, (PetscMPIInt)(chart[1] - chart[0]), MPIU_INT, rootrank, comm));
1033:   PetscCall(PetscSectionCreate(comm, &sec));
1034:   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1035:   for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionSetDof(sec, p, dofarr[p - chart[0]]));
1036:   PetscCall(PetscSectionSetUp(sec));
1037:   PetscCall(PetscFree(dofarr));
1038:   *secout = sec;
1039:   PetscFunctionReturn(PETSC_SUCCESS);
1040: }

1042: static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1043: {
1044:   IS faces_expanded_is;

1046:   PetscFunctionBegin;
1047:   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1048:   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1049:   PetscCall(ISDestroy(&faces_expanded_is));
1050:   PetscFunctionReturn(PETSC_SUCCESS);
1051: }

1053: /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1054: static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1055: {
1056:   PetscOptions     options = NULL;
1057:   const char      *prefix  = NULL;
1058:   const char       opt[]   = "-dm_plex_interpolate_orient_interfaces";
1059:   char             prefix_opt[512];
1060:   PetscBool        flg, set;
1061:   static PetscBool wasSetTrue = PETSC_FALSE;

1063:   PetscFunctionBegin;
1064:   if (dm) {
1065:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1066:     options = ((PetscObject)dm)->options;
1067:   }
1068:   PetscCall(PetscStrncpy(prefix_opt, "-", sizeof(prefix_opt)));
1069:   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1070:   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1071:   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1072:   if (!enable) {
1073:     if (set && flg) wasSetTrue = PETSC_TRUE;
1074:     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1075:   } else if (set && !flg) {
1076:     if (wasSetTrue) {
1077:       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1078:     } else {
1079:       /* default is PETSC_TRUE */
1080:       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1081:     }
1082:     wasSetTrue = PETSC_FALSE;
1083:   }
1084:   if (PetscDefined(USE_DEBUG)) {
1085:     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1086:     PetscCheck(!set || flg == enable, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1087:   }
1088:   PetscFunctionReturn(PETSC_SUCCESS);
1089: }

1091: /* get coordinate description of the whole-domain boundary */
1092: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1093: {
1094:   PortableBoundary       bnd0, bnd;
1095:   MPI_Comm               comm;
1096:   DM                     idm;
1097:   DMLabel                label;
1098:   PetscInt               d;
1099:   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1100:   IS                     boundary_is;
1101:   IS                    *boundary_expanded_iss;
1102:   PetscMPIInt            rootrank = 0;
1103:   PetscMPIInt            rank, size;
1104:   PetscInt               value = 1;
1105:   DMPlexInterpolatedFlag intp;
1106:   PetscBool              flg;

1108:   PetscFunctionBegin;
1109:   PetscCall(PetscNew(&bnd));
1110:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1111:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1112:   PetscCallMPI(MPI_Comm_size(comm, &size));
1113:   PetscCall(DMPlexIsDistributed(dm, &flg));
1114:   PetscCheck(!flg, comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");

1116:   /* interpolate serial DM if not yet interpolated */
1117:   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1118:   if (intp == DMPLEX_INTERPOLATED_FULL) {
1119:     idm = dm;
1120:     PetscCall(PetscObjectReference((PetscObject)dm));
1121:   } else {
1122:     PetscCall(DMPlexInterpolate(dm, &idm));
1123:     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1124:   }

1126:   /* mark whole-domain boundary of the serial DM */
1127:   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1128:   PetscCall(DMAddLabel(idm, label));
1129:   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1130:   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1131:   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));

1133:   /* translate to coordinates */
1134:   PetscCall(PetscNew(&bnd0));
1135:   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1136:   if (rank == rootrank) {
1137:     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1138:     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1139:     /* self-check */
1140:     {
1141:       IS is0;
1142:       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1143:       PetscCall(ISEqual(is0, boundary_is, &flg));
1144:       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1145:       PetscCall(ISDestroy(&is0));
1146:     }
1147:   } else {
1148:     PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, 0, 0, &bnd0->coordinates));
1149:   }

1151:   {
1152:     Vec        tmp;
1153:     VecScatter sc;
1154:     IS         xis;
1155:     PetscInt   n;

1157:     /* just convert seq vectors to mpi vector */
1158:     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1159:     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1160:     if (rank == rootrank) {
1161:       PetscCall(VecCreateFromOptions(comm, NULL, 1, n, n, &tmp));
1162:     } else {
1163:       PetscCall(VecCreateFromOptions(comm, NULL, 1, 0, n, &tmp));
1164:     }
1165:     PetscCall(VecCopy(bnd0->coordinates, tmp));
1166:     PetscCall(VecDestroy(&bnd0->coordinates));
1167:     bnd0->coordinates = tmp;

1169:     /* replicate coordinates from root rank to all ranks */
1170:     PetscCall(VecCreateFromOptions(comm, NULL, 1, n, n * size, &bnd->coordinates));
1171:     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1172:     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1173:     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1174:     PetscCall(VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1175:     PetscCall(VecScatterDestroy(&sc));
1176:     PetscCall(ISDestroy(&xis));
1177:   }
1178:   bnd->depth = bnd0->depth;
1179:   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1180:   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1181:   for (d = 0; d < bnd->depth; d++) PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));

1183:   if (rank == rootrank) PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1184:   PetscCall(PortableBoundaryDestroy(&bnd0));
1185:   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1186:   PetscCall(DMLabelDestroy(&label));
1187:   PetscCall(ISDestroy(&boundary_is));
1188:   PetscCall(DMDestroy(&idm));
1189:   *boundary = bnd;
1190:   PetscFunctionReturn(PETSC_SUCCESS);
1191: }

1193: /* get faces of inter-partition interface */
1194: static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1195: {
1196:   MPI_Comm               comm;
1197:   DMLabel                label;
1198:   IS                     part_boundary_faces_is;
1199:   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1200:   PetscInt               value              = 1;
1201:   DMPlexInterpolatedFlag intp;

1203:   PetscFunctionBegin;
1204:   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1205:   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1206:   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");

1208:   /* get ipdm partition boundary (partBoundary) */
1209:   {
1210:     PetscSF sf;

1212:     PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1213:     PetscCall(DMAddLabel(ipdm, label));
1214:     PetscCall(DMGetPointSF(ipdm, &sf));
1215:     PetscCall(DMSetPointSF(ipdm, NULL));
1216:     PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1217:     PetscCall(DMSetPointSF(ipdm, sf));
1218:     PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1219:     PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1220:     PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1221:     PetscCall(DMLabelDestroy(&label));
1222:   }

1224:   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1225:   PetscCall(ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is));
1226:   PetscCall(ISDestroy(&part_boundary_faces_is));
1227:   PetscFunctionReturn(PETSC_SUCCESS);
1228: }

1230: /* compute inter-partition interface including edges and vertices */
1231: static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1232: {
1233:   DMLabel                label;
1234:   PetscInt               value           = 1;
1235:   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1236:   DMPlexInterpolatedFlag intp;
1237:   MPI_Comm               comm;

1239:   PetscFunctionBegin;
1240:   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1241:   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1242:   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");

1244:   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1245:   PetscCall(DMAddLabel(ipdm, label));
1246:   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1247:   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1248:   PetscCall(DMPlexLabelComplete(ipdm, label));
1249:   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1250:   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1251:   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1252:   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1253:   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1254:   PetscCall(DMLabelDestroy(&label));
1255:   PetscFunctionReturn(PETSC_SUCCESS);
1256: }

1258: static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1259: {
1260:   PetscInt        n;
1261:   const PetscInt *arr;

1263:   PetscFunctionBegin;
1264:   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1265:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1266:   PetscFunctionReturn(PETSC_SUCCESS);
1267: }

1269: static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1270: {
1271:   PetscInt        n;
1272:   const PetscInt *rootdegree;
1273:   PetscInt       *arr;

1275:   PetscFunctionBegin;
1276:   PetscCall(PetscSFSetUp(sf));
1277:   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1278:   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1279:   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1280:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1281:   PetscFunctionReturn(PETSC_SUCCESS);
1282: }

1284: static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1285: {
1286:   IS pointSF_out_is, pointSF_in_is;

1288:   PetscFunctionBegin;
1289:   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1290:   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1291:   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1292:   PetscCall(ISDestroy(&pointSF_out_is));
1293:   PetscCall(ISDestroy(&pointSF_in_is));
1294:   PetscFunctionReturn(PETSC_SUCCESS);
1295: }

1297: #define CHKERRMY(ierr) PetscCheck(!ierr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")

1299: static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1300: {
1301:   DMLabel         label;
1302:   PetscSection    coordsSection;
1303:   Vec             coordsVec;
1304:   PetscScalar    *coordsScalar;
1305:   PetscInt        coneSize, depth, dim, i, p, npoints;
1306:   const PetscInt *points;

1308:   PetscFunctionBegin;
1309:   PetscCall(DMGetDimension(dm, &dim));
1310:   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1311:   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1312:   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1313:   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1314:   PetscCall(ISGetIndices(pointsIS, &points));
1315:   PetscCall(DMPlexGetDepthLabel(dm, &label));
1316:   PetscCall(PetscViewerASCIIPushTab(v));
1317:   for (i = 0; i < npoints; i++) {
1318:     p = points[i];
1319:     PetscCall(DMLabelGetValue(label, p, &depth));
1320:     if (!depth) {
1321:       PetscInt n, o;
1322:       char     coordstr[128];

1324:       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1325:       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1326:       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1327:       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1328:     } else {
1329:       char entityType[16];

1331:       switch (depth) {
1332:       case 1:
1333:         PetscCall(PetscStrncpy(entityType, "edge", sizeof(entityType)));
1334:         break;
1335:       case 2:
1336:         PetscCall(PetscStrncpy(entityType, "face", sizeof(entityType)));
1337:         break;
1338:       case 3:
1339:         PetscCall(PetscStrncpy(entityType, "cell", sizeof(entityType)));
1340:         break;
1341:       default:
1342:         SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1343:       }
1344:       if (depth == dim && dim < 3) PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1345:       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1346:     }
1347:     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1348:     if (coneSize) {
1349:       const PetscInt *cone;
1350:       IS              coneIS;

1352:       PetscCall(DMPlexGetCone(dm, p, &cone));
1353:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1354:       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1355:       PetscCall(ISDestroy(&coneIS));
1356:     }
1357:   }
1358:   PetscCall(PetscViewerASCIIPopTab(v));
1359:   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1360:   PetscCall(ISRestoreIndices(pointsIS, &points));
1361:   PetscFunctionReturn(PETSC_SUCCESS);
1362: }

1364: static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1365: {
1366:   PetscBool   flg;
1367:   PetscInt    npoints;
1368:   PetscMPIInt rank;

1370:   PetscFunctionBegin;
1371:   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1372:   PetscCheck(flg, PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1373:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1374:   PetscCall(PetscViewerASCIIPushSynchronized(v));
1375:   PetscCall(ISGetLocalSize(points, &npoints));
1376:   if (npoints) {
1377:     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1378:     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1379:   }
1380:   PetscCall(PetscViewerFlush(v));
1381:   PetscCall(PetscViewerASCIIPopSynchronized(v));
1382:   PetscFunctionReturn(PETSC_SUCCESS);
1383: }

1385: static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1386: {
1387:   PetscSF     pointsf;
1388:   IS          pointsf_is;
1389:   PetscBool   flg;
1390:   MPI_Comm    comm;
1391:   PetscMPIInt size;

1393:   PetscFunctionBegin;
1394:   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1395:   PetscCallMPI(MPI_Comm_size(comm, &size));
1396:   PetscCall(DMGetPointSF(ipdm, &pointsf));
1397:   if (pointsf) {
1398:     PetscInt nroots;
1399:     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1400:     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1401:   }
1402:   if (!pointsf) {
1403:     PetscInt N = 0;
1404:     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1405:     PetscCheck(!N, comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1406:     PetscFunctionReturn(PETSC_SUCCESS);
1407:   }

1409:   /* get PointSF points as IS pointsf_is */
1410:   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));

1412:   /* compare pointsf_is with interface_is */
1413:   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1414:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, comm));
1415:   if (!flg) {
1416:     IS          pointsf_extra_is, pointsf_missing_is;
1417:     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1418:     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1419:     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1420:     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1421:     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1422:     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1423:     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1424:     CHKERRMY(ISDestroy(&pointsf_extra_is));
1425:     CHKERRMY(ISDestroy(&pointsf_missing_is));
1426:     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1427:   }
1428:   PetscCall(ISDestroy(&pointsf_is));
1429:   PetscFunctionReturn(PETSC_SUCCESS);
1430: }

1432: /* remove faces & edges from label, leave just vertices */
1433: static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1434: {
1435:   PetscInt vStart, vEnd;
1436:   MPI_Comm comm;

1438:   PetscFunctionBegin;
1439:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1440:   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1441:   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1442:   PetscFunctionReturn(PETSC_SUCCESS);
1443: }

1445: /*
1446:   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.

1448:   Collective

1450:   Input Parameter:
1451: . dm - The DMPlex object

1453:   Notes:
1454:   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1455:   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1456:   This is mainly intended for debugging/testing purposes.

1458:   Algorithm:
1459:   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1460:   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1461:   3. the mesh is distributed or loaded in parallel
1462:   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1463:   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1464:   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1465:   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error

1467:   Level: developer

1469: .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1470: */
1471: static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1472: {
1473:   DM                     ipdm = NULL;
1474:   IS                     boundary_faces_is, interface_faces_is, interface_is;
1475:   DMPlexInterpolatedFlag intp;
1476:   MPI_Comm               comm;

1478:   PetscFunctionBegin;
1479:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));

1481:   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1482:   if (intp == DMPLEX_INTERPOLATED_FULL) {
1483:     ipdm = dm;
1484:   } else {
1485:     /* create temporary interpolated DM if input DM is not interpolated */
1486:     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1487:     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1488:     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1489:   }
1490:   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));

1492:   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1493:   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1494:   /* get inter-partition interface faces (interface_faces_is)*/
1495:   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1496:   /* compute inter-partition interface including edges and vertices (interface_is) */
1497:   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1498:   /* destroy immediate ISs */
1499:   PetscCall(ISDestroy(&boundary_faces_is));
1500:   PetscCall(ISDestroy(&interface_faces_is));

1502:   /* for uninterpolated case, keep just vertices in interface */
1503:   if (!intp) {
1504:     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1505:     PetscCall(DMDestroy(&ipdm));
1506:   }

1508:   /* compare PointSF with the boundary reconstructed from coordinates */
1509:   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1510:   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1511:   PetscCall(ISDestroy(&interface_is));
1512:   PetscFunctionReturn(PETSC_SUCCESS);
1513: }

1515: int main(int argc, char **argv)
1516: {
1517:   DM     dm;
1518:   AppCtx user;

1520:   PetscFunctionBeginUser;
1521:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1522:   PetscCall(PetscLogStageRegister("create", &stage[0]));
1523:   PetscCall(PetscLogStageRegister("distribute", &stage[1]));
1524:   PetscCall(PetscLogStageRegister("interpolate", &stage[2]));
1525:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1526:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1527:   if (user.nPointsToExpand) PetscCall(TestExpandPoints(dm, &user));
1528:   if (user.ncoords) {
1529:     Vec coords;

1531:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1532:     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1533:     PetscCall(VecDestroy(&coords));
1534:   }
1535:   PetscCall(DMDestroy(&dm));
1536:   PetscCall(PetscFinalize());
1537:   return 0;
1538: }

1540: /*TEST

1542:   testset:
1543:     nsize: 2
1544:     args: -dm_view ascii::ascii_info_detail
1545:     args: -dm_plex_check_all
1546:     test:
1547:       suffix: 1_tri_dist0
1548:       args: -distribute 0 -interpolate {{none create}separate output}
1549:     test:
1550:       suffix: 1_tri_dist1
1551:       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1552:     test:
1553:       suffix: 1_quad_dist0
1554:       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1555:     test:
1556:       suffix: 1_quad_dist1
1557:       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1558:     test:
1559:       suffix: 1_1d_dist1
1560:       args: -dim 1 -distribute 1

1562:   testset:
1563:     nsize: 3
1564:     args: -testnum 1 -interpolate create
1565:     args: -dm_plex_check_all
1566:     test:
1567:       suffix: 2
1568:       args: -dm_view ascii::ascii_info_detail
1569:     test:
1570:       suffix: 2a
1571:       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1572:     test:
1573:       suffix: 2b
1574:       args: -test_expand_points 0,1,2,5,6
1575:     test:
1576:       suffix: 2c
1577:       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty

1579:   testset:
1580:     # the same as 1% for 3D
1581:     nsize: 2
1582:     args: -dim 3 -dm_view ascii::ascii_info_detail
1583:     args: -dm_plex_check_all
1584:     test:
1585:       suffix: 4_tet_dist0
1586:       args: -distribute 0 -interpolate {{none create}separate output}
1587:     test:
1588:       suffix: 4_tet_dist1
1589:       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1590:     test:
1591:       suffix: 4_hex_dist0
1592:       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1593:     test:
1594:       suffix: 4_hex_dist1
1595:       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}

1597:   test:
1598:     # the same as 4_tet_dist0 but test different initial orientations
1599:     suffix: 4_tet_test_orient
1600:     nsize: 2
1601:     args: -dim 3 -distribute 0
1602:     args: -dm_plex_check_all
1603:     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1604:     args: -rotate_interface_1 {{0 1 2 11 12 13}}

1606:   testset:
1607:     requires: exodusii
1608:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1609:     args: -dm_view ascii::ascii_info_detail
1610:     args: -dm_plex_check_all
1611:     args: -custom_view
1612:     test:
1613:       suffix: 5_seq
1614:       nsize: 1
1615:       args: -distribute 0 -interpolate {{none create}separate output}
1616:     test:
1617:       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1618:       suffix: 5_dist0
1619:       nsize: 2
1620:       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1621:     test:
1622:       suffix: 5_dist1
1623:       nsize: 2
1624:       args: -distribute 1 -interpolate {{none create after_distribute}separate output}

1626:   testset:
1627:     nsize: {{1 2 4}}
1628:     args: -use_generator
1629:     args: -dm_plex_check_all
1630:     args: -distribute -interpolate none
1631:     test:
1632:       suffix: 6_tri
1633:       requires: triangle
1634:       args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1635:     test:
1636:       suffix: 6_quad
1637:       args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1638:     test:
1639:       suffix: 6_tet
1640:       requires: ctetgen
1641:       args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1642:     test:
1643:       suffix: 6_hex
1644:       args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1645:   testset:
1646:     nsize: {{1 2 4}}
1647:     args: -use_generator
1648:     args: -dm_plex_check_all
1649:     args: -distribute -interpolate create
1650:     test:
1651:       suffix: 6_int_tri
1652:       requires: triangle
1653:       args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1654:     test:
1655:       suffix: 6_int_quad
1656:       args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1657:     test:
1658:       suffix: 6_int_tet
1659:       requires: ctetgen
1660:       args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1661:     test:
1662:       suffix: 6_int_hex
1663:       args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1664:   testset:
1665:     nsize: {{2 4}}
1666:     args: -use_generator
1667:     args: -dm_plex_check_all
1668:     args: -distribute -interpolate after_distribute
1669:     test:
1670:       suffix: 6_parint_tri
1671:       requires: triangle
1672:       args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1673:     test:
1674:       suffix: 6_parint_quad
1675:       args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1676:     test:
1677:       suffix: 6_parint_tet
1678:       requires: ctetgen
1679:       args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1680:     test:
1681:       suffix: 6_parint_hex
1682:       args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0

1684:   testset: # 7 EXODUS
1685:     requires: exodusii
1686:     args: -dm_plex_check_all
1687:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1688:     args: -distribute
1689:     test: # seq load, simple partitioner
1690:       suffix: 7_exo
1691:       nsize: {{1 2 4 5}}
1692:       args: -interpolate none
1693:     test: # seq load, seq interpolation, simple partitioner
1694:       suffix: 7_exo_int_simple
1695:       nsize: {{1 2 4 5}}
1696:       args: -interpolate create
1697:     test: # seq load, seq interpolation, metis partitioner
1698:       suffix: 7_exo_int_metis
1699:       requires: parmetis
1700:       nsize: {{2 4 5}}
1701:       args: -interpolate create
1702:       args: -petscpartitioner_type parmetis
1703:     test: # seq load, simple partitioner, par interpolation
1704:       suffix: 7_exo_simple_int
1705:       nsize: {{2 4 5}}
1706:       args: -interpolate after_distribute
1707:     test: # seq load, metis partitioner, par interpolation
1708:       suffix: 7_exo_metis_int
1709:       requires: parmetis
1710:       nsize: {{2 4 5}}
1711:       args: -interpolate after_distribute
1712:       args: -petscpartitioner_type parmetis

1714:   testset: # 7 HDF5 SEQUANTIAL LOAD
1715:     requires: hdf5 !complex
1716:     args: -dm_plex_check_all
1717:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1718:     args: -dm_plex_hdf5_force_sequential
1719:     args: -distribute
1720:     test: # seq load, simple partitioner
1721:       suffix: 7_seq_hdf5_simple
1722:       nsize: {{1 2 4 5}}
1723:       args: -interpolate none
1724:     test: # seq load, seq interpolation, simple partitioner
1725:       suffix: 7_seq_hdf5_int_simple
1726:       nsize: {{1 2 4 5}}
1727:       args: -interpolate after_create
1728:     test: # seq load, seq interpolation, metis partitioner
1729:       nsize: {{2 4 5}}
1730:       suffix: 7_seq_hdf5_int_metis
1731:       requires: parmetis
1732:       args: -interpolate after_create
1733:       args: -petscpartitioner_type parmetis
1734:     test: # seq load, simple partitioner, par interpolation
1735:       suffix: 7_seq_hdf5_simple_int
1736:       nsize: {{2 4 5}}
1737:       args: -interpolate after_distribute
1738:     test: # seq load, metis partitioner, par interpolation
1739:       nsize: {{2 4 5}}
1740:       suffix: 7_seq_hdf5_metis_int
1741:       requires: parmetis
1742:       args: -interpolate after_distribute
1743:       args: -petscpartitioner_type parmetis

1745:   testset: # 7 HDF5 PARALLEL LOAD
1746:     requires: hdf5 !complex
1747:     nsize: {{2 4 5}}
1748:     args: -dm_plex_check_all
1749:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1750:     test: # par load
1751:       suffix: 7_par_hdf5
1752:       args: -interpolate none
1753:     test: # par load, par interpolation
1754:       suffix: 7_par_hdf5_int
1755:       args: -interpolate after_create
1756:     test: # par load, parmetis repartitioner
1757:       TODO: Parallel partitioning of uninterpolated meshes not supported
1758:       suffix: 7_par_hdf5_parmetis
1759:       requires: parmetis
1760:       args: -distribute -petscpartitioner_type parmetis
1761:       args: -interpolate none
1762:     test: # par load, par interpolation, parmetis repartitioner
1763:       suffix: 7_par_hdf5_int_parmetis
1764:       requires: parmetis
1765:       args: -distribute -petscpartitioner_type parmetis
1766:       args: -interpolate after_create
1767:     test: # par load, parmetis partitioner, par interpolation
1768:       TODO: Parallel partitioning of uninterpolated meshes not supported
1769:       suffix: 7_par_hdf5_parmetis_int
1770:       requires: parmetis
1771:       args: -distribute -petscpartitioner_type parmetis
1772:       args: -interpolate after_distribute

1774:     test:
1775:       suffix: 7_hdf5_hierarch
1776:       requires: hdf5 ptscotch !complex
1777:       nsize: {{2 3 4}separate output}
1778:       args: -distribute
1779:       args: -interpolate after_create
1780:       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1781:       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1782:       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch

1784:   test:
1785:     suffix: 8
1786:     requires: hdf5 !complex
1787:     nsize: 4
1788:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1789:     args: -distribute 0 -interpolate after_create
1790:     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1791:     args: -dm_plex_check_all
1792:     args: -custom_view

1794:   testset: # 9 HDF5 SEQUANTIAL LOAD
1795:     requires: hdf5 !complex datafilespath
1796:     args: -dm_plex_check_all
1797:     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1798:     args: -dm_plex_hdf5_force_sequential
1799:     args: -distribute
1800:     test: # seq load, simple partitioner
1801:       suffix: 9_seq_hdf5_simple
1802:       nsize: {{1 2 4 5}}
1803:       args: -interpolate none
1804:     test: # seq load, seq interpolation, simple partitioner
1805:       suffix: 9_seq_hdf5_int_simple
1806:       nsize: {{1 2 4 5}}
1807:       args: -interpolate after_create
1808:     test: # seq load, seq interpolation, metis partitioner
1809:       nsize: {{2 4 5}}
1810:       suffix: 9_seq_hdf5_int_metis
1811:       requires: parmetis
1812:       args: -interpolate after_create
1813:       args: -petscpartitioner_type parmetis
1814:     test: # seq load, simple partitioner, par interpolation
1815:       suffix: 9_seq_hdf5_simple_int
1816:       nsize: {{2 4 5}}
1817:       args: -interpolate after_distribute
1818:     test: # seq load, simple partitioner, par interpolation
1819:       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1820:       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1821:       # We can then provide an intentionally broken mesh instead.
1822:       TODO: This test is broken because PointSF is fixed.
1823:       suffix: 9_seq_hdf5_simple_int_err
1824:       nsize: 4
1825:       args: -interpolate after_distribute
1826:       filter: sed -e "/PETSC ERROR/,$$d"
1827:     test: # seq load, metis partitioner, par interpolation
1828:       nsize: {{2 4 5}}
1829:       suffix: 9_seq_hdf5_metis_int
1830:       requires: parmetis
1831:       args: -interpolate after_distribute
1832:       args: -petscpartitioner_type parmetis

1834:   testset: # 9 HDF5 PARALLEL LOAD
1835:     requires: hdf5 !complex datafilespath
1836:     nsize: {{2 4 5}}
1837:     args: -dm_plex_check_all
1838:     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1839:     test: # par load
1840:       suffix: 9_par_hdf5
1841:       args: -interpolate none
1842:     test: # par load, par interpolation
1843:       suffix: 9_par_hdf5_int
1844:       args: -interpolate after_create
1845:     test: # par load, parmetis repartitioner
1846:       TODO: Parallel partitioning of uninterpolated meshes not supported
1847:       suffix: 9_par_hdf5_parmetis
1848:       requires: parmetis
1849:       args: -distribute -petscpartitioner_type parmetis
1850:       args: -interpolate none
1851:     test: # par load, par interpolation, parmetis repartitioner
1852:       suffix: 9_par_hdf5_int_parmetis
1853:       requires: parmetis
1854:       args: -distribute -petscpartitioner_type parmetis
1855:       args: -interpolate after_create
1856:     test: # par load, parmetis partitioner, par interpolation
1857:       TODO: Parallel partitioning of uninterpolated meshes not supported
1858:       suffix: 9_par_hdf5_parmetis_int
1859:       requires: parmetis
1860:       args: -distribute -petscpartitioner_type parmetis
1861:       args: -interpolate after_distribute

1863:   testset: # 10 HDF5 PARALLEL LOAD
1864:     requires: hdf5 !complex datafilespath
1865:     nsize: {{2 4 7}}
1866:     args: -dm_plex_check_all
1867:     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1868:     test: # par load, par interpolation
1869:       suffix: 10_par_hdf5_int
1870:       args: -interpolate after_create
1871: TEST*/