Actual source code: ex56.c
1: #include <petscdmplex.h>
2: #include <petscviewerhdf5.h>
3: #include <petscsf.h>
5: static const char help[] = "Test DMLabel I/O with PETSc native HDF5 mesh format\n\n";
6: static const char EX[] = "ex56.c";
7: typedef struct {
8: MPI_Comm comm;
9: const char *meshname; /* Mesh name */
10: PetscInt num_labels; /* Asserted number of labels in loaded mesh */
11: PetscBool compare; /* Compare the meshes using DMPlexEqual() and DMCompareLabels() */
12: PetscBool compare_labels; /* Compare labels in the meshes using DMCompareLabels() */
13: PetscBool compare_boundary; /* Check label I/O via boundary vertex coordinates */
14: PetscBool compare_pre_post; /* Compare labels loaded before distribution with those loaded after distribution */
15: char outfile[PETSC_MAX_PATH_LEN]; /* Output file */
16: PetscBool use_low_level_functions; /* Use low level functions for viewing and loading */
17: //TODO This is meant as temporary option; can be removed once we have full parallel loading in place
18: PetscBool distribute_after_topo_load; /* Distribute topology right after DMPlexTopologyLoad(), if use_low_level_functions=true */
19: PetscInt verbose;
20: } AppCtx;
22: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
23: {
24: PetscFunctionBeginUser;
25: options->comm = comm;
26: options->num_labels = -1;
27: options->compare = PETSC_FALSE;
28: options->compare_labels = PETSC_FALSE;
29: options->compare_boundary = PETSC_FALSE;
30: options->compare_pre_post = PETSC_FALSE;
31: options->outfile[0] = '\0';
32: options->use_low_level_functions = PETSC_FALSE;
33: options->distribute_after_topo_load = PETSC_FALSE;
34: options->verbose = 0;
36: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
37: PetscCall(PetscOptionsInt("-num_labels", "Asserted number of labels in meshfile; don't count depth and celltype; -1 to deactivate", EX, options->num_labels, &options->num_labels, NULL));
38: PetscCall(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual() and DMCompareLabels()", EX, options->compare, &options->compare, NULL));
39: PetscCall(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL));
40: PetscCall(PetscOptionsBool("-compare_boundary", "Check label I/O via boundary vertex coordinates", "ex55.c", options->compare_boundary, &options->compare_boundary, NULL));
41: PetscCall(PetscOptionsBool("-compare_pre_post", "Compare labels loaded before distribution with those loaded after distribution", "ex55.c", options->compare_pre_post, &options->compare_pre_post, NULL));
42: PetscCall(PetscOptionsString("-outfile", "Output mesh file", EX, options->outfile, options->outfile, sizeof(options->outfile), NULL));
43: PetscCall(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", EX, options->use_low_level_functions, &options->use_low_level_functions, NULL));
44: PetscCall(PetscOptionsBool("-distribute_after_topo_load", "Distribute topology right after DMPlexTopologyLoad(), if use_low_level_functions=true", EX, options->distribute_after_topo_load, &options->distribute_after_topo_load, NULL));
45: PetscCall(PetscOptionsInt("-verbose", "Verbosity level", EX, options->verbose, &options->verbose, NULL));
46: PetscOptionsEnd();
47: PetscFunctionReturn(PETSC_SUCCESS);
48: }
50: static PetscErrorCode CreateMesh(AppCtx *options, DM *newdm)
51: {
52: DM dm;
54: PetscFunctionBeginUser;
55: PetscCall(DMCreate(options->comm, &dm));
56: PetscCall(DMSetType(dm, DMPLEX));
57: PetscCall(DMSetFromOptions(dm));
58: PetscCall(PetscObjectGetName((PetscObject)dm, &options->meshname));
59: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
60: *newdm = dm;
61: PetscFunctionReturn(PETSC_SUCCESS);
62: }
64: static PetscErrorCode SaveMesh(AppCtx *options, DM dm)
65: {
66: PetscViewer v;
68: PetscFunctionBeginUser;
69: PetscCall(PetscViewerHDF5Open(PetscObjectComm((PetscObject)dm), options->outfile, FILE_MODE_WRITE, &v));
70: if (options->use_low_level_functions) {
71: PetscCall(DMPlexTopologyView(dm, v));
72: PetscCall(DMPlexCoordinatesView(dm, v));
73: PetscCall(DMPlexLabelsView(dm, v));
74: } else {
75: PetscCall(DMView(dm, v));
76: }
77: PetscCall(PetscViewerDestroy(&v));
78: PetscFunctionReturn(PETSC_SUCCESS);
79: }
81: typedef enum {
82: NONE = 0,
83: PRE_DIST = 1,
84: POST_DIST = 2
85: } AuxObjLoadMode;
87: static PetscErrorCode LoadMeshLowLevel(AppCtx *options, PetscViewer v, PetscBool explicitDistribute, AuxObjLoadMode mode, DM *newdm)
88: {
89: DM dm;
90: PetscSF sfXC;
92: PetscFunctionBeginUser;
93: PetscCall(DMCreate(options->comm, &dm));
94: PetscCall(DMSetType(dm, DMPLEX));
95: PetscCall(PetscObjectSetName((PetscObject)dm, options->meshname));
96: PetscCall(DMPlexTopologyLoad(dm, v, &sfXC));
97: if (mode == PRE_DIST) {
98: PetscCall(DMPlexCoordinatesLoad(dm, v, sfXC));
99: PetscCall(DMPlexLabelsLoad(dm, v, sfXC));
100: }
101: if (explicitDistribute) {
102: DM dmdist;
103: PetscSF sfXB = sfXC, sfBC;
105: PetscCall(DMPlexDistribute(dm, 0, &sfBC, &dmdist));
106: if (dmdist) {
107: const char *name;
109: PetscCall(PetscObjectGetName((PetscObject)dm, &name));
110: PetscCall(PetscObjectSetName((PetscObject)dmdist, name));
111: PetscCall(PetscSFCompose(sfXB, sfBC, &sfXC));
112: PetscCall(PetscSFDestroy(&sfXB));
113: PetscCall(PetscSFDestroy(&sfBC));
114: PetscCall(DMDestroy(&dm));
115: dm = dmdist;
116: }
117: }
118: if (mode == POST_DIST) {
119: PetscCall(DMPlexLabelsLoad(dm, v, sfXC));
120: PetscCall(DMPlexCoordinatesLoad(dm, v, sfXC));
121: }
122: PetscCall(PetscSFDestroy(&sfXC));
123: *newdm = dm;
124: PetscFunctionReturn(PETSC_SUCCESS);
125: }
127: static PetscErrorCode LoadMesh(AppCtx *options, DM *dmnew)
128: {
129: DM dm;
130: PetscViewer v;
132: PetscFunctionBeginUser;
133: PetscCall(PetscViewerHDF5Open(options->comm, options->outfile, FILE_MODE_READ, &v));
134: if (options->use_low_level_functions) {
135: if (options->compare_pre_post) {
136: DM dm0;
138: PetscCall(LoadMeshLowLevel(options, v, PETSC_TRUE, PRE_DIST, &dm0));
139: PetscCall(LoadMeshLowLevel(options, v, PETSC_TRUE, POST_DIST, &dm));
140: PetscCall(DMCompareLabels(dm0, dm, NULL, NULL));
141: PetscCall(DMDestroy(&dm0));
142: } else {
143: PetscCall(LoadMeshLowLevel(options, v, options->distribute_after_topo_load, POST_DIST, &dm));
144: }
145: } else {
146: PetscCall(DMCreate(options->comm, &dm));
147: PetscCall(DMSetType(dm, DMPLEX));
148: PetscCall(PetscObjectSetName((PetscObject)dm, options->meshname));
149: PetscCall(DMLoad(dm, v));
150: }
151: PetscCall(PetscViewerDestroy(&v));
152: DMLabel celltypes;
153: PetscCall(DMPlexGetCellTypeLabel(dm, &celltypes));
155: PetscCall(DMSetOptionsPrefix(dm, "load_"));
156: PetscCall(DMSetFromOptions(dm));
157: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
158: *dmnew = dm;
159: PetscFunctionReturn(PETSC_SUCCESS);
160: }
162: static PetscErrorCode CompareMeshes(AppCtx *options, DM dm0, DM dm1)
163: {
164: PetscBool flg;
166: PetscFunctionBeginUser;
167: if (options->compare) {
168: PetscCall(DMPlexEqual(dm0, dm1, &flg));
169: PetscCheck(flg, options->comm, PETSC_ERR_ARG_INCOMP, "DMs are not equal");
170: PetscCall(PetscPrintf(options->comm, "DMs equal\n"));
171: }
172: if (options->compare_labels) {
173: PetscCall(DMCompareLabels(dm0, dm1, NULL, NULL));
174: PetscCall(PetscPrintf(options->comm, "DMLabels equal\n"));
175: }
176: PetscFunctionReturn(PETSC_SUCCESS);
177: }
179: static PetscErrorCode MarkBoundary(DM dm, const char name[], PetscInt value, PetscBool verticesOnly, DMLabel *label)
180: {
181: DMLabel l;
183: PetscFunctionBeginUser;
184: PetscCall(DMLabelCreate(PetscObjectComm((PetscObject)dm), name, &l));
185: PetscCall(DMAddLabel(dm, l));
186: PetscCall(DMPlexMarkBoundaryFaces(dm, value, l));
187: PetscCall(DMPlexLabelComplete(dm, l));
188: if (verticesOnly) {
189: IS points;
190: const PetscInt *idx;
191: PetscInt i, n = 0;
193: PetscCall(DMLabelGetStratumIS(l, value, &points));
194: if (points) {
195: PetscCall(ISGetLocalSize(points, &n));
196: PetscCall(ISGetIndices(points, &idx));
197: }
198: for (i = 0; i < n; i++) {
199: const PetscInt p = idx[i];
200: PetscInt d;
202: PetscCall(DMPlexGetPointDepth(dm, p, &d));
203: if (d != 0) PetscCall(DMLabelClearValue(l, p, value));
204: }
205: if (points) PetscCall(ISRestoreIndices(points, &idx));
206: PetscCall(ISDestroy(&points));
207: }
208: if (label) *label = l;
209: else PetscCall(DMLabelDestroy(&l));
210: PetscFunctionReturn(PETSC_SUCCESS);
211: }
213: static PetscErrorCode VertexCoordinatesToAll(DM dm, IS vertices, Vec *allCoords)
214: {
215: Vec coords, allCoords_;
216: VecScatter sc;
217: MPI_Comm comm;
219: PetscFunctionBeginUser;
220: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
221: PetscCall(DMGetCoordinatesLocalSetUp(dm));
222: if (vertices) {
223: PetscCall(DMGetCoordinatesLocalTuple(dm, vertices, NULL, &coords));
224: } else {
225: PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, 0, 0, &coords));
226: }
227: {
228: PetscInt n;
229: Vec mpivec;
231: PetscCall(VecGetLocalSize(coords, &n));
232: PetscCall(VecCreateFromOptions(comm, NULL, 1, n, PETSC_DECIDE, &mpivec));
233: PetscCall(VecCopy(coords, mpivec));
234: PetscCall(VecDestroy(&coords));
235: coords = mpivec;
236: }
238: PetscCall(VecScatterCreateToAll(coords, &sc, &allCoords_));
239: PetscCall(VecScatterBegin(sc, coords, allCoords_, INSERT_VALUES, SCATTER_FORWARD));
240: PetscCall(VecScatterEnd(sc, coords, allCoords_, INSERT_VALUES, SCATTER_FORWARD));
241: PetscCall(VecScatterDestroy(&sc));
242: PetscCall(VecDestroy(&coords));
243: *allCoords = allCoords_;
244: PetscFunctionReturn(PETSC_SUCCESS);
245: }
247: static PetscErrorCode DMLabelGetCoordinateRepresentation(DM dm, DMLabel label, PetscInt value, Vec *allCoords)
248: {
249: IS vertices;
251: PetscFunctionBeginUser;
252: PetscCall(DMLabelGetStratumIS(label, value, &vertices));
253: PetscCall(VertexCoordinatesToAll(dm, vertices, allCoords));
254: PetscCall(ISDestroy(&vertices));
255: PetscFunctionReturn(PETSC_SUCCESS);
256: }
258: static PetscErrorCode DMLabelCompareWithCoordinateRepresentation(DM dm, DMLabel label, PetscInt value, Vec allCoords, PetscInt verbose)
259: {
260: const char *labelName;
261: IS pointsIS;
262: const PetscInt *points;
263: PetscInt i, n;
264: PetscBool fail = PETSC_FALSE;
265: MPI_Comm comm;
266: PetscMPIInt rank;
268: PetscFunctionBeginUser;
269: PetscCheck(label, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Label not loaded");
270: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
271: PetscCall(PetscObjectGetName((PetscObject)label, &labelName));
272: PetscCallMPI(MPI_Comm_rank(comm, &rank));
273: {
274: PetscInt pStart, pEnd;
276: PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
277: PetscCall(DMLabelCreateIndex(label, pStart, pEnd));
278: }
279: PetscCall(DMPlexFindVertices(dm, allCoords, 0.0, &pointsIS));
280: PetscCall(ISGetIndices(pointsIS, &points));
281: PetscCall(ISGetLocalSize(pointsIS, &n));
282: if (verbose > 1) PetscCall(DMLabelView(label, PETSC_VIEWER_STDOUT_(comm)));
283: for (i = 0; i < n; i++) {
284: const PetscInt p = points[i];
285: PetscBool has;
286: PetscInt v;
288: if (p < 0) continue;
289: PetscCall(DMLabelHasPoint(label, p, &has));
290: if (!has) {
291: if (verbose) PetscCall(PetscSynchronizedFPrintf(comm, PETSC_STDERR, "[%d] Label \"%s\" does not have point %" PetscInt_FMT "\n", rank, labelName, p));
292: fail = PETSC_TRUE;
293: continue;
294: }
295: PetscCall(DMLabelGetValue(label, p, &v));
296: if (v != value) {
297: if (verbose) PetscCall(PetscSynchronizedFPrintf(comm, PETSC_STDERR, "Point %" PetscInt_FMT " has bad value %" PetscInt_FMT " in label \"%s\"", p, v, labelName));
298: fail = PETSC_TRUE;
299: continue;
300: }
301: if (verbose > 1) PetscCall(PetscSynchronizedPrintf(comm, "[%d] OK point %" PetscInt_FMT "\n", rank, p));
302: }
303: PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
304: PetscCall(PetscSynchronizedFlush(comm, PETSC_STDERR));
305: PetscCall(ISRestoreIndices(pointsIS, &points));
306: PetscCall(ISDestroy(&pointsIS));
307: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &fail, 1, MPIU_BOOL, MPI_LOR, comm));
308: PetscCheck(!fail, comm, PETSC_ERR_PLIB, "Label \"%s\" was not loaded correctly%s", labelName, verbose ? " - see details above" : "");
309: PetscFunctionReturn(PETSC_SUCCESS);
310: }
312: static PetscErrorCode CheckNumLabels(DM dm, AppCtx *ctx)
313: {
314: PetscInt actualNum;
315: PetscBool fail = PETSC_FALSE;
316: MPI_Comm comm;
317: PetscMPIInt rank;
319: PetscFunctionBeginUser;
320: if (ctx->num_labels < 0) PetscFunctionReturn(PETSC_SUCCESS);
321: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
322: PetscCallMPI(MPI_Comm_rank(comm, &rank));
323: PetscCall(DMGetNumLabels(dm, &actualNum));
324: if (ctx->num_labels != actualNum) {
325: fail = PETSC_TRUE;
326: if (ctx->verbose) {
327: PetscInt i;
329: PetscCall(PetscSynchronizedFPrintf(comm, PETSC_STDERR, "[%d] Asserted number of labels: %" PetscInt_FMT ", actual: %" PetscInt_FMT "\n", rank, ctx->num_labels, actualNum));
330: for (i = 0; i < actualNum; i++) {
331: DMLabel label;
332: const char *name;
334: PetscCall(DMGetLabelByNum(dm, i, &label));
335: PetscCall(PetscObjectGetName((PetscObject)label, &name));
336: PetscCall(PetscSynchronizedFPrintf(comm, PETSC_STDERR, "[%d] Label %" PetscInt_FMT " \"%s\"\n", rank, i, name));
337: }
338: PetscCall(PetscSynchronizedFlush(comm, PETSC_STDERR));
339: }
340: }
341: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &fail, 1, MPIU_BOOL, MPI_LOR, comm));
342: PetscCheck(!fail, comm, PETSC_ERR_PLIB, "Wrong number of labels%s", ctx->verbose ? " - see details above" : "");
343: PetscFunctionReturn(PETSC_SUCCESS);
344: }
346: static inline PetscErrorCode IncrementNumLabels(AppCtx *ctx)
347: {
348: PetscFunctionBeginUser;
349: if (ctx->num_labels >= 0) ctx->num_labels++;
350: PetscFunctionReturn(PETSC_SUCCESS);
351: }
353: int main(int argc, char **argv)
354: {
355: const char BOUNDARY_NAME[] = "Boundary";
356: const char BOUNDARY_VERTICES_NAME[] = "BoundaryVertices";
357: const PetscInt BOUNDARY_VALUE = 12345;
358: const PetscInt BOUNDARY_VERTICES_VALUE = 6789;
359: DM dm, dmnew;
360: AppCtx user;
361: Vec allCoords = NULL;
363: PetscFunctionBeginUser;
364: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
365: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
366: PetscCall(CreateMesh(&user, &dm));
367: PetscCall(MarkBoundary(dm, BOUNDARY_NAME, BOUNDARY_VALUE, PETSC_FALSE, NULL));
368: PetscCall(IncrementNumLabels(&user));
369: if (user.compare_boundary) {
370: DMLabel label;
372: PetscCall(MarkBoundary(dm, BOUNDARY_VERTICES_NAME, BOUNDARY_VERTICES_VALUE, PETSC_TRUE, &label));
373: PetscCall(IncrementNumLabels(&user));
374: PetscCall(DMLabelGetCoordinateRepresentation(dm, label, BOUNDARY_VERTICES_VALUE, &allCoords));
375: PetscCall(DMLabelDestroy(&label));
376: }
377: PetscCall(SaveMesh(&user, dm));
379: PetscCall(LoadMesh(&user, &dmnew));
380: PetscCall(IncrementNumLabels(&user)); /* account for depth label */
381: PetscCall(IncrementNumLabels(&user)); /* account for celltype label */
382: PetscCall(CheckNumLabels(dm, &user));
383: PetscCall(CompareMeshes(&user, dm, dmnew));
384: if (user.compare_boundary) {
385: DMLabel label;
387: PetscCall(DMGetLabel(dmnew, BOUNDARY_VERTICES_NAME, &label));
388: PetscCall(DMLabelCompareWithCoordinateRepresentation(dmnew, label, BOUNDARY_VERTICES_VALUE, allCoords, user.verbose));
389: }
390: PetscCall(DMDestroy(&dm));
391: PetscCall(DMDestroy(&dmnew));
392: PetscCall(VecDestroy(&allCoords));
393: PetscCall(PetscFinalize());
394: return 0;
395: }
397: //TODO we can -compare once the new parallel topology format is in place
398: /*TEST
399: build:
400: requires: hdf5
402: # load old format, save in new format, reload, distribute
403: testset:
404: suffix: 1
405: requires: !complex datafilespath
406: args: -dm_plex_name plex
407: args: -dm_plex_check_all -dm_plex_view_hdf5_storage_version 2.0.0
408: args: -dm_plex_interpolate -petscpartitioner_type simple
409: args: -load_dm_plex_check_all
410: args: -use_low_level_functions {{0 1}} -compare_boundary
411: args: -num_labels 1
412: args: -outfile ex56_1.h5
413: nsize: {{1 3}}
414: test:
415: suffix: a
416: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
417: test:
418: suffix: b
419: TODO: broken
420: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
421: test:
422: suffix: c
423: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
424: test:
425: suffix: d
426: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5 -num_labels 0
427: test:
428: suffix: e
429: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
430: test:
431: suffix: f
432: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
434: # load old format, save in new format, reload topology, distribute, load geometry and labels
435: testset:
436: suffix: 2
437: requires: !complex datafilespath
438: args: -dm_plex_name plex
439: args: -dm_plex_check_all -dm_plex_view_hdf5_storage_version 2.0.0
440: args: -dm_plex_interpolate -petscpartitioner_type simple
441: args: -load_dm_plex_check_all
442: args: -use_low_level_functions -load_dm_distribute 0 -distribute_after_topo_load -compare_boundary
443: args: -num_labels 1
444: args: -outfile ex56_2.h5
445: nsize: 3
446: test:
447: suffix: a
448: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
449: test:
450: suffix: b
451: TODO: broken
452: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
453: test:
454: suffix: c
455: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
456: test:
457: suffix: d
458: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5 -num_labels 0
459: test:
460: suffix: e
461: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
462: test:
463: suffix: f
464: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
466: # load old format, save in new format, reload topology, distribute, load geometry and labels
467: testset:
468: suffix: 3
469: requires: !complex datafilespath
470: args: -dm_plex_name plex
471: args: -dm_plex_view_hdf5_storage_version 2.0.0
472: args: -dm_plex_interpolate -load_dm_distribute 0 -petscpartitioner_type simple
473: args: -use_low_level_functions -compare_pre_post
474: args: -num_labels 1
475: args: -outfile ex56_3.h5
476: nsize: 3
477: test:
478: suffix: a
479: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
480: test:
481: suffix: b
482: TODO: broken
483: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
484: test:
485: suffix: c
486: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
487: test:
488: suffix: d
489: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5 -num_labels 0
490: test:
491: suffix: e
492: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
493: test:
494: suffix: f
495: args: -dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
496: TEST*/