Actual source code: ex55.c
1: static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";
3: #include <petsc/private/dmpleximpl.h>
4: #include <petscviewerhdf5.h>
5: #include <petscsf.h>
7: typedef struct {
8: PetscBool compare; /* Compare the meshes using DMPlexEqual() */
9: PetscBool compare_labels; /* Compare labels in the meshes using DMCompareLabels() */
10: PetscBool distribute; /* Distribute the mesh */
11: PetscBool field; /* Layout a field over the mesh */
12: PetscBool reorder; /* Reorder the points in the section */
13: char ofname[PETSC_MAX_PATH_LEN]; /* Output mesh filename */
14: PetscViewerFormat format; /* Format to write and read */
15: PetscBool second_write_read; /* Write and read for the 2nd time */
16: PetscBool use_low_level_functions; /* Use low level functions for viewing and loading */
17: } AppCtx;
19: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
20: {
21: PetscFunctionBeginUser;
22: options->compare = PETSC_FALSE;
23: options->compare_labels = PETSC_FALSE;
24: options->distribute = PETSC_TRUE;
25: options->field = PETSC_FALSE;
26: options->reorder = PETSC_FALSE;
27: options->format = PETSC_VIEWER_DEFAULT;
28: options->second_write_read = PETSC_FALSE;
29: options->use_low_level_functions = PETSC_FALSE;
30: PetscCall(PetscStrncpy(options->ofname, "ex55.h5", sizeof(options->ofname)));
32: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
33: PetscCall(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL));
34: PetscCall(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL));
35: PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL));
36: PetscCall(PetscOptionsBool("-field", "Layout a field over the mesh", "ex55.c", options->field, &options->field, NULL));
37: PetscCall(PetscOptionsBool("-reorder", "Reorder the points in the section", "ex55.c", options->reorder, &options->reorder, NULL));
38: PetscCall(PetscOptionsString("-ofilename", "The output mesh file", "ex55.c", options->ofname, options->ofname, sizeof(options->ofname), NULL));
39: PetscCall(PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum *)&options->format, NULL));
40: PetscCall(PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL));
41: PetscCall(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL));
42: PetscOptionsEnd();
43: PetscFunctionReturn(PETSC_SUCCESS);
44: }
46: static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm)
47: {
48: PetscFunctionBeginUser;
49: PetscCall(DMCreate(comm, dm));
50: PetscCall(DMSetType(*dm, DMPLEX));
51: PetscCall(DMSetOptionsPrefix(*dm, "orig_"));
52: PetscCall(DMSetFromOptions(*dm));
53: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
54: PetscFunctionReturn(PETSC_SUCCESS);
55: }
57: static PetscErrorCode CreateDiscretization(DM dm)
58: {
59: PetscFE fe;
60: DMPolytopeType ct;
61: PetscInt dim, cStart;
63: PetscFunctionBeginUser;
64: PetscCall(DMGetDimension(dm, &dim));
65: PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL));
66: PetscCall(DMPlexGetCellType(dm, cStart, &ct));
67: PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, ct, 1, PETSC_DETERMINE, &fe));
68: PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe));
69: PetscCall(PetscFEDestroy(&fe));
70: PetscCall(DMCreateDS(dm));
71: PetscFunctionReturn(PETSC_SUCCESS);
72: }
74: static PetscErrorCode CheckDistributed(DM dm, PetscBool expectedDistributed)
75: {
76: PetscMPIInt size;
77: PetscBool distributed;
78: const char YES[] = "DISTRIBUTED";
79: const char NO[] = "NOT DISTRIBUTED";
81: PetscFunctionBeginUser;
82: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
83: if (size < 2) PetscFunctionReturn(PETSC_SUCCESS);
84: PetscCall(DMPlexIsDistributed(dm, &distributed));
85: PetscCheck(distributed == expectedDistributed, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedDistributed ? YES : NO, distributed ? YES : NO);
86: PetscFunctionReturn(PETSC_SUCCESS);
87: }
89: static PetscErrorCode CheckInterpolated(DM dm, PetscBool expectedInterpolated)
90: {
91: DMPlexInterpolatedFlag iflg;
92: PetscBool interpolated;
93: const char YES[] = "INTERPOLATED";
94: const char NO[] = "NOT INTERPOLATED";
96: PetscFunctionBeginUser;
97: PetscCall(DMPlexIsInterpolatedCollective(dm, &iflg));
98: interpolated = (PetscBool)(iflg == DMPLEX_INTERPOLATED_FULL);
99: PetscCheck(interpolated == expectedInterpolated, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedInterpolated ? YES : NO, interpolated ? YES : NO);
100: PetscFunctionReturn(PETSC_SUCCESS);
101: }
103: static PetscErrorCode CheckDistributedInterpolated(DM dm, PetscBool expectedInterpolated, PetscViewer v, AppCtx *user)
104: {
105: PetscViewerFormat format;
106: PetscBool distributed, interpolated = expectedInterpolated;
108: PetscFunctionBeginUser;
109: PetscCall(PetscViewerGetFormat(v, &format));
110: switch (format) {
111: case PETSC_VIEWER_HDF5_XDMF:
112: case PETSC_VIEWER_HDF5_VIZ: {
113: distributed = PETSC_TRUE;
114: interpolated = PETSC_FALSE;
115: }; break;
116: case PETSC_VIEWER_HDF5_PETSC:
117: case PETSC_VIEWER_DEFAULT:
118: case PETSC_VIEWER_NATIVE: {
119: DMPlexStorageVersion version;
121: PetscCall(PetscViewerHDF5GetDMPlexStorageVersionReading(v, &version));
122: distributed = (PetscBool)(version->major >= 3);
123: }; break;
124: default:
125: distributed = PETSC_FALSE;
126: }
127: PetscCall(CheckDistributed(dm, distributed));
128: PetscCall(CheckInterpolated(dm, interpolated));
129: PetscFunctionReturn(PETSC_SUCCESS);
130: }
132: static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, Vec vec, const char filename[], const char prefix[], PetscBool expectedInterpolated, AppCtx *user, DM *dm_new, Vec *v_new)
133: {
134: DM dmnew;
135: Vec vnew = NULL;
136: const char savedName[] = "Mesh";
137: const char loadedName[] = "Mesh_new";
138: PetscViewer v;
140: PetscFunctionBeginUser;
141: PetscCall(PetscViewerHDF5Open(PetscObjectComm((PetscObject)dm), filename, FILE_MODE_WRITE, &v));
142: PetscCall(PetscViewerPushFormat(v, user->format));
143: PetscCall(PetscObjectSetName((PetscObject)dm, savedName));
144: if (user->use_low_level_functions) {
145: PetscCall(DMPlexTopologyView(dm, v));
146: PetscCall(DMPlexCoordinatesView(dm, v));
147: PetscCall(DMPlexLabelsView(dm, v));
148: } else {
149: PetscCall(DMView(dm, v));
150: if (vec) PetscCall(VecView(vec, v));
151: }
152: PetscCall(PetscViewerFileSetMode(v, FILE_MODE_READ));
153: PetscCall(DMCreate(PETSC_COMM_WORLD, &dmnew));
154: PetscCall(DMSetType(dmnew, DMPLEX));
155: PetscCall(DMPlexDistributeSetDefault(dmnew, PETSC_FALSE));
156: PetscCall(PetscObjectSetName((PetscObject)dmnew, savedName));
157: PetscCall(DMSetOptionsPrefix(dmnew, prefix));
158: if (user->use_low_level_functions) {
159: PetscSF sfXC;
161: PetscCall(DMPlexTopologyLoad(dmnew, v, &sfXC));
162: PetscCall(DMPlexCoordinatesLoad(dmnew, v, sfXC));
163: PetscCall(DMPlexLabelsLoad(dmnew, v, sfXC));
164: PetscCall(PetscSFDestroy(&sfXC));
165: } else {
166: PetscCall(DMLoad(dmnew, v));
167: if (vec) {
168: PetscCall(CreateDiscretization(dmnew));
169: PetscCall(DMCreateGlobalVector(dmnew, &vnew));
170: PetscCall(PetscObjectSetName((PetscObject)vnew, "solution"));
171: PetscCall(VecLoad(vnew, v));
172: }
173: }
174: DMLabel celltypes;
175: PetscCall(DMPlexGetCellTypeLabel(dmnew, &celltypes));
176: PetscCall(CheckDistributedInterpolated(dmnew, expectedInterpolated, v, user));
177: PetscCall(PetscObjectSetName((PetscObject)dmnew, loadedName));
178: PetscCall(PetscViewerPopFormat(v));
179: PetscCall(PetscViewerDestroy(&v));
180: *dm_new = dmnew;
181: *v_new = vnew;
182: PetscFunctionReturn(PETSC_SUCCESS);
183: }
185: int main(int argc, char **argv)
186: {
187: DM dm, dmnew;
188: Vec v = NULL, vnew = NULL;
189: PetscPartitioner part;
190: AppCtx user;
191: PetscBool interpolated = PETSC_TRUE, flg;
193: PetscFunctionBeginUser;
194: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
195: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
196: PetscCall(CreateMesh(PETSC_COMM_WORLD, &dm));
197: PetscCall(PetscOptionsGetBool(NULL, "orig_", "-dm_plex_interpolate", &interpolated, NULL));
198: PetscCall(CheckInterpolated(dm, interpolated));
200: if (user.distribute) {
201: DM dmdist;
203: PetscCall(DMPlexGetPartitioner(dm, &part));
204: PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
205: PetscCall(PetscPartitionerSetFromOptions(part));
206: PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist));
207: if (dmdist) {
208: PetscCall(DMDestroy(&dm));
209: dm = dmdist;
210: PetscCall(CheckDistributed(dm, PETSC_TRUE));
211: PetscCall(CheckInterpolated(dm, interpolated));
212: }
213: }
214: if (user.field) {
215: PetscSection gs;
216: PetscScalar *a;
217: PetscInt pStart, pEnd, rStart;
219: PetscCall(CreateDiscretization(dm));
221: PetscCall(DMCreateGlobalVector(dm, &v));
222: PetscCall(PetscObjectSetName((PetscObject)v, "solution"));
223: PetscCall(DMGetGlobalSection(dm, &gs));
224: PetscCall(PetscSectionGetChart(gs, &pStart, &pEnd));
225: PetscCall(VecGetOwnershipRange(v, &rStart, NULL));
226: PetscCall(VecGetArrayWrite(v, &a));
227: for (PetscInt p = pStart; p < pEnd; ++p) {
228: PetscInt dof, off;
230: PetscCall(PetscSectionGetDof(gs, p, &dof));
231: PetscCall(PetscSectionGetOffset(gs, p, &off));
232: if (off < 0) continue;
233: for (PetscInt d = 0; d < dof; ++d) a[off + d] = p;
234: }
235: PetscCall(VecRestoreArrayWrite(v, &a));
236: }
238: PetscCall(DMSetOptionsPrefix(dm, NULL));
239: PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
240: PetscCall(DMSetFromOptions(dm));
241: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
243: PetscCall(DMPlexWriteAndReadHDF5(dm, v, user.ofname, "new_", interpolated, &user, &dmnew, &vnew));
245: if (user.second_write_read) {
246: PetscCall(DMDestroy(&dm));
247: dm = dmnew;
248: PetscCall(DMPlexWriteAndReadHDF5(dm, v, user.ofname, "new_", interpolated, &user, &dmnew, &vnew));
249: }
251: PetscCall(DMViewFromOptions(dmnew, NULL, "-dm_view"));
253: /* This currently makes sense only for sequential meshes. */
254: if (user.compare) {
255: PetscCall(DMPlexEqual(dmnew, dm, &flg));
256: PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal");
257: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMs equal\n"));
258: if (v) {
259: PetscCall(VecEqual(vnew, v, &flg));
260: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Vecs %s\n", flg ? "equal" : "are not equal"));
261: PetscCall(VecViewFromOptions(v, NULL, "-old_vec_view"));
262: PetscCall(VecViewFromOptions(vnew, NULL, "-new_vec_view"));
263: }
264: }
265: if (user.compare_labels) {
266: PetscCall(DMCompareLabels(dmnew, dm, NULL, NULL));
267: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMLabels equal\n"));
268: }
270: PetscCall(VecDestroy(&v));
271: PetscCall(DMDestroy(&dm));
272: PetscCall(VecDestroy(&vnew));
273: PetscCall(DMDestroy(&dmnew));
274: PetscCall(PetscFinalize());
275: return 0;
276: }
278: /*TEST
279: build:
280: requires: hdf5
281: # Idempotence of saving/loading
282: # Have to replace Exodus file, which is creating uninterpolated edges
283: test:
284: suffix: 0
285: TODO: broken
286: requires: exodusii
287: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
288: args: -format hdf5_petsc -compare
289: test:
290: suffix: 1
291: TODO: broken
292: requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES)
293: nsize: 2
294: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
295: args: -petscpartitioner_type parmetis
296: args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail
298: testset:
299: requires: exodusii
300: args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
301: test:
302: suffix: 2
303: nsize: {{1 2 4 8}separate output}
304: args: -format {{default hdf5_petsc}separate output}
305: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
306: args: -orig_dm_plex_interpolate {{0 1}separate output}
307: test:
308: suffix: 2a
309: nsize: {{1 2 4 8}separate output}
310: args: -format {{hdf5_xdmf hdf5_viz}separate output}
312: test:
313: suffix: 3
314: requires: exodusii
315: args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels
316: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
318: # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
319: testset:
320: suffix: 4
321: requires: !complex
322: args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
323: args: -distribute 0 -second_write_read -compare
324: test:
325: suffix: hdf5_petsc
326: nsize: {{1 2}}
327: args: -format hdf5_petsc -compare_labels
328: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
329: test:
330: suffix: hdf5_xdmf
331: nsize: {{1 3 8}}
332: args: -format hdf5_xdmf
334: # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load()
335: # TODO: The output is very long so keeping just 1.0.0 version. This test should be redesigned or removed.
336: test:
337: suffix: 5
338: requires: exodusii
339: nsize: 2
340: args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
341: args: -orig_dm_plex_interpolate 0 -orig_dm_distribute 0
342: args: -dm_view ascii::ascii_info_detail
343: args: -new_dm_view ascii::ascii_info_detail
344: args: -format hdf5_petsc -use_low_level_functions {{0 1}}
345: args: -dm_plex_view_hdf5_storage_version 1.0.0
347: testset:
348: suffix: 6
349: requires: hdf5 !complex datafilespath
350: nsize: {{1 3}}
351: args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
352: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
353: args: -orig_dm_distribute 0
354: args: -format hdf5_petsc -second_write_read -compare -compare_labels
355: args: -orig_dm_plex_interpolate {{0 1}} -distribute {{0 1}}
356: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
358: testset:
359: # the same data and settings as dm_impls_plex_tests-ex18_9%
360: suffix: 9
361: requires: hdf5 !complex datafilespath
362: nsize: {{1 2 4}}
363: args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
364: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
365: args: -orig_dm_distribute 0
366: args: -format {{hdf5_petsc hdf5_xdmf}} -second_write_read -compare
367: args: -dm_plex_view_hdf5_storage_version 3.0.0
368: test:
369: suffix: hdf5_seqload
370: args: -distribute
371: args: -orig_dm_plex_interpolate {{0 1}}
372: args: -dm_plex_hdf5_force_sequential
373: test:
374: suffix: hdf5_seqload_metis
375: requires: parmetis
376: args: -distribute -petscpartitioner_type parmetis
377: args: -orig_dm_plex_interpolate 1
378: args: -dm_plex_hdf5_force_sequential
379: test:
380: suffix: hdf5
381: args: -orig_dm_plex_interpolate 1
382: test:
383: suffix: hdf5_repart
384: requires: parmetis
385: args: -distribute -petscpartitioner_type parmetis
386: args: -orig_dm_plex_interpolate 1
387: test:
388: TODO: Parallel partitioning of uninterpolated meshes not supported
389: suffix: hdf5_repart_ppu
390: requires: parmetis
391: args: -distribute -petscpartitioner_type parmetis
392: args: -orig_dm_plex_interpolate 0
394: # reproduce PetscSFView() crash - fixed, left as regression test
395: test:
396: suffix: new_dm_view
397: requires: exodusii
398: nsize: 2
399: args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail
400: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
402: # test backward compatibility with petsc_hdf5 format version 1.0.0, serial idempotence
403: testset:
404: suffix: 10-v3.16.0-v1.0.0
405: requires: hdf5 !complex datafilespath
406: args: -dm_plex_check_all -compare -compare_labels
407: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} -use_low_level_functions {{0 1}}
408: test:
409: suffix: a
410: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
411: test:
412: suffix: b
413: TODO: broken
414: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
415: test:
416: suffix: c
417: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
418: test:
419: suffix: d
420: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5
421: test:
422: suffix: e
423: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
424: test:
425: suffix: f
426: args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
428: # test permuted sections with petsc_hdf5 format version 1.0.0
429: testset:
430: suffix: 11
431: requires: hdf5 triangle
432: args: -field
433: args: -dm_plex_check_all -compare -compare_labels
434: args: -orig_dm_plex_box_faces 3,3 -dm_plex_view_hdf5_storage_version 1.0.0
435: args: -orig_dm_reorder_section -orig_dm_reorder_section_type reverse
437: test:
438: suffix: serial
439: test:
440: suffix: serial_no_perm
441: args: -orig_dm_ignore_perm_output
443: TEST*/