Actual source code: ex26.c

  1: static char help[] = "Test FEM layout with DM and ExodusII storage\n\n";

  3: /*
  4:   In order to see the vectors which are being tested, use

  6:      -ua_vec_view -s_vec_view
  7: */

  9: #include <petsc.h>
 10: #include <exodusII.h>

 12: #include <petsc/private/dmpleximpl.h>

 14: int main(int argc, char **argv)
 15: {
 16:   DM              dm, pdm, dmU, dmA, dmS, dmUA, dmUA2, *dmList;
 17:   Vec             X, U, A, S, UA, UA2;
 18:   IS              isU, isA, isS, isUA;
 19:   PetscSection    section;
 20:   const PetscInt  fieldU     = 0;
 21:   const PetscInt  fieldA     = 2;
 22:   const PetscInt  fieldS     = 1;
 23:   const PetscInt  fieldUA[2] = {0, 2};
 24:   char            ifilename[PETSC_MAX_PATH_LEN], ofilename[PETSC_MAX_PATH_LEN];
 25:   int             exoid = -1;
 26:   IS              csIS;
 27:   const PetscInt *csID;
 28:   PetscInt       *pStartDepth, *pEndDepth;
 29:   PetscInt        order = 1;
 30:   PetscInt        sdim, d, pStart, pEnd, p, numCS, set;
 31:   PetscMPIInt     rank, size;
 32:   PetscViewer     viewer;
 33:   const char     *nodalVarName2D[4] = {"U_x", "U_y", "Alpha", "Beta"};
 34:   const char     *zonalVarName2D[3] = {"Sigma_11", "Sigma_22", "Sigma_12"};
 35:   const char     *nodalVarName3D[5] = {"U_x", "U_y", "U_z", "Alpha", "Beta"};
 36:   const char     *zonalVarName3D[6] = {"Sigma_11", "Sigma_22", "Sigma_33", "Sigma_23", "Sigma_13", "Sigma_12"};

 38:   PetscFunctionBeginUser;
 39:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 40:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 41:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 42:   PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "FEM Layout Options", "ex26");
 43:   PetscCall(PetscOptionsString("-i", "Filename to read", "ex26", ifilename, ifilename, sizeof(ifilename), NULL));
 44:   PetscCall(PetscOptionsString("-o", "Filename to write", "ex26", ofilename, ofilename, sizeof(ofilename), NULL));
 45:   PetscCall(PetscOptionsBoundedInt("-order", "FEM polynomial order", "ex26", order, &order, NULL, 1));
 46:   PetscOptionsEnd();
 47:   PetscCheck((order >= 1) && (order <= 2), PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Unsupported polynomial order %" PetscInt_FMT " not in [1, 2]", order);

 49:   /* Read the mesh from a file in any supported format */
 50:   PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, ifilename, NULL, PETSC_TRUE, &dm));
 51:   PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
 52:   PetscCall(DMSetFromOptions(dm));
 53:   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
 54:   PetscCall(DMGetDimension(dm, &sdim));

 56:   /* Create the exodus result file */
 57:   {
 58:     PetscInt numstep = 3, step;
 59:     int     *truthtable;
 60:     int      numNodalVar, numZonalVar, i;

 62:     /* enable exodus debugging information */
 63:     ex_opts(EX_VERBOSE | EX_DEBUG);
 64:     /* Create the exodus file */
 65:     PetscCall(PetscViewerExodusIIOpen(PETSC_COMM_WORLD, ofilename, FILE_MODE_WRITE, &viewer));
 66:     /* The long way would be */
 67:     /*
 68:       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD,&viewer));
 69:       PetscCall(PetscViewerSetType(viewer,PETSCVIEWEREXODUSII));
 70:       PetscCall(PetscViewerFileSetMode(viewer,FILE_MODE_APPEND));
 71:       PetscCall(PetscViewerFileSetName(viewer,ofilename));
 72:     */
 73:     /* set the mesh order */
 74:     PetscCall(PetscViewerExodusIISetOrder(viewer, order));
 75:     PetscCall(PetscViewerView(viewer, PETSC_VIEWER_STDOUT_WORLD));
 76:     /*
 77:       Notice how the exodus file is actually NOT open at this point (exoid is -1)
 78:       Since we are overwriting the file (mode is FILE_MODE_WRITE), we are going to have to
 79:       write the geometry (the DM), which can only be done on a brand new file.
 80:     */

 82:     /* Save the geometry to the file, erasing all previous content */
 83:     PetscCall(DMView(dm, viewer));
 84:     PetscCall(PetscViewerView(viewer, PETSC_VIEWER_STDOUT_WORLD));
 85:     /*
 86:       Note how the exodus file is now open
 87:     */
 88:     PetscCall(PetscViewerExodusIIGetId(viewer, &exoid));

 90:     /* "Format" the exodus result file, i.e. allocate space for nodal and zonal variables */
 91:     switch (sdim) {
 92:     case 2:
 93:       numNodalVar = 4;
 94:       numZonalVar = 3;
 95:       PetscCall(PetscViewerExodusIISetZonalVariable(viewer, numZonalVar));
 96:       PetscCall(PetscViewerExodusIISetZonalVariableNames(viewer, zonalVarName2D));
 97:       PetscCall(PetscViewerExodusIISetNodalVariable(viewer, numNodalVar));
 98:       PetscCall(PetscViewerExodusIISetNodalVariableNames(viewer, nodalVarName2D));
 99:       break;
100:     case 3:
101:       numNodalVar = 5;
102:       numZonalVar = 6;
103:       PetscCall(PetscViewerExodusIISetZonalVariable(viewer, numZonalVar));
104:       PetscCall(PetscViewerExodusIISetZonalVariableNames(viewer, zonalVarName3D));
105:       PetscCall(PetscViewerExodusIISetNodalVariable(viewer, numNodalVar));
106:       PetscCall(PetscViewerExodusIISetNodalVariableNames(viewer, nodalVarName3D));
107:       break;
108:     default:
109:       SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No layout for dimension %" PetscInt_FMT, sdim);
110:     }
111:     PetscCall(PetscViewerView(viewer, PETSC_VIEWER_STDOUT_WORLD));

113:     /*
114:       An exodusII truth table specifies which fields are saved at which time step
115:       It speeds up I/O but reserving space for fields in the file ahead of time.
116:     */
117:     numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
118:     PetscCall(PetscMalloc1(numZonalVar * numCS, &truthtable));
119:     for (i = 0; i < numZonalVar * numCS; ++i) truthtable[i] = 1;
120:     PetscCallExternal(ex_put_truth_table, exoid, EX_ELEM_BLOCK, numCS, numZonalVar, truthtable);
121:     PetscCall(PetscFree(truthtable));

123:     /* Writing time step information in the file. Note that this is currently broken in the exodus library for netcdf4 (HDF5-based) files */
124:     for (step = 0; step < numstep; ++step) {
125:       PetscReal time = step;
126:       PetscCallExternal(ex_put_time, exoid, step + 1, &time);
127:     }
128:   }

130:   /* Create the main section containing all fields */
131:   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dm), &section));
132:   PetscCall(PetscSectionSetNumFields(section, 3));
133:   PetscCall(PetscSectionSetFieldName(section, fieldU, "U"));
134:   PetscCall(PetscSectionSetFieldName(section, fieldA, "Alpha"));
135:   PetscCall(PetscSectionSetFieldName(section, fieldS, "Sigma"));
136:   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
137:   PetscCall(PetscSectionSetChart(section, pStart, pEnd));
138:   PetscCall(PetscMalloc2(sdim + 1, &pStartDepth, sdim + 1, &pEndDepth));
139:   for (d = 0; d <= sdim; ++d) PetscCall(DMPlexGetDepthStratum(dm, d, &pStartDepth[d], &pEndDepth[d]));
140:   /* Vector field U, Scalar field Alpha, Tensor field Sigma */
141:   PetscCall(PetscSectionSetFieldComponents(section, fieldU, sdim));
142:   PetscCall(PetscSectionSetFieldComponents(section, fieldA, 1));
143:   PetscCall(PetscSectionSetFieldComponents(section, fieldS, sdim * (sdim + 1) / 2));

145:   /* Going through cell sets then cells, and setting up storage for the sections */
146:   PetscCall(DMGetLabelSize(dm, "Cell Sets", &numCS));
147:   PetscCall(DMGetLabelIdIS(dm, "Cell Sets", &csIS));
148:   if (csIS) PetscCall(ISGetIndices(csIS, &csID));
149:   for (set = 0; set < numCS; set++) {
150:     IS              cellIS;
151:     const PetscInt *cellID;
152:     PetscInt        numCells, cell, closureSize, *closureA = NULL;

154:     PetscCall(DMGetStratumSize(dm, "Cell Sets", csID[set], &numCells));
155:     PetscCall(DMGetStratumIS(dm, "Cell Sets", csID[set], &cellIS));
156:     if (numCells > 0) {
157:       /* dof layout ordered by increasing height in the DAG: cell, face, edge, vertex */
158:       PetscInt  dofUP1Tri[]  = {2, 0, 0};
159:       PetscInt  dofAP1Tri[]  = {1, 0, 0};
160:       PetscInt  dofUP2Tri[]  = {2, 2, 0};
161:       PetscInt  dofAP2Tri[]  = {1, 1, 0};
162:       PetscInt  dofUP1Quad[] = {2, 0, 0};
163:       PetscInt  dofAP1Quad[] = {1, 0, 0};
164:       PetscInt  dofUP2Quad[] = {2, 2, 2};
165:       PetscInt  dofAP2Quad[] = {1, 1, 1};
166:       PetscInt  dofS2D[]     = {0, 0, 3};
167:       PetscInt  dofUP1Tet[]  = {3, 0, 0, 0};
168:       PetscInt  dofAP1Tet[]  = {1, 0, 0, 0};
169:       PetscInt  dofUP2Tet[]  = {3, 3, 0, 0};
170:       PetscInt  dofAP2Tet[]  = {1, 1, 0, 0};
171:       PetscInt  dofUP1Hex[]  = {3, 0, 0, 0};
172:       PetscInt  dofAP1Hex[]  = {1, 0, 0, 0};
173:       PetscInt  dofUP2Hex[]  = {3, 3, 3, 3};
174:       PetscInt  dofAP2Hex[]  = {1, 1, 1, 1};
175:       PetscInt  dofS3D[]     = {0, 0, 0, 6};
176:       PetscInt *dofU, *dofA, *dofS;

178:       switch (sdim) {
179:       case 2:
180:         dofS = dofS2D;
181:         break;
182:       case 3:
183:         dofS = dofS3D;
184:         break;
185:       default:
186:         SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No layout for dimension %" PetscInt_FMT, sdim);
187:       }

189:       /* Identify cell type based on closure size only. This works for Tri/Tet/Quad/Hex meshes
190:          It will not be enough to identify more exotic elements like pyramid or prisms...  */
191:       PetscCall(ISGetIndices(cellIS, &cellID));
192:       PetscCall(DMPlexGetTransitiveClosure(dm, cellID[0], PETSC_TRUE, &closureSize, &closureA));
193:       switch (closureSize) {
194:       case 7: /* Tri */
195:         if (order == 1) {
196:           dofU = dofUP1Tri;
197:           dofA = dofAP1Tri;
198:         } else {
199:           dofU = dofUP2Tri;
200:           dofA = dofAP2Tri;
201:         }
202:         break;
203:       case 9: /* Quad */
204:         if (order == 1) {
205:           dofU = dofUP1Quad;
206:           dofA = dofAP1Quad;
207:         } else {
208:           dofU = dofUP2Quad;
209:           dofA = dofAP2Quad;
210:         }
211:         break;
212:       case 15: /* Tet */
213:         if (order == 1) {
214:           dofU = dofUP1Tet;
215:           dofA = dofAP1Tet;
216:         } else {
217:           dofU = dofUP2Tet;
218:           dofA = dofAP2Tet;
219:         }
220:         break;
221:       case 27: /* Hex */
222:         if (order == 1) {
223:           dofU = dofUP1Hex;
224:           dofA = dofAP1Hex;
225:         } else {
226:           dofU = dofUP2Hex;
227:           dofA = dofAP2Hex;
228:         }
229:         break;
230:       default:
231:         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Unknown element with closure size %" PetscInt_FMT, closureSize);
232:       }
233:       PetscCall(DMPlexRestoreTransitiveClosure(dm, cellID[0], PETSC_TRUE, &closureSize, &closureA));

235:       for (cell = 0; cell < numCells; cell++) {
236:         PetscInt *closure = NULL;

238:         PetscCall(DMPlexGetTransitiveClosure(dm, cellID[cell], PETSC_TRUE, &closureSize, &closure));
239:         for (p = 0; p < closureSize; ++p) {
240:           /* Find depth of p */
241:           for (d = 0; d <= sdim; ++d) {
242:             if ((closure[2 * p] >= pStartDepth[d]) && (closure[2 * p] < pEndDepth[d])) {
243:               PetscCall(PetscSectionSetDof(section, closure[2 * p], dofU[d] + dofA[d] + dofS[d]));
244:               PetscCall(PetscSectionSetFieldDof(section, closure[2 * p], fieldU, dofU[d]));
245:               PetscCall(PetscSectionSetFieldDof(section, closure[2 * p], fieldA, dofA[d]));
246:               PetscCall(PetscSectionSetFieldDof(section, closure[2 * p], fieldS, dofS[d]));
247:             }
248:           }
249:         }
250:         PetscCall(DMPlexRestoreTransitiveClosure(dm, cellID[cell], PETSC_TRUE, &closureSize, &closure));
251:       }
252:       PetscCall(ISRestoreIndices(cellIS, &cellID));
253:       PetscCall(ISDestroy(&cellIS));
254:     }
255:   }
256:   if (csIS) PetscCall(ISRestoreIndices(csIS, &csID));
257:   PetscCall(ISDestroy(&csIS));
258:   PetscCall(PetscSectionSetUp(section));
259:   PetscCall(DMSetLocalSection(dm, section));
260:   PetscCall(PetscObjectViewFromOptions((PetscObject)section, NULL, "-dm_section_view"));
261:   PetscCall(PetscSectionDestroy(&section));

263:   {
264:     PetscSF          migrationSF;
265:     PetscInt         ovlp = 0;
266:     PetscPartitioner part;

268:     PetscCall(DMSetUseNatural(dm, PETSC_TRUE));
269:     PetscCall(DMPlexGetPartitioner(dm, &part));
270:     PetscCall(PetscPartitionerSetFromOptions(part));
271:     PetscCall(DMPlexDistribute(dm, ovlp, &migrationSF, &pdm));
272:     if (!pdm) pdm = dm;
273:     /* Set the migrationSF is mandatory since useNatural = PETSC_TRUE */
274:     if (migrationSF) {
275:       PetscCall(DMPlexSetMigrationSF(pdm, migrationSF));
276:       PetscCall(PetscSFDestroy(&migrationSF));
277:     }
278:     PetscCall(DMViewFromOptions(pdm, NULL, "-dm_view"));
279:   }

281:   /* Get DM and IS for each field of dm */
282:   PetscCall(DMCreateSubDM(pdm, 1, &fieldU, &isU, &dmU));
283:   PetscCall(DMCreateSubDM(pdm, 1, &fieldA, &isA, &dmA));
284:   PetscCall(DMCreateSubDM(pdm, 1, &fieldS, &isS, &dmS));
285:   PetscCall(DMCreateSubDM(pdm, 2, fieldUA, &isUA, &dmUA));

287:   PetscCall(PetscMalloc1(2, &dmList));
288:   dmList[0] = dmU;
289:   dmList[1] = dmA;

291:   PetscCall(DMCreateSuperDM(dmList, 2, NULL, &dmUA2));
292:   PetscCall(PetscFree(dmList));

294:   PetscCall(DMGetGlobalVector(pdm, &X));
295:   PetscCall(DMGetGlobalVector(dmU, &U));
296:   PetscCall(DMGetGlobalVector(dmA, &A));
297:   PetscCall(DMGetGlobalVector(dmS, &S));
298:   PetscCall(DMGetGlobalVector(dmUA, &UA));
299:   PetscCall(DMGetGlobalVector(dmUA2, &UA2));

301:   PetscCall(PetscObjectSetName((PetscObject)U, "U"));
302:   PetscCall(PetscObjectSetName((PetscObject)A, "Alpha"));
303:   PetscCall(PetscObjectSetName((PetscObject)S, "Sigma"));
304:   PetscCall(PetscObjectSetName((PetscObject)UA, "UAlpha"));
305:   PetscCall(PetscObjectSetName((PetscObject)UA2, "UAlpha2"));
306:   PetscCall(VecSet(X, -111.));

308:   /* Setting u to [x,y,z]  and alpha to x^2+y^2+z^2 by writing in UAlpha then restricting to U and Alpha */
309:   {
310:     PetscSection sectionUA;
311:     Vec          UALoc;
312:     PetscSection coordSection;
313:     Vec          coord;
314:     PetscScalar *cval, *xyz;
315:     PetscInt     clSize, i, j;

317:     PetscCall(DMGetLocalSection(dmUA, &sectionUA));
318:     PetscCall(DMGetLocalVector(dmUA, &UALoc));
319:     PetscCall(VecGetArray(UALoc, &cval));
320:     PetscCall(DMGetCoordinateSection(dmUA, &coordSection));
321:     PetscCall(DMGetCoordinatesLocal(dmUA, &coord));
322:     PetscCall(DMPlexGetChart(dmUA, &pStart, &pEnd));

324:     for (p = pStart; p < pEnd; ++p) {
325:       PetscInt dofUA, offUA;

327:       PetscCall(PetscSectionGetDof(sectionUA, p, &dofUA));
328:       if (dofUA > 0) {
329:         xyz = NULL;
330:         PetscCall(PetscSectionGetOffset(sectionUA, p, &offUA));
331:         PetscCall(DMPlexVecGetClosure(dmUA, coordSection, coord, p, &clSize, &xyz));
332:         cval[offUA + sdim] = 0.;
333:         for (i = 0; i < sdim; ++i) {
334:           cval[offUA + i] = 0;
335:           for (j = 0; j < clSize / sdim; ++j) cval[offUA + i] += xyz[j * sdim + i];
336:           cval[offUA + i] = cval[offUA + i] * sdim / clSize;
337:           cval[offUA + sdim] += PetscSqr(cval[offUA + i]);
338:         }
339:         PetscCall(DMPlexVecRestoreClosure(dmUA, coordSection, coord, p, &clSize, &xyz));
340:       }
341:     }
342:     PetscCall(VecRestoreArray(UALoc, &cval));
343:     PetscCall(DMLocalToGlobalBegin(dmUA, UALoc, INSERT_VALUES, UA));
344:     PetscCall(DMLocalToGlobalEnd(dmUA, UALoc, INSERT_VALUES, UA));
345:     PetscCall(DMRestoreLocalVector(dmUA, &UALoc));

347:     /* Update X */
348:     PetscCall(VecISCopy(X, isUA, SCATTER_FORWARD, UA));
349:     PetscCall(VecViewFromOptions(UA, NULL, "-ua_vec_view"));

351:     /* Restrict to U and Alpha */
352:     PetscCall(VecISCopy(X, isU, SCATTER_REVERSE, U));
353:     PetscCall(VecISCopy(X, isA, SCATTER_REVERSE, A));

355:     /* restrict to UA2 */
356:     PetscCall(VecISCopy(X, isUA, SCATTER_REVERSE, UA2));
357:     PetscCall(VecViewFromOptions(UA2, NULL, "-ua2_vec_view"));
358:   }

360:   {
361:     Vec          tmpVec;
362:     PetscSection coordSection;
363:     Vec          coord;
364:     PetscReal    norm;
365:     PetscReal    time = 1.234;

367:     /* Writing nodal variables to ExodusII file */
368:     PetscCall(DMSetOutputSequenceNumber(dmU, 0, time));
369:     PetscCall(DMSetOutputSequenceNumber(dmA, 0, time));
370:     PetscCall(VecView(U, viewer));
371:     PetscCall(VecView(A, viewer));

373:     /* Saving U and Alpha in one shot.
374:        For this, we need to cheat and change the Vec's name
375:        Note that in the end we write variables one component at a time,
376:        so that there is no real values in doing this
377:     */

379:     PetscCall(DMSetOutputSequenceNumber(dmUA, 1, time));
380:     PetscCall(DMGetGlobalVector(dmUA, &tmpVec));
381:     PetscCall(VecCopy(UA, tmpVec));
382:     PetscCall(PetscObjectSetName((PetscObject)tmpVec, "U"));
383:     PetscCall(VecView(tmpVec, viewer));
384:     /* Reading nodal variables in Exodus file */
385:     PetscCall(VecSet(tmpVec, -1000.0));
386:     PetscCall(VecLoad(tmpVec, viewer));
387:     PetscCall(VecAXPY(UA, -1.0, tmpVec));
388:     PetscCall(VecNorm(UA, NORM_INFINITY, &norm));
389:     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "UAlpha ||Vin - Vout|| = %g", (double)norm);
390:     PetscCall(DMRestoreGlobalVector(dmUA, &tmpVec));

392:     /* same thing with the UA2 Vec obtained from the superDM */
393:     PetscCall(DMGetGlobalVector(dmUA2, &tmpVec));
394:     PetscCall(VecCopy(UA2, tmpVec));
395:     PetscCall(PetscObjectSetName((PetscObject)tmpVec, "U"));
396:     PetscCall(DMSetOutputSequenceNumber(dmUA2, 2, time));
397:     PetscCall(VecView(tmpVec, viewer));
398:     /* Reading nodal variables in Exodus file */
399:     PetscCall(VecSet(tmpVec, -1000.0));
400:     PetscCall(VecLoad(tmpVec, viewer));
401:     PetscCall(VecAXPY(UA2, -1.0, tmpVec));
402:     PetscCall(VecNorm(UA2, NORM_INFINITY, &norm));
403:     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "UAlpha2 ||Vin - Vout|| = %g", (double)norm);
404:     PetscCall(DMRestoreGlobalVector(dmUA2, &tmpVec));

406:     /* Building and saving Sigma
407:        We set sigma_0 = rank (to see partitioning)
408:               sigma_1 = cell set ID
409:               sigma_2 = x_coordinate of the cell center of mass
410:     */
411:     PetscCall(DMGetCoordinateSection(dmS, &coordSection));
412:     PetscCall(DMGetCoordinatesLocal(dmS, &coord));
413:     PetscCall(DMGetLabelIdIS(dmS, "Cell Sets", &csIS));
414:     PetscCall(DMGetLabelSize(dmS, "Cell Sets", &numCS));
415:     PetscCall(ISGetIndices(csIS, &csID));
416:     for (set = 0; set < numCS; ++set) {
417:       /* We know that all cells in a cell set have the same type, so we can dimension cval and xyz once for each cell set */
418:       IS              cellIS;
419:       const PetscInt *cellID;
420:       PetscInt        numCells, cell;
421:       PetscScalar    *cval = NULL, *xyz = NULL;
422:       PetscInt        clSize, cdimCoord, c;

424:       PetscCall(DMGetStratumIS(dmS, "Cell Sets", csID[set], &cellIS));
425:       PetscCall(ISGetIndices(cellIS, &cellID));
426:       PetscCall(ISGetSize(cellIS, &numCells));
427:       for (cell = 0; cell < numCells; cell++) {
428:         PetscCall(DMPlexVecGetClosure(dmS, NULL, S, cellID[cell], &clSize, &cval));
429:         PetscCall(DMPlexVecGetClosure(dmS, coordSection, coord, cellID[cell], &cdimCoord, &xyz));
430:         cval[0] = rank;
431:         cval[1] = csID[set];
432:         cval[2] = 0.;
433:         for (c = 0; c < cdimCoord / sdim; c++) cval[2] += xyz[c * sdim];
434:         cval[2] = cval[2] * sdim / cdimCoord;
435:         PetscCall(DMPlexVecSetClosure(dmS, NULL, S, cellID[cell], cval, INSERT_ALL_VALUES));
436:       }
437:       PetscCall(DMPlexVecRestoreClosure(dmS, NULL, S, cellID[0], &clSize, &cval));
438:       PetscCall(DMPlexVecRestoreClosure(dmS, coordSection, coord, cellID[0], NULL, &xyz));
439:       PetscCall(ISRestoreIndices(cellIS, &cellID));
440:       PetscCall(ISDestroy(&cellIS));
441:     }
442:     PetscCall(ISRestoreIndices(csIS, &csID));
443:     PetscCall(ISDestroy(&csIS));
444:     PetscCall(VecViewFromOptions(S, NULL, "-s_vec_view"));

446:     /* Writing zonal variables in Exodus file */
447:     PetscCall(DMSetOutputSequenceNumber(dmS, 0, time));
448:     PetscCall(VecView(S, viewer));

450:     /* Reading zonal variables in Exodus file */
451:     PetscCall(DMGetGlobalVector(dmS, &tmpVec));
452:     PetscCall(VecSet(tmpVec, -1000.0));
453:     PetscCall(PetscObjectSetName((PetscObject)tmpVec, "Sigma"));
454:     PetscCall(VecLoad(tmpVec, viewer));
455:     PetscCall(VecAXPY(S, -1.0, tmpVec));
456:     PetscCall(VecNorm(S, NORM_INFINITY, &norm));
457:     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Sigma ||Vin - Vout|| = %g", (double)norm);
458:     PetscCall(DMRestoreGlobalVector(dmS, &tmpVec));
459:   }
460:   PetscCall(PetscViewerDestroy(&viewer));

462:   PetscCall(DMRestoreGlobalVector(dmUA2, &UA2));
463:   PetscCall(DMRestoreGlobalVector(dmUA, &UA));
464:   PetscCall(DMRestoreGlobalVector(dmS, &S));
465:   PetscCall(DMRestoreGlobalVector(dmA, &A));
466:   PetscCall(DMRestoreGlobalVector(dmU, &U));
467:   PetscCall(DMRestoreGlobalVector(pdm, &X));
468:   PetscCall(DMDestroy(&dmU));
469:   PetscCall(ISDestroy(&isU));
470:   PetscCall(DMDestroy(&dmA));
471:   PetscCall(ISDestroy(&isA));
472:   PetscCall(DMDestroy(&dmS));
473:   PetscCall(ISDestroy(&isS));
474:   PetscCall(DMDestroy(&dmUA));
475:   PetscCall(ISDestroy(&isUA));
476:   PetscCall(DMDestroy(&dmUA2));
477:   PetscCall(DMDestroy(&pdm));
478:   if (size > 1) PetscCall(DMDestroy(&dm));
479:   PetscCall(PetscFree2(pStartDepth, pEndDepth));
480:   PetscCall(PetscFinalize());
481:   return 0;
482: }

484: /*TEST

486:   build:
487:     requires: exodusii pnetcdf !complex
488:   # 2D seq
489:   test:
490:     suffix: 0
491:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 1
492:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
493:   test:
494:     suffix: 1
495:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 1
496:   test:
497:     suffix: 2
498:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 1
499:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
500:   test:
501:     suffix: 3
502:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 2
503:   test:
504:     suffix: 4
505:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 2
506:   test:
507:     suffix: 5
508:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 2

510:   # 2D par
511:   test:
512:     suffix: 6
513:     nsize: 2
514:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 1
515:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
516:   test:
517:     suffix: 7
518:     nsize: 2
519:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 1
520:   test:
521:     suffix: 8
522:     nsize: 2
523:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 1
524:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: invalid dimension ID or name
525:   test:
526:     suffix: 9
527:     nsize: 2
528:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 2
529:   test:
530:     suffix: 10
531:     nsize: 2
532:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 2
533:   test:
534:     # Something is now broken with parallel read/write for whatever shape H is
535:     TODO: broken
536:     suffix: 11
537:     nsize: 2
538:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 2

540:   #3d seq
541:   test:
542:     suffix: 12
543:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 1
544:   test:
545:     suffix: 13
546:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 1
547:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
548:   test:
549:     suffix: 14
550:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 2
551:   test:
552:     suffix: 15
553:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 2
554:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
555:   #3d par
556:   test:
557:     suffix: 16
558:     nsize: 2
559:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 1
560:   test:
561:     suffix: 17
562:     nsize: 2
563:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 1
564:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
565:   test:
566:     suffix: 18
567:     nsize: 2
568:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 2
569:   test:
570:     suffix: 19
571:     nsize: 2
572:     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 2
573:     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints

575: TEST*/