Actual source code: pdvec.c

  1: /*
  2:      Code for some of the parallel vector primitives.
  3: */
  4: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
  5: #include <petsc/private/viewerhdf5impl.h>
  6: #include <petsc/private/glvisviewerimpl.h>
  7: #include <petsc/private/glvisvecimpl.h>
  8: #include <petscsf.h>

 10: static PetscErrorCode VecResetPreallocationCOO_MPI(Vec v)
 11: {
 12:   Vec_MPI *vmpi = (Vec_MPI *)v->data;

 14:   PetscFunctionBegin;
 15:   if (vmpi) {
 16:     PetscCall(PetscFree(vmpi->jmap1));
 17:     PetscCall(PetscFree(vmpi->perm1));
 18:     PetscCall(PetscFree(vmpi->Cperm));
 19:     PetscCall(PetscFree4(vmpi->imap2, vmpi->jmap2, vmpi->sendbuf, vmpi->recvbuf));
 20:     PetscCall(PetscFree(vmpi->perm2));
 21:     PetscCall(PetscSFDestroy(&vmpi->coo_sf));
 22:   }
 23:   PetscFunctionReturn(PETSC_SUCCESS);
 24: }

 26: PetscErrorCode VecDestroy_MPI(Vec v)
 27: {
 28:   Vec_MPI *x = (Vec_MPI *)v->data;

 30:   PetscFunctionBegin;
 31:   PetscCall(PetscLogObjectState((PetscObject)v, "Length=%" PetscInt_FMT, v->map->N));
 32:   if (!x) PetscFunctionReturn(PETSC_SUCCESS);
 33:   PetscCall(PetscFree(x->array_allocated));

 35:   /* Destroy local representation of vector if it exists */
 36:   if (x->localrep) {
 37:     PetscCall(VecDestroy(&x->localrep));
 38:     PetscCall(VecScatterDestroy(&x->localupdate));
 39:     PetscCall(ISDestroy(&x->ghost));
 40:   }
 41:   PetscCall(VecAssemblyReset_MPI(v));

 43:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 44:   PetscCall(VecStashDestroy_Private(&v->bstash));
 45:   PetscCall(VecStashDestroy_Private(&v->stash));

 47:   PetscCall(VecResetPreallocationCOO_MPI(v));
 48:   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", NULL));
 49:   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", NULL));
 50:   PetscCall(PetscFree(v->data));
 51:   PetscFunctionReturn(PETSC_SUCCESS);
 52: }

 54: static PetscErrorCode VecView_MPI_ASCII(Vec xin, PetscViewer viewer)
 55: {
 56:   PetscInt           i, work = xin->map->n, cnt, len, nLen;
 57:   PetscMPIInt        j, n = 0, size, rank, tag = ((PetscObject)viewer)->tag;
 58:   MPI_Status         status;
 59:   PetscScalar       *values;
 60:   const PetscScalar *xarray;
 61:   const char        *name;
 62:   PetscViewerFormat  format;

 64:   PetscFunctionBegin;
 65:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &size));
 66:   PetscCall(PetscViewerGetFormat(viewer, &format));
 67:   if (format == PETSC_VIEWER_LOAD_BALANCE) {
 68:     PetscInt nmax = 0, nmin = xin->map->n, navg;
 69:     for (i = 0; i < (PetscInt)size; i++) {
 70:       nmax = PetscMax(nmax, xin->map->range[i + 1] - xin->map->range[i]);
 71:       nmin = PetscMin(nmin, xin->map->range[i + 1] - xin->map->range[i]);
 72:     }
 73:     navg = xin->map->N / size;
 74:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Load Balance - Local vector size Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax));
 75:     PetscFunctionReturn(PETSC_SUCCESS);
 76:   }

 78:   PetscCall(VecGetArrayRead(xin, &xarray));
 79:   /* determine maximum message to arrive */
 80:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)xin), &rank));
 81:   PetscCallMPI(MPI_Reduce(&work, &len, 1, MPIU_INT, MPI_MAX, 0, PetscObjectComm((PetscObject)xin)));
 82:   if (format == PETSC_VIEWER_ASCII_GLVIS) rank = 0, len = 0; /* no parallel distributed write support from GLVis */
 83:   if (rank == 0) {
 84:     PetscCall(PetscMalloc1(len, &values));
 85:     /*
 86:         MATLAB format and ASCII format are very similar except
 87:         MATLAB uses %18.16e format while ASCII uses %g
 88:     */
 89:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 90:       PetscCall(PetscObjectGetName((PetscObject)xin, &name));
 91:       PetscCall(PetscViewerASCIIPrintf(viewer, "%s = [\n", name));
 92:       for (i = 0; i < xin->map->n; i++) {
 93: #if defined(PETSC_USE_COMPLEX)
 94:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 95:           PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e + %18.16ei\n", (double)PetscRealPart(xarray[i]), (double)PetscImaginaryPart(xarray[i])));
 96:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 97:           PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e - %18.16ei\n", (double)PetscRealPart(xarray[i]), -(double)PetscImaginaryPart(xarray[i])));
 98:         } else {
 99:           PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e\n", (double)PetscRealPart(xarray[i])));
100:         }
101: #else
102:         PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e\n", (double)xarray[i]));
103: #endif
104:       }
105:       /* receive and print messages */
106:       for (j = 1; j < size; j++) {
107:         PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, PetscObjectComm((PetscObject)xin), &status));
108:         PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &n));
109:         for (i = 0; i < n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111:           if (PetscImaginaryPart(values[i]) > 0.0) {
112:             PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e + %18.16ei\n", (double)PetscRealPart(values[i]), (double)PetscImaginaryPart(values[i])));
113:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
114:             PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e - %18.16ei\n", (double)PetscRealPart(values[i]), -(double)PetscImaginaryPart(values[i])));
115:           } else {
116:             PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e\n", (double)PetscRealPart(values[i])));
117:           }
118: #else
119:           PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e\n", (double)values[i]));
120: #endif
121:         }
122:       }
123:       PetscCall(PetscViewerASCIIPrintf(viewer, "];\n"));

125:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
126:       for (i = 0; i < xin->map->n; i++) {
127: #if defined(PETSC_USE_COMPLEX)
128:         PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e %18.16e\n", (double)PetscRealPart(xarray[i]), (double)PetscImaginaryPart(xarray[i])));
129: #else
130:         PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e\n", (double)xarray[i]));
131: #endif
132:       }
133:       /* receive and print messages */
134:       for (j = 1; j < size; j++) {
135:         PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, PetscObjectComm((PetscObject)xin), &status));
136:         PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &n));
137:         for (i = 0; i < n; i++) {
138: #if defined(PETSC_USE_COMPLEX)
139:           PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e %18.16e\n", (double)PetscRealPart(values[i]), (double)PetscImaginaryPart(values[i])));
140: #else
141:           PetscCall(PetscViewerASCIIPrintf(viewer, "%18.16e\n", (double)values[i]));
142: #endif
143:         }
144:       }
145:     } else if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
146:       /*
147:         state 0: No header has been output
148:         state 1: Only POINT_DATA has been output
149:         state 2: Only CELL_DATA has been output
150:         state 3: Output both, POINT_DATA last
151:         state 4: Output both, CELL_DATA last
152:       */
153:       static PetscInt stateId     = -1;
154:       PetscInt        outputState = 0;
155:       int             doOutput    = 0;
156:       PetscBool       hasState;
157:       PetscInt        bs, b;

159:       if (stateId < 0) PetscCall(PetscObjectComposedDataRegister(&stateId));
160:       PetscCall(PetscObjectComposedDataGetInt((PetscObject)viewer, stateId, outputState, hasState));
161:       if (!hasState) outputState = 0;

163:       PetscCall(PetscObjectGetName((PetscObject)xin, &name));
164:       PetscCall(VecGetLocalSize(xin, &nLen));
165:       PetscCall(PetscMPIIntCast(nLen, &n));
166:       PetscCall(VecGetBlockSize(xin, &bs));
167:       if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED) {
168:         if (outputState == 0) {
169:           outputState = 1;
170:           doOutput    = 1;
171:         } else if (outputState == 1) doOutput = 0;
172:         else if (outputState == 2) {
173:           outputState = 3;
174:           doOutput    = 1;
175:         } else if (outputState == 3) doOutput = 0;
176:         else PetscCheck(outputState != 4, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");

178:         if (doOutput) PetscCall(PetscViewerASCIIPrintf(viewer, "POINT_DATA %" PetscInt_FMT "\n", xin->map->N / bs));
179:       } else {
180:         if (outputState == 0) {
181:           outputState = 2;
182:           doOutput    = 1;
183:         } else if (outputState == 1) {
184:           outputState = 4;
185:           doOutput    = 1;
186:         } else if (outputState == 2) {
187:           doOutput = 0;
188:         } else {
189:           PetscCheck(outputState != 3, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
190:           if (outputState == 4) doOutput = 0;
191:         }

193:         if (doOutput) PetscCall(PetscViewerASCIIPrintf(viewer, "CELL_DATA %" PetscInt_FMT "\n", xin->map->N / bs));
194:       }
195:       PetscCall(PetscObjectComposedDataSetInt((PetscObject)viewer, stateId, outputState));
196:       if (name) {
197:         if (bs == 3) {
198:           PetscCall(PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name));
199:         } else {
200:           PetscCall(PetscViewerASCIIPrintf(viewer, "SCALARS %s double %" PetscInt_FMT "\n", name, bs));
201:         }
202:       } else {
203:         PetscCall(PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %" PetscInt_FMT "\n", bs));
204:       }
205:       if (bs != 3) PetscCall(PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n"));
206:       for (i = 0; i < n / bs; i++) {
207:         for (b = 0; b < bs; b++) {
208:           if (b > 0) PetscCall(PetscViewerASCIIPrintf(viewer, " "));
209:           PetscCall(PetscViewerASCIIPrintf(viewer, "%g", (double)PetscRealPart(xarray[i * bs + b])));
210:         }
211:         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
212:       }
213:       for (j = 1; j < size; j++) {
214:         PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, PetscObjectComm((PetscObject)xin), &status));
215:         PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &n));
216:         for (i = 0; i < n / bs; i++) {
217:           for (b = 0; b < bs; b++) {
218:             if (b > 0) PetscCall(PetscViewerASCIIPrintf(viewer, " "));
219:             PetscCall(PetscViewerASCIIPrintf(viewer, "%g", (double)PetscRealPart(values[i * bs + b])));
220:           }
221:           PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
222:         }
223:       }
224:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS_DEPRECATED) {
225:       PetscInt bs, b;

227:       PetscCall(VecGetLocalSize(xin, &nLen));
228:       PetscCall(PetscMPIIntCast(nLen, &n));
229:       PetscCall(VecGetBlockSize(xin, &bs));
230:       PetscCheck(bs >= 1 && bs <= 3, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %" PetscInt_FMT, bs);

232:       for (i = 0; i < n / bs; i++) {
233:         for (b = 0; b < bs; b++) {
234:           if (b > 0) PetscCall(PetscViewerASCIIPrintf(viewer, " "));
235:           PetscCall(PetscViewerASCIIPrintf(viewer, "%g", (double)PetscRealPart(xarray[i * bs + b])));
236:         }
237:         for (b = bs; b < 3; b++) PetscCall(PetscViewerASCIIPrintf(viewer, " 0.0"));
238:         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
239:       }
240:       for (j = 1; j < size; j++) {
241:         PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, PetscObjectComm((PetscObject)xin), &status));
242:         PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &n));
243:         for (i = 0; i < n / bs; i++) {
244:           for (b = 0; b < bs; b++) {
245:             if (b > 0) PetscCall(PetscViewerASCIIPrintf(viewer, " "));
246:             PetscCall(PetscViewerASCIIPrintf(viewer, "%g", (double)PetscRealPart(values[i * bs + b])));
247:           }
248:           for (b = bs; b < 3; b++) PetscCall(PetscViewerASCIIPrintf(viewer, " 0.0"));
249:           PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
250:         }
251:       }
252:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
253:       PetscInt bs, b, vertexCount = 1;

255:       PetscCall(VecGetLocalSize(xin, &nLen));
256:       PetscCall(PetscMPIIntCast(nLen, &n));
257:       PetscCall(VecGetBlockSize(xin, &bs));
258:       PetscCheck(bs >= 1 && bs <= 3, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %" PetscInt_FMT, bs);

260:       PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT "\n", xin->map->N / bs));
261:       for (i = 0; i < n / bs; i++) {
262:         PetscCall(PetscViewerASCIIPrintf(viewer, "%7" PetscInt_FMT "   ", vertexCount++));
263:         for (b = 0; b < bs; b++) {
264:           if (b > 0) PetscCall(PetscViewerASCIIPrintf(viewer, " "));
265: #if !defined(PETSC_USE_COMPLEX)
266:           PetscCall(PetscViewerASCIIPrintf(viewer, "% 12.5E", (double)xarray[i * bs + b]));
267: #endif
268:         }
269:         PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
270:       }
271:       for (j = 1; j < size; j++) {
272:         PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, PetscObjectComm((PetscObject)xin), &status));
273:         PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &n));
274:         for (i = 0; i < n / bs; i++) {
275:           PetscCall(PetscViewerASCIIPrintf(viewer, "%7" PetscInt_FMT "   ", vertexCount++));
276:           for (b = 0; b < bs; b++) {
277:             if (b > 0) PetscCall(PetscViewerASCIIPrintf(viewer, " "));
278: #if !defined(PETSC_USE_COMPLEX)
279:             PetscCall(PetscViewerASCIIPrintf(viewer, "% 12.5E", (double)values[i * bs + b]));
280: #endif
281:           }
282:           PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
283:         }
284:       }
285:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
286:       /* GLVis ASCII visualization/dump: this function mimics mfem::GridFunction::Save() */
287:       const PetscScalar      *array;
288:       PetscInt                i, n, vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
289:       PetscContainer          glvis_container;
290:       PetscViewerGLVisVecInfo glvis_vec_info;
291:       PetscViewerGLVisInfo    glvis_info;

293:       /* mfem::FiniteElementSpace::Save() */
294:       PetscCall(VecGetBlockSize(xin, &vdim));
295:       PetscCall(PetscViewerASCIIPrintf(viewer, "FiniteElementSpace\n"));
296:       PetscCall(PetscObjectQuery((PetscObject)xin, "_glvis_info_container", (PetscObject *)&glvis_container));
297:       PetscCheck(glvis_container, PetscObjectComm((PetscObject)xin), PETSC_ERR_PLIB, "Missing GLVis container");
298:       PetscCall(PetscContainerGetPointer(glvis_container, (void **)&glvis_vec_info));
299:       PetscCall(PetscViewerASCIIPrintf(viewer, "%s\n", glvis_vec_info->fec_type));
300:       PetscCall(PetscViewerASCIIPrintf(viewer, "VDim: %" PetscInt_FMT "\n", vdim));
301:       PetscCall(PetscViewerASCIIPrintf(viewer, "Ordering: %" PetscInt_FMT "\n", ordering));
302:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
303:       /* mfem::Vector::Print() */
304:       PetscCall(PetscObjectQuery((PetscObject)viewer, "_glvis_info_container", (PetscObject *)&glvis_container));
305:       PetscCheck(glvis_container, PetscObjectComm((PetscObject)viewer), PETSC_ERR_PLIB, "Missing GLVis container");
306:       PetscCall(PetscContainerGetPointer(glvis_container, (void **)&glvis_info));
307:       if (glvis_info->enabled) {
308:         PetscCall(VecGetLocalSize(xin, &n));
309:         PetscCall(VecGetArrayRead(xin, &array));
310:         for (i = 0; i < n; i++) {
311:           PetscCall(PetscViewerASCIIPrintf(viewer, glvis_info->fmt, (double)PetscRealPart(array[i])));
312:           PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
313:         }
314:         PetscCall(VecRestoreArrayRead(xin, &array));
315:       }
316:     } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
317:       /* No info */
318:     } else {
319:       if (format != PETSC_VIEWER_ASCII_COMMON) PetscCall(PetscViewerASCIIPrintf(viewer, "Process [%d]\n", rank));
320:       cnt = 0;
321:       for (i = 0; i < xin->map->n; i++) {
322:         if (format == PETSC_VIEWER_ASCII_INDEX) PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ": ", cnt++));
323: #if defined(PETSC_USE_COMPLEX)
324:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
325:           PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %g i\n", (double)PetscRealPart(xarray[i]), (double)PetscImaginaryPart(xarray[i])));
326:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
327:           PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %g i\n", (double)PetscRealPart(xarray[i]), -(double)PetscImaginaryPart(xarray[i])));
328:         } else {
329:           PetscCall(PetscViewerASCIIPrintf(viewer, "%g\n", (double)PetscRealPart(xarray[i])));
330:         }
331: #else
332:         PetscCall(PetscViewerASCIIPrintf(viewer, "%g\n", (double)xarray[i]));
333: #endif
334:       }
335:       /* receive and print messages */
336:       for (j = 1; j < size; j++) {
337:         PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, PetscObjectComm((PetscObject)xin), &status));
338:         PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &n));
339:         if (format != PETSC_VIEWER_ASCII_COMMON) PetscCall(PetscViewerASCIIPrintf(viewer, "Process [%d]\n", j));
340:         for (i = 0; i < n; i++) {
341:           if (format == PETSC_VIEWER_ASCII_INDEX) PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ": ", cnt++));
342: #if defined(PETSC_USE_COMPLEX)
343:           if (PetscImaginaryPart(values[i]) > 0.0) {
344:             PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %g i\n", (double)PetscRealPart(values[i]), (double)PetscImaginaryPart(values[i])));
345:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
346:             PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %g i\n", (double)PetscRealPart(values[i]), -(double)PetscImaginaryPart(values[i])));
347:           } else {
348:             PetscCall(PetscViewerASCIIPrintf(viewer, "%g\n", (double)PetscRealPart(values[i])));
349:           }
350: #else
351:           PetscCall(PetscViewerASCIIPrintf(viewer, "%g\n", (double)values[i]));
352: #endif
353:         }
354:       }
355:     }
356:     PetscCall(PetscFree(values));
357:   } else {
358:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
359:       /* Rank 0 is not trying to receive anything, so don't send anything */
360:     } else {
361:       if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
362:         /* this may be a collective operation so make sure everyone calls it */
363:         PetscCall(PetscObjectGetName((PetscObject)xin, &name));
364:       }
365:       PetscCallMPI(MPIU_Send((void *)xarray, xin->map->n, MPIU_SCALAR, 0, tag, PetscObjectComm((PetscObject)xin)));
366:     }
367:   }
368:   PetscCall(PetscViewerFlush(viewer));
369:   PetscCall(VecRestoreArrayRead(xin, &xarray));
370:   PetscFunctionReturn(PETSC_SUCCESS);
371: }

373: PetscErrorCode VecView_MPI_Binary(Vec xin, PetscViewer viewer)
374: {
375:   return VecView_Binary(xin, viewer);
376: }

378: #include <petscdraw.h>
379: PetscErrorCode VecView_MPI_Draw_LG(Vec xin, PetscViewer viewer)
380: {
381:   PetscDraw          draw;
382:   PetscBool          isnull;
383:   PetscDrawLG        lg;
384:   PetscMPIInt        i, size, rank, n, N, *lens = NULL, *disp = NULL;
385:   PetscReal         *values, *xx = NULL, *yy = NULL;
386:   const PetscScalar *xarray;
387:   int                colors[] = {PETSC_DRAW_RED};

389:   PetscFunctionBegin;
390:   PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
391:   PetscCall(PetscDrawIsNull(draw, &isnull));
392:   if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
393:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)xin), &rank));
394:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &size));
395:   PetscCall(PetscMPIIntCast(xin->map->n, &n));
396:   PetscCall(PetscMPIIntCast(xin->map->N, &N));

398:   PetscCall(VecGetArrayRead(xin, &xarray));
399: #if defined(PETSC_USE_COMPLEX)
400:   PetscCall(PetscMalloc1(n + 1, &values));
401:   for (i = 0; i < n; i++) values[i] = PetscRealPart(xarray[i]);
402: #else
403:   values = (PetscReal *)xarray;
404: #endif
405:   if (rank == 0) {
406:     PetscCall(PetscMalloc2(N, &xx, N, &yy));
407:     for (i = 0; i < N; i++) xx[i] = (PetscReal)i;
408:     PetscCall(PetscMalloc2(size, &lens, size, &disp));
409:     for (i = 0; i < size; i++) lens[i] = (PetscMPIInt)xin->map->range[i + 1] - (PetscMPIInt)xin->map->range[i];
410:     for (i = 0; i < size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
411:   }
412:   PetscCallMPI(MPI_Gatherv(values, n, MPIU_REAL, yy, lens, disp, MPIU_REAL, 0, PetscObjectComm((PetscObject)xin)));
413:   PetscCall(PetscFree2(lens, disp));
414: #if defined(PETSC_USE_COMPLEX)
415:   PetscCall(PetscFree(values));
416: #endif
417:   PetscCall(VecRestoreArrayRead(xin, &xarray));

419:   PetscCall(PetscViewerDrawGetDrawLG(viewer, 0, &lg));
420:   PetscCall(PetscDrawLGReset(lg));
421:   PetscCall(PetscDrawLGSetDimension(lg, 1));
422:   PetscCall(PetscDrawLGSetColors(lg, colors));
423:   if (rank == 0) {
424:     PetscCall(PetscDrawLGAddPoints(lg, N, &xx, &yy));
425:     PetscCall(PetscFree2(xx, yy));
426:   }
427:   PetscCall(PetscDrawLGDraw(lg));
428:   PetscCall(PetscDrawLGSave(lg));
429:   PetscFunctionReturn(PETSC_SUCCESS);
430: }

432: PETSC_INTERN PetscErrorCode VecView_MPI_Draw(Vec xin, PetscViewer viewer)
433: {
434:   PetscMPIInt        rank, size, tag = ((PetscObject)viewer)->tag;
435:   PetscInt           i, start, end;
436:   MPI_Status         status;
437:   PetscReal          min, max, tmp = 0.0;
438:   PetscDraw          draw;
439:   PetscBool          isnull;
440:   PetscDrawAxis      axis;
441:   const PetscScalar *xarray;

443:   PetscFunctionBegin;
444:   PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
445:   PetscCall(PetscDrawIsNull(draw, &isnull));
446:   if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
447:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &size));
448:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)xin), &rank));

450:   PetscCall(VecMin(xin, NULL, &min));
451:   PetscCall(VecMax(xin, NULL, &max));
452:   if (min == max) {
453:     min -= 1.e-5;
454:     max += 1.e-5;
455:   }

457:   PetscCall(PetscDrawCheckResizedWindow(draw));
458:   PetscCall(PetscDrawClear(draw));

460:   PetscCall(PetscDrawAxisCreate(draw, &axis));
461:   PetscCall(PetscDrawAxisSetLimits(axis, 0.0, (PetscReal)xin->map->N, min, max));
462:   PetscCall(PetscDrawAxisDraw(axis));
463:   PetscCall(PetscDrawAxisDestroy(&axis));

465:   /* draw local part of vector */
466:   PetscCall(VecGetArrayRead(xin, &xarray));
467:   PetscCall(VecGetOwnershipRange(xin, &start, &end));
468:   if (rank < size - 1) { /* send value to right */
469:     PetscCallMPI(MPI_Send((void *)&xarray[xin->map->n - 1], 1, MPIU_REAL, rank + 1, tag, PetscObjectComm((PetscObject)xin)));
470:   }
471:   if (rank) { /* receive value from right */
472:     PetscCallMPI(MPI_Recv(&tmp, 1, MPIU_REAL, rank - 1, tag, PetscObjectComm((PetscObject)xin), &status));
473:   }
474:   PetscDrawCollectiveBegin(draw);
475:   if (rank) PetscCall(PetscDrawLine(draw, (PetscReal)start - 1, tmp, (PetscReal)start, PetscRealPart(xarray[0]), PETSC_DRAW_RED));
476:   for (i = 1; i < xin->map->n; i++) PetscCall(PetscDrawLine(draw, (PetscReal)(i - 1 + start), PetscRealPart(xarray[i - 1]), (PetscReal)(i + start), PetscRealPart(xarray[i]), PETSC_DRAW_RED));
477:   PetscDrawCollectiveEnd(draw);
478:   PetscCall(VecRestoreArrayRead(xin, &xarray));

480:   PetscCall(PetscDrawFlush(draw));
481:   PetscCall(PetscDrawPause(draw));
482:   PetscCall(PetscDrawSave(draw));
483:   PetscFunctionReturn(PETSC_SUCCESS);
484: }

486: #if defined(PETSC_HAVE_MATLAB)
487: PetscErrorCode VecView_MPI_Matlab(Vec xin, PetscViewer viewer)
488: {
489:   PetscMPIInt        rank, size, *lens;
490:   PetscInt           i, N = xin->map->N;
491:   const PetscScalar *xarray;
492:   PetscScalar       *xx;

494:   PetscFunctionBegin;
495:   PetscCall(VecGetArrayRead(xin, &xarray));
496:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)xin), &rank));
497:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &size));
498:   if (rank == 0) {
499:     PetscCall(PetscMalloc1(N, &xx));
500:     PetscCall(PetscMalloc1(size, &lens));
501:     for (i = 0; i < size; i++) lens[i] = xin->map->range[i + 1] - xin->map->range[i];

503:     PetscCallMPI(MPI_Gatherv((void *)xarray, xin->map->n, MPIU_SCALAR, xx, lens, xin->map->range, MPIU_SCALAR, 0, PetscObjectComm((PetscObject)xin)));
504:     PetscCall(PetscFree(lens));

506:     PetscCall(PetscObjectName((PetscObject)xin));
507:     PetscCall(PetscViewerMatlabPutArray(viewer, N, 1, xx, ((PetscObject)xin)->name));

509:     PetscCall(PetscFree(xx));
510:   } else {
511:     PetscCallMPI(MPI_Gatherv((void *)xarray, xin->map->n, MPIU_SCALAR, 0, 0, 0, MPIU_SCALAR, 0, PetscObjectComm((PetscObject)xin)));
512:   }
513:   PetscCall(VecRestoreArrayRead(xin, &xarray));
514:   PetscFunctionReturn(PETSC_SUCCESS);
515: }
516: #endif

518: #if defined(PETSC_HAVE_ADIOS)
519:   #include <adios.h>
520:   #include <adios_read.h>
521: #include <petsc/private/vieweradiosimpl.h>
522: #include <petsc/private/viewerimpl.h>

524: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
525: {
526:   PetscViewer_ADIOS *adios = (PetscViewer_ADIOS *)viewer->data;
527:   const char        *vecname;
528:   int64_t            id;
529:   PetscInt           n, N, rstart;
530:   const PetscScalar *array;
531:   char               nglobalname[16], nlocalname[16], coffset[16];

533:   PetscFunctionBegin;
534:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));

536:   PetscCall(VecGetLocalSize(xin, &n));
537:   PetscCall(VecGetSize(xin, &N));
538:   PetscCall(VecGetOwnershipRange(xin, &rstart, NULL));

540:   PetscCall(PetscSNPrintf(nlocalname, PETSC_STATIC_ARRAY_LENGTH(nlocalname), "%" PetscInt_FMT, n));
541:   PetscCall(PetscSNPrintf(nglobalname, PETSC_STATIC_ARRAY_LENGTH(nglobalname), "%" PetscInt_FMT, N));
542:   PetscCall(PetscSNPrintf(coffset, PETSC_STATIC_ARRAY_LENGTH(coffset), "%" PetscInt_FMT, rstart));
543:   id = adios_define_var(Petsc_adios_group, vecname, "", adios_double, nlocalname, nglobalname, coffset);
544:   PetscCall(VecGetArrayRead(xin, &array));
545:   PetscCallExternal(adios_write_byid, adios->adios_handle, id, array);
546:   PetscCall(VecRestoreArrayRead(xin, &array));
547:   PetscFunctionReturn(PETSC_SUCCESS);
548: }
549: #endif

551: #if defined(PETSC_HAVE_HDF5)
552: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
553: {
554:   PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data;
555:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
556:   hid_t              filespace;  /* file dataspace identifier */
557:   hid_t              chunkspace; /* chunk dataset property identifier */
558:   hid_t              dset_id;    /* dataset identifier */
559:   hid_t              memspace;   /* memory dataspace identifier */
560:   hid_t              file_id;
561:   hid_t              group;
562:   hid_t              memscalartype;  /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
563:   hid_t              filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
564:   PetscInt           bs = PetscAbs(xin->map->bs);
565:   hsize_t            dim;
566:   hsize_t            maxDims[4], dims[4], chunkDims[4], count[4], offset[4];
567:   PetscBool          timestepping, dim2, spoutput;
568:   PetscInt           timestep = PETSC_INT_MIN, low;
569:   hsize_t            chunksize;
570:   const PetscScalar *x;
571:   const char        *vecname;
572:   size_t             len;

574:   PetscFunctionBegin;
575:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
576:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
577:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
578:   PetscCall(PetscViewerHDF5GetBaseDimension2(viewer, &dim2));
579:   PetscCall(PetscViewerHDF5GetSPOutput(viewer, &spoutput));

581:   /* Create the dataspace for the dataset.
582:    *
583:    * dims - holds the current dimensions of the dataset
584:    *
585:    * maxDims - holds the maximum dimensions of the dataset (unlimited
586:    * for the number of time steps with the current dimensions for the
587:    * other dimensions; so only additional time steps can be added).
588:    *
589:    * chunkDims - holds the size of a single time step (required to
590:    * permit extending dataset).
591:    */
592:   dim       = 0;
593:   chunksize = 1;
594:   if (timestep >= 0) {
595:     dims[dim]      = timestep + 1;
596:     maxDims[dim]   = H5S_UNLIMITED;
597:     chunkDims[dim] = 1;
598:     ++dim;
599:   }
600:   PetscCall(PetscHDF5IntCast(xin->map->N / bs, dims + dim));

602:   maxDims[dim]   = dims[dim];
603:   chunkDims[dim] = PetscMax(1, dims[dim]);
604:   chunksize *= chunkDims[dim];
605:   ++dim;
606:   if (bs > 1 || dim2) {
607:     dims[dim]      = bs;
608:     maxDims[dim]   = dims[dim];
609:     chunkDims[dim] = PetscMax(1, dims[dim]);
610:     chunksize *= chunkDims[dim];
611:     ++dim;
612:   }
613:   #if defined(PETSC_USE_COMPLEX)
614:   dims[dim]      = 2;
615:   maxDims[dim]   = dims[dim];
616:   chunkDims[dim] = PetscMax(1, dims[dim]);
617:   chunksize *= chunkDims[dim];
618:   /* hdf5 chunks must be less than 4GB */
619:   if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE / 64) {
620:     if (bs > 1 || dim2) {
621:       if (chunkDims[dim - 2] > (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 128))) chunkDims[dim - 2] = (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 128));
622:       if (chunkDims[dim - 1] > (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 128))) chunkDims[dim - 1] = (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 128));
623:     } else {
624:       chunkDims[dim - 1] = PETSC_HDF5_MAX_CHUNKSIZE / 128;
625:     }
626:   }
627:   ++dim;
628:   #else
629:   /* hdf5 chunks must be less than 4GB */
630:   if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE / 64) {
631:     if (bs > 1 || dim2) {
632:       if (chunkDims[dim - 2] > (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 64))) chunkDims[dim - 2] = (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 64));
633:       if (chunkDims[dim - 1] > (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 64))) chunkDims[dim - 1] = (hsize_t)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE / 64));
634:     } else {
635:       chunkDims[dim - 1] = PETSC_HDF5_MAX_CHUNKSIZE / 64;
636:     }
637:   }
638:   #endif

640:   PetscCallHDF5Return(filespace, H5Screate_simple, ((int)dim, dims, maxDims));

642:   #if defined(PETSC_USE_REAL_SINGLE)
643:   memscalartype  = H5T_NATIVE_FLOAT;
644:   filescalartype = H5T_NATIVE_FLOAT;
645:   #elif defined(PETSC_USE_REAL___FLOAT128)
646:     #error "HDF5 output with 128 bit floats not supported."
647:   #elif defined(PETSC_USE_REAL___FP16)
648:     #error "HDF5 output with 16 bit floats not supported."
649:   #else
650:   memscalartype = H5T_NATIVE_DOUBLE;
651:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
652:   else filescalartype = H5T_NATIVE_DOUBLE;
653:   #endif

655:   /* Create the dataset with default properties and close filespace */
656:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
657:   PetscCall(PetscStrlen(vecname, &len));
658:   PetscCheck(len, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Object must be named");
659:   if (H5Lexists(group, vecname, H5P_DEFAULT) < 1) {
660:     /* Create chunk */
661:     PetscCallHDF5Return(chunkspace, H5Pcreate, (H5P_DATASET_CREATE));
662:     PetscCallHDF5(H5Pset_chunk, (chunkspace, (int)dim, chunkDims));

664:     PetscCallHDF5Return(dset_id, H5Dcreate2, (group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
665:     PetscCallHDF5(H5Pclose, (chunkspace));
666:   } else {
667:     PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));
668:     PetscCallHDF5(H5Dset_extent, (dset_id, dims));
669:   }
670:   PetscCallHDF5(H5Sclose, (filespace));

672:   /* Each process defines a dataset and writes it to the hyperslab in the file */
673:   dim = 0;
674:   if (timestep >= 0) {
675:     count[dim] = 1;
676:     ++dim;
677:   }
678:   PetscCall(PetscHDF5IntCast(xin->map->n / bs, count + dim));
679:   ++dim;
680:   if (bs > 1 || dim2) {
681:     count[dim] = bs;
682:     ++dim;
683:   }
684:   #if defined(PETSC_USE_COMPLEX)
685:   count[dim] = 2;
686:   ++dim;
687:   #endif
688:   if (xin->map->n > 0 || H5_VERSION_GE(1, 10, 0)) {
689:     PetscCallHDF5Return(memspace, H5Screate_simple, ((int)dim, count, NULL));
690:   } else {
691:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
692:     PetscCallHDF5Return(memspace, H5Screate, (H5S_NULL));
693:   }

695:   /* Select hyperslab in the file */
696:   PetscCall(VecGetOwnershipRange(xin, &low, NULL));
697:   dim = 0;
698:   if (timestep >= 0) {
699:     offset[dim] = timestep;
700:     ++dim;
701:   }
702:   PetscCall(PetscHDF5IntCast(low / bs, offset + dim));
703:   ++dim;
704:   if (bs > 1 || dim2) {
705:     offset[dim] = 0;
706:     ++dim;
707:   }
708:   #if defined(PETSC_USE_COMPLEX)
709:   offset[dim] = 0;
710:   ++dim;
711:   #endif
712:   if (xin->map->n > 0 || H5_VERSION_GE(1, 10, 0)) {
713:     PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
714:     PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
715:   } else {
716:     /* Create null filespace to match null memspace. */
717:     PetscCallHDF5Return(filespace, H5Screate, (H5S_NULL));
718:   }

720:   PetscCall(VecGetArrayRead(xin, &x));
721:   PetscCallHDF5(H5Dwrite, (dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
722:   PetscCallHDF5(H5Fflush, (file_id, H5F_SCOPE_GLOBAL));
723:   PetscCall(VecRestoreArrayRead(xin, &x));

725:   /* Close/release resources */
726:   PetscCallHDF5(H5Gclose, (group));
727:   PetscCallHDF5(H5Sclose, (filespace));
728:   PetscCallHDF5(H5Sclose, (memspace));
729:   PetscCallHDF5(H5Dclose, (dset_id));

731:   #if defined(PETSC_USE_COMPLEX)
732:   {
733:     PetscBool tru = PETSC_TRUE;
734:     PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "complex", PETSC_BOOL, &tru));
735:   }
736:   #endif
737:   if (timestepping) PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "timestepping", PETSC_BOOL, &timestepping));
738:   PetscCall(PetscInfo(xin, "Wrote Vec object with name %s\n", vecname));
739:   PetscFunctionReturn(PETSC_SUCCESS);
740: }
741: #endif

743: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin, PetscViewer viewer)
744: {
745:   PetscBool iascii, isbinary, isdraw;
746: #if defined(PETSC_HAVE_MATHEMATICA)
747:   PetscBool ismathematica;
748: #endif
749: #if defined(PETSC_HAVE_HDF5)
750:   PetscBool ishdf5;
751: #endif
752: #if defined(PETSC_HAVE_MATLAB)
753:   PetscBool ismatlab;
754: #endif
755: #if defined(PETSC_HAVE_ADIOS)
756:   PetscBool isadios;
757: #endif
758:   PetscBool isglvis;

760:   PetscFunctionBegin;
761:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
762:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
763:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
764: #if defined(PETSC_HAVE_MATHEMATICA)
765:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATHEMATICA, &ismathematica));
766: #endif
767: #if defined(PETSC_HAVE_HDF5)
768:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
769: #endif
770: #if defined(PETSC_HAVE_MATLAB)
771:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
772: #endif
773:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
774: #if defined(PETSC_HAVE_ADIOS)
775:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERADIOS, &isadios));
776: #endif
777:   if (iascii) {
778:     PetscCall(VecView_MPI_ASCII(xin, viewer));
779:   } else if (isbinary) {
780:     PetscCall(VecView_MPI_Binary(xin, viewer));
781:   } else if (isdraw) {
782:     PetscViewerFormat format;
783:     PetscCall(PetscViewerGetFormat(viewer, &format));
784:     if (format == PETSC_VIEWER_DRAW_LG) {
785:       PetscCall(VecView_MPI_Draw_LG(xin, viewer));
786:     } else {
787:       PetscCall(VecView_MPI_Draw(xin, viewer));
788:     }
789: #if defined(PETSC_HAVE_MATHEMATICA)
790:   } else if (ismathematica) {
791:     PetscCall(PetscViewerMathematicaPutVector(viewer, xin));
792: #endif
793: #if defined(PETSC_HAVE_HDF5)
794:   } else if (ishdf5) {
795:     PetscCall(VecView_MPI_HDF5(xin, viewer));
796: #endif
797: #if defined(PETSC_HAVE_ADIOS)
798:   } else if (isadios) {
799:     PetscCall(VecView_MPI_ADIOS(xin, viewer));
800: #endif
801: #if defined(PETSC_HAVE_MATLAB)
802:   } else if (ismatlab) {
803:     PetscCall(VecView_MPI_Matlab(xin, viewer));
804: #endif
805:   } else if (isglvis) PetscCall(VecView_GLVis(xin, viewer));
806:   PetscFunctionReturn(PETSC_SUCCESS);
807: }

809: PetscErrorCode VecGetSize_MPI(Vec xin, PetscInt *N)
810: {
811:   PetscFunctionBegin;
812:   *N = xin->map->N;
813:   PetscFunctionReturn(PETSC_SUCCESS);
814: }

816: PetscErrorCode VecGetValues_MPI(Vec xin, PetscInt ni, const PetscInt ix[], PetscScalar y[])
817: {
818:   const PetscScalar *xx;
819:   const PetscInt     start = xin->map->range[xin->stash.rank];

821:   PetscFunctionBegin;
822:   PetscCall(VecGetArrayRead(xin, &xx));
823:   for (PetscInt i = 0; i < ni; i++) {
824:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
825:     const PetscInt tmp = ix[i] - start;

827:     PetscCheck(tmp >= 0 && tmp < xin->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Can only get local values, trying %" PetscInt_FMT, ix[i]);
828:     y[i] = xx[tmp];
829:   }
830:   PetscCall(VecRestoreArrayRead(xin, &xx));
831:   PetscFunctionReturn(PETSC_SUCCESS);
832: }

834: PetscErrorCode VecSetValues_MPI(Vec xin, PetscInt ni, const PetscInt ix[], const PetscScalar y[], InsertMode addv)
835: {
836:   const PetscBool   ignorenegidx = xin->stash.ignorenegidx;
837:   const PetscBool   donotstash   = xin->stash.donotstash;
838:   const PetscMPIInt rank         = xin->stash.rank;
839:   const PetscInt   *owners       = xin->map->range;
840:   const PetscInt    start = owners[rank], end = owners[rank + 1];
841:   PetscScalar      *xx;

843:   PetscFunctionBegin;
844:   if (PetscDefined(USE_DEBUG)) {
845:     PetscCheck(xin->stash.insertmode != INSERT_VALUES || addv != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "You have already inserted values; you cannot now add");
846:     PetscCheck(xin->stash.insertmode != ADD_VALUES || addv != INSERT_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "You have already added values; you cannot now insert");
847:   }
848:   PetscCall(VecGetArray(xin, &xx));
849:   xin->stash.insertmode = addv;
850:   for (PetscInt i = 0; i < ni; ++i) {
851:     PetscInt row;

853:     if (ignorenegidx && ix[i] < 0) continue;
854:     PetscCheck(ix[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Out of range index value %" PetscInt_FMT " cannot be negative", ix[i]);
855:     if ((row = ix[i]) >= start && row < end) {
856:       if (addv == INSERT_VALUES) {
857:         xx[row - start] = y[i];
858:       } else {
859:         xx[row - start] += y[i];
860:       }
861:     } else if (!donotstash) {
862:       PetscCheck(ix[i] < xin->map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Out of range index value %" PetscInt_FMT ", should be less than %" PetscInt_FMT, ix[i], xin->map->N);
863:       PetscCall(VecStashValue_Private(&xin->stash, row, y[i]));
864:     }
865:   }
866:   PetscCall(VecRestoreArray(xin, &xx));
867:   PetscFunctionReturn(PETSC_SUCCESS);
868: }

870: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin, PetscInt ni, const PetscInt ix[], const PetscScalar yin[], InsertMode addv)
871: {
872:   PetscMPIInt  rank   = xin->stash.rank;
873:   PetscInt    *owners = xin->map->range, start = owners[rank];
874:   PetscInt     end = owners[rank + 1], i, row, bs = PetscAbs(xin->map->bs), j;
875:   PetscScalar *xx, *y = (PetscScalar *)yin;

877:   PetscFunctionBegin;
878:   PetscCall(VecGetArray(xin, &xx));
879:   if (PetscDefined(USE_DEBUG)) {
880:     PetscCheck(xin->stash.insertmode != INSERT_VALUES || addv != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "You have already inserted values; you cannot now add");
881:     PetscCheck(xin->stash.insertmode != ADD_VALUES || addv != INSERT_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "You have already added values; you cannot now insert");
882:   }
883:   xin->stash.insertmode = addv;

885:   if (addv == INSERT_VALUES) {
886:     for (i = 0; i < ni; i++) {
887:       if ((row = bs * ix[i]) >= start && row < end) {
888:         for (j = 0; j < bs; j++) xx[row - start + j] = y[j];
889:       } else if (!xin->stash.donotstash) {
890:         if (ix[i] < 0) {
891:           y += bs;
892:           continue;
893:         }
894:         PetscCheck(ix[i] < xin->map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Out of range index value %" PetscInt_FMT ", should be less than %" PetscInt_FMT, ix[i], xin->map->N);
895:         PetscCall(VecStashValuesBlocked_Private(&xin->bstash, ix[i], y));
896:       }
897:       y += bs;
898:     }
899:   } else {
900:     for (i = 0; i < ni; i++) {
901:       if ((row = bs * ix[i]) >= start && row < end) {
902:         for (j = 0; j < bs; j++) xx[row - start + j] += y[j];
903:       } else if (!xin->stash.donotstash) {
904:         if (ix[i] < 0) {
905:           y += bs;
906:           continue;
907:         }
908:         PetscCheck(ix[i] <= xin->map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Out of range index value %" PetscInt_FMT ", should be less than %" PetscInt_FMT, ix[i], xin->map->N);
909:         PetscCall(VecStashValuesBlocked_Private(&xin->bstash, ix[i], y));
910:       }
911:       y += bs;
912:     }
913:   }
914:   PetscCall(VecRestoreArray(xin, &xx));
915:   PetscFunctionReturn(PETSC_SUCCESS);
916: }

918: /*
919:    Since nsends or nreceives may be zero we add 1 in certain mallocs
920: to make sure we never malloc an empty one.
921: */
922: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
923: {
924:   PetscInt   *owners = xin->map->range, *bowners, i, bs, nstash, reallocs;
925:   PetscMPIInt size;
926:   InsertMode  addv;
927:   MPI_Comm    comm;

929:   PetscFunctionBegin;
930:   PetscCall(PetscObjectGetComm((PetscObject)xin, &comm));
931:   if (xin->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);

933:   PetscCallMPI(MPIU_Allreduce((PetscEnum *)&xin->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
934:   PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
935:   xin->stash.insertmode  = addv; /* in case this processor had no cache */
936:   xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */

938:   PetscCall(VecGetBlockSize(xin, &bs));
939:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &size));
940:   if (!xin->bstash.bowners && xin->map->bs != -1) {
941:     PetscCall(PetscMalloc1(size + 1, &bowners));
942:     for (i = 0; i < size + 1; i++) bowners[i] = owners[i] / bs;
943:     xin->bstash.bowners = bowners;
944:   } else bowners = xin->bstash.bowners;

946:   PetscCall(VecStashScatterBegin_Private(&xin->stash, owners));
947:   PetscCall(VecStashScatterBegin_Private(&xin->bstash, bowners));
948:   PetscCall(VecStashGetInfo_Private(&xin->stash, &nstash, &reallocs));
949:   PetscCall(PetscInfo(xin, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
950:   PetscCall(VecStashGetInfo_Private(&xin->bstash, &nstash, &reallocs));
951:   PetscCall(PetscInfo(xin, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
952:   PetscFunctionReturn(PETSC_SUCCESS);
953: }

955: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
956: {
957:   PetscInt     base, i, j, *row, flg, bs;
958:   PetscMPIInt  n;
959:   PetscScalar *val, *vv, *array, *xarray;

961:   PetscFunctionBegin;
962:   if (!vec->stash.donotstash) {
963:     PetscCall(VecGetArray(vec, &xarray));
964:     PetscCall(VecGetBlockSize(vec, &bs));
965:     base = vec->map->range[vec->stash.rank];

967:     /* Process the stash */
968:     while (1) {
969:       PetscCall(VecStashScatterGetMesg_Private(&vec->stash, &n, &row, &val, &flg));
970:       if (!flg) break;
971:       if (vec->stash.insertmode == ADD_VALUES) {
972:         for (i = 0; i < n; i++) xarray[row[i] - base] += val[i];
973:       } else if (vec->stash.insertmode == INSERT_VALUES) {
974:         for (i = 0; i < n; i++) xarray[row[i] - base] = val[i];
975:       } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_CORRUPT, "Insert mode is not set correctly; corrupted vector");
976:     }
977:     PetscCall(VecStashScatterEnd_Private(&vec->stash));

979:     /* now process the block-stash */
980:     while (1) {
981:       PetscCall(VecStashScatterGetMesg_Private(&vec->bstash, &n, &row, &val, &flg));
982:       if (!flg) break;
983:       for (i = 0; i < n; i++) {
984:         array = xarray + row[i] * bs - base;
985:         vv    = val + i * bs;
986:         if (vec->stash.insertmode == ADD_VALUES) {
987:           for (j = 0; j < bs; j++) array[j] += vv[j];
988:         } else if (vec->stash.insertmode == INSERT_VALUES) {
989:           for (j = 0; j < bs; j++) array[j] = vv[j];
990:         } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_CORRUPT, "Insert mode is not set correctly; corrupted vector");
991:       }
992:     }
993:     PetscCall(VecStashScatterEnd_Private(&vec->bstash));
994:     PetscCall(VecRestoreArray(vec, &xarray));
995:   }
996:   vec->stash.insertmode = NOT_SET_VALUES;
997:   PetscFunctionReturn(PETSC_SUCCESS);
998: }

1000: PetscErrorCode VecSetPreallocationCOO_MPI(Vec x, PetscCount coo_n, const PetscInt coo_i[])
1001: {
1002:   PetscInt    m, M, rstart, rend;
1003:   Vec_MPI    *vmpi = (Vec_MPI *)x->data;
1004:   PetscCount  k, p, q, rem; /* Loop variables over coo arrays */
1005:   PetscMPIInt size;
1006:   MPI_Comm    comm;

1008:   PetscFunctionBegin;
1009:   PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
1010:   PetscCallMPI(MPI_Comm_size(comm, &size));
1011:   PetscCall(VecResetPreallocationCOO_MPI(x));

1013:   PetscCall(PetscLayoutSetUp(x->map));
1014:   PetscCall(VecGetOwnershipRange(x, &rstart, &rend));
1015:   PetscCall(VecGetLocalSize(x, &m));
1016:   PetscCall(VecGetSize(x, &M));

1018:   /* ---------------------------------------------------------------------------*/
1019:   /* Sort COOs along with a permutation array, so that negative indices come    */
1020:   /* first, then local ones, then remote ones.                                  */
1021:   /* ---------------------------------------------------------------------------*/
1022:   PetscCount n1 = coo_n, nneg, *perm;
1023:   PetscInt  *i1; /* Copy of input COOs along with a permutation array */
1024:   PetscCall(PetscMalloc1(n1, &i1));
1025:   PetscCall(PetscMalloc1(n1, &perm));
1026:   PetscCall(PetscArraycpy(i1, coo_i, n1)); /* Make a copy since we'll modify it */
1027:   for (k = 0; k < n1; k++) perm[k] = k;

1029:   /* Manipulate i1[] so that entries with negative indices will have the smallest
1030:      index, local entries will have greater but negative indices, and remote entries
1031:      will have positive indices.
1032:   */
1033:   for (k = 0; k < n1; k++) {
1034:     if (i1[k] < 0) {
1035:       if (x->stash.ignorenegidx) i1[k] = PETSC_INT_MIN; /* e.g., -2^31, minimal to move them ahead */
1036:       else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Found a negative index in VecSetPreallocateCOO() but VEC_IGNORE_NEGATIVE_INDICES was not set");
1037:     } else if (i1[k] >= rstart && i1[k] < rend) {
1038:       i1[k] -= PETSC_INT_MAX; /* e.g., minus 2^31-1 to shift local rows to range of [-PETSC_INT_MAX, -1] */
1039:     } else {
1040:       PetscCheck(i1[k] < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Found index %" PetscInt_FMT " in VecSetPreallocateCOO() larger than the global size %" PetscInt_FMT, i1[k], M);
1041:       if (x->stash.donotstash) i1[k] = PETSC_INT_MIN; /* Ignore off-proc indices as if they were negative */
1042:     }
1043:   }

1045:   /* Sort the indices, after that, [0,nneg) have ignored entries, [nneg,rem) have local entries and [rem,n1) have remote entries */
1046:   PetscCall(PetscSortIntWithCountArray(n1, i1, perm));
1047:   for (k = 0; k < n1; k++) {
1048:     if (i1[k] > PETSC_INT_MIN) break;
1049:   } /* Advance k to the first entry we need to take care of */
1050:   nneg = k;
1051:   PetscCall(PetscSortedIntUpperBound(i1, nneg, n1, rend - 1 - PETSC_INT_MAX, &rem)); /* rem is upper bound of the last local row */
1052:   for (k = nneg; k < rem; k++) i1[k] += PETSC_INT_MAX;                               /* Revert indices of local entries */

1054:   /* ---------------------------------------------------------------------------*/
1055:   /*           Build stuff for local entries                                    */
1056:   /* ---------------------------------------------------------------------------*/
1057:   PetscCount tot1, *jmap1, *perm1;
1058:   PetscCall(PetscCalloc1(m + 1, &jmap1));
1059:   for (k = nneg; k < rem; k++) jmap1[i1[k] - rstart + 1]++; /* Count repeats of each local entry */
1060:   for (k = 0; k < m; k++) jmap1[k + 1] += jmap1[k];         /* Transform jmap1[] to CSR-like data structure */
1061:   tot1 = jmap1[m];
1062:   PetscAssert(tot1 == rem - nneg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected errors in VecSetPreallocationCOO_MPI");
1063:   PetscCall(PetscMalloc1(tot1, &perm1));
1064:   PetscCall(PetscArraycpy(perm1, perm + nneg, tot1));

1066:   /* ---------------------------------------------------------------------------*/
1067:   /*        Record the permutation array for filling the send buffer            */
1068:   /* ---------------------------------------------------------------------------*/
1069:   PetscCount *Cperm;
1070:   PetscCall(PetscMalloc1(n1 - rem, &Cperm));
1071:   PetscCall(PetscArraycpy(Cperm, perm + rem, n1 - rem));
1072:   PetscCall(PetscFree(perm));

1074:   /* ---------------------------------------------------------------------------*/
1075:   /*           Send remote entries to their owner                                  */
1076:   /* ---------------------------------------------------------------------------*/
1077:   /* Find which entries should be sent to which remote ranks*/
1078:   PetscInt        nsend = 0; /* Number of MPI ranks to send data to */
1079:   PetscMPIInt    *sendto;    /* [nsend], storing remote ranks */
1080:   PetscInt       *nentries;  /* [nsend], storing number of entries sent to remote ranks; Assume PetscInt is big enough for this count, and error if not */
1081:   const PetscInt *ranges;
1082:   PetscInt        maxNsend = size >= 128 ? 128 : size; /* Assume max 128 neighbors; realloc when needed */

1084:   PetscCall(PetscLayoutGetRanges(x->map, &ranges));
1085:   PetscCall(PetscMalloc2(maxNsend, &sendto, maxNsend, &nentries));
1086:   for (k = rem; k < n1;) {
1087:     PetscMPIInt owner;
1088:     PetscInt    firstRow, lastRow;

1090:     /* Locate a row range */
1091:     firstRow = i1[k]; /* first row of this owner */
1092:     PetscCall(PetscLayoutFindOwner(x->map, firstRow, &owner));
1093:     lastRow = ranges[owner + 1] - 1; /* last row of this owner */

1095:     /* Find the first index 'p' in [k,n) with i[p] belonging to next owner */
1096:     PetscCall(PetscSortedIntUpperBound(i1, k, n1, lastRow, &p));

1098:     /* All entries in [k,p) belong to this remote owner */
1099:     if (nsend >= maxNsend) { /* Double the remote ranks arrays if not long enough */
1100:       PetscMPIInt *sendto2;
1101:       PetscInt    *nentries2;
1102:       PetscInt     maxNsend2 = (maxNsend <= size / 2) ? maxNsend * 2 : size;

1104:       PetscCall(PetscMalloc2(maxNsend2, &sendto2, maxNsend2, &nentries2));
1105:       PetscCall(PetscArraycpy(sendto2, sendto, maxNsend));
1106:       PetscCall(PetscArraycpy(nentries2, nentries2, maxNsend + 1));
1107:       PetscCall(PetscFree2(sendto, nentries2));
1108:       sendto   = sendto2;
1109:       nentries = nentries2;
1110:       maxNsend = maxNsend2;
1111:     }
1112:     sendto[nsend] = owner;
1113:     PetscCall(PetscIntCast(p - k, &nentries[nsend]));
1114:     nsend++;
1115:     k = p;
1116:   }

1118:   /* Build 1st SF to know offsets on remote to send data */
1119:   PetscSF      sf1;
1120:   PetscInt     nroots = 1, nroots2 = 0;
1121:   PetscInt     nleaves = nsend, nleaves2 = 0;
1122:   PetscInt    *offsets;
1123:   PetscSFNode *iremote;

1125:   PetscCall(PetscSFCreate(comm, &sf1));
1126:   PetscCall(PetscMalloc1(nsend, &iremote));
1127:   PetscCall(PetscMalloc1(nsend, &offsets));
1128:   for (k = 0; k < nsend; k++) {
1129:     iremote[k].rank  = sendto[k];
1130:     iremote[k].index = 0;
1131:     nleaves2 += nentries[k];
1132:     PetscCheck(nleaves2 >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of SF leaves is too large for PetscInt");
1133:   }
1134:   PetscCall(PetscSFSetGraph(sf1, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
1135:   PetscCall(PetscSFFetchAndOpWithMemTypeBegin(sf1, MPIU_INT, PETSC_MEMTYPE_HOST, &nroots2 /*rootdata*/, PETSC_MEMTYPE_HOST, nentries /*leafdata*/, PETSC_MEMTYPE_HOST, offsets /*leafupdate*/, MPI_SUM));
1136:   PetscCall(PetscSFFetchAndOpEnd(sf1, MPIU_INT, &nroots2, nentries, offsets, MPI_SUM)); /* Would nroots2 overflow, we check offsets[] below */
1137:   PetscCall(PetscSFDestroy(&sf1));
1138:   PetscAssert(nleaves2 == n1 - rem, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nleaves2 %" PetscInt_FMT " != number of remote entries %" PetscCount_FMT, nleaves2, n1 - rem);

1140:   /* Build 2nd SF to send remote COOs to their owner */
1141:   PetscSF sf2;
1142:   nroots  = nroots2;
1143:   nleaves = nleaves2;
1144:   PetscCall(PetscSFCreate(comm, &sf2));
1145:   PetscCall(PetscSFSetFromOptions(sf2));
1146:   PetscCall(PetscMalloc1(nleaves, &iremote));
1147:   p = 0;
1148:   for (k = 0; k < nsend; k++) {
1149:     PetscCheck(offsets[k] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of SF roots is too large for PetscInt");
1150:     for (q = 0; q < nentries[k]; q++, p++) {
1151:       iremote[p].rank = sendto[k];
1152:       PetscCall(PetscIntCast(offsets[k] + q, &iremote[p].index));
1153:     }
1154:   }
1155:   PetscCall(PetscSFSetGraph(sf2, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));

1157:   /* Send the remote COOs to their owner */
1158:   PetscInt    n2 = nroots, *i2; /* Buffers for received COOs from other ranks, along with a permutation array */
1159:   PetscCount *perm2;
1160:   PetscCall(PetscMalloc1(n2, &i2));
1161:   PetscCall(PetscMalloc1(n2, &perm2));
1162:   PetscCall(PetscSFReduceWithMemTypeBegin(sf2, MPIU_INT, PETSC_MEMTYPE_HOST, i1 + rem, PETSC_MEMTYPE_HOST, i2, MPI_REPLACE));
1163:   PetscCall(PetscSFReduceEnd(sf2, MPIU_INT, i1 + rem, i2, MPI_REPLACE));

1165:   PetscCall(PetscFree(i1));
1166:   PetscCall(PetscFree(offsets));
1167:   PetscCall(PetscFree2(sendto, nentries));

1169:   /* ---------------------------------------------------------------*/
1170:   /* Sort received COOs along with a permutation array            */
1171:   /* ---------------------------------------------------------------*/
1172:   PetscCount  *imap2;
1173:   PetscCount  *jmap2, nnz2;
1174:   PetscScalar *sendbuf, *recvbuf;
1175:   PetscInt     old;
1176:   PetscCount   sendlen = n1 - rem, recvlen = n2;

1178:   for (k = 0; k < n2; k++) perm2[k] = k;
1179:   PetscCall(PetscSortIntWithCountArray(n2, i2, perm2));

1181:   /* nnz2 will be # of unique entries in the recvbuf */
1182:   nnz2 = n2;
1183:   for (k = 1; k < n2; k++) {
1184:     if (i2[k] == i2[k - 1]) nnz2--;
1185:   }

1187:   /* Build imap2[] and jmap2[] for each unique entry */
1188:   PetscCall(PetscMalloc4(nnz2, &imap2, nnz2 + 1, &jmap2, sendlen, &sendbuf, recvlen, &recvbuf));
1189:   p        = -1;
1190:   old      = -1;
1191:   jmap2[0] = 0;
1192:   jmap2++;
1193:   for (k = 0; k < n2; k++) {
1194:     if (i2[k] != old) { /* Meet a new entry */
1195:       p++;
1196:       imap2[p] = i2[k] - rstart;
1197:       jmap2[p] = 1;
1198:       old      = i2[k];
1199:     } else {
1200:       jmap2[p]++;
1201:     }
1202:   }
1203:   jmap2--;
1204:   for (k = 0; k < nnz2; k++) jmap2[k + 1] += jmap2[k];

1206:   PetscCall(PetscFree(i2));

1208:   vmpi->coo_n = coo_n;
1209:   vmpi->tot1  = tot1;
1210:   vmpi->jmap1 = jmap1;
1211:   vmpi->perm1 = perm1;
1212:   vmpi->nnz2  = nnz2;
1213:   vmpi->imap2 = imap2;
1214:   vmpi->jmap2 = jmap2;
1215:   vmpi->perm2 = perm2;

1217:   vmpi->Cperm   = Cperm;
1218:   vmpi->sendbuf = sendbuf;
1219:   vmpi->recvbuf = recvbuf;
1220:   vmpi->sendlen = sendlen;
1221:   vmpi->recvlen = recvlen;
1222:   vmpi->coo_sf  = sf2;
1223:   PetscFunctionReturn(PETSC_SUCCESS);
1224: }

1226: PetscErrorCode VecSetValuesCOO_MPI(Vec x, const PetscScalar v[], InsertMode imode)
1227: {
1228:   Vec_MPI          *vmpi = (Vec_MPI *)x->data;
1229:   PetscInt          m;
1230:   PetscScalar      *a, *sendbuf = vmpi->sendbuf, *recvbuf = vmpi->recvbuf;
1231:   const PetscCount *jmap1 = vmpi->jmap1;
1232:   const PetscCount *perm1 = vmpi->perm1;
1233:   const PetscCount *imap2 = vmpi->imap2;
1234:   const PetscCount *jmap2 = vmpi->jmap2;
1235:   const PetscCount *perm2 = vmpi->perm2;
1236:   const PetscCount *Cperm = vmpi->Cperm;
1237:   const PetscCount  nnz2  = vmpi->nnz2;

1239:   PetscFunctionBegin;
1240:   PetscCall(VecGetLocalSize(x, &m));
1241:   PetscCall(VecGetArray(x, &a));

1243:   /* Pack entries to be sent to remote */
1244:   for (PetscInt i = 0; i < vmpi->sendlen; i++) sendbuf[i] = v[Cperm[i]];

1246:   /* Send remote entries to their owner and overlap the communication with local computation */
1247:   PetscCall(PetscSFReduceWithMemTypeBegin(vmpi->coo_sf, MPIU_SCALAR, PETSC_MEMTYPE_HOST, sendbuf, PETSC_MEMTYPE_HOST, recvbuf, MPI_REPLACE));
1248:   /* Add local entries to A and B */
1249:   for (PetscInt i = 0; i < m; i++) { /* All entries in a[] are either zero'ed or added with a value (i.e., initialized) */
1250:     PetscScalar sum = 0.0;           /* Do partial summation first to improve numerical stability */
1251:     for (PetscCount k = jmap1[i]; k < jmap1[i + 1]; k++) sum += v[perm1[k]];
1252:     a[i] = (imode == INSERT_VALUES ? 0.0 : a[i]) + sum;
1253:   }
1254:   PetscCall(PetscSFReduceEnd(vmpi->coo_sf, MPIU_SCALAR, sendbuf, recvbuf, MPI_REPLACE));

1256:   /* Add received remote entries to A and B */
1257:   for (PetscInt i = 0; i < nnz2; i++) {
1258:     for (PetscCount k = jmap2[i]; k < jmap2[i + 1]; k++) a[imap2[i]] += recvbuf[perm2[k]];
1259:   }

1261:   PetscCall(VecRestoreArray(x, &a));
1262:   PetscFunctionReturn(PETSC_SUCCESS);
1263: }