Actual source code: ex14.c

  1: static char help[] = "Solves a nonlinear system in parallel with a user-defined Newton method.\n\
  2: Uses KSP to solve the linearized Newton systems.  This solver\n\
  3: is a very simplistic inexact Newton method.  The intent of this code is to\n\
  4: demonstrate the repeated solution of linear systems with the same nonzero pattern.\n\
  5: \n\
  6: This is NOT the recommended approach for solving nonlinear problems with PETSc!\n\
  7: We urge users to employ the SNES component for solving nonlinear problems whenever\n\
  8: possible, as it offers many advantages over coding nonlinear solvers independently.\n\
  9: \n\
 10: We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
 11: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
 12: The command line options include:\n\
 13:   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
 14:      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\
 15:   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
 16:   -my <yg>, where <yg> = number of grid points in the y-direction\n\
 17:   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
 18:   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";

 20: /* ------------------------------------------------------------------------

 22:     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 23:     the partial differential equation

 25:             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,

 27:     with boundary conditions

 29:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 31:     A finite difference approximation with the usual 5-point stencil
 32:     is used to discretize the boundary value problem to obtain a nonlinear
 33:     system of equations.

 35:     The SNES version of this problem is:  snes/tutorials/ex5.c
 36:     We urge users to employ the SNES component for solving nonlinear
 37:     problems whenever possible, as it offers many advantages over coding
 38:     nonlinear solvers independently.

 40:   ------------------------------------------------------------------------- */

 42: /*
 43:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 44:    Include "petscksp.h" so that we can use KSP solvers.  Note that this
 45:    file automatically includes:
 46:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 47:      petscmat.h - matrices
 48:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 49:      petscviewer.h - viewers               petscpc.h  - preconditioners
 50: */
 51: #include <petscdm.h>
 52: #include <petscdmda.h>
 53: #include <petscksp.h>

 55: /*
 56:    User-defined application context - contains data needed by the
 57:    application-provided call-back routines, ComputeJacobian() and
 58:    ComputeFunction().
 59: */
 60: typedef struct {
 61:   PetscReal param;  /* test problem parameter */
 62:   PetscInt  mx, my; /* discretization in x,y directions */
 63:   Vec       localX; /* ghosted local vector */
 64:   DM        da;     /* distributed array data structure */
 65: } AppCtx;

 67: /*
 68:    User-defined routines
 69: */
 70: extern PetscErrorCode ComputeFunction(AppCtx *, Vec, Vec), FormInitialGuess(AppCtx *, Vec);
 71: extern PetscErrorCode ComputeJacobian(AppCtx *, Vec, Mat);

 73: int main(int argc, char **argv)
 74: {
 75:   /* -------------- Data to define application problem ---------------- */
 76:   MPI_Comm    comm;    /* communicator */
 77:   KSP         ksp;     /* linear solver */
 78:   Vec         X, Y, F; /* solution, update, residual vectors */
 79:   Mat         J;       /* Jacobian matrix */
 80:   AppCtx      user;    /* user-defined work context */
 81:   PetscInt    Nx, Ny;  /* number of preocessors in x- and y- directions */
 82:   PetscMPIInt size;    /* number of processors */
 83:   PetscReal   bratu_lambda_max = 6.81, bratu_lambda_min = 0.;
 84:   PetscInt    m, N;

 86:   /* --------------- Data to define nonlinear solver -------------- */
 87:   PetscReal rtol = 1.e-8;            /* relative convergence tolerance */
 88:   PetscReal xtol = 1.e-8;            /* step convergence tolerance */
 89:   PetscReal ttol;                    /* convergence tolerance */
 90:   PetscReal fnorm, ynorm, xnorm;     /* various vector norms */
 91:   PetscInt  max_nonlin_its = 3;      /* maximum number of iterations for nonlinear solver */
 92:   PetscInt  max_functions  = 50;     /* maximum number of function evaluations */
 93:   PetscInt  lin_its;                 /* number of linear solver iterations for each step */
 94:   PetscInt  i;                       /* nonlinear solve iteration number */
 95:   PetscBool no_output = PETSC_FALSE; /* flag indicating whether to suppress output */

 97:   PetscFunctionBeginUser;
 98:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 99:   comm = PETSC_COMM_WORLD;
100:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-no_output", &no_output, NULL));

102:   /*
103:      Initialize problem parameters
104:   */
105:   user.mx    = 4;
106:   user.my    = 4;
107:   user.param = 6.0;

109:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-mx", &user.mx, NULL));
110:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-my", &user.my, NULL));
111:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-par", &user.param, NULL));
112:   PetscCheck(user.param < bratu_lambda_max && user.param > bratu_lambda_min, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Lambda is out of range");
113:   N = user.mx * user.my;

115:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116:      Create linear solver context
117:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

119:   PetscCall(KSPCreate(comm, &ksp));

121:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122:      Create vector data structures
123:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

125:   /*
126:      Create distributed array (DMDA) to manage parallel grid and vectors
127:   */
128:   PetscCallMPI(MPI_Comm_size(comm, &size));
129:   Nx = PETSC_DECIDE;
130:   Ny = PETSC_DECIDE;
131:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Nx", &Nx, NULL));
132:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Ny", &Ny, NULL));
133:   PetscCheck(Nx * Ny == size || (Nx == PETSC_DECIDE && Ny == PETSC_DECIDE), PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "Incompatible number of processors:  Nx * Ny != size");
134:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.mx, user.my, Nx, Ny, 1, 1, NULL, NULL, &user.da));
135:   PetscCall(DMSetFromOptions(user.da));
136:   PetscCall(DMSetUp(user.da));

138:   /*
139:      Extract global and local vectors from DMDA; then duplicate for remaining
140:      vectors that are the same types
141:   */
142:   PetscCall(DMCreateGlobalVector(user.da, &X));
143:   PetscCall(DMCreateLocalVector(user.da, &user.localX));
144:   PetscCall(VecDuplicate(X, &F));
145:   PetscCall(VecDuplicate(X, &Y));

147:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148:      Create matrix data structure for Jacobian
149:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150:   /*
151:      Note:  For the parallel case, vectors and matrices MUST be partitioned
152:      accordingly.  When using distributed arrays (DMDAs) to create vectors,
153:      the DMDAs determine the problem partitioning.  We must explicitly
154:      specify the local matrix dimensions upon its creation for compatibility
155:      with the vector distribution.  Thus, the generic MatCreate() routine
156:      is NOT sufficient when working with distributed arrays.

158:      Note: Here we only approximately preallocate storage space for the
159:      Jacobian.  See the users manual for a discussion of better techniques
160:      for preallocating matrix memory.
161:   */
162:   PetscCall(VecGetLocalSize(X, &m));
163:   PetscCall(MatCreateFromOptions(comm, NULL, 1, m, m, N, N, &J));

165:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166:      Customize linear solver; set runtime options
167:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

169:   /*
170:      Set runtime options (e.g.,-ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
171:   */
172:   PetscCall(KSPSetFromOptions(ksp));

174:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175:      Evaluate initial guess
176:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

178:   PetscCall(FormInitialGuess(&user, X));
179:   PetscCall(ComputeFunction(&user, X, F)); /* Compute F(X)    */
180:   PetscCall(VecNorm(F, NORM_2, &fnorm));   /* fnorm = || F || */
181:   ttol = fnorm * rtol;
182:   if (!no_output) PetscCall(PetscPrintf(comm, "Initial function norm = %g\n", (double)fnorm));

184:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185:      Solve nonlinear system with a user-defined method
186:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

188:   /*
189:       This solver is a very simplistic inexact Newton method, with no
190:       no damping strategies or bells and whistles. The intent of this code
191:       is  merely to demonstrate the repeated solution with KSP of linear
192:       systems with the same nonzero structure.

194:       This is NOT the recommended approach for solving nonlinear problems
195:       with PETSc!  We urge users to employ the SNES component for solving
196:       nonlinear problems whenever possible with application codes, as it
197:       offers many advantages over coding nonlinear solvers independently.
198:    */

200:   for (i = 0; i < max_nonlin_its; i++) {
201:     /*
202:         Compute the Jacobian matrix.
203:      */
204:     PetscCall(ComputeJacobian(&user, X, J));

206:     /*
207:         Solve J Y = F, where J is the Jacobian matrix.
208:           - First, set the KSP linear operators.  Here the matrix that
209:             defines the linear system also serves as the preconditioning
210:             matrix.
211:           - Then solve the Newton system.
212:      */
213:     PetscCall(KSPSetOperators(ksp, J, J));
214:     PetscCall(KSPSolve(ksp, F, Y));
215:     PetscCall(KSPGetIterationNumber(ksp, &lin_its));

217:     /*
218:        Compute updated iterate
219:      */
220:     PetscCall(VecNorm(Y, NORM_2, &ynorm)); /* ynorm = || Y || */
221:     PetscCall(VecAYPX(Y, -1.0, X));        /* Y <- X - Y      */
222:     PetscCall(VecCopy(Y, X));              /* X <- Y          */
223:     PetscCall(VecNorm(X, NORM_2, &xnorm)); /* xnorm = || X || */
224:     if (!no_output) PetscCall(PetscPrintf(comm, "   linear solve iterations = %" PetscInt_FMT ", xnorm=%g, ynorm=%g\n", lin_its, (double)xnorm, (double)ynorm));

226:     /*
227:        Evaluate new nonlinear function
228:      */
229:     PetscCall(ComputeFunction(&user, X, F)); /* Compute F(X)    */
230:     PetscCall(VecNorm(F, NORM_2, &fnorm));   /* fnorm = || F || */
231:     if (!no_output) PetscCall(PetscPrintf(comm, "Iteration %" PetscInt_FMT ", function norm = %g\n", i + 1, (double)fnorm));

233:     /*
234:        Test for convergence
235:      */
236:     if (fnorm <= ttol) {
237:       if (!no_output) PetscCall(PetscPrintf(comm, "Converged due to function norm %g < %g (relative tolerance)\n", (double)fnorm, (double)ttol));
238:       break;
239:     }
240:     if (ynorm < xtol * (xnorm)) {
241:       if (!no_output) PetscCall(PetscPrintf(comm, "Converged due to small update length: %g < %g * %g\n", (double)ynorm, (double)xtol, (double)xnorm));
242:       break;
243:     }
244:     if (i > max_functions) {
245:       if (!no_output) PetscCall(PetscPrintf(comm, "Exceeded maximum number of function evaluations: %" PetscInt_FMT " > %" PetscInt_FMT "\n", i, max_functions));
246:       break;
247:     }
248:   }
249:   PetscCall(PetscPrintf(comm, "Number of nonlinear iterations = %" PetscInt_FMT "\n", i));

251:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
252:      Free work space.  All PETSc objects should be destroyed when they
253:      are no longer needed.
254:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

256:   PetscCall(MatDestroy(&J));
257:   PetscCall(VecDestroy(&Y));
258:   PetscCall(VecDestroy(&user.localX));
259:   PetscCall(VecDestroy(&X));
260:   PetscCall(VecDestroy(&F));
261:   PetscCall(KSPDestroy(&ksp));
262:   PetscCall(DMDestroy(&user.da));
263:   PetscCall(PetscFinalize());
264:   return 0;
265: }
266: /* ------------------------------------------------------------------- */
267: /*
268:    FormInitialGuess - Forms initial approximation.

270:    Input Parameters:
271:    user - user-defined application context
272:    X - vector

274:    Output Parameter:
275:    X - vector
276:  */
277: PetscErrorCode FormInitialGuess(AppCtx *user, Vec X)
278: {
279:   PetscInt     i, j, row, mx, my, xs, ys, xm, ym, gxm, gym, gxs, gys;
280:   PetscReal    one = 1.0, lambda, temp1, temp, hx, hy;
281:   PetscScalar *x;

283:   mx     = user->mx;
284:   my     = user->my;
285:   lambda = user->param;
286:   hx     = one / (PetscReal)(mx - 1);
287:   hy     = one / (PetscReal)(my - 1);
288:   temp1  = lambda / (lambda + one);

290:   /*
291:      Get a pointer to vector data.
292:        - For default PETSc vectors, VecGetArray() returns a pointer to
293:          the data array.  Otherwise, the routine is implementation dependent.
294:        - You MUST call VecRestoreArray() when you no longer need access to
295:          the array.
296:   */
297:   PetscCall(VecGetArray(X, &x));

299:   /*
300:      Get local grid boundaries (for 2-dimensional DMDA):
301:        xs, ys   - starting grid indices (no ghost points)
302:        xm, ym   - widths of local grid (no ghost points)
303:        gxs, gys - starting grid indices (including ghost points)
304:        gxm, gym - widths of local grid (including ghost points)
305:   */
306:   PetscCall(DMDAGetCorners(user->da, &xs, &ys, NULL, &xm, &ym, NULL));
307:   PetscCall(DMDAGetGhostCorners(user->da, &gxs, &gys, NULL, &gxm, &gym, NULL));

309:   /*
310:      Compute initial guess over the locally owned part of the grid
311:   */
312:   for (j = ys; j < ys + ym; j++) {
313:     temp = (PetscReal)(PetscMin(j, my - j - 1)) * hy;
314:     for (i = xs; i < xs + xm; i++) {
315:       row = i - gxs + (j - gys) * gxm;
316:       if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
317:         x[row] = 0.0;
318:         continue;
319:       }
320:       x[row] = temp1 * PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i, mx - i - 1)) * hx, temp));
321:     }
322:   }

324:   /*
325:      Restore vector
326:   */
327:   PetscCall(VecRestoreArray(X, &x));
328:   return PETSC_SUCCESS;
329: }
330: /* ------------------------------------------------------------------- */
331: /*
332:    ComputeFunction - Evaluates nonlinear function, F(x).

334:    Input Parameters:
335: .  X - input vector
336: .  user - user-defined application context

338:    Output Parameter:
339: .  F - function vector
340:  */
341: PetscErrorCode ComputeFunction(AppCtx *user, Vec X, Vec F)
342: {
343:   PetscInt    i, j, row, mx, my, xs, ys, xm, ym, gxs, gys, gxm, gym;
344:   PetscReal   two = 2.0, one = 1.0, lambda, hx, hy, hxdhy, hydhx, sc;
345:   PetscScalar u, uxx, uyy, *x, *f;
346:   Vec         localX = user->localX;

348:   mx     = user->mx;
349:   my     = user->my;
350:   lambda = user->param;
351:   hx     = one / (PetscReal)(mx - 1);
352:   hy     = one / (PetscReal)(my - 1);
353:   sc     = hx * hy * lambda;
354:   hxdhy  = hx / hy;
355:   hydhx  = hy / hx;

357:   /*
358:      Scatter ghost points to local vector, using the 2-step process
359:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
360:      By placing code between these two statements, computations can be
361:      done while messages are in transition.
362:   */
363:   PetscCall(DMGlobalToLocalBegin(user->da, X, INSERT_VALUES, localX));
364:   PetscCall(DMGlobalToLocalEnd(user->da, X, INSERT_VALUES, localX));

366:   /*
367:      Get pointers to vector data
368:   */
369:   PetscCall(VecGetArray(localX, &x));
370:   PetscCall(VecGetArray(F, &f));

372:   /*
373:      Get local grid boundaries
374:   */
375:   PetscCall(DMDAGetCorners(user->da, &xs, &ys, NULL, &xm, &ym, NULL));
376:   PetscCall(DMDAGetGhostCorners(user->da, &gxs, &gys, NULL, &gxm, &gym, NULL));

378:   /*
379:      Compute function over the locally owned part of the grid
380:   */
381:   for (j = ys; j < ys + ym; j++) {
382:     row = (j - gys) * gxm + xs - gxs - 1;
383:     for (i = xs; i < xs + xm; i++) {
384:       row++;
385:       if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
386:         f[row] = x[row];
387:         continue;
388:       }
389:       u      = x[row];
390:       uxx    = (two * u - x[row - 1] - x[row + 1]) * hydhx;
391:       uyy    = (two * u - x[row - gxm] - x[row + gxm]) * hxdhy;
392:       f[row] = uxx + uyy - sc * PetscExpScalar(u);
393:     }
394:   }

396:   /*
397:      Restore vectors
398:   */
399:   PetscCall(VecRestoreArray(localX, &x));
400:   PetscCall(VecRestoreArray(F, &f));
401:   PetscCall(PetscLogFlops(11.0 * ym * xm));
402:   return PETSC_SUCCESS;
403: }

405: /*
406:    ComputeJacobian - Evaluates Jacobian matrix.

408:    Input Parameters:
409: .  x - input vector
410: .  user - user-defined application context

412:    Output Parameters:
413: +  jac - Jacobian matrix

415:    Notes:
416:    Due to grid point reordering with DMDAs, we must always work
417:    with the local grid points, and then transform them to the new
418:    global numbering with the "ltog" mapping
419:    We cannot work directly with the global numbers for the original
420:    uniprocessor grid!
421: */
422: PetscErrorCode ComputeJacobian(AppCtx *user, Vec X, Mat jac)
423: {
424:   Vec                    localX = user->localX; /* local vector */
425:   const PetscInt        *ltog;                  /* local-to-global mapping */
426:   PetscInt               i, j, row, mx, my, col[5];
427:   PetscInt               xs, ys, xm, ym, gxs, gys, gxm, gym, grow;
428:   PetscScalar            two = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x;
429:   ISLocalToGlobalMapping ltogm;

431:   mx     = user->mx;
432:   my     = user->my;
433:   lambda = user->param;
434:   hx     = one / (PetscReal)(mx - 1);
435:   hy     = one / (PetscReal)(my - 1);
436:   sc     = hx * hy;
437:   hxdhy  = hx / hy;
438:   hydhx  = hy / hx;

440:   /*
441:      Scatter ghost points to local vector, using the 2-step process
442:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
443:      By placing code between these two statements, computations can be
444:      done while messages are in transition.
445:   */
446:   PetscCall(DMGlobalToLocalBegin(user->da, X, INSERT_VALUES, localX));
447:   PetscCall(DMGlobalToLocalEnd(user->da, X, INSERT_VALUES, localX));

449:   /*
450:      Get pointer to vector data
451:   */
452:   PetscCall(VecGetArray(localX, &x));

454:   /*
455:      Get local grid boundaries
456:   */
457:   PetscCall(DMDAGetCorners(user->da, &xs, &ys, NULL, &xm, &ym, NULL));
458:   PetscCall(DMDAGetGhostCorners(user->da, &gxs, &gys, NULL, &gxm, &gym, NULL));

460:   /*
461:      Get the global node numbers for all local nodes, including ghost points
462:   */
463:   PetscCall(DMGetLocalToGlobalMapping(user->da, &ltogm));
464:   PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, &ltog));

466:   /*
467:      Compute entries for the locally owned part of the Jacobian.
468:       - Currently, all PETSc parallel matrix formats are partitioned by
469:         contiguous chunks of rows across the processors. The "grow"
470:         parameter computed below specifies the global row number
471:         corresponding to each local grid point.
472:       - Each processor needs to insert only elements that it owns
473:         locally (but any non-local elements will be sent to the
474:         appropriate processor during matrix assembly).
475:       - Always specify global row and columns of matrix entries.
476:       - Here, we set all entries for a particular row at once.
477:   */
478:   for (j = ys; j < ys + ym; j++) {
479:     row = (j - gys) * gxm + xs - gxs - 1;
480:     for (i = xs; i < xs + xm; i++) {
481:       row++;
482:       grow = ltog[row];
483:       /* boundary points */
484:       if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
485:         PetscCall(MatSetValues(jac, 1, &grow, 1, &grow, &one, INSERT_VALUES));
486:         continue;
487:       }
488:       /* interior grid points */
489:       v[0]   = -hxdhy;
490:       col[0] = ltog[row - gxm];
491:       v[1]   = -hydhx;
492:       col[1] = ltog[row - 1];
493:       v[2]   = two * (hydhx + hxdhy) - sc * lambda * PetscExpScalar(x[row]);
494:       col[2] = grow;
495:       v[3]   = -hydhx;
496:       col[3] = ltog[row + 1];
497:       v[4]   = -hxdhy;
498:       col[4] = ltog[row + gxm];
499:       PetscCall(MatSetValues(jac, 1, &grow, 5, col, v, INSERT_VALUES));
500:     }
501:   }
502:   PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, &ltog));

504:   /*
505:      Assemble matrix, using the 2-step process:
506:        MatAssemblyBegin(), MatAssemblyEnd().
507:      By placing code between these two statements, computations can be
508:      done while messages are in transition.
509:   */
510:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
511:   PetscCall(VecRestoreArray(localX, &x));
512:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));

514:   return PETSC_SUCCESS;
515: }

517: /*TEST

519:     test:

521: TEST*/