Actual source code: ex19.c
1: static char help[] = "Solvers Laplacian with multigrid, bad way.\n\
2: -mx <xg>, where <xg> = number of grid points in the x-direction\n\
3: -my <yg>, where <yg> = number of grid points in the y-direction\n\
4: -Nx <npx>, where <npx> = number of processors in the x-direction\n\
5: -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
7: /*
8: This problem is modeled by
9: the partial differential equation
11: -Laplacian u = g, 0 < x,y < 1,
13: with boundary conditions
15: u = 0 for x = 0, x = 1, y = 0, y = 1.
17: A finite difference approximation with the usual 5-point stencil
18: is used to discretize the boundary value problem to obtain a nonlinear
19: system of equations.
20: */
22: #include <petscksp.h>
23: #include <petscdm.h>
24: #include <petscdmda.h>
26: /* User-defined application contexts */
28: typedef struct {
29: PetscInt mx, my; /* number grid points in x and y direction */
30: Vec localX, localF; /* local vectors with ghost region */
31: DM da;
32: Vec x, b, r; /* global vectors */
33: Mat J; /* Jacobian on grid */
34: } GridCtx;
36: typedef struct {
37: GridCtx fine;
38: GridCtx coarse;
39: KSP ksp_coarse;
40: PetscInt ratio;
41: Mat Ii; /* interpolation from coarse to fine */
42: } AppCtx;
44: #define COARSE_LEVEL 0
45: #define FINE_LEVEL 1
47: extern PetscErrorCode FormJacobian_Grid(AppCtx *, GridCtx *, Mat *);
49: /*
50: Mm_ratio - ration of grid lines between fine and coarse grids.
51: */
52: int main(int argc, char **argv)
53: {
54: AppCtx user;
55: PetscInt its, N, n, Nx = PETSC_DECIDE, Ny = PETSC_DECIDE, nlocal, Nlocal;
56: PetscMPIInt size;
57: KSP ksp, ksp_fine;
58: PC pc;
59: PetscScalar one = 1.0;
61: PetscFunctionBeginUser;
62: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
63: user.ratio = 2;
64: user.coarse.mx = 5;
65: user.coarse.my = 5;
67: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mx", &user.coarse.mx, NULL));
68: PetscCall(PetscOptionsGetInt(NULL, NULL, "-My", &user.coarse.my, NULL));
69: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ratio", &user.ratio, NULL));
71: user.fine.mx = user.ratio * (user.coarse.mx - 1) + 1;
72: user.fine.my = user.ratio * (user.coarse.my - 1) + 1;
74: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Coarse grid size %" PetscInt_FMT " by %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my));
75: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Fine grid size %" PetscInt_FMT " by %" PetscInt_FMT "\n", user.fine.mx, user.fine.my));
77: n = user.fine.mx * user.fine.my;
78: N = user.coarse.mx * user.coarse.my;
80: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
81: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Nx", &Nx, NULL));
82: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Ny", &Ny, NULL));
84: /* Set up distributed array for fine grid */
85: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, Nx, Ny, 1, 1, NULL, NULL, &user.fine.da));
86: PetscCall(DMSetFromOptions(user.fine.da));
87: PetscCall(DMSetUp(user.fine.da));
88: PetscCall(DMCreateGlobalVector(user.fine.da, &user.fine.x));
89: PetscCall(VecDuplicate(user.fine.x, &user.fine.r));
90: PetscCall(VecDuplicate(user.fine.x, &user.fine.b));
91: PetscCall(VecGetLocalSize(user.fine.x, &nlocal));
92: PetscCall(DMCreateLocalVector(user.fine.da, &user.fine.localX));
93: PetscCall(VecDuplicate(user.fine.localX, &user.fine.localF));
94: PetscCall(MatCreateFromOptions(PETSC_COMM_WORLD, NULL, 1, nlocal, nlocal, n, n, &user.fine.J));
96: /* Set up distributed array for coarse grid */
97: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, Nx, Ny, 1, 1, NULL, NULL, &user.coarse.da));
98: PetscCall(DMSetFromOptions(user.coarse.da));
99: PetscCall(DMSetUp(user.coarse.da));
100: PetscCall(DMCreateGlobalVector(user.coarse.da, &user.coarse.x));
101: PetscCall(VecDuplicate(user.coarse.x, &user.coarse.b));
102: PetscCall(VecGetLocalSize(user.coarse.x, &Nlocal));
103: PetscCall(DMCreateLocalVector(user.coarse.da, &user.coarse.localX));
104: PetscCall(VecDuplicate(user.coarse.localX, &user.coarse.localF));
105: PetscCall(MatCreateFromOptions(PETSC_COMM_WORLD, NULL, 1, Nlocal, Nlocal, N, N, &user.coarse.J));
107: /* Create linear solver */
108: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
110: /* set two level additive Schwarz preconditioner */
111: PetscCall(KSPGetPC(ksp, &pc));
112: PetscCall(PCSetType(pc, PCMG));
113: PetscCall(PCMGSetLevels(pc, 2, NULL));
114: PetscCall(PCMGSetType(pc, PC_MG_ADDITIVE));
116: PetscCall(FormJacobian_Grid(&user, &user.coarse, &user.coarse.J));
117: PetscCall(FormJacobian_Grid(&user, &user.fine, &user.fine.J));
119: /* Create coarse level */
120: PetscCall(PCMGGetCoarseSolve(pc, &user.ksp_coarse));
121: PetscCall(KSPSetOptionsPrefix(user.ksp_coarse, "coarse_"));
122: PetscCall(KSPSetFromOptions(user.ksp_coarse));
123: PetscCall(KSPSetOperators(user.ksp_coarse, user.coarse.J, user.coarse.J));
124: PetscCall(PCMGSetX(pc, COARSE_LEVEL, user.coarse.x));
125: PetscCall(PCMGSetRhs(pc, COARSE_LEVEL, user.coarse.b));
127: /* Create fine level */
128: PetscCall(PCMGGetSmoother(pc, FINE_LEVEL, &ksp_fine));
129: PetscCall(KSPSetOptionsPrefix(ksp_fine, "fine_"));
130: PetscCall(KSPSetFromOptions(ksp_fine));
131: PetscCall(KSPSetOperators(ksp_fine, user.fine.J, user.fine.J));
132: PetscCall(PCMGSetR(pc, FINE_LEVEL, user.fine.r));
134: /* Create interpolation between the levels */
135: PetscCall(DMCreateInterpolation(user.coarse.da, user.fine.da, &user.Ii, NULL));
136: PetscCall(PCMGSetInterpolation(pc, FINE_LEVEL, user.Ii));
137: PetscCall(PCMGSetRestriction(pc, FINE_LEVEL, user.Ii));
139: PetscCall(KSPSetOperators(ksp, user.fine.J, user.fine.J));
141: PetscCall(VecSet(user.fine.b, one));
143: /* Set options, then solve nonlinear system */
144: PetscCall(KSPSetFromOptions(ksp));
146: PetscCall(KSPSolve(ksp, user.fine.b, user.fine.x));
147: PetscCall(KSPGetIterationNumber(ksp, &its));
148: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %" PetscInt_FMT "\n", its));
150: /* Free data structures */
151: PetscCall(MatDestroy(&user.fine.J));
152: PetscCall(VecDestroy(&user.fine.x));
153: PetscCall(VecDestroy(&user.fine.r));
154: PetscCall(VecDestroy(&user.fine.b));
155: PetscCall(DMDestroy(&user.fine.da));
156: PetscCall(VecDestroy(&user.fine.localX));
157: PetscCall(VecDestroy(&user.fine.localF));
159: PetscCall(MatDestroy(&user.coarse.J));
160: PetscCall(VecDestroy(&user.coarse.x));
161: PetscCall(VecDestroy(&user.coarse.b));
162: PetscCall(DMDestroy(&user.coarse.da));
163: PetscCall(VecDestroy(&user.coarse.localX));
164: PetscCall(VecDestroy(&user.coarse.localF));
166: PetscCall(KSPDestroy(&ksp));
167: PetscCall(MatDestroy(&user.Ii));
168: PetscCall(PetscFinalize());
169: return 0;
170: }
172: PetscErrorCode FormJacobian_Grid(AppCtx *user, GridCtx *grid, Mat *J)
173: {
174: Mat jac = *J;
175: PetscInt i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym, col[5];
176: PetscInt grow;
177: const PetscInt *ltog;
178: PetscScalar two = 2.0, one = 1.0, v[5], hx, hy, hxdhy, hydhx, value;
179: ISLocalToGlobalMapping ltogm;
181: mx = grid->mx;
182: my = grid->my;
183: hx = one / (PetscReal)(mx - 1);
184: hy = one / (PetscReal)(my - 1);
185: hxdhy = hx / hy;
186: hydhx = hy / hx;
188: /* Get ghost points */
189: PetscCall(DMDAGetCorners(grid->da, &xs, &ys, 0, &xm, &ym, 0));
190: PetscCall(DMDAGetGhostCorners(grid->da, &Xs, &Ys, 0, &Xm, &Ym, 0));
191: PetscCall(DMGetLocalToGlobalMapping(grid->da, <ogm));
192: PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, <og));
194: /* Evaluate Jacobian of function */
195: for (j = ys; j < ys + ym; j++) {
196: row = (j - Ys) * Xm + xs - Xs - 1;
197: for (i = xs; i < xs + xm; i++) {
198: row++;
199: grow = ltog[row];
200: if (i > 0 && i < mx - 1 && j > 0 && j < my - 1) {
201: v[0] = -hxdhy;
202: col[0] = ltog[row - Xm];
203: v[1] = -hydhx;
204: col[1] = ltog[row - 1];
205: v[2] = two * (hydhx + hxdhy);
206: col[2] = grow;
207: v[3] = -hydhx;
208: col[3] = ltog[row + 1];
209: v[4] = -hxdhy;
210: col[4] = ltog[row + Xm];
211: PetscCall(MatSetValues(jac, 1, &grow, 5, col, v, INSERT_VALUES));
212: } else if ((i > 0 && i < mx - 1) || (j > 0 && j < my - 1)) {
213: value = .5 * two * (hydhx + hxdhy);
214: PetscCall(MatSetValues(jac, 1, &grow, 1, &grow, &value, INSERT_VALUES));
215: } else {
216: value = .25 * two * (hydhx + hxdhy);
217: PetscCall(MatSetValues(jac, 1, &grow, 1, &grow, &value, INSERT_VALUES));
218: }
219: }
220: }
221: PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, <og));
222: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
223: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
225: return PETSC_SUCCESS;
226: }
228: /*TEST
230: test:
231: args: -ksp_gmres_cgs_refinement_type refine_always -pc_type jacobi -ksp_monitor_short -ksp_type gmres
233: test:
234: suffix: 2
235: nsize: 3
236: args: -ksp_monitor_short
238: TEST*/