Actual source code: ex40.c
1: static char help[] = "Solves a linear system in parallel with KSP.\n\
2: Input parameters include:\n\
3: -random_exact_sol : use a random exact solution vector\n\
4: -view_exact_sol : write exact solution vector to stdout\n\
5: -m <mesh_x> : number of mesh points in x-direction\n\
6: -n <mesh_y> : number of mesh points in y-direction\n\n";
8: /*
9: Include "petscksp.h" so that we can use KSP solvers. Note that this file
10: automatically includes:
11: petscsys.h - base PETSc routines petscvec.h - vectors
12: petscmat.h - matrices
13: petscis.h - index sets petscksp.h - Krylov subspace methods
14: petscviewer.h - viewers petscpc.h - preconditioners
15: */
16: #include <petscksp.h>
18: int main(int argc, char **args)
19: {
20: Vec x, b, u; /* approx solution, RHS, exact solution */
21: Mat A; /* linear system matrix */
22: KSP ksp; /* linear solver context */
23: PetscRandom rctx; /* random number generator context */
24: PetscReal norm; /* norm of solution error */
25: PetscInt i, j, Ii, J, m = 8, n = 7, its;
26: PetscBool flg = PETSC_FALSE;
27: PetscScalar v;
28: PetscMPIInt rank;
30: PetscFunctionBeginUser;
31: PetscCall(PetscInitialize(&argc, &args, NULL, help));
32: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
33: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
34: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
35: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
36: Compute the matrix and right-hand-side vector that define
37: the linear system, Ax = b.
38: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
39: /*
40: Create parallel matrix, specifying only its global dimensions.
41: When using MatCreate(), the matrix format can be specified at
42: runtime. Also, the parallel partitioning of the matrix is
43: determined by PETSc at runtime.
45: Performance tuning note: For problems of substantial size,
46: preallocation of matrix memory is crucial for attaining good
47: performance. See the matrix chapter of the users manual for details.
48: */
49: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
50: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
51: PetscCall(MatSetType(A, MATELEMENTAL));
52: PetscCall(MatSetFromOptions(A));
53: PetscCall(MatSetUp(A));
54: if (rank == 0) {
55: PetscInt M, N;
56: PetscCall(MatGetSize(A, &M, &N));
57: for (Ii = 0; Ii < M; Ii++) {
58: v = -1.0;
59: i = Ii / n;
60: j = Ii - i * n;
61: if (i > 0) {
62: J = Ii - n;
63: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
64: }
65: if (i < m - 1) {
66: J = Ii + n;
67: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
68: }
69: if (j > 0) {
70: J = Ii - 1;
71: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
72: }
73: if (j < n - 1) {
74: J = Ii + 1;
75: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
76: }
77: v = 4.0;
78: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, ADD_VALUES));
79: }
80: }
81: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
82: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
84: /* PetscCall(MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE)); */
86: /*
87: Create parallel vectors.
88: - We form 1 vector from scratch and then duplicate as needed.
89: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
90: in this example, we specify only the
91: vector's global dimension; the parallel partitioning is determined
92: at runtime.
93: - When solving a linear system, the vectors and matrices MUST
94: be partitioned accordingly. PETSc automatically generates
95: appropriately partitioned matrices and vectors when MatCreate()
96: and VecCreate() are used with the same communicator.
97: - The user can alternatively specify the local vector and matrix
98: dimensions when more sophisticated partitioning is needed
99: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
100: below).
101: */
102: PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
103: PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
104: PetscCall(VecSetFromOptions(u));
105: PetscCall(VecDuplicate(u, &b));
106: PetscCall(VecDuplicate(b, &x));
108: /*
109: Set exact solution; then compute right-hand-side vector.
110: By default we use an exact solution of a vector with all
111: elements of 1.0; Alternatively, using the runtime option
112: -random_sol forms a solution vector with random components.
113: */
114: PetscCall(PetscOptionsGetBool(NULL, NULL, "-random_exact_sol", &flg, NULL));
115: if (flg) {
116: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
117: PetscCall(PetscRandomSetFromOptions(rctx));
118: PetscCall(VecSetRandom(u, rctx));
119: PetscCall(PetscRandomDestroy(&rctx));
120: } else {
121: PetscCall(VecSet(u, 1.0));
122: }
123: PetscCall(MatMult(A, u, b));
125: /*
126: View the exact solution vector if desired
127: */
128: flg = PETSC_FALSE;
129: PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL));
130: if (flg) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
132: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: Create the linear solver and set various options
134: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136: /*
137: Create linear solver context
138: */
139: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
141: /*
142: Set operators. Here the matrix that defines the linear system
143: also serves as the preconditioning matrix.
144: */
145: PetscCall(KSPSetOperators(ksp, A, A));
147: /*
148: Set linear solver defaults for this problem (optional).
149: - By extracting the KSP and PC contexts from the KSP context,
150: we can then directly call any KSP and PC routines to set
151: various options.
152: - The following two statements are optional; all of these
153: parameters could alternatively be specified at runtime via
154: KSPSetFromOptions(). All of these defaults can be
155: overridden at runtime, as indicated below.
156: */
157: PetscCall(KSPSetTolerances(ksp, 1.e-2 / ((m + 1) * (n + 1)), PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
159: /*
160: Set runtime options, e.g.,
161: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
162: These options will override those specified above as long as
163: KSPSetFromOptions() is called _after_ any other customization
164: routines.
165: */
166: PetscCall(KSPSetFromOptions(ksp));
168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: Solve the linear system
170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
172: PetscCall(KSPSolve(ksp, b, x));
174: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: Check solution and clean up
176: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178: /*
179: Check the error
180: */
181: PetscCall(VecAXPY(x, -1.0, u));
182: PetscCall(VecNorm(x, NORM_2, &norm));
183: PetscCall(KSPGetIterationNumber(ksp, &its));
185: /*
186: Print convergence information. PetscPrintf() produces a single
187: print statement from all processes that share a communicator.
188: An alternative is PetscFPrintf(), which prints to a file.
189: */
190: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
192: /*
193: Free work space. All PETSc objects should be destroyed when they
194: are no longer needed.
195: */
196: PetscCall(KSPDestroy(&ksp));
197: PetscCall(VecDestroy(&u));
198: PetscCall(VecDestroy(&x));
199: PetscCall(VecDestroy(&b));
200: PetscCall(MatDestroy(&A));
202: /*
203: Always call PetscFinalize() before exiting a program. This routine
204: - finalizes the PETSc libraries as well as MPI
205: - provides summary and diagnostic information if certain runtime
206: options are chosen (e.g., -log_view).
207: */
208: PetscCall(PetscFinalize());
209: return 0;
210: }
212: /*TEST
214: test:
215: nsize: 6
216: args: -pc_type none
217: requires: elemental
219: test:
220: suffix: 2
221: nsize: 6
222: args: -pc_type lu -pc_factor_mat_solver_type elemental
223: requires: elemental
225: TEST*/