Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! -----------------------------------------------------------------------
7: program main
8: #include <petsc/finclude/petscksp.h>
9: use petscksp
10: implicit none
11: !
12: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
13: ! Variable declarations
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: !
16: ! Variables:
17: ! ksp - linear solver context
18: ! ksp - Krylov subspace method context
19: ! pc - preconditioner context
20: ! x, b, u - approx solution, right-hand side, exact solution vectors
21: ! A - matrix that defines linear system
22: ! its - iterations for convergence
23: ! norm - norm of error in solution
24: ! rctx - random number generator context
25: !
26: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
27: ! are mathematical objects that contain more than just an array of
28: ! double precision numbers. I.e., vectors in PETSc are not just
29: ! double precision x(*).
30: ! However, local vector data can be easily accessed via VecGetArray().
31: ! See the Fortran section of the PETSc users manual for details.
32: !
33: PetscReal norm
34: PetscInt i,j,II,JJ,m,n,its
35: PetscInt Istart,Iend,ione
36: PetscErrorCode ierr
37: PetscMPIInt rank,size
38: PetscBool flg
39: PetscScalar v,one,neg_one
40: Vec x,b,u
41: Mat A
42: KSP ksp
43: PetscRandom rctx
44: PetscViewerAndFormat vf,vzero
46: ! These variables are not currently used.
47: ! PC pc
48: ! PCType ptype
49: ! PetscReal tol
51: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
52: ! MUST be declared as external.
54: external MyKSPMonitor,MyKSPConverged
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: ! Beginning of program
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr))
61: m = 3
62: n = 3
63: one = 1.0
64: neg_one = -1.0
65: ione = 1
66: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
67: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
68: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
69: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
71: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
72: ! Compute the matrix and right-hand-side vector that define
73: ! the linear system, Ax = b.
74: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
76: ! Create parallel matrix, specifying only its global dimensions.
77: ! When using MatCreate(), the matrix format can be specified at
78: ! runtime. Also, the parallel partitioning of the matrix is
79: ! determined by PETSc at runtime.
81: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
82: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
83: PetscCallA(MatSetFromOptions(A,ierr))
84: PetscCallA(MatSetUp(A,ierr))
86: ! Currently, all PETSc parallel matrix formats are partitioned by
87: ! contiguous chunks of rows across the processors. Determine which
88: ! rows of the matrix are locally owned.
90: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
92: ! Set matrix elements for the 2-D, five-point stencil in parallel.
93: ! - Each processor needs to insert only elements that it owns
94: ! locally (but any non-local elements will be sent to the
95: ! appropriate processor during matrix assembly).
96: ! - Always specify global row and columns of matrix entries.
97: ! - Note that MatSetValues() uses 0-based row and column numbers
98: ! in Fortran as well as in C.
100: ! Note: this uses the less common natural ordering that orders first
101: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
102: ! instead of JJ = II +- m as you might expect. The more standard ordering
103: ! would first do all variables for y = h, then y = 2h etc.
105: do 10, II=Istart,Iend-1
106: v = -1.0
107: i = II/n
108: j = II - i*n
109: if (i.gt.0) then
110: JJ = II - n
111: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
112: endif
113: if (i.lt.m-1) then
114: JJ = II + n
115: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
116: endif
117: if (j.gt.0) then
118: JJ = II - 1
119: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
120: endif
121: if (j.lt.n-1) then
122: JJ = II + 1
123: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
124: endif
125: v = 4.0
126: PetscCallA( MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr))
127: 10 continue
129: ! Assemble matrix, using the 2-step process:
130: ! MatAssemblyBegin(), MatAssemblyEnd()
131: ! Computations can be done while messages are in transition,
132: ! by placing code between these two statements.
134: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
135: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
137: ! Create parallel vectors.
138: ! - Here, the parallel partitioning of the vector is determined by
139: ! PETSc at runtime. We could also specify the local dimensions
140: ! if desired -- or use the more general routine VecCreate().
141: ! - When solving a linear system, the vectors and matrices MUST
142: ! be partitioned accordingly. PETSc automatically generates
143: ! appropriately partitioned matrices and vectors when MatCreate()
144: ! and VecCreate() are used with the same communicator.
145: ! - Note: We form 1 vector from scratch and then duplicate as needed.
147: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,ione,PETSC_DECIDE,m*n,u,ierr))
148: PetscCallA(VecSetFromOptions(u,ierr))
149: PetscCallA(VecDuplicate(u,b,ierr))
150: PetscCallA(VecDuplicate(b,x,ierr))
152: ! Set exact solution; then compute right-hand-side vector.
153: ! By default we use an exact solution of a vector with all
154: ! elements of 1.0; Alternatively, using the runtime option
155: ! -random_sol forms a solution vector with random components.
157: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr))
158: if (flg) then
159: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr))
160: PetscCallA(PetscRandomSetFromOptions(rctx,ierr))
161: PetscCallA(VecSetRandom(u,rctx,ierr))
162: PetscCallA(PetscRandomDestroy(rctx,ierr))
163: else
164: PetscCallA(VecSet(u,one,ierr))
165: endif
166: PetscCallA(MatMult(A,u,b,ierr))
168: ! View the exact solution vector if desired
170: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr))
171: if (flg) then
172: PetscCallA(VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr))
173: endif
175: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: ! Create the linear solver and set various options
177: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: ! Create linear solver context
181: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
183: ! Set operators. Here the matrix that defines the linear system
184: ! also serves as the preconditioning matrix.
186: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
188: ! Set linear solver defaults for this problem (optional).
189: ! - By extracting the KSP and PC contexts from the KSP context,
190: ! we can then directly call any KSP and PC routines
191: ! to set various options.
192: ! - The following four statements are optional; all of these
193: ! parameters could alternatively be specified at runtime via
194: ! KSPSetFromOptions(). All of these defaults can be
195: ! overridden at runtime, as indicated below.
197: ! We comment out this section of code since the Jacobi
198: ! preconditioner is not a good general default.
200: ! PetscCallA(KSPGetPC(ksp,pc,ierr))
201: ! ptype = PCJACOBI
202: ! PetscCallA(PCSetType(pc,ptype,ierr))
203: ! tol = 1.e-7
204: ! PetscCallA(KSPSetTolerances(ksp,tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
206: ! Set user-defined monitoring routine if desired
208: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr))
209: if (flg) then
210: vzero = 0
211: PetscCallA(KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr))
212: !
213: ! Also use the default KSP monitor routine showing how it may be used from Fortran
214: !
215: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr))
216: PetscCallA(KSPMonitorSet(ksp,KSPMonitorResidual,vf,PetscViewerAndFormatDestroy,ierr))
217: endif
219: ! Set runtime options, e.g.,
220: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
221: ! These options will override those specified above as long as
222: ! KSPSetFromOptions() is called _after_ any other customization
223: ! routines.
225: PetscCallA(KSPSetFromOptions(ksp,ierr))
227: ! Set convergence test routine if desired
229: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr))
230: if (flg) then
231: PetscCallA(KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr))
232: endif
233: !
234: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
235: ! Solve the linear system
236: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
238: PetscCallA(KSPSolve(ksp,b,x,ierr))
240: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
241: ! Check solution and clean up
242: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
244: ! Check the error
245: PetscCallA(VecAXPY(x,neg_one,u,ierr))
246: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
247: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
248: if (rank .eq. 0) then
249: if (norm .gt. 1.e-12) then
250: write(6,100) norm,its
251: else
252: write(6,110) its
253: endif
254: endif
255: 100 format('Norm of error ',e11.4,' iterations ',i5)
256: 110 format('Norm of error < 1.e-12 iterations ',i5)
258: ! Free work space. All PETSc objects should be destroyed when they
259: ! are no longer needed.
261: PetscCallA(KSPDestroy(ksp,ierr))
262: PetscCallA(VecDestroy(u,ierr))
263: PetscCallA(VecDestroy(x,ierr))
264: PetscCallA(VecDestroy(b,ierr))
265: PetscCallA(MatDestroy(A,ierr))
267: ! Always call PetscFinalize() before exiting a program. This routine
268: ! - finalizes the PETSc libraries as well as MPI
269: ! - provides summary and diagnostic information if certain runtime
270: ! options are chosen (e.g., -log_view). See PetscFinalize()
271: ! manpage for more information.
273: PetscCallA(PetscFinalize(ierr))
274: end
276: ! --------------------------------------------------------------
277: !
278: ! MyKSPMonitor - This is a user-defined routine for monitoring
279: ! the KSP iterative solvers.
280: !
281: ! Input Parameters:
282: ! ksp - iterative context
283: ! n - iteration number
284: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
285: ! dummy - optional user-defined monitor context (unused here)
286: !
287: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
288: use petscksp
289: implicit none
291: KSP ksp
292: Vec x
293: PetscErrorCode ierr
294: PetscInt n,dummy
295: PetscMPIInt rank
296: PetscReal rnorm
298: ! Build the solution vector
299: PetscCallA(KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr))
301: ! Write the solution vector and residual norm to stdout
302: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
303: ! handles data from multiple processors so that the
304: ! output is not jumbled.
306: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
307: if (rank .eq. 0) write(6,100) n
308: PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
309: if (rank .eq. 0) write(6,200) n,rnorm
311: 100 format('iteration ',i5,' solution vector:')
312: 200 format('iteration ',i5,' residual norm ',e11.4)
313: ierr = 0
314: end
316: ! --------------------------------------------------------------
317: !
318: ! MyKSPConverged - This is a user-defined routine for testing
319: ! convergence of the KSP iterative solvers.
320: !
321: ! Input Parameters:
322: ! ksp - iterative context
323: ! n - iteration number
324: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
325: ! dummy - optional user-defined monitor context (unused here)
326: !
327: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
328: use petscksp
329: implicit none
331: KSP ksp
332: PetscErrorCode ierr
333: PetscInt n,dummy
334: KSPConvergedReason flag
335: PetscReal rnorm
337: if (rnorm .le. .05) then
338: flag = 1
339: else
340: flag = 0
341: endif
342: ierr = 0
344: end
346: !/*TEST
347: !
348: ! test:
349: ! nsize: 2
350: ! args: -pc_type jacobi -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
351: !
352: ! test:
353: ! suffix: 2
354: ! nsize: 2
355: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
356: !
357: !TEST*/