Actual source code: ex21f.F
petsc-3.7.5 2017-01-01
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: !
5: !
6: !/*T
7: ! Concepts: KSP^basic parallel example
8: ! Concepts: PC^setting a user-defined shell preconditioner
9: ! Processors: n
10: !T*/
11: !
12: ! -------------------------------------------------------------------------
14: program main
15: implicit none
17: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
18: ! Include files
19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
20: !
21: ! petscsys.h - base PETSc routines petscvec.h - vectors
22: ! petscmat.h - matrices
23: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
25: #include <petsc/finclude/petscsys.h>
26: #include <petsc/finclude/petscvec.h>
27: #include <petsc/finclude/petscmat.h>
28: #include <petsc/finclude/petscpc.h>
29: #include <petsc/finclude/petscksp.h>
31: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32: ! Variable declarations
33: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
34: !
35: ! Variables:
36: ! ksp - linear solver context
37: ! ksp - Krylov subspace method context
38: ! pc - preconditioner context
39: ! x, b, u - approx solution, right-hand-side, exact solution vectors
40: ! A - matrix that defines linear system
41: ! its - iterations for convergence
42: ! norm - norm of solution error
44: Vec x,b,u
45: Mat A
46: PC pc
47: KSP ksp
48: PetscScalar v,one,neg_one
49: PetscReal norm,tol
50: PetscInt i,j,II,JJ,Istart
51: PetscInt Iend,m,n,its,ione
52: PetscMPIInt rank
53: PetscBool flg
54: PetscErrorCode ierr
56: ! Note: Any user-defined Fortran routines MUST be declared as external.
58: external SampleShellPCSetUp,SampleShellPCApply
60: ! Common block to store data for user-provided preconditioner
61: common /mypcs/ jacobi,sor,work
62: PC jacobi,sor
63: Vec work
65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66: ! Beginning of program
67: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
70: one = 1.0
71: neg_one = -1.0
72: m = 8
73: n = 7
74: ione = 1
75: call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, &
76: & '-m',m,flg,ierr)
77: call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, &
78: & '-n',n,flg,ierr)
79: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Compute the matrix and right-hand-side vector that define
83: ! the linear system, Ax = b.
84: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: ! Create parallel matrix, specifying only its global dimensions.
87: ! When using MatCreate(), the matrix format can be specified at
88: ! runtime. Also, the parallel partitioning of the matrix is
89: ! determined by PETSc at runtime.
91: call MatCreate(PETSC_COMM_WORLD,A,ierr)
92: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
93: call MatSetFromOptions(A,ierr)
94: call MatSetUp(A,ierr)
96: ! Currently, all PETSc parallel matrix formats are partitioned by
97: ! contiguous chunks of rows across the processors. Determine which
98: ! rows of the matrix are locally owned.
100: call MatGetOwnershipRange(A,Istart,Iend,ierr)
102: ! Set matrix elements for the 2-D, five-point stencil in parallel.
103: ! - Each processor needs to insert only elements that it owns
104: ! locally (but any non-local elements will be sent to the
105: ! appropriate processor during matrix assembly).
106: ! - Always specify global row and columns of matrix entries.
107: ! - Note that MatSetValues() uses 0-based row and column numbers
108: ! in Fortran as well as in C.
110: do 10, II=Istart,Iend-1
111: v = -1.0
112: i = II/n
113: j = II - i*n
114: if (i.gt.0) then
115: JJ = II - n
116: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
117: endif
118: if (i.lt.m-1) then
119: JJ = II + n
120: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
121: endif
122: if (j.gt.0) then
123: JJ = II - 1
124: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
125: endif
126: if (j.lt.n-1) then
127: JJ = II + 1
128: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
129: endif
130: v = 4.0
131: call MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)
132: 10 continue
134: ! Assemble matrix, using the 2-step process:
135: ! MatAssemblyBegin(), MatAssemblyEnd()
136: ! Computations can be done while messages are in transition,
137: ! by placing code between these two statements.
139: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
140: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
142: ! Create parallel vectors.
143: ! - Here, the parallel partitioning of the vector is determined by
144: ! PETSc at runtime. We could also specify the local dimensions
145: ! if desired -- or use the more general routine VecCreate().
146: ! - When solving a linear system, the vectors and matrices MUST
147: ! be partitioned accordingly. PETSc automatically generates
148: ! appropriately partitioned matrices and vectors when MatCreate()
149: ! and VecCreate() are used with the same communicator.
150: ! - Note: We form 1 vector from scratch and then duplicate as needed.
152: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
153: call VecDuplicate(u,b,ierr)
154: call VecDuplicate(b,x,ierr)
156: ! Set exact solution; then compute right-hand-side vector.
158: call VecSet(u,one,ierr)
159: call MatMult(A,u,b,ierr)
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: ! Create the linear solver and set various options
163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: ! Create linear solver context
167: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
169: ! Set operators. Here the matrix that defines the linear system
170: ! also serves as the preconditioning matrix.
172: call KSPSetOperators(ksp,A,A,ierr)
174: ! Set linear solver defaults for this problem (optional).
175: ! - By extracting the KSP and PC contexts from the KSP context,
176: ! we can then directly directly call any KSP and PC routines
177: ! to set various options.
179: call KSPGetPC(ksp,pc,ierr)
180: tol = 1.e-7
181: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL, &
182: & PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
184: !
185: ! Set a user-defined shell preconditioner
186: !
188: ! (Required) Indicate to PETSc that we are using a shell preconditioner
189: call PCSetType(pc,PCSHELL,ierr)
191: ! (Required) Set the user-defined routine for applying the preconditioner
192: call PCShellSetApply(pc,SampleShellPCApply,ierr)
194: ! (Optional) Do any setup required for the preconditioner
195: ! Note: if you use PCShellSetSetUp, this will be done for your
196: call SampleShellPCSetUp(pc,x,ierr)
199: ! Set runtime options, e.g.,
200: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
201: ! These options will override those specified above as long as
202: ! KSPSetFromOptions() is called _after_ any other customization
203: ! routines.
205: call KSPSetFromOptions(ksp,ierr)
207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: ! Solve the linear system
209: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: call KSPSolve(ksp,b,x,ierr)
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: ! Check solution and clean up
215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217: ! Check the error
219: call VecAXPY(x,neg_one,u,ierr)
220: call VecNorm(x,NORM_2,norm,ierr)
221: call KSPGetIterationNumber(ksp,its,ierr)
223: if (rank .eq. 0) then
224: if (norm .gt. 1.e-12) then
225: write(6,100) norm,its
226: else
227: write(6,110) its
228: endif
229: endif
230: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
231: 110 format('Norm of error < 1.e-12,iterations ',i5)
234: ! Free work space. All PETSc objects should be destroyed when they
235: ! are no longer needed.
237: call KSPDestroy(ksp,ierr)
238: call VecDestroy(u,ierr)
239: call VecDestroy(x,ierr)
240: call VecDestroy(b,ierr)
241: call MatDestroy(A,ierr)
243: ! Free up PCShell data
244: call PCDestroy(sor,ierr)
245: call PCDestroy(jacobi,ierr)
246: call VecDestroy(work,ierr)
249: ! Always call PetscFinalize() before exiting a program.
251: call PetscFinalize(ierr)
252: end
254: !/***********************************************************************/
255: !/* Routines for a user-defined shell preconditioner */
256: !/***********************************************************************/
258: !
259: ! SampleShellPCSetUp - This routine sets up a user-defined
260: ! preconditioner context.
261: !
262: ! Input Parameters:
263: ! pc - preconditioner object
264: ! x - vector
265: !
266: ! Output Parameter:
267: ! ierr - error code (nonzero if error has been detected)
268: !
269: ! Notes:
270: ! In this example, we define the shell preconditioner to be Jacobi
271: ! method. Thus, here we create a work vector for storing the reciprocal
272: ! of the diagonal of the preconditioner matrix; this vector is then
273: ! used within the routine SampleShellPCApply().
274: !
275: subroutine SampleShellPCSetUp(pc,x,ierr)
277: implicit none
279: #include <petsc/finclude/petscsys.h>
280: #include <petsc/finclude/petscvec.h>
281: #include <petsc/finclude/petscmat.h>
283: PC pc
284: Vec x
285: Mat pmat
286: PetscErrorCode ierr
288: ! Common block to store data for user-provided preconditioner
289: common /mypcs/ jacobi,sor,work
290: PC jacobi,sor
291: Vec work
293: call PCGetOperators(pc,PETSC_NULL_OBJECT,pmat,ierr)
294: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
295: call PCSetType(jacobi,PCJACOBI,ierr)
296: call PCSetOperators(jacobi,pmat,pmat,ierr)
297: call PCSetUp(jacobi,ierr)
299: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
300: call PCSetType(sor,PCSOR,ierr)
301: call PCSetOperators(sor,pmat,pmat,ierr)
302: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
303: call PCSetUp(sor,ierr)
305: call VecDuplicate(x,work,ierr)
307: end
309: ! -------------------------------------------------------------------
310: !
311: ! SampleShellPCApply - This routine demonstrates the use of a
312: ! user-provided preconditioner.
313: !
314: ! Input Parameters:
315: ! pc - preconditioner object
316: ! x - input vector
317: !
318: ! Output Parameters:
319: ! y - preconditioned vector
320: ! ierr - error code (nonzero if error has been detected)
321: !
322: ! Notes:
323: ! This code implements the Jacobi preconditioner plus the
324: ! SOR preconditioner
325: !
326: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
327: ! mpiexec -n 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
328: !
329: subroutine SampleShellPCApply(pc,x,y,ierr)
331: implicit none
333: #include <petsc/finclude/petscsys.h>
334: #include <petsc/finclude/petscvec.h>
335: #include <petsc/finclude/petscpc.h>
337: PC pc
338: Vec x,y
339: PetscErrorCode ierr
340: PetscScalar one
342: ! Common block to store data for user-provided preconditioner
343: common /mypcs/ jacobi,sor,work
344: PC jacobi,sor
345: Vec work
347: one = 1.0
348: call PCApply(jacobi,x,y,ierr)
349: call PCApply(sor,x,work,ierr)
350: call VecAXPY(y,one,work,ierr)
352: end