Actual source code: ex21f.F
1: !
2: ! "$Id: ex21f.F,v 1.9 2001/08/07 03:04:00 balay Exp $";
3: !
4: ! Solves a linear system in parallel with SLES. Also indicates
5: ! use of a user-provided preconditioner. Input parameters include:
6: !
7: ! Program usage: mpirun ex21f [-help] [all PETSc options]
8: !
9: !/*T
10: ! Concepts: SLES^basic parallel example
11: ! Concepts: PC^setting a user-defined shell preconditioner
12: ! Processors: n
13: !T*/
14: !
15: ! -------------------------------------------------------------------------
17: program main
18: implicit none
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: ! Include files
22: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
23: !
24: ! petsc.h - base PETSc routines petscvec.h - vectors
25: ! petscsys.h - system routines petscmat.h - matrices
26: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
27: ! petscsles.h - SLES interface
29: #include include/finclude/petsc.h
30: #include include/finclude/petscvec.h
31: #include include/finclude/petscmat.h
32: #include include/finclude/petscpc.h
33: #include include/finclude/petscksp.h
34: #include include/finclude/petscsles.h
36: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
37: ! Variable declarations
38: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
39: !
40: ! Variables:
41: ! sles - linear solver context
42: ! ksp - Krylov subspace method context
43: ! pc - preconditioner context
44: ! x, b, u - approx solution, right-hand-side, exact solution vectors
45: ! A - matrix that defines linear system
46: ! its - iterations for convergence
47: ! norm - norm of solution error
49: Vec x,b,u
50: Mat A
51: SLES sles
52: PC pc
53: KSP ksp
54: PetscScalar v,one,neg_one
55: double precision norm,tol
56: integer i,j,II,JJ,Istart,Iend,ierr,m,n
57: integer its,flg,rank
59: ! Note: Any user-defined Fortran routines MUST be declared as external.
61: external SampleShellPCSetUp,SampleShellPCApply
63: ! Common block to store data for user-provided preconditioner
64: common /mypcs/ jacobi,sor,work
65: PC jacobi,sor
66: Vec work
68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: ! Beginning of program
70: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
72: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
73: one = 1.0
74: neg_one = -1.0
75: m = 8
76: n = 7
77: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
78: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
79: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Compute the matrix and right-hand-side vector that define
83: ! the linear system, Ax = b.
84: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: ! Create parallel matrix, specifying only its global dimensions.
87: ! When using MatCreate(), the matrix format can be specified at
88: ! runtime. Also, the parallel partitioning of the matrix is
89: ! determined by PETSc at runtime.
91: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
92: & m*n,A,ierr)
94: call MatSetFromOptions(A,ierr)
96: ! Currently, all PETSc parallel matrix formats are partitioned by
97: ! contiguous chunks of rows across the processors. Determine which
98: ! rows of the matrix are locally owned.
100: call MatGetOwnershipRange(A,Istart,Iend,ierr)
102: ! Set matrix elements for the 2-D, five-point stencil in parallel.
103: ! - Each processor needs to insert only elements that it owns
104: ! locally (but any non-local elements will be sent to the
105: ! appropriate processor during matrix assembly).
106: ! - Always specify global row and columns of matrix entries.
107: ! - Note that MatSetValues() uses 0-based row and column numbers
108: ! in Fortran as well as in C.
110: do 10, II=Istart,Iend-1
111: v = -1.0
112: i = II/n
113: j = II - i*n
114: if (i.gt.0) then
115: JJ = II - n
116: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
117: endif
118: if (i.lt.m-1) then
119: JJ = II + n
120: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
121: endif
122: if (j.gt.0) then
123: JJ = II - 1
124: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
125: endif
126: if (j.lt.n-1) then
127: JJ = II + 1
128: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
129: endif
130: v = 4.0
131: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
132: 10 continue
134: ! Assemble matrix, using the 2-step process:
135: ! MatAssemblyBegin(), MatAssemblyEnd()
136: ! Computations can be done while messages are in transition,
137: ! by placing code between these two statements.
139: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
140: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
142: ! Create parallel vectors.
143: ! - Here, the parallel partitioning of the vector is determined by
144: ! PETSc at runtime. We could also specify the local dimensions
145: ! if desired -- or use the more general routine VecCreate().
146: ! - When solving a linear system, the vectors and matrices MUST
147: ! be partitioned accordingly. PETSc automatically generates
148: ! appropriately partitioned matrices and vectors when MatCreate()
149: ! and VecCreate() are used with the same communicator.
150: ! - Note: We form 1 vector from scratch and then duplicate as needed.
152: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
153: call VecDuplicate(u,b,ierr)
154: call VecDuplicate(b,x,ierr)
156: ! Set exact solution; then compute right-hand-side vector.
158: call VecSet(one,u,ierr)
159: call MatMult(A,u,b,ierr)
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: ! Create the linear solver and set various options
163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: ! Create linear solver context
167: call SLESCreate(PETSC_COMM_WORLD,sles,ierr)
169: ! Set operators. Here the matrix that defines the linear system
170: ! also serves as the preconditioning matrix.
172: call SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
174: ! Set linear solver defaults for this problem (optional).
175: ! - By extracting the KSP and PC contexts from the SLES context,
176: ! we can then directly directly call any KSP and PC routines
177: ! to set various options.
179: call SLESGetKSP(sles,ksp,ierr)
180: call SLESGetPC(sles,pc,ierr)
181: tol = 1.e-7
182: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION, &
183: & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
185: !
186: ! Set a user-defined shell preconditioner
187: !
189: ! (Required) Indicate to PETSc that we are using a shell preconditioner
190: call PCSetType(pc,PCSHELL,ierr)
192: ! (Required) Set the user-defined routine for applying the preconditioner
193: call PCShellSetApply(pc,SampleShellPCApply,PETSC_NULL_OBJECT, &
194: & ierr)
196: ! (Optional) Do any setup required for the preconditioner
197: call SampleShellPCSetUp(A,x,ierr)
200: ! Set runtime options, e.g.,
201: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
202: ! These options will override those specified above as long as
203: ! SLESSetFromOptions() is called _after_ any other customization
204: ! routines.
206: call SLESSetFromOptions(sles,ierr)
208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: ! Solve the linear system
210: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212: call SLESSolve(sles,b,x,its,ierr)
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: ! Check solution and clean up
216: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: ! Check the error
220: call VecAXPY(neg_one,u,x,ierr)
221: call VecNorm(x,NORM_2,norm,ierr)
222: if (rank .eq. 0) then
223: if (norm .gt. 1.e-12) then
224: write(6,100) norm,its
225: else
226: write(6,110) its
227: endif
228: endif
229: 100 format('Norm of error ',1pe10.4,' iterations ',i5)
230: 110 format('Norm of error < 1.e-12,iterations ',i5)
233: ! Free work space. All PETSc objects should be destroyed when they
234: ! are no longer needed.
236: call SLESDestroy(sles,ierr)
237: call VecDestroy(u,ierr)
238: call VecDestroy(x,ierr)
239: call VecDestroy(b,ierr)
240: call MatDestroy(A,ierr)
242: ! Free up PCShell data
243: call PCDestroy(sor,ierr)
244: call PCDestroy(jacobi,ierr)
245: call VecDestroy(work,ierr)
248: ! Always call PetscFinalize() before exiting a program.
250: call PetscFinalize(ierr)
251: end
253: !/***********************************************************************/
254: !/* Routines for a user-defined shell preconditioner */
255: !/***********************************************************************/
257: !
258: ! SampleShellPCSetUp - This routine sets up a user-defined
259: ! preconditioner context.
260: !
261: ! Input Parameters:
262: ! pmat - preconditioner matrix
263: ! x - vector
264: !
265: ! Output Parameter:
266: ! ierr - error code (nonzero if error has been detected)
267: !
268: ! Notes:
269: ! In this example, we define the shell preconditioner to be Jacobi
270: ! method. Thus, here we create a work vector for storing the reciprocal
271: ! of the diagonal of the preconditioner matrix; this vector is then
272: ! used within the routine SampleShellPCApply().
273: !
274: subroutine SampleShellPCSetUp(pmat,x,ierr)
276: implicit none
278: #include include/finclude/petsc.h
279: #include include/finclude/petscvec.h
280: #include include/finclude/petscmat.h
282: Vec x
283: Mat pmat
284: integer ierr
286: ! Common block to store data for user-provided preconditioner
287: common /mypcs/ jacobi,sor,work
288: PC jacobi,sor
289: Vec work
291: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
292: call PCSetType(jacobi,PCJACOBI,ierr)
293: call PCSetVector(jacobi,x,ierr)
294: call PCSetOperators(jacobi,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
295: & ierr)
296: call PCSetUp(jacobi,ierr)
298: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
299: call PCSetType(sor,PCSOR,ierr)
300: call PCSetVector(sor,x,ierr)
301: call PCSetOperators(sor,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
302: & ierr)
303: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
304: call PCSetUp(sor,ierr)
306: call VecDuplicate(x,work,ierr)
308: end
310: ! -------------------------------------------------------------------
311: !
312: ! SampleShellPCApply - This routine demonstrates the use of a
313: ! user-provided preconditioner.
314: !
315: ! Input Parameters:
316: ! dummy - optional user-defined context, not used here
317: ! x - input vector
318: !
319: ! Output Parameters:
320: ! y - preconditioned vector
321: ! ierr - error code (nonzero if error has been detected)
322: !
323: ! Notes:
324: ! This code implements the Jacobi preconditioner plus the
325: ! SOR preconditioner
326: !
327: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
328: ! mpirun -np 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
329: !
330: subroutine SampleShellPCApply(dummy,x,y,ierr)
332: implicit none
334: #include include/finclude/petsc.h
335: #include include/finclude/petscvec.h
337: Vec x,y
338: integer dummy,ierr
339: PetscScalar one
340:
341: ! Common block to store data for user-provided preconditioner
342: common /mypcs/ jacobi,sor,work
343: PC jacobi,sor
344: Vec work
346: one = 1.0
347: call PCApply(jacobi,x,y,ierr)
348: call PCApply(sor,x,work,ierr)
349: call VecAXPY(one,work,y,ierr)
351: end