Actual source code: ex15f.F
1: !
2: ! "$Id: ex15f.F,v 1.17 2001/08/07 03:04:00 balay Exp $";
3: !
4: ! Solves a linear system in parallel with SLES. Also indicates
5: ! use of a user-provided preconditioner. Input parameters include:
6: ! -user_defined_pc : Activate a user-defined preconditioner
7: !
8: ! Program usage: mpirun ex15f [-help] [all PETSc options]
9: !
10: !/*T
11: ! Concepts: SLES^basic parallel example
12: ! Concepts: PC^setting a user-defined shell preconditioner
13: ! Processors: n
14: !T*/
15: !
16: ! -------------------------------------------------------------------------
18: program main
19: implicit none
21: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
22: ! Include files
23: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
24: !
25: ! petsc.h - base PETSc routines petscvec.h - vectors
26: ! petscsys.h - system routines petscmat.h - matrices
27: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
28: ! petscsles.h - SLES interface
30: #include include/finclude/petsc.h
31: #include include/finclude/petscvec.h
32: #include include/finclude/petscmat.h
33: #include include/finclude/petscpc.h
34: #include include/finclude/petscksp.h
35: #include include/finclude/petscsles.h
37: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38: ! Variable declarations
39: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
40: !
41: ! Variables:
42: ! sles - linear solver context
43: ! ksp - Krylov subspace method context
44: ! pc - preconditioner context
45: ! x, b, u - approx solution, right-hand-side, exact solution vectors
46: ! A - matrix that defines linear system
47: ! its - iterations for convergence
48: ! norm - norm of solution error
50: Vec x,b,u
51: Mat A
52: SLES sles
53: PC pc
54: KSP ksp
55: PetscScalar v,one,neg_one
56: double precision norm,tol
57: integer i,j,II,JJ,Istart,Iend,ierr,m,n
58: integer user_defined_pc,its,flg,rank
60: ! Note: Any user-defined Fortran routines MUST be declared as external.
62: external SampleShellPCSetUp, SampleShellPCApply
64: ! Common block to store data for user-provided preconditioner
65: common /myshellpc/ diag
66: Vec diag
68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: ! Beginning of program
70: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
72: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
73: one = 1.0
74: neg_one = -1.0
75: m = 8
76: n = 7
77: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
78: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
79: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Compute the matrix and right-hand-side vector that define
83: ! the linear system, Ax = b.
84: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: ! Create parallel matrix, specifying only its global dimensions.
87: ! When using MatCreate(), the matrix format can be specified at
88: ! runtime. Also, the parallel partitioning of the matrix is
89: ! determined by PETSc at runtime.
91: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
92: & m*n,A,ierr)
93: call MatSetFromOptions(A,ierr)
95: ! Currently, all PETSc parallel matrix formats are partitioned by
96: ! contiguous chunks of rows across the processors. Determine which
97: ! rows of the matrix are locally owned.
99: call MatGetOwnershipRange(A,Istart,Iend,ierr)
101: ! Set matrix elements for the 2-D, five-point stencil in parallel.
102: ! - Each processor needs to insert only elements that it owns
103: ! locally (but any non-local elements will be sent to the
104: ! appropriate processor during matrix assembly).
105: ! - Always specify global row and columns of matrix entries.
106: ! - Note that MatSetValues() uses 0-based row and column numbers
107: ! in Fortran as well as in C.
109: do 10, II=Istart,Iend-1
110: v = -1.0
111: i = II/n
112: j = II - i*n
113: if (i.gt.0) then
114: JJ = II - n
115: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
116: endif
117: if (i.lt.m-1) then
118: JJ = II + n
119: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
120: endif
121: if (j.gt.0) then
122: JJ = II - 1
123: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
124: endif
125: if (j.lt.n-1) then
126: JJ = II + 1
127: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
128: endif
129: v = 4.0
130: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
131: 10 continue
133: ! Assemble matrix, using the 2-step process:
134: ! MatAssemblyBegin(), MatAssemblyEnd()
135: ! Computations can be done while messages are in transition,
136: ! by placing code between these two statements.
138: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
139: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
141: ! Create parallel vectors.
142: ! - Here, the parallel partitioning of the vector is determined by
143: ! PETSc at runtime. We could also specify the local dimensions
144: ! if desired -- or use the more general routine VecCreate().
145: ! - When solving a linear system, the vectors and matrices MUST
146: ! be partitioned accordingly. PETSc automatically generates
147: ! appropriately partitioned matrices and vectors when MatCreate()
148: ! and VecCreate() are used with the same communicator.
149: ! - Note: We form 1 vector from scratch and then duplicate as needed.
151: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
152: call VecDuplicate(u,b,ierr)
153: call VecDuplicate(b,x,ierr)
155: ! Set exact solution; then compute right-hand-side vector.
157: call VecSet(one,u,ierr)
158: call MatMult(A,u,b,ierr)
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: ! Create the linear solver and set various options
162: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: ! Create linear solver context
166: call SLESCreate(PETSC_COMM_WORLD,sles,ierr)
168: ! Set operators. Here the matrix that defines the linear system
169: ! also serves as the preconditioning matrix.
171: call SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
173: ! Set linear solver defaults for this problem (optional).
174: ! - By extracting the KSP and PC contexts from the SLES context,
175: ! we can then directly directly call any KSP and PC routines
176: ! to set various options.
178: call SLESGetKSP(sles,ksp,ierr)
179: call SLESGetPC(sles,pc,ierr)
180: tol = 1.e-7
181: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION, &
182: & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
184: !
185: ! Set a user-defined shell preconditioner if desired
186: !
187: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-user_defined_pc', &
188: & user_defined_pc,ierr)
190: if (user_defined_pc .eq. 1) then
192: ! (Required) Indicate to PETSc that we are using a shell preconditioner
193: call PCSetType(pc,PCSHELL,ierr)
195: ! (Required) Set the user-defined routine for applying the preconditioner
196: call PCShellSetApply(pc,SampleShellPCApply,PETSC_NULL_OBJECT, &
197: & ierr)
199: ! (Optional) Do any setup required for the preconditioner
200: call SampleShellPCSetUp(A,x,ierr)
202: else
203: call PCSetType(pc,PCJACOBI,ierr)
204: endif
206: ! Set runtime options, e.g.,
207: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
208: ! These options will override those specified above as long as
209: ! SLESSetFromOptions() is called _after_ any other customization
210: ! routines.
212: call SLESSetFromOptions(sles,ierr)
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: ! Solve the linear system
216: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: call SLESSolve(sles,b,x,its,ierr)
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
221: ! Check solution and clean up
222: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: ! Check the error
226: call VecAXPY(neg_one,u,x,ierr)
227: call VecNorm(x,NORM_2,norm,ierr)
228: if (rank .eq. 0) then
229: if (norm .gt. 1.e-12) then
230: write(6,100) norm,its
231: else
232: write(6,110) its
233: endif
234: endif
235: 100 format('Norm of error ',1pe10.4,' iterations ',i5)
236: 110 format('Norm of error < 1.e-12,iterations ',i5)
238: ! Free work space. All PETSc objects should be destroyed when they
239: ! are no longer needed.
241: call SLESDestroy(sles,ierr)
242: call VecDestroy(u,ierr)
243: call VecDestroy(x,ierr)
244: call VecDestroy(b,ierr)
245: call MatDestroy(A,ierr)
246: if (user_defined_pc .eq. 1) then
247: call VecDestroy(diag,ierr)
248: endif
251: ! Always call PetscFinalize() before exiting a program.
253: call PetscFinalize(ierr)
254: end
256: !/***********************************************************************/
257: !/* Routines for a user-defined shell preconditioner */
258: !/***********************************************************************/
260: !
261: ! SampleShellPCSetUp - This routine sets up a user-defined
262: ! preconditioner context.
263: !
264: ! Input Parameters:
265: ! pmat - preconditioner matrix
266: ! x - vector
267: !
268: ! Output Parameter:
269: ! ierr - error code (nonzero if error has been detected)
270: !
271: ! Notes:
272: ! In this example, we define the shell preconditioner to be Jacobi
273: ! method. Thus, here we create a work vector for storing the reciprocal
274: ! of the diagonal of the preconditioner matrix; this vector is then
275: ! used within the routine SampleShellPCApply().
276: !
277: subroutine SampleShellPCSetUp(pmat,x,ierr)
279: implicit none
281: #include include/finclude/petsc.h
282: #include include/finclude/petscvec.h
283: #include include/finclude/petscmat.h
285: Vec x
286: Mat pmat
287: integer ierr
289: ! Common block to store data for user-provided preconditioner
290: common /myshellpc/ diag
291: Vec diag
293: call VecDuplicate(x,diag,ierr)
294: call MatGetDiagonal(pmat,diag,ierr)
295: call VecReciprocal(diag,ierr)
297: end
299: ! -------------------------------------------------------------------
300: !
301: ! SampleShellPCApply - This routine demonstrates the use of a
302: ! user-provided preconditioner.
303: !
304: ! Input Parameters:
305: ! dummy - optional user-defined context, not used here
306: ! x - input vector
307: !
308: ! Output Parameters:
309: ! y - preconditioned vector
310: ! ierr - error code (nonzero if error has been detected)
311: !
312: ! Notes:
313: ! This code implements the Jacobi preconditioner, merely as an
314: ! example of working with a PCSHELL. Note that the Jacobi method
315: ! is already provided within PETSc.
316: !
317: subroutine SampleShellPCApply(dummy,x,y,ierr)
319: implicit none
321: #include include/finclude/petsc.h
322: #include include/finclude/petscvec.h
324: Vec x,y
325: integer dummy,ierr
327: ! Common block to store data for user-provided preconditioner
328: common /myshellpc/ diag
329: Vec diag
331: call VecPointwiseMult(x,diag,y,ierr)
333: end