Actual source code: ex6f.F
1: !
2: ! "$Id: ex6f.F,v 1.36 2001/08/07 03:04:00 balay Exp $";
3: !
4: ! Description: This example demonstrates repeated linear solves as
5: ! well as the use of different preconditioner and linear system
6: ! matrices. This example also illustrates how to save PETSc objects
7: ! in common blocks.
8: !
9: !/*T
10: ! Concepts: SLES^repeatedly solving linear systems;
11: ! Concepts: SLES^different matrices for linear system and preconditioner;
12: ! Processors: n
13: !T*/
14: !
15: ! The following include statements are required for SLES Fortran programs:
16: ! petsc.h - base PETSc routines
17: ! petscvec.h - vectors
18: ! petscmat.h - matrices
19: ! petscpc.h - preconditioners
20: ! petscksp.h - Krylov subspace methods
21: ! petscsles.h - SLES interface
22: ! Other include statements may be needed if using additional PETSc
23: ! routines in a Fortran program, e.g.,
24: ! petscviewer.h - viewers
25: ! petscis.h - index sets
26: !
27: program main
28: #include include/finclude/petsc.h
29: #include include/finclude/petscvec.h
30: #include include/finclude/petscmat.h
31: #include include/finclude/petscsles.h
32: #include include/finclude/petscpc.h
33: #include include/finclude/petscksp.h
35: ! Variables:
36: !
37: ! A - matrix that defines linear system
38: ! sles - SLES context
39: ! ksp - KSP context
40: ! x, b, u - approx solution, RHS, exact solution vectors
41: !
42: Vec x,u,b
43: Mat A
44: SLES sles
45: integer i,j,II,JJ,ierr,m,n
46: integer Istart,Iend,flg,nsteps
47: PetscScalar v
49: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
50: m = 3
51: n = 3
52: nsteps = 2
53: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
54: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
55: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps, &
56: & flg,ierr)
58: ! Create parallel matrix, specifying only its global dimensions.
59: ! When using MatCreate(), the matrix format can be specified at
60: ! runtime. Also, the parallel partitioning of the matrix is
61: ! determined by PETSc at runtime.
63: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
64: & m*n,A,ierr)
65: call MatSetFromOptions(A,ierr)
67: ! The matrix is partitioned by contiguous chunks of rows across the
68: ! processors. Determine which rows of the matrix are locally owned.
70: call MatGetOwnershipRange(A,Istart,Iend,ierr)
72: ! Set matrix elements.
73: ! - Each processor needs to insert only elements that it owns
74: ! locally (but any non-local elements will be sent to the
75: ! appropriate processor during matrix assembly).
76: ! - Always specify global rows and columns of matrix entries.
78: do 10, II=Istart,Iend-1
79: v = -1.0
80: i = II/n
81: j = II - i*n
82: if (i.gt.0) then
83: JJ = II - n
84: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
85: endif
86: if (i.lt.m-1) then
87: JJ = II + n
88: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
89: endif
90: if (j.gt.0) then
91: JJ = II - 1
92: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
93: endif
94: if (j.lt.n-1) then
95: JJ = II + 1
96: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
97: endif
98: v = 4.0
99: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
100: 10 continue
102: ! Assemble matrix, using the 2-step process:
103: ! MatAssemblyBegin(), MatAssemblyEnd()
104: ! Computations can be done while messages are in transition
105: ! by placing code between these two statements.
107: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
108: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
110: ! Create parallel vectors.
111: ! - When using VecCreate(), the parallel partitioning of the vector
112: ! is determined by PETSc at runtime.
113: ! - Note: We form 1 vector from scratch and then duplicate as needed.
115: call VecCreate(PETSC_COMM_WORLD,u,ierr)
116: call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
117: call VecSetFromOptions(u,ierr)
118: call VecDuplicate(u,b,ierr)
119: call VecDuplicate(b,x,ierr)
121: ! Create linear solver context
123: call SLESCreate(PETSC_COMM_WORLD,sles,ierr)
125: ! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
127: call SLESSetFromOptions(sles,ierr)
129: ! Solve several linear systems in succession
131: do 100 i=1,nsteps
132: call solve1(sles,A,x,b,u,i,nsteps,ierr)
133: 100 continue
135: ! Free work space. All PETSc objects should be destroyed when they
136: ! are no longer needed.
138: call VecDestroy(u,ierr)
139: call VecDestroy(x,ierr)
140: call VecDestroy(b,ierr)
141: call MatDestroy(A,ierr)
142: call SLESDestroy(sles,ierr)
144: call PetscFinalize(ierr)
145: end
147: ! -----------------------------------------------------------------------
148: !
149: subroutine solve1(sles,A,x,b,u,count,nsteps,ierr)
151: #include include/finclude/petsc.h
152: #include include/finclude/petscvec.h
153: #include include/finclude/petscmat.h
154: #include include/finclude/petscsles.h
155: #include include/finclude/petscpc.h
156: #include include/finclude/petscksp.h
158: !
159: ! solve1 - This routine is used for repeated linear system solves.
160: ! We update the linear system matrix each time, but retain the same
161: ! preconditioning matrix for all linear solves.
162: !
163: ! A - linear system matrix
164: ! A2 - preconditioning matrix
165: !
166: PetscScalar v,val
167: integer II,ierr,Istart,Iend,count,nsteps,its
168: Mat A
169: SLES sles
170: KSP ksp
171: Vec x,b,u
173: ! Use common block to retain matrix between successive subroutine calls
174: Mat A2
175: integer rank,pflag
176: common /my_data/ A2,pflag,rank
178: ! First time thorough: Create new matrix to define the linear system
179: if (count .eq. 1) then
180: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
181: pflag = 0
182: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view', &
183: & pflag,ierr)
184: if (pflag .ne. 0) then
185: if (rank .eq. 0) write(6,100)
186: endif
187: call MatConvert(A,MATSAME,A2,ierr)
188: ! All other times: Set previous solution as initial guess for next solve.
189: else
190: call SLESGetKSP(sles,ksp,ierr)
191: call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
192: endif
194: ! Alter the matrix A a bit
195: call MatGetOwnershipRange(A,Istart,Iend,ierr)
196: do 20, II=Istart,Iend-1
197: v = 2.0
198: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
199: 20 continue
200: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
201: if (pflag .ne. 0) then
202: if (rank .eq. 0) write(6,110)
203: endif
204: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
206: ! Set the exact solution; compute the right-hand-side vector
207: val = 1.0*count
208: call VecSet(val,u,ierr)
209: call MatMult(A,u,b,ierr)
211: ! Set operators, keeping the identical preconditioner matrix for
212: ! all linear solves. This approach is often effective when the
213: ! linear systems do not change very much between successive steps.
214: call SLESSetOperators(sles,A,A2,SAME_PRECONDITIONER,ierr)
216: ! Solve linear system
217: call SLESSolve(sles,b,x,its,ierr)
219: ! Destroy the preconditioner matrix on the last time through
220: if (count .eq. nsteps) call MatDestroy(A2,ierr)
222: 100 format('previous matrix: preconditioning')
223: 110 format('next matrix: defines linear system')
225: end