Actual source code: ex2f.F

petsc-3.7.5 2017-01-01
Report Typos and Errors
  1: !
  2: !  Description: Solves a linear system in parallel with KSP (Fortran code).
  3: !               Also shows how to set a user-defined monitoring routine.
  4: !
  5: !
  6: !/*T
  7: !  Concepts: KSP^basic parallel example
  8: !  Concepts: KSP^setting a user-defined monitoring routine
  9: !  Processors: n
 10: !T*/
 11: !
 12: ! -----------------------------------------------------------------------

 14:       program main
 15:       implicit none

 17: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 18: !                    Include files
 19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 20: !
 21: !  This program uses CPP for preprocessing, as indicated by the use of
 22: !  PETSc include files in the directory petsc/include/petsc/finclude.  This
 23: !  convention enables use of the CPP preprocessor, which allows the use
 24: !  of the #include statements that define PETSc objects and variables.
 25: !
 26: !  Use of the conventional Fortran include statements is also supported
 27: !  In this case, the PETsc include files are located in the directory
 28: !  petsc/include/foldinclude.
 29: !
 30: !  Since one must be very careful to include each file no more than once
 31: !  in a Fortran routine, application programmers must exlicitly list
 32: !  each file needed for the various PETSc components within their
 33: !  program (unlike the C/C++ interface).
 34: !
 35: !  See the Fortran section of the PETSc users manual for details.
 36: !
 37: !  The following include statements are required for KSP Fortran programs:
 38: !     petscsys.h       - base PETSc routines
 39: !     petscvec.h    - vectors
 40: !     petscmat.h    - matrices
 41: !     petscpc.h     - preconditioners
 42: !     petscksp.h    - Krylov subspace methods
 43: !  Additional include statements may be needed if using additional
 44: !  PETSc routines in a Fortran program, e.g.,
 45: !     petscviewer.h - viewers
 46: !     petscis.h     - index sets
 47: !
 48: #include <petsc/finclude/petscsys.h>
 49: #include <petsc/finclude/petscvec.h>
 50: #include <petsc/finclude/petscmat.h>
 51: #include <petsc/finclude/petscpc.h>
 52: #include <petsc/finclude/petscksp.h>
 53: #include <petsc/finclude/petscviewer.h>
 54: !
 55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 56: !                   Variable declarations
 57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 58: !
 59: !  Variables:
 60: !     ksp     - linear solver context
 61: !     ksp      - Krylov subspace method context
 62: !     pc       - preconditioner context
 63: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 64: !     A        - matrix that defines linear system
 65: !     its      - iterations for convergence
 66: !     norm     - norm of error in solution
 67: !     rctx     - random number generator context
 68: !
 69: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 70: !  are mathematical objects that contain more than just an array of
 71: !  double precision numbers. I.e., vectors in PETSc are not just
 72: !        double precision x(*).
 73: !  However, local vector data can be easily accessed via VecGetArray().
 74: !  See the Fortran section of the PETSc users manual for details.
 75: !
 76:       PetscReal  norm
 77:       PetscInt  i,j,II,JJ,m,n,its
 78:       PetscInt  Istart,Iend,ione
 79:       PetscErrorCode ierr
 80:       PetscMPIInt     rank,size
 81:       PetscBool   flg
 82:       PetscScalar v,one,neg_one
 83:       Vec         x,b,u
 84:       Mat         A
 85:       KSP         ksp
 86:       PetscRandom rctx
 87:       PetscViewerAndFormat vf;

 89: !  These variables are not currently used.
 90: !      PC          pc
 91: !      PCType      ptype
 92: !      PetscReal tol


 95: !  Note: Any user-defined Fortran routines (such as MyKSPMonitor)
 96: !  MUST be declared as external.

 98:       external MyKSPMonitor,MyKSPConverged

100: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101: !                 Beginning of program
102: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

104:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
105:       m = 3
106:       n = 3
107:       one  = 1.0
108:       neg_one = -1.0
109:       ione    = 1
110:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
111:      &                        '-m',m,flg,ierr)
112:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
113:      &                        '-n',n,flg,ierr)
114:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
115:       call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)

117: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: !      Compute the matrix and right-hand-side vector that define
119: !      the linear system, Ax = b.
120: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

122: !  Create parallel matrix, specifying only its global dimensions.
123: !  When using MatCreate(), the matrix format can be specified at
124: !  runtime. Also, the parallel partitioning of the matrix is
125: !  determined by PETSc at runtime.

127:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
128:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
129:       call MatSetFromOptions(A,ierr)
130:       call MatSetUp(A,ierr)

132: !  Currently, all PETSc parallel matrix formats are partitioned by
133: !  contiguous chunks of rows across the processors.  Determine which
134: !  rows of the matrix are locally owned.

136:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

138: !  Set matrix elements for the 2-D, five-point stencil in parallel.
139: !   - Each processor needs to insert only elements that it owns
140: !     locally (but any non-local elements will be sent to the
141: !     appropriate processor during matrix assembly).
142: !   - Always specify global row and columns of matrix entries.
143: !   - Note that MatSetValues() uses 0-based row and column numbers
144: !     in Fortran as well as in C.

146: !     Note: this uses the less common natural ordering that orders first
147: !     all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
148: !     instead of JJ = II +- m as you might expect. The more standard ordering
149: !     would first do all variables for y = h, then y = 2h etc.

151:       do 10, II=Istart,Iend-1
152:         v = -1.0
153:         i = II/n
154:         j = II - i*n
155:         if (i.gt.0) then
156:           JJ = II - n
157:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
158:         endif
159:         if (i.lt.m-1) then
160:           JJ = II + n
161:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
162:         endif
163:         if (j.gt.0) then
164:           JJ = II - 1
165:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
166:         endif
167:         if (j.lt.n-1) then
168:           JJ = II + 1
169:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
170:         endif
171:         v = 4.0
172:         call  MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
173:  10   continue

175: !  Assemble matrix, using the 2-step process:
176: !       MatAssemblyBegin(), MatAssemblyEnd()
177: !  Computations can be done while messages are in transition,
178: !  by placing code between these two statements.

180:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
181:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

183: !  Create parallel vectors.
184: !   - Here, the parallel partitioning of the vector is determined by
185: !     PETSc at runtime.  We could also specify the local dimensions
186: !     if desired -- or use the more general routine VecCreate().
187: !   - When solving a linear system, the vectors and matrices MUST
188: !     be partitioned accordingly.  PETSc automatically generates
189: !     appropriately partitioned matrices and vectors when MatCreate()
190: !     and VecCreate() are used with the same communicator.
191: !   - Note: We form 1 vector from scratch and then duplicate as needed.

193:       call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
194:       call VecSetFromOptions(u,ierr)
195:       call VecDuplicate(u,b,ierr)
196:       call VecDuplicate(b,x,ierr)

198: !  Set exact solution; then compute right-hand-side vector.
199: !  By default we use an exact solution of a vector with all
200: !  elements of 1.0;  Alternatively, using the runtime option
201: !  -random_sol forms a solution vector with random components.

203:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,                    &
204:      &             '-random_exact_sol',flg,ierr)
205:       if (flg) then
206:          call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
207:          call PetscRandomSetFromOptions(rctx,ierr)
208:          call VecSetRandom(u,rctx,ierr)
209:          call PetscRandomDestroy(rctx,ierr)
210:       else
211:          call VecSet(u,one,ierr)
212:       endif
213:       call MatMult(A,u,b,ierr)

215: !  View the exact solution vector if desired

217:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,                    &
218:      &             '-view_exact_sol',flg,ierr)
219:       if (flg) then
220:          call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
221:       endif

223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: !         Create the linear solver and set various options
225: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

227: !  Create linear solver context

229:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

231: !  Set operators. Here the matrix that defines the linear system
232: !  also serves as the preconditioning matrix.

234:       call KSPSetOperators(ksp,A,A,ierr)

236: !  Set linear solver defaults for this problem (optional).
237: !   - By extracting the KSP and PC contexts from the KSP context,
238: !     we can then directly directly call any KSP and PC routines
239: !     to set various options.
240: !   - The following four statements are optional; all of these
241: !     parameters could alternatively be specified at runtime via
242: !     KSPSetFromOptions(). All of these defaults can be
243: !     overridden at runtime, as indicated below.

245: !     We comment out this section of code since the Jacobi
246: !     preconditioner is not a good general default.

248: !      call KSPGetPC(ksp,pc,ierr)
249: !      ptype = PCJACOBI
250: !      call PCSetType(pc,ptype,ierr)
251: !      tol = 1.e-7
252: !      call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,
253: !     &     PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)

255: !  Set user-defined monitoring routine if desired

257:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,       &
258:      &                         '-my_ksp_monitor',flg,ierr)
259:       if (flg) then
260:         call KSPMonitorSet(ksp,MyKSPMonitor,PETSC_NULL_OBJECT,          &
261:      &        PETSC_NULL_FUNCTION,ierr)
262: !
263: !     Also use the default KSP monitor routine showing how it may be used from Fortran
264: !
265:         call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,      &
266:      &          PETSC_VIEWER_DEFAULT,vf,ierr)
267:         call KSPMonitorSet(ksp,KSPMonitorDefault,vf,                    &
268:      &                     PetscViewerAndFormatDestroy,ierr)
269:       endif


272: !  Set runtime options, e.g.,
273: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
274: !  These options will override those specified above as long as
275: !  KSPSetFromOptions() is called _after_ any other customization
276: !  routines.

278:       call KSPSetFromOptions(ksp,ierr)

280: !  Set convergence test routine if desired

282:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,       &
283:      &                         '-my_ksp_convergence',flg,ierr)
284:       if (flg) then
285:         call KSPSetConvergenceTest(ksp,MyKSPConverged,                  &
286:      &          PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr)
287:       endif
288: !
289: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
290: !                      Solve the linear system
291: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

293:       call KSPSolve(ksp,b,x,ierr)

295: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
296: !                     Check solution and clean up
297: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

299: !  Check the error
300:       call VecAXPY(x,neg_one,u,ierr)
301:       call VecNorm(x,NORM_2,norm,ierr)
302:       call KSPGetIterationNumber(ksp,its,ierr)
303:       if (rank .eq. 0) then
304:         if (norm .gt. 1.e-12) then
305:            write(6,100) norm,its
306:         else
307:            write(6,110) its
308:         endif
309:       endif
310:   100 format('Norm of error ',e11.4,' iterations ',i5)
311:   110 format('Norm of error < 1.e-12 iterations ',i5)

313: !  Free work space.  All PETSc objects should be destroyed when they
314: !  are no longer needed.

316:       call KSPDestroy(ksp,ierr)
317:       call VecDestroy(u,ierr)
318:       call VecDestroy(x,ierr)
319:       call VecDestroy(b,ierr)
320:       call MatDestroy(A,ierr)

322: !  Always call PetscFinalize() before exiting a program.  This routine
323: !    - finalizes the PETSc libraries as well as MPI
324: !    - provides summary and diagnostic information if certain runtime
325: !      options are chosen (e.g., -log_summary).  See PetscFinalize()
326: !      manpage for more information.

328:       call PetscFinalize(ierr)
329:       end

331: ! --------------------------------------------------------------
332: !
333: !  MyKSPMonitor - This is a user-defined routine for monitoring
334: !  the KSP iterative solvers.
335: !
336: !  Input Parameters:
337: !    ksp   - iterative context
338: !    n     - iteration number
339: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
340: !    dummy - optional user-defined monitor context (unused here)
341: !
342:       subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)

344:       implicit none

346: #include <petsc/finclude/petscsys.h>
347: #include <petsc/finclude/petscvec.h>
348: #include <petsc/finclude/petscksp.h>

350:       KSP              ksp
351:       Vec              x
352:       PetscErrorCode ierr
353:       PetscInt n,dummy
354:       PetscMPIInt rank
355:       PetscReal rnorm

357: !  Build the solution vector

359:       call KSPBuildSolution(ksp,PETSC_NULL_OBJECT,x,ierr)

361: !  Write the solution vector and residual norm to stdout
362: !   - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
363: !     handles data from multiple processors so that the
364: !     output is not jumbled.

366:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
367:       if (rank .eq. 0) write(6,100) n
368:       call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
369:       if (rank .eq. 0) write(6,200) n,rnorm

371:  100  format('iteration ',i5,' solution vector:')
372:  200  format('iteration ',i5,' residual norm ',e11.4)
373:       0
374:       end

376: ! --------------------------------------------------------------
377: !
378: !  MyKSPConverged - This is a user-defined routine for testing
379: !  convergence of the KSP iterative solvers.
380: !
381: !  Input Parameters:
382: !    ksp   - iterative context
383: !    n     - iteration number
384: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
385: !    dummy - optional user-defined monitor context (unused here)
386: !
387:       subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)

389:       implicit none

391: #include <petsc/finclude/petscsys.h>
392: #include <petsc/finclude/petscvec.h>
393: #include <petsc/finclude/petscksp.h>

395:       KSP              ksp
396:       PetscErrorCode ierr
397:       PetscInt n,dummy
398:       KSPConvergedReason flag
399:       PetscReal rnorm

401:       if (rnorm .le. .05) then
402:         flag = 1
403:       else
404:         flag = 0
405:       endif
406:       0

408:       end