Actual source code: ex2f.F

  1: !
  2: !    "$Id: ex2f.F,v 1.15 2001/08/07 03:02:34 balay Exp $"
  3: !
  4: !  Description: Builds a parallel vector with 1 component on the first
  5: !               processor, 2 on the second, etc.  Then each processor adds
  6: !               one to all elements except the last rank.
  7: !
  8: !/*T
  9: !   Concepts: vectors^assembling
 10: !   Processors: n
 11: !T*/
 12: ! -----------------------------------------------------------------------

 14:       program main
 15:       implicit none

 17: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 18: !                    Include files
 19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 20: !
 21: !  The following include statements are required for Fortran programs
 22: !  that use PETSc vectors:
 23: !     petsc.h       - base PETSc routines
 24: !     petscvec.h    - vectors
 25: !  Additional include statements may be needed if using additional
 26: !  PETSc routines in a Fortran program, e.g.,
 27: !     petscviewer.h - viewers
 28: !     petscis.h     - index sets
 29: !
 30:  #include include/finclude/petsc.h
 31:  #include include/finclude/petscvec.h

 33: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 34: !                 Beginning of program
 35: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 37:       Vec     x
 38:       integer N,ierr,rank,i
 39:       PetscScalar  one

 41:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 42:       one   = 1.0
 43:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)

 45: !  Create a parallel vector.
 46: !   - In this case, we specify the size of the local portion on
 47: !     each processor, and PETSc computes the global size.  Alternatively,
 48: !     if we pass the global size and use PETSC_DECIDE for the
 49: !     local size PETSc will choose a reasonable partition trying
 50: !     to put nearly an equal number of elements on each processor.


 53:       call VecCreateMPI(PETSC_COMM_WORLD,rank+1,PETSC_DECIDE,x,ierr)
 54:       call VecGetSize(x,N,ierr)
 55:       call VecSet(one,x,ierr)

 57: !  Set the vector elements.
 58: !   - Note that VecSetValues() uses 0-based row and column numbers
 59: !     in Fortran as well as in C.
 60: !   - Always specify global locations of vector entries.
 61: !   - Each processor can contribute any vector entries,
 62: !     regardless of which processor "owns" them; any nonlocal
 63: !     contributions will be transferred to the appropriate processor
 64: !     during the assembly process.
 65: !   - In this example, the flag ADD_VALUES indicates that all
 66: !     contributions will be added together.

 68:       do 100 i=0,N-rank-1
 69:          call VecSetValues(x,1,i,one,ADD_VALUES,ierr)
 70:  100  continue

 72: !  Assemble vector, using the 2-step process:
 73: !    VecAssemblyBegin(), VecAssemblyEnd()
 74: !  Computations can be done while messages are in transition
 75: !  by placing code between these two statements.

 77:       call VecAssemblyBegin(x,ierr)
 78:       call VecAssemblyEnd(x,ierr)

 80: !  View the vector; then destroy it.

 82:       call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
 83:       call VecDestroy(x,ierr)

 85:       call PetscFinalize(ierr)
 86:       end
 87: