Actual source code: ex11f.F
1: !
2: ! "$Id: ex11f.F,v 1.11 2001/08/09 23:21:58 balay Exp $";
3: !
4: !
5: ! Program usage: mpirun ex1 [-help] [all PETSc options]
6: !
7: !
8: !/*T
9: ! Concepts: vectors^norms of sub-vectors;
10: ! Processors: n
11: !T*/
13: program main
14: implicit none
16: !
17: ! The following include statements are required for Fortran programs
18: ! that use PETSc vectors:
19: ! petsc.h - base PETSc routines
20: ! petscvec.h - vectors
21: ! Additional include statements may be needed if using additional
22: ! PETSc routines in a Fortran program, e.g.,
23: ! petscviewer.h - viewers
24: ! petscis.h - index sets
25: !
26: #include include/finclude/petsc.h
27: #include include/finclude/petscvec.h
28: !
30: Vec x
31: PetscReal norm
32: integer n,ierr,flg,rank
33: PetscScalar one
35: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
36: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
38: n = 20
39: one = 1.d0
40: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
42: !
43: ! Create a vector, specifying only its global dimension.
44: ! When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
45: ! the vector format (currently parallel,
46: ! shared, or sequential) is determined at runtime. Also, the parallel
47: ! partitioning of the vector is determined by PETSc at runtime.
48: !
49: ! Routines for creating particular vector types directly are:
50: ! VecCreateSeq() - uniprocessor vector
51: ! VecCreateMPI() - distributed vector, where the user can
52: ! determine the parallel partitioning
53: ! VecCreateShared() - parallel vector that uses shared memory
54: ! (available only on the SGI); otherwise,
55: ! is the same as VecCreateMPI()
56: !
57: ! With VecCreate(), VecSetSizes() and VecSetFromOptions() the option
58: ! -vec_type mpi or -vec_type shared causes the
59: ! particular type of vector to be formed.
61: call VecCreate(PETSC_COMM_WORLD,x,ierr)
62: call VecSetSizes(x,PETSC_DECIDE,n,ierr)
63: call VecSetFromOptions(x,ierr)
65: !
66: ! Set the vectors to entries to a constant value.
67: !
68: call VecSet(one,x,ierr)
70: call VecNorm(x,NORM_2,norm,ierr)
71: if (rank .eq. 0) then
72: write (6,100) norm
73: 100 format ('Norm of entire vector ',1pe8.2)
74: endif
76: call VecSetBlockSize(x,2,ierr)
77: call VecStrideNorm(x,0,NORM_2,norm,ierr)
78: if (rank .eq. 0) then
79: write (6,200) norm
80: 200 format ('Norm of subvector ',1pe8.2)
81: endif
84: call VecStrideNorm(x,1,NORM_2,norm,ierr)
85: if (rank .eq. 0) then
86: write (6,300) norm
87: 300 format ('Norm of subvector ',1pe8.2)
88: endif
90: call VecStrideNorm(x,1,NORM_1,norm,ierr)
91: if (rank .eq. 0) then
92: write (6,400) norm
93: 400 format ('Norm of subvector ',1pe8.2)
94: endif
96: call VecStrideNorm(x,1,NORM_INFINITY,norm,ierr)
97: if (rank .eq. 0) then
98: write (6,500) norm
99: 500 format ('Norm of subvector ',1pe8.2)
100: endif
102: !
103: ! Free work space. All PETSc objects should be destroyed when they
104: ! are no longer needed.
106: call VecDestroy(x,ierr)
107: call PetscFinalize(ierr)
108: end
109: