Actual source code: ex2.c

  1: /*$Id: ex2.c,v 1.94 2001/08/07 21:30:54 bsmith Exp $*/

  3: /* Program usage:  mpirun -np <procs> ex2 [-help] [all PETSc options] */

  5: static char help[] = "Solves a linear system in parallel with SLES.n
  6: Input parameters include:n
  7:   -random_exact_sol : use a random exact solution vectorn
  8:   -view_exact_sol   : write exact solution vector to stdoutn
  9:   -m <mesh_x>       : number of mesh points in x-directionn
 10:   -n <mesh_n>       : number of mesh points in y-directionnn";

 12: /*T
 13:    Concepts: SLES^basic parallel example;
 14:    Concepts: SLES^Laplacian, 2d
 15:    Concepts: Laplacian, 2d
 16:    Processors: n
 17: T*/

 19: /* 
 20:   Include "petscsles.h" so that we can use SLES solvers.  Note that this file
 21:   automatically includes:
 22:      petsc.h       - base PETSc routines   petscvec.h - vectors
 23:      petscsys.h    - system routines       petscmat.h - matrices
 24:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 25:      petscviewer.h - viewers               petscpc.h  - preconditioners
 26: */
 27:  #include petscsles.h

 29: #undef __FUNCT__
 31: int main(int argc,char **args)
 32: {
 33:   Vec         x,b,u;  /* approx solution, RHS, exact solution */
 34:   Mat         A;        /* linear system matrix */
 35:   SLES        sles;     /* linear solver context */
 36:   PetscRandom rctx;     /* random number generator context */
 37:   PetscReal   norm;     /* norm of solution error */
 38:   int         i,j,I,J,Istart,Iend,ierr,m = 8,n = 7,its;
 39:   PetscTruth  flg;
 40:   PetscScalar v,one = 1.0,neg_one = -1.0;
 41:   KSP         ksp;

 43:   PetscInitialize(&argc,&args,(char *)0,help);
 44:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 45:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);

 47:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 48:          Compute the matrix and right-hand-side vector that define
 49:          the linear system, Ax = b.
 50:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 51:   /* 
 52:      Create parallel matrix, specifying only its global dimensions.
 53:      When using MatCreate(), the matrix format can be specified at
 54:      runtime. Also, the parallel partitioning of the matrix is
 55:      determined by PETSc at runtime.

 57:      Performance tuning note:  For problems of substantial size,
 58:      preallocation of matrix memory is crucial for attaining good 
 59:      performance.  Since preallocation is not possible via the generic
 60:      matrix creation routine MatCreate(), we recommend for practical 
 61:      problems instead to use the creation routine for a particular matrix
 62:      format, e.g.,
 63:          MatCreateMPIAIJ() - parallel AIJ (compressed sparse row)
 64:          MatCreateMPIBAIJ() - parallel block AIJ
 65:      See the matrix chapter of the users manual for details.
 66:   */
 67:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 68:   MatSetFromOptions(A);

 70:   /* 
 71:      Currently, all PETSc parallel matrix formats are partitioned by
 72:      contiguous chunks of rows across the processors.  Determine which
 73:      rows of the matrix are locally owned. 
 74:   */
 75:   MatGetOwnershipRange(A,&Istart,&Iend);

 77:   /* 
 78:      Set matrix elements for the 2-D, five-point stencil in parallel.
 79:       - Each processor needs to insert only elements that it owns
 80:         locally (but any non-local elements will be sent to the
 81:         appropriate processor during matrix assembly). 
 82:       - Always specify global rows and columns of matrix entries.

 84:      Note: this uses the less common natural ordering that orders first
 85:      all the unknowns for x = h then for x = 2h etc; Hence you see J = I +- n
 86:      instead of J = I +- m as you might expect. The more standard ordering
 87:      would first do all variables for y = h, then y = 2h etc.

 89:    */
 90:   for (I=Istart; I<Iend; I++) {
 91:     v = -1.0; i = I/n; j = I - i*n;
 92:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 93:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 94:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 95:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 96:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
 97:   }

 99:   /* 
100:      Assemble matrix, using the 2-step process:
101:        MatAssemblyBegin(), MatAssemblyEnd()
102:      Computations can be done while messages are in transition
103:      by placing code between these two statements.
104:   */
105:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
106:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

108:   /* 
109:      Create parallel vectors.
110:       - We form 1 vector from scratch and then duplicate as needed.
111:       - When using VecCreate(), VecSetSizes and VecSetFromOptions()
112:         in this example, we specify only the
113:         vector's global dimension; the parallel partitioning is determined
114:         at runtime. 
115:       - When solving a linear system, the vectors and matrices MUST
116:         be partitioned accordingly.  PETSc automatically generates
117:         appropriately partitioned matrices and vectors when MatCreate()
118:         and VecCreate() are used with the same communicator.  
119:       - The user can alternatively specify the local vector and matrix
120:         dimensions when more sophisticated partitioning is needed
121:         (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
122:         below).
123:   */
124:   VecCreate(PETSC_COMM_WORLD,&u);
125:   VecSetSizes(u,PETSC_DECIDE,m*n);
126:   VecSetFromOptions(u);
127:   VecDuplicate(u,&b);
128:   VecDuplicate(b,&x);

130:   /* 
131:      Set exact solution; then compute right-hand-side vector.
132:      By default we use an exact solution of a vector with all
133:      elements of 1.0;  Alternatively, using the runtime option
134:      -random_sol forms a solution vector with random components.
135:   */
136:   PetscOptionsHasName(PETSC_NULL,"-random_exact_sol",&flg);
137:   if (flg) {
138:     PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
139:     VecSetRandom(rctx,u);
140:     PetscRandomDestroy(rctx);
141:   } else {
142:     VecSet(&one,u);
143:   }
144:   MatMult(A,u,b);

146:   /*
147:      View the exact solution vector if desired
148:   */
149:   PetscOptionsHasName(PETSC_NULL,"-view_exact_sol",&flg);
150:   if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}

152:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
153:                 Create the linear solver and set various options
154:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

156:   /* 
157:      Create linear solver context
158:   */
159:   SLESCreate(PETSC_COMM_WORLD,&sles);

161:   /* 
162:      Set operators. Here the matrix that defines the linear system
163:      also serves as the preconditioning matrix.
164:   */
165:   SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);

167:   /* 
168:      Set linear solver defaults for this problem (optional).
169:      - By extracting the KSP and PC contexts from the SLES context,
170:        we can then directly call any KSP and PC routines to set
171:        various options.
172:      - The following two statements are optional; all of these
173:        parameters could alternatively be specified at runtime via
174:        SLESSetFromOptions().  All of these defaults can be
175:        overridden at runtime, as indicated below.
176:   */

178:   SLESGetKSP(sles,&ksp);
179:   KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),1.e-50,PETSC_DEFAULT,PETSC_DEFAULT);

181:   /* 
182:     Set runtime options, e.g.,
183:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
184:     These options will override those specified above as long as
185:     SLESSetFromOptions() is called _after_ any other customization
186:     routines.
187:   */
188:   SLESSetFromOptions(sles);

190:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
191:                       Solve the linear system
192:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

194:   SLESSolve(sles,b,x,&its);

196:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
197:                       Check solution and clean up
198:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

200:   /* 
201:      Check the error
202:   */
203:   VecAXPY(&neg_one,u,x);
204:   VecNorm(x,NORM_2,&norm);

206:   /* Scale the norm */
207:   /*  norm *= sqrt(1.0/((m+1)*(n+1))); */

209:   /*
210:      Print convergence information.  PetscPrintf() produces a single 
211:      print statement from all processes that share a communicator.
212:      An alternative is PetscFPrintf(), which prints to a file.
213:   */
214:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %dn",norm,its);

216:   /*
217:      Free work space.  All PETSc objects should be destroyed when they
218:      are no longer needed.
219:   */
220:   SLESDestroy(sles);
221:   VecDestroy(u);  VecDestroy(x);
222:   VecDestroy(b);  MatDestroy(A);

224:   /*
225:      Always call PetscFinalize() before exiting a program.  This routine
226:        - finalizes the PETSc libraries as well as MPI
227:        - provides summary and diagnostic information if certain runtime
228:          options are chosen (e.g., -log_summary). 
229:   */
230:   PetscFinalize();
231:   return 0;
232: }