Actual source code: ex15.c

  1: /*$Id: ex15.c,v 1.24 2001/08/07 21:30:54 bsmith Exp $*/

  3: static char help[] = "Solves a linear system in parallel with SLES.  Alson
  4: illustrates setting a user-defined shell preconditioner and using then
  5: macro __FUNCT__ to define routine names for use in error handling.n
  6: Input parameters include:n
  7:   -user_defined_pc : Activate a user-defined preconditionernn";

  9: /*T
 10:    Concepts: SLES^basic parallel example
 11:    Concepts: PC^setting a user-defined shell preconditioner
 12:    Concepts: error handling^Using the macro __FUNCT__ to define routine names;
 13:    Processors: n
 14: T*/

 16: /* 
 17:   Include "petscsles.h" so that we can use SLES solvers.  Note that this file
 18:   automatically includes:
 19:      petsc.h       - base PETSc routines   petscvec.h - vectors
 20:      petscsys.h    - system routines       petscmat.h - matrices
 21:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 22:      petscviewer.h - viewers               petscpc.h  - preconditioners
 23: */
 24:  #include petscsles.h

 26: /* Define context for user-provided preconditioner */
 27: typedef struct {
 28:   Vec diag;
 29: } SampleShellPC;

 31: /* Declare routines for user-provided preconditioner */
 32: extern int SampleShellPCCreate(SampleShellPC**);
 33: extern int SampleShellPCSetUp(SampleShellPC*,Mat,Vec);
 34: extern int SampleShellPCApply(void*,Vec x,Vec y);
 35: extern int SampleShellPCDestroy(SampleShellPC*);

 37: /* 
 38:    User-defined routines.  Note that immediately before each routine below,
 39:    we define the macro __FUNCT__ to be a string containing the routine name.
 40:    If defined, this macro is used in the PETSc error handlers to provide a
 41:    complete traceback of routine names.  All PETSc library routines use this
 42:    macro, and users can optionally employ it as well in their application
 43:    codes.  Note that users can get a traceback of PETSc errors regardless of
 44:    whether they define __FUNCT__ in application codes; this macro merely
 45:    provides the added traceback detail of the application routine names.
 46: */

 48: #undef __FUNCT__  
 50: int main(int argc,char **args)
 51: {
 52:   Vec           x,b,u;   /* approx solution, RHS, exact solution */
 53:   Mat           A;         /* linear system matrix */
 54:   SLES          sles;      /* linear solver context */
 55:   PC            pc;        /* preconditioner context */
 56:   KSP           ksp;       /* Krylov subspace method context */
 57:   PetscReal     norm;      /* norm of solution error */
 58:   SampleShellPC *shell;    /* user-defined preconditioner context */
 59:   PetscScalar   v,one = 1.0,none = -1.0;
 60:   int           i,j,I,J,Istart,Iend,ierr,m = 8,n = 7,its;
 61:   PetscTruth    user_defined_pc;

 63:   PetscInitialize(&argc,&args,(char *)0,help);
 64:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 65:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);

 67:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 68:          Compute the matrix and right-hand-side vector that define
 69:          the linear system, Ax = b.
 70:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 71:   /* 
 72:      Create parallel matrix, specifying only its global dimensions.
 73:      When using MatCreate(), the matrix format can be specified at
 74:      runtime. Also, the parallel partioning of the matrix is
 75:      determined by PETSc at runtime.
 76:   */
 77:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 78:   MatSetFromOptions(A);

 80:   /* 
 81:      Currently, all PETSc parallel matrix formats are partitioned by
 82:      contiguous chunks of rows across the processors.  Determine which
 83:      rows of the matrix are locally owned. 
 84:   */
 85:   MatGetOwnershipRange(A,&Istart,&Iend);

 87:   /* 
 88:      Set matrix elements for the 2-D, five-point stencil in parallel.
 89:       - Each processor needs to insert only elements that it owns
 90:         locally (but any non-local elements will be sent to the
 91:         appropriate processor during matrix assembly). 
 92:       - Always specify global rows and columns of matrix entries.
 93:    */
 94:   for (I=Istart; I<Iend; I++) {
 95:     v = -1.0; i = I/n; j = I - i*n;
 96:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 97:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 98:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 99:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
100:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
101:   }

103:   /* 
104:      Assemble matrix, using the 2-step process:
105:        MatAssemblyBegin(), MatAssemblyEnd()
106:      Computations can be done while messages are in transition
107:      by placing code between these two statements.
108:   */
109:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
110:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

112:   /* 
113:      Create parallel vectors.
114:       - When using VecCreate() VecSetSizes() and VecSetFromOptions(),
115:         we specify only the vector's global
116:         dimension; the parallel partitioning is determined at runtime. 
117:       - Note: We form 1 vector from scratch and then duplicate as needed.
118:   */
119:   VecCreate(PETSC_COMM_WORLD,&u);
120:   VecSetSizes(u,PETSC_DECIDE,m*n);
121:   VecSetFromOptions(u);
122:   VecDuplicate(u,&b);
123:   VecDuplicate(b,&x);

125:   /* 
126:      Set exact solution; then compute right-hand-side vector.
127:   */
128:   VecSet(&one,u);
129:   MatMult(A,u,b);

131:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
132:                 Create the linear solver and set various options
133:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

135:   /* 
136:      Create linear solver context
137:   */
138:   SLESCreate(PETSC_COMM_WORLD,&sles);

140:   /* 
141:      Set operators. Here the matrix that defines the linear system
142:      also serves as the preconditioning matrix.
143:   */
144:   SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);

146:   /* 
147:      Set linear solver defaults for this problem (optional).
148:      - By extracting the KSP and PC contexts from the SLES context,
149:        we can then directly call any KSP and PC routines
150:        to set various options.
151:   */
152:   SLESGetKSP(sles,&ksp);
153:   SLESGetPC(sles,&pc);
154:   KSPSetTolerances(ksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,
155:          PETSC_DEFAULT);

157:   /*
158:      Set a user-defined "shell" preconditioner if desired
159:   */
160:   PetscOptionsHasName(PETSC_NULL,"-user_defined_pc",&user_defined_pc);
161:   if (user_defined_pc) {
162:     /* (Required) Indicate to PETSc that we're using a "shell" preconditioner */
163:     PCSetType(pc,PCSHELL);

165:     /* (Optional) Create a context for the user-defined preconditioner; this
166:        context can be used to contain any application-specific data. */
167:     SampleShellPCCreate(&shell);

169:     /* (Required) Set the user-defined routine for applying the preconditioner */
170:     PCShellSetApply(pc,SampleShellPCApply,(void*)shell);

172:     /* (Optional) Set a name for the preconditioner, used for PCView() */
173:     PCShellSetName(pc,"MyPreconditioner");

175:     /* (Optional) Do any setup required for the preconditioner */
176:     SampleShellPCSetUp(shell,A,x);

178:   } else {
179:     PCSetType(pc,PCJACOBI);
180:   }

182:   /* 
183:     Set runtime options, e.g.,
184:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
185:     These options will override those specified above as long as
186:     SLESSetFromOptions() is called _after_ any other customization
187:     routines.
188:   */
189:   SLESSetFromOptions(sles);

191:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
192:                       Solve the linear system
193:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

195:   SLESSolve(sles,b,x,&its);

197:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
198:                       Check solution and clean up
199:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

201:   /* 
202:      Check the error
203:   */
204:   VecAXPY(&none,u,x);
205:   VecNorm(x,NORM_2,&norm);
206:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %dn",norm,its);

208:   /* 
209:      Free work space.  All PETSc objects should be destroyed when they
210:      are no longer needed.
211:   */
212:   SLESDestroy(sles);
213:   VecDestroy(u);  VecDestroy(x);
214:   VecDestroy(b);  MatDestroy(A);

216:   if (user_defined_pc) {
217:     SampleShellPCDestroy(shell);
218:   }

220:   PetscFinalize();
221:   return 0;

223: }

225: /***********************************************************************/
226: /*          Routines for a user-defined shell preconditioner           */
227: /***********************************************************************/

229: #undef __FUNCT__  
231: /*
232:    SampleShellPCCreate - This routine creates a user-defined
233:    preconditioner context.

235:    Output Parameter:
236: .  shell - user-defined preconditioner context
237: */
238: int SampleShellPCCreate(SampleShellPC **shell)
239: {
240:   SampleShellPC *newctx;
241:   int           ierr;

243:   ierr         = PetscNew(SampleShellPC,&newctx);
244:   newctx->diag = 0;
245:   *shell       = newctx;
246:   return 0;
247: }
248: /* ------------------------------------------------------------------- */
249: #undef __FUNCT__  
251: /*
252:    SampleShellPCSetUp - This routine sets up a user-defined
253:    preconditioner context.  

255:    Input Parameters:
256: .  shell - user-defined preconditioner context
257: .  pmat  - preconditioner matrix
258: .  x     - vector

260:    Output Parameter:
261: .  shell - fully set up user-defined preconditioner context

263:    Notes:
264:    In this example, we define the shell preconditioner to be Jacobi's
265:    method.  Thus, here we create a work vector for storing the reciprocal
266:    of the diagonal of the preconditioner matrix; this vector is then
267:    used within the routine SampleShellPCApply().
268: */
269: int SampleShellPCSetUp(SampleShellPC *shell,Mat pmat,Vec x)
270: {
271:   Vec diag;

274:   VecDuplicate(x,&diag);
275:   MatGetDiagonal(pmat,diag);
276:   VecReciprocal(diag);
277:   shell->diag = diag;

279:   return 0;
280: }
281: /* ------------------------------------------------------------------- */
282: #undef __FUNCT__  
284: /*
285:    SampleShellPCApply - This routine demonstrates the use of a
286:    user-provided preconditioner.

288:    Input Parameters:
289: .  ctx - optional user-defined context, as set by PCShellSetApply()
290: .  x - input vector

292:    Output Parameter:
293: .  y - preconditioned vector

295:    Notes:
296:    Note that the PCSHELL preconditioner passes a void pointer as the
297:    first input argument.  This can be cast to be the whatever the user
298:    has set (via PCSetShellApply()) the application-defined context to be.

300:    This code implements the Jacobi preconditioner, merely as an
301:    example of working with a PCSHELL.  Note that the Jacobi method
302:    is already provided within PETSc.
303: */
304: int SampleShellPCApply(void *ctx,Vec x,Vec y)
305: {
306:   SampleShellPC *shell = (SampleShellPC*)ctx;
307:   int           ierr;

309:   VecPointwiseMult(x,shell->diag,y);

311:   return 0;
312: }
313: /* ------------------------------------------------------------------- */
314: #undef __FUNCT__  
316: /*
317:    SampleShellPCDestroy - This routine destroys a user-defined
318:    preconditioner context.

320:    Input Parameter:
321: .  shell - user-defined preconditioner context
322: */
323: int SampleShellPCDestroy(SampleShellPC *shell)
324: {

327:   VecDestroy(shell->diag);
328:   PetscFree(shell);

330:   return 0;
331: }