Actual source code: ex8.c

  1: /*$Id: ex8.c,v 1.50 2001/08/07 03:04:00 balay Exp $*/

  3: static char help[] = "Illustrates use of the preconditioner ASM.n
  4: The Additive Schwarz Method for solving a linear system in parallel with SLES.  Then
  5: code indicates the procedure for setting user-defined subdomains.  Inputn
  6: parameters include:n
  7:   -user_set_subdomain_solvers:  User explicitly sets subdomain solversn
  8:   -user_set_subdomains:  Activate user-defined subdomainsnn";

 10: /*
 11:    Note:  This example focuses on setting the subdomains for the ASM 
 12:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 13:    and ex2.c for more detailed comments on the basic usage of SLES
 14:    (including working with matrices and vectors).

 16:    The ASM preconditioner is fully parallel, but currently the routine
 17:    PCASMCreateSubDomains2D(), which is used in this example to demonstrate
 18:    user-defined subdomains (activated via -user_set_subdomains), is
 19:    uniprocessor only.

 21:    This matrix in this linear system arises from the discretized Laplacian,
 22:    and thus is not very interesting in terms of experimenting with variants
 23:    of the ASM preconditioner.  
 24: */

 26: /*T
 27:    Concepts: SLES^Additive Schwarz Method (ASM) with user-defined subdomains
 28:    Processors: n
 29: T*/

 31: /* 
 32:   Include "petscsles.h" so that we can use SLES solvers.  Note that this file
 33:   automatically includes:
 34:      petsc.h       - base PETSc routines   petscvec.h - vectors
 35:      petscsys.h    - system routines       petscmat.h - matrices
 36:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 37:      petscviewer.h - viewers               petscpc.h  - preconditioners
 38: */
 39:  #include petscsles.h

 41: #undef __FUNCT__
 43: int main(int argc,char **args)
 44: {
 45:   Vec          x,b,u;                 /* approx solution, RHS, exact solution */
 46:   Mat          A;                       /* linear system matrix */
 47:   SLES         sles;                    /* linear solver context */
 48:   PC           pc;                      /* PC context */
 49:   IS           *is;                     /* array of index sets that define the subdomains */
 50:   int          overlap = 1;             /* width of subdomain overlap */
 51:   int          Nsub;                    /* number of subdomains */
 52:   int          m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 53:   int          M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 54:   int          i,j,its,I,J,ierr,Istart,Iend,size;
 55:   PetscTruth   flg;
 56:   PetscTruth   user_subdomains;         /* flag - 1 indicates user-defined subdomains */
 57:   PetscScalar  v, one = 1.0;

 59:   PetscInitialize(&argc,&args,(char *)0,help);
 60:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 61:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 62:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 63:   PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
 64:   PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
 65:   PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
 66:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);

 68:   /* -------------------------------------------------------------------
 69:          Compute the matrix and right-hand-side vector that define
 70:          the linear system, Ax = b.
 71:      ------------------------------------------------------------------- */

 73:   /* 
 74:      Assemble the matrix for the five point stencil, YET AGAIN 
 75:   */
 76:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 77:   MatSetFromOptions(A);
 78:   MatGetOwnershipRange(A,&Istart,&Iend);
 79:   for (I=Istart; I<Iend; I++) {
 80:     v = -1.0; i = I/n; j = I - i*n;
 81:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 82:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 83:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 84:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 85:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
 86:   }
 87:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 88:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 90:   /* 
 91:      Create and set vectors 
 92:   */
 93:   VecCreate(PETSC_COMM_WORLD,&b);
 94:   VecSetSizes(b,PETSC_DECIDE,m*n);
 95:   VecSetFromOptions(b);
 96:   VecDuplicate(b,&u);
 97:   VecDuplicate(b,&x);
 98:   VecSet(&one,u);
 99:   MatMult(A,u,b);

101:   /* 
102:      Create linear solver context 
103:   */
104:   SLESCreate(PETSC_COMM_WORLD,&sles);

106:   /* 
107:      Set operators. Here the matrix that defines the linear system
108:      also serves as the preconditioning matrix.
109:   */
110:   SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);

112:   /* 
113:      Set the default preconditioner for this program to be ASM
114:   */
115:   SLESGetPC(sles,&pc);
116:   PCSetType(pc,PCASM);

118:   /* -------------------------------------------------------------------
119:                   Define the problem decomposition
120:      ------------------------------------------------------------------- */

122:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
123:        Basic method, should be sufficient for the needs of many users.
124:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

126:      Set the overlap, using the default PETSc decomposition via
127:          PCASMSetOverlap(pc,overlap);
128:      Could instead use the option -pc_asm_overlap <ovl> 

130:      Set the total number of blocks via -pc_asm_blocks <blks>
131:      Note:  The ASM default is to use 1 block per processor.  To
132:      experiment on a single processor with various overlaps, you
133:      must specify use of multiple blocks!
134:   */

136:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
137:        More advanced method, setting user-defined subdomains
138:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

140:      Firstly, create index sets that define the subdomains.  The utility
141:      routine PCASMCreateSubdomains2D() is a simple example (that currently
142:      supports 1 processor only!).  More generally, the user should write
143:      a custom routine for a particular problem geometry.

145:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
146:      to set the subdomains for the ASM preconditioner.
147:   */

149:   if (!user_subdomains) { /* basic version */
150:     PCASMSetOverlap(pc,overlap);
151:   } else { /* advanced version */
152:     if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!");
153:     PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);
154:     PCASMSetLocalSubdomains(pc,Nsub,is);
155:   }

157:   /* -------------------------------------------------------------------
158:                 Set the linear solvers for the subblocks
159:      ------------------------------------------------------------------- */

161:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
162:        Basic method, should be sufficient for the needs of most users.
163:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

165:      By default, the ASM preconditioner uses the same solver on each
166:      block of the problem.  To set the same solver options on all blocks,
167:      use the prefix -sub before the usual PC and KSP options, e.g.,
168:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

170:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
171:         Advanced method, setting different solvers for various blocks.
172:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

174:      Note that each block's SLES context is completely independent of
175:      the others, and the full range of uniprocessor SLES options is
176:      available for each block.

178:      - Use PCASMGetSubSLES() to extract the array of SLES contexts for
179:        the local blocks.
180:      - See ex7.c for a simple example of setting different linear solvers
181:        for the individual blocks for the block Jacobi method (which is
182:        equivalent to the ASM method with zero overlap).
183:   */

185:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);
186:   if (flg) {
187:     SLES       *subsles;       /* array of SLES contexts for local subblocks */
188:     int        nlocal,first;  /* number of local subblocks, first local subblock */
189:     KSP        subksp;         /* KSP context for subblock */
190:     PC         subpc;          /* PC context for subblock */
191:     PetscTruth isasm;

193:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.n");

195:     /* 
196:        Set runtime options
197:     */
198:     SLESSetFromOptions(sles);

200:     /* 
201:        Flag an error if PCTYPE is changed from the runtime options
202:      */
203:     PetscTypeCompare((PetscObject)pc,PCASM,&isasm);
204:     if (isasm) {
205:       SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
206:     }
207:     /* 
208:        Call SLESSetUp() to set the block Jacobi data structures (including
209:        creation of an internal SLES context for each block).

211:        Note: SLESSetUp() MUST be called before PCASMGetSubSLES().
212:     */
213:     SLESSetUp(sles,b,x);

215:     /*
216:        Extract the array of SLES contexts for the local blocks
217:     */
218:     PCASMGetSubSLES(pc,&nlocal,&first,&subsles);

220:     /*
221:        Loop over the local blocks, setting various SLES options
222:        for each block.  
223:     */
224:     for (i=0; i<nlocal; i++) {
225:       SLESGetPC(subsles[i],&subpc);
226:       SLESGetKSP(subsles[i],&subksp);
227:       PCSetType(subpc,PCILU);
228:       KSPSetType(subksp,KSPGMRES);
229:       KSPSetTolerances(subksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
230:     }
231:   } else {
232:     /* 
233:        Set runtime options
234:     */
235:     SLESSetFromOptions(sles);
236:   }

238:   /* -------------------------------------------------------------------
239:                       Solve the linear system
240:      ------------------------------------------------------------------- */

242:   SLESSolve(sles,b,x,&its);

244:   /* 
245:      Free work space.  All PETSc objects should be destroyed when they
246:      are no longer needed.
247:   */

249:   if (user_subdomains) {
250:     for (i=0; i<Nsub; i++) {
251:       ISDestroy(is[i]);
252:     }
253:     PetscFree(is);
254:   }
255:   SLESDestroy(sles);
256:   VecDestroy(u);
257:   VecDestroy(x);
258:   VecDestroy(b);
259:   MatDestroy(A);
260:   PetscFinalize();
261:   return 0;
262: }