Actual source code: bjacobi.c

  1: /*$Id: bjacobi.c,v 1.160 2001/08/07 03:03:35 balay Exp $*/
  2: /*
  3:    Defines a block Jacobi preconditioner.
  4: */
 5:  #include src/mat/matimpl.h
 6:  #include src/sles/pc/pcimpl.h
 7:  #include src/sles/pc/impls/bjacobi/bjacobi.h

  9: static int PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
 10: static int PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);

 12: #undef __FUNCT__  
 14: static int PCSetUp_BJacobi(PC pc)
 15: {
 16:   PC_BJacobi      *jac = (PC_BJacobi*)pc->data;
 17:   Mat             mat = pc->mat,pmat = pc->pmat;
 18:   int             ierr,N,M,start,i,rank,size,sum,end;
 19:   int             bs,i_start=-1,i_end=-1;
 20:   char            *pprefix,*mprefix;

 23:   MPI_Comm_rank(pc->comm,&rank);
 24:   MPI_Comm_size(pc->comm,&size);
 25:   MatGetLocalSize(pc->pmat,&M,&N);
 26:   MatGetBlockSize(pc->pmat,&bs);

 28:   /* ----------
 29:       Determines the number of blocks assigned to each processor 
 30:   */

 32:   /*   local block count  given */
 33:   if (jac->n_local > 0 && jac->n < 0) {
 34:     MPI_Allreduce(&jac->n_local,&jac->n,1,MPI_INT,MPI_SUM,pc->comm);
 35:     if (jac->l_lens) { /* check that user set these correctly */
 36:       sum = 0;
 37:       for (i=0; i<jac->n_local; i++) {
 38:         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) {
 39:           SETERRQ(PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
 40:         }
 41:         sum += jac->l_lens[i];
 42:       }
 43:       if (sum != M) SETERRQ(PETSC_ERR_ARG_SIZ,"Local lens sent incorrectly");
 44:     } else {
 45:       PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);
 46:       for (i=0; i<jac->n_local; i++) {
 47:         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
 48:       }
 49:     }
 50:   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
 51:     /* global blocks given: determine which ones are local */
 52:     if (jac->g_lens) {
 53:       /* check if the g_lens is has valid entries */
 54:       for (i=0; i<jac->n; i++) {
 55:         if (!jac->g_lens[i]) SETERRQ(PETSC_ERR_ARG_SIZ,"Zero block not allowed");
 56:         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) {
 57:           SETERRQ(PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
 58:         }
 59:       }
 60:       if (size == 1) {
 61:         jac->n_local = jac->n;
 62:         ierr         = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);
 63:         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(int));
 64:         /* check that user set these correctly */
 65:         sum = 0;
 66:         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
 67:         if (sum != M) SETERRQ(PETSC_ERR_ARG_SIZ,"Global lens sent incorrectly");
 68:       } else {
 69:         MatGetOwnershipRange(pc->pmat,&start,&end);
 70:         /* loop over blocks determing first one owned by me */
 71:         sum = 0;
 72:         for (i=0; i<jac->n+1; i++) {
 73:           if (sum == start) { i_start = i; goto start_1;}
 74:           if (i < jac->n) sum += jac->g_lens[i];
 75:         }
 76:         SETERRQ(PETSC_ERR_ARG_SIZ,"Block sizesn
 77:                    used in PCBJacobiSetTotalBlocks()n
 78:                    are not compatible with parallel matrix layout");
 79:  start_1:
 80:         for (i=i_start; i<jac->n+1; i++) {
 81:           if (sum == end) { i_end = i; goto end_1; }
 82:           if (i < jac->n) sum += jac->g_lens[i];
 83:         }
 84:         SETERRQ(PETSC_ERR_ARG_SIZ,"Block sizesn
 85:                       used in PCBJacobiSetTotalBlocks()n
 86:                       are not compatible with parallel matrix layout");
 87:  end_1:
 88:         jac->n_local = i_end - i_start;
 89:         ierr         = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);
 90:         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(int));
 91:       }
 92:     } else { /* no global blocks given, determine then using default layout */
 93:       jac->n_local = jac->n/size + ((jac->n % size) > rank);
 94:       ierr         = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);
 95:       for (i=0; i<jac->n_local; i++) {
 96:         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
 97:         if (!jac->l_lens[i]) SETERRQ(PETSC_ERR_ARG_SIZ,"Too many blocks given");
 98:       }
 99:     }
100:   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
101:     jac->n         = size;
102:     jac->n_local   = 1;
103:     ierr           = PetscMalloc(sizeof(int),&jac->l_lens);
104:     jac->l_lens[0] = M;
105:   }

107:   MPI_Comm_size(pc->comm,&size);
108:   if (size == 1) {
109:     mat  = pc->mat;
110:     pmat = pc->pmat;
111:   } else {
112:     PetscTruth iscopy;
113:     MatReuse   scall;
114:     int        (*f)(Mat,PetscTruth*,MatReuse,Mat*);

116:     if (jac->use_true_local) {
117:       scall = MAT_INITIAL_MATRIX;
118:       if (pc->setupcalled) {
119:         if (pc->flag == SAME_NONZERO_PATTERN) {
120:           if (jac->tp_mat) {
121:             scall = MAT_REUSE_MATRIX;
122:             mat   = jac->tp_mat;
123:           }
124:         } else {
125:           if (jac->tp_mat)  {
126:             MatDestroy(jac->tp_mat);
127:           }
128:         }
129:       }
130:       PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);
131:       if (!f) {
132:         SETERRQ(PETSC_ERR_SUP,"This matrix does not support getting diagonal block");
133:       }
134:       (*f)(pc->mat,&iscopy,scall,&mat);
135:       /* make submatrix have same prefix as entire matrix */
136:       PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);
137:       PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);
138:       if (iscopy) {
139:         jac->tp_mat = mat;
140:       }
141:     }
142:     if (pc->pmat != pc->mat || !jac->use_true_local) {
143:       scall = MAT_INITIAL_MATRIX;
144:       if (pc->setupcalled) {
145:         if (pc->flag == SAME_NONZERO_PATTERN) {
146:           if (jac->tp_pmat) {
147:             scall = MAT_REUSE_MATRIX;
148:             pmat   = jac->tp_pmat;
149:           }
150:         } else {
151:           if (jac->tp_pmat)  {
152:             MatDestroy(jac->tp_pmat);
153:           }
154:         }
155:       }
156:       PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);
157:       if (!f) {
158:         SETERRQ(PETSC_ERR_SUP,"This matrix does not support getting diagonal block");
159:       }
160:       (*f)(pc->pmat,&iscopy,scall,&pmat);
161:       /* make submatrix have same prefix as entire matrix */
162:       PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
163:       PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);
164:       if (iscopy) {
165:         jac->tp_pmat = pmat;
166:       }
167:     } else {
168:       pmat = mat;
169:     }
170:   }

172:   /* ------
173:      Setup code depends on the number of blocks 
174:   */
175:   if (jac->n_local == 1) {
176:     PCSetUp_BJacobi_Singleblock(pc,mat,pmat);
177:   } else {
178:     PCSetUp_BJacobi_Multiblock(pc,mat,pmat);
179:   }
180:   return(0);
181: }

183: /* Default destroy, if it has never been setup */
184: #undef __FUNCT__  
186: static int PCDestroy_BJacobi(PC pc)
187: {
188:   PC_BJacobi *jac = (PC_BJacobi*)pc->data;

192:   if (jac->g_lens) {PetscFree(jac->g_lens);}
193:   if (jac->l_lens) {PetscFree(jac->l_lens);}
194:   PetscFree(jac);
195:   return(0);
196: }

198: #undef __FUNCT__  
200: static int PCSetFromOptions_BJacobi(PC pc)
201: {
202:   PC_BJacobi *jac = (PC_BJacobi*)pc->data;
203:   int        blocks,ierr;
204:   PetscTruth flg;

207:   PetscOptionsHead("Block Jacobi options");
208:     PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);
209:     if (flg) {
210:       PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);
211:     }
212:     PetscOptionsName("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",&flg);
213:     if (flg) {
214:       PCBJacobiSetUseTrueLocal(pc);
215:     }
216:   PetscOptionsTail();
217:   return(0);
218: }

220: #undef __FUNCT__  
222: static int PCView_BJacobi(PC pc,PetscViewer viewer)
223: {
224:   PC_BJacobi  *jac = (PC_BJacobi*)pc->data;
225:   int         rank,ierr,i;
226:   PetscTruth  isascii,isstring;
227:   PetscViewer sviewer;

230:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
231:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);
232:   if (isascii) {
233:     if (jac->use_true_local) {
234:       PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %dn",jac->n);
235:     }
236:     PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %dn",jac->n);
237:     MPI_Comm_rank(pc->comm,&rank);
238:     if (jac->same_local_solves) {
239:       PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:n");
240:       PetscViewerGetSingleton(viewer,&sviewer);
241:       if (!rank && jac->sles) {
242:         PetscViewerASCIIPushTab(viewer);
243:         SLESView(jac->sles[0],sviewer);
244:         PetscViewerASCIIPopTab(viewer);
245:       }
246:       PetscViewerRestoreSingleton(viewer,&sviewer);
247:     } else {

249:       PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:n");
250:       PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d, first local block number = %dn",
251:                    rank,jac->n_local,jac->first_local);
252:       PetscViewerASCIIPushTab(viewer);
253:       for (i=0; i<jac->n_local; i++) {
254:         PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %dn",rank,i);
255:         PetscViewerGetSingleton(viewer,&sviewer);
256:         SLESView(jac->sles[i],sviewer);
257:         if (i != jac->n_local-1) {
258:           PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -n");
259:         }
260:         PetscViewerRestoreSingleton(viewer,&sviewer);
261:       }
262:       PetscViewerASCIIPopTab(viewer);
263:       PetscViewerFlush(viewer);
264:     }
265:   } else if (isstring) {
266:     PetscViewerStringSPrintf(viewer," blks=%d",jac->n);
267:     PetscViewerGetSingleton(viewer,&sviewer);
268:     if (jac->sles) {SLESView(jac->sles[0],sviewer);}
269:     PetscViewerRestoreSingleton(viewer,&sviewer);
270:   } else {
271:     SETERRQ1(1,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name);
272:   }
273:   return(0);
274: }

276: /* -------------------------------------------------------------------------------------*/

278: EXTERN_C_BEGIN
279: #undef __FUNCT__  
281: int PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
282: {
283:   PC_BJacobi   *jac;

286:   jac                 = (PC_BJacobi*)pc->data;
287:   jac->use_true_local = PETSC_TRUE;
288:   return(0);
289: }
290: EXTERN_C_END

292: EXTERN_C_BEGIN
293: #undef __FUNCT__  
295: int PCBJacobiGetSubSLES_BJacobi(PC pc,int *n_local,int *first_local,SLES **sles)
296: {
297:   PC_BJacobi   *jac = (PC_BJacobi*)pc->data;;

300:   if (!pc->setupcalled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call SLESSetUp() or PCSetUp() first");

302:   if (n_local)     *n_local     = jac->n_local;
303:   if (first_local) *first_local = jac->first_local;
304:   *sles                         = jac->sles;
305:   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
306:                                                   not necessarily true though!  This flag is 
307:                                                   used only for PCView_BJacobi() */
308:   return(0);
309: }
310: EXTERN_C_END

312: EXTERN_C_BEGIN
313: #undef __FUNCT__  
315: int PCBJacobiSetTotalBlocks_BJacobi(PC pc,int blocks,int *lens)
316: {
317:   PC_BJacobi *jac = (PC_BJacobi*)pc->data;
318:   int        ierr;


322:   jac->n = blocks;
323:   if (!lens) {
324:     jac->g_lens = 0;
325:   } else {
326:     PetscMalloc(blocks*sizeof(int),&jac->g_lens);
327:     PetscLogObjectMemory(pc,blocks*sizeof(int));
328:     PetscMemcpy(jac->g_lens,lens,blocks*sizeof(int));
329:   }
330:   return(0);
331: }
332: EXTERN_C_END

334: EXTERN_C_BEGIN
335: #undef __FUNCT__  
337: int PCBJacobiGetTotalBlocks_BJacobi(PC pc, int *blocks, const int *lens[])
338: {
339:   PC_BJacobi *jac = (PC_BJacobi*) pc->data;

342:   *blocks = jac->n;
343:   if (lens) *lens = jac->g_lens;
344:   return(0);
345: }
346: EXTERN_C_END

348: EXTERN_C_BEGIN
349: #undef __FUNCT__  
351: int PCBJacobiSetLocalBlocks_BJacobi(PC pc,int blocks,int *lens)
352: {
353:   PC_BJacobi *jac;
354:   int        ierr;

357:   jac = (PC_BJacobi*)pc->data;

359:   jac->n_local = blocks;
360:   if (!lens) {
361:     jac->l_lens = 0;
362:   } else {
363:     PetscMalloc(blocks*sizeof(int),&jac->l_lens);
364:     PetscLogObjectMemory(pc,blocks*sizeof(int));
365:     PetscMemcpy(jac->l_lens,lens,blocks*sizeof(int));
366:   }
367:   return(0);
368: }
369: EXTERN_C_END

371: EXTERN_C_BEGIN
372: #undef __FUNCT__  
374: int PCBJacobiGetLocalBlocks_BJacobi(PC pc, int *blocks, const int *lens[])
375: {
376:   PC_BJacobi *jac = (PC_BJacobi*) pc->data;

379:   *blocks = jac->n_local;
380:   if (lens) *lens = jac->l_lens;
381:   return(0);
382: }
383: EXTERN_C_END

385: /* -------------------------------------------------------------------------------------*/

387: #undef __FUNCT__  
389: /*@
390:    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block 
391:    problem is associated with the linear system matrix instead of the
392:    default (where it is associated with the preconditioning matrix).
393:    That is, if the local system is solved iteratively then it iterates
394:    on the block from the matrix using the block from the preconditioner
395:    as the preconditioner for the local block.

397:    Collective on PC

399:    Input Parameters:
400: .  pc - the preconditioner context

402:    Options Database Key:
403: .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()

405:    Notes:
406:    For the common case in which the preconditioning and linear 
407:    system matrices are identical, this routine is unnecessary.

409:    Level: intermediate

411: .keywords:  block, Jacobi, set, true, local, flag

413: .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
414: @*/
415: int PCBJacobiSetUseTrueLocal(PC pc)
416: {
417:   int ierr,(*f)(PC);

421:   PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",(void (**)(void))&f);
422:   if (f) {
423:     (*f)(pc);
424:   }

426:   return(0);
427: }

429: #undef __FUNCT__  
431: /*@C
432:    PCBJacobiGetSubSLES - Gets the local SLES contexts for all blocks on
433:    this processor.
434:    
435:    Note Collective

437:    Input Parameter:
438: .  pc - the preconditioner context

440:    Output Parameters:
441: +  n_local - the number of blocks on this processor, or PETSC_NULL
442: .  first_local - the global number of the first block on this processor, or PETSC_NULL
443: -  sles - the array of SLES contexts

445:    Notes:  
446:    After PCBJacobiGetSubSLES() the array of SLES contexts is not to be freed.
447:    
448:    Currently for some matrix implementations only 1 block per processor 
449:    is supported.
450:    
451:    You must call SLESSetUp() or PCSetUp() before calling PCBJacobiGetSubSLES().

453:    Level: advanced

455: .keywords:  block, Jacobi, get, sub, SLES, context

457: .seealso: PCBJacobiGetSubSLES()
458: @*/
459: int PCBJacobiGetSubSLES(PC pc,int *n_local,int *first_local,SLES **sles)
460: {
461:   int ierr,(*f)(PC,int *,int *,SLES **);

465:   PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiGetSubSLES_C",(void (**)(void))&f);
466:   if (f) {
467:     (*f)(pc,n_local,first_local,sles);
468:   } else {
469:     SETERRQ(1,"Cannot get subsolvers for this preconditioner");
470:   }
471:   return(0);
472: }

474: #undef __FUNCT__  
476: /*@
477:    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
478:    Jacobi preconditioner.

480:    Collective on PC

482:    Input Parameters:
483: +  pc - the preconditioner context
484: .  blocks - the number of blocks
485: -  lens - [optional] integer array containing the size of each block

487:    Options Database Key:
488: .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks

490:    Notes:  
491:    Currently only a limited number of blocking configurations are supported.
492:    All processors sharing the PC must call this routine with the same data.

494:    Level: intermediate

496: .keywords:  set, number, Jacobi, global, total, blocks

498: .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
499: @*/
500: int PCBJacobiSetTotalBlocks(PC pc,int blocks,int *lens)
501: {
502:   int ierr,(*f)(PC,int,int *);

506:   if (blocks <= 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
507:   PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",(void (**)(void))&f);
508:   if (f) {
509:     (*f)(pc,blocks,lens);
510:   }
511:   return(0);
512: }

514: #undef __FUNCT__  
516: /*@C
517:    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
518:    Jacobi preconditioner.

520:    Collective on PC

522:    Input Parameter:
523: .  pc - the preconditioner context

525:    Output parameters:
526: +  blocks - the number of blocks
527: -  lens - integer array containing the size of each block

529:    Level: intermediate

531: .keywords:  get, number, Jacobi, global, total, blocks

533: .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
534: @*/
535: int PCBJacobiGetTotalBlocks(PC pc, int *blocks, const int *lens[])
536: {
537:   int ierr,(*f)(PC,int*, const int *[]);

542:   PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",(void (**)(void))&f);
543:   if (f) {
544:     (*f)(pc,blocks,lens);
545:   }
546:   return(0);
547: }
548: 
549: #undef __FUNCT__  
551: /*@
552:    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
553:    Jacobi preconditioner.

555:    Not Collective

557:    Input Parameters:
558: +  pc - the preconditioner context
559: .  blocks - the number of blocks
560: -  lens - [optional] integer array containing size of each block

562:    Note:  
563:    Currently only a limited number of blocking configurations are supported.

565:    Level: intermediate

567: .keywords: PC, set, number, Jacobi, local, blocks

569: .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
570: @*/
571: int PCBJacobiSetLocalBlocks(PC pc,int blocks,int *lens)
572: {
573:   int ierr,(*f)(PC,int,int *);

577:   if (blocks < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
578:   PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",(void (**)(void))&f);
579:   if (f) {
580:     (*f)(pc,blocks,lens);
581:   }
582:   return(0);
583: }
584: 
585: #undef __FUNCT__  
587: /*@C
588:    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
589:    Jacobi preconditioner.

591:    Not Collective

593:    Input Parameters:
594: +  pc - the preconditioner context
595: .  blocks - the number of blocks
596: -  lens - [optional] integer array containing size of each block

598:    Note:  
599:    Currently only a limited number of blocking configurations are supported.

601:    Level: intermediate

603: .keywords: PC, get, number, Jacobi, local, blocks

605: .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
606: @*/
607: int PCBJacobiGetLocalBlocks(PC pc, int *blocks, const int *lens[])
608: {
609:   int ierr,(*f)(PC,int*, const int *[]);

614:   PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",(void (**)(void))&f);
615:   if (f) {
616:     (*f)(pc,blocks,lens);
617:   }
618:   return(0);
619: }

621: /* -----------------------------------------------------------------------------------*/

623: /*MC
624:    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 
625:            its own SLES object.

627:    Options Database Keys:
628: .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()

630:    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
631:      than one processor. Defaults to one block per processor.

633:      To set options on the solvers for each block append -sub_ to all the SLES, KSP, and PC
634:         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
635:         
636:      To set the options on the solvers seperate for each block call PCBJacobiGetSubSLES()
637:          and set the options directly on the resulting SLES object (you can access its KSP and PC
638:          with SLESGetKSP() and SLESGetPC())

640:    Level: beginner

642:    Concepts: block Jacobi

644: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
645:            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubSLES(), PCBJacobiSetTotalBlocks(),
646:            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
647: M*/

649: EXTERN_C_BEGIN
650: #undef __FUNCT__  
652: int PCCreate_BJacobi(PC pc)
653: {
654:   int          rank,ierr;
655:   PC_BJacobi   *jac;

658:   PetscNew(PC_BJacobi,&jac);
659:   PetscLogObjectMemory(pc,sizeof(PC_BJacobi));
660:   MPI_Comm_rank(pc->comm,&rank);
661:   pc->ops->apply              = 0;
662:   pc->ops->applytranspose     = 0;
663:   pc->ops->setup              = PCSetUp_BJacobi;
664:   pc->ops->destroy            = PCDestroy_BJacobi;
665:   pc->ops->setfromoptions     = PCSetFromOptions_BJacobi;
666:   pc->ops->view               = PCView_BJacobi;
667:   pc->ops->applyrichardson    = 0;

669:   pc->data               = (void*)jac;
670:   jac->n                 = -1;
671:   jac->n_local           = -1;
672:   jac->first_local       = rank;
673:   jac->sles              = 0;
674:   jac->use_true_local    = PETSC_FALSE;
675:   jac->same_local_solves = PETSC_TRUE;
676:   jac->g_lens            = 0;
677:   jac->l_lens            = 0;
678:   jac->tp_mat            = 0;
679:   jac->tp_pmat           = 0;

681:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",
682:                     "PCBJacobiSetUseTrueLocal_BJacobi",
683:                     PCBJacobiSetUseTrueLocal_BJacobi);
684:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubSLES_C","PCBJacobiGetSubSLES_BJacobi",
685:                     PCBJacobiGetSubSLES_BJacobi);
686:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",
687:                     PCBJacobiSetTotalBlocks_BJacobi);
688:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",
689:                     PCBJacobiGetTotalBlocks_BJacobi);
690:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",
691:                     PCBJacobiSetLocalBlocks_BJacobi);
692:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",
693:                     PCBJacobiGetLocalBlocks_BJacobi);

695:   return(0);
696: }
697: EXTERN_C_END

699: /* --------------------------------------------------------------------------------------------*/
700: /*
701:         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
702: */
703: #undef __FUNCT__  
705: int PCDestroy_BJacobi_Singleblock(PC pc)
706: {
707:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
708:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
709:   int                    ierr;

712:   /*
713:         If the on processor block had to be generated via a MatGetDiagonalBlock()
714:      that creates a copy (for example MPIBDiag matrices do), this frees the space
715:   */
716:   if (jac->tp_mat) {
717:     MatDestroy(jac->tp_mat);
718:   }
719:   if (jac->tp_pmat) {
720:     MatDestroy(jac->tp_pmat);
721:   }

723:   SLESDestroy(jac->sles[0]);
724:   PetscFree(jac->sles);
725:   VecDestroy(bjac->x);
726:   VecDestroy(bjac->y);
727:   if (jac->l_lens) {PetscFree(jac->l_lens);}
728:   if (jac->g_lens) {PetscFree(jac->g_lens);}
729:   PetscFree(bjac);
730:   PetscFree(jac);
731:   return(0);
732: }

734: #undef __FUNCT__  
736: int PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
737: {
738:   int                    ierr;
739:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
740:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;

743:   SLESSetUp(jac->sles[0],bjac->x,bjac->y);
744:   return(0);
745: }

747: #undef __FUNCT__  
749: int PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
750: {
751:   int                    ierr,its;
752:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
753:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
754:   PetscScalar            *x_array,*y_array;

757:   /* 
758:       The VecPlaceArray() is to avoid having to copy the 
759:     y vector into the bjac->x vector. The reason for 
760:     the bjac->x vector is that we need a sequential vector
761:     for the sequential solve.
762:   */
763:   VecGetArray(x,&x_array);
764:   VecGetArray(y,&y_array);
765:   VecPlaceArray(bjac->x,x_array);
766:   VecPlaceArray(bjac->y,y_array);
767:   SLESSolve(jac->sles[0],bjac->x,bjac->y,&its);
768:   VecRestoreArray(x,&x_array);
769:   VecRestoreArray(y,&y_array);
770:   return(0);
771: }

773: #undef __FUNCT__  
775: int PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
776: {
777:   int                    ierr;
778:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
779:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
780:   PetscScalar            *x_array,*y_array;
781:   PC                     subpc;

784:   /* 
785:       The VecPlaceArray() is to avoid having to copy the 
786:     y vector into the bjac->x vector. The reason for 
787:     the bjac->x vector is that we need a sequential vector
788:     for the sequential solve.
789:   */
790:   VecGetArray(x,&x_array);
791:   VecGetArray(y,&y_array);
792:   VecPlaceArray(bjac->x,x_array);
793:   VecPlaceArray(bjac->y,y_array);

795:   /* apply the symmetric left portion of the inner PC operator */
796:   /* note this by-passes the inner SLES and its options completely */

798:   SLESGetPC(jac->sles[0],&subpc);
799:   PCApplySymmetricLeft(subpc,bjac->x,bjac->y);

801:   VecRestoreArray(x,&x_array);
802:   VecRestoreArray(y,&y_array);
803:   return(0);
804: }

806: #undef __FUNCT__  
808: int PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
809: {
810:   int                    ierr;
811:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
812:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
813:   PetscScalar            *x_array,*y_array;
814:   PC                     subpc;

817:   /* 
818:       The VecPlaceArray() is to avoid having to copy the 
819:     y vector into the bjac->x vector. The reason for 
820:     the bjac->x vector is that we need a sequential vector
821:     for the sequential solve.
822:   */
823:   VecGetArray(x,&x_array);
824:   VecGetArray(y,&y_array);
825:   VecPlaceArray(bjac->x,x_array);
826:   VecPlaceArray(bjac->y,y_array);

828:   /* apply the symmetric right portion of the inner PC operator */
829:   /* note this by-passes the inner SLES and its options completely */

831:   SLESGetPC(jac->sles[0],&subpc);
832:   PCApplySymmetricRight(subpc,bjac->x,bjac->y);

834:   VecRestoreArray(x,&x_array);
835:   VecRestoreArray(y,&y_array);
836:   return(0);
837: }

839: #undef __FUNCT__  
841: int PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
842: {
843:   int                    ierr,its;
844:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
845:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
846:   PetscScalar            *x_array,*y_array;

849:   /* 
850:       The VecPlaceArray() is to avoid having to copy the 
851:     y vector into the bjac->x vector. The reason for 
852:     the bjac->x vector is that we need a sequential vector
853:     for the sequential solve.
854:   */
855:   VecGetArray(x,&x_array);
856:   VecGetArray(y,&y_array);
857:   VecPlaceArray(bjac->x,x_array);
858:   VecPlaceArray(bjac->y,y_array);
859:   SLESSolveTranspose(jac->sles[0],bjac->x,bjac->y,&its);
860:   VecRestoreArray(x,&x_array);
861:   VecRestoreArray(y,&y_array);
862:   return(0);
863: }

865: #undef __FUNCT__  
867: static int PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
868: {
869:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
870:   int                    ierr,m;
871:   SLES                   sles;
872:   Vec                    x,y;
873:   PC_BJacobi_Singleblock *bjac;
874:   KSP                    subksp;
875:   PC                     subpc;


879:   /* set default direct solver with no Krylov method */
880:   if (!pc->setupcalled) {
881:     char *prefix;
882:     SLESCreate(PETSC_COMM_SELF,&sles);
883:     PetscLogObjectParent(pc,sles);
884:     SLESGetKSP(sles,&subksp);
885:     KSPSetType(subksp,KSPPREONLY);
886:     SLESGetPC(sles,&subpc);
887:     PCSetType(subpc,PCILU);
888:     PCGetOptionsPrefix(pc,&prefix);
889:     SLESSetOptionsPrefix(sles,prefix);
890:     SLESAppendOptionsPrefix(sles,"sub_");
891:     SLESSetFromOptions(sles);
892:     /*
893:       The reason we need to generate these vectors is to serve 
894:       as the right-hand side and solution vector for the solve on the 
895:       block. We do not need to allocate space for the vectors since
896:       that is provided via VecPlaceArray() just before the call to 
897:       SLESSolve() on the block.
898:     */
899:     MatGetSize(pmat,&m,&m);
900:     VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x);
901:     VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);
902:     PetscLogObjectParent(pc,x);
903:     PetscLogObjectParent(pc,y);

905:     pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
906:     pc->ops->apply               = PCApply_BJacobi_Singleblock;
907:     pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
908:     pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
909:     pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
910:     pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;

912:     PetscMalloc(sizeof(PC_BJacobi_Singleblock),&bjac);
913:     PetscLogObjectMemory(pc,sizeof(PC_BJacobi_Singleblock));
914:     bjac->x      = x;
915:     bjac->y      = y;

917:     PetscMalloc(sizeof(SLES),&jac->sles);
918:     jac->sles[0] = sles;
919:     jac->data    = (void*)bjac;
920:   } else {
921:     sles = jac->sles[0];
922:     bjac = (PC_BJacobi_Singleblock *)jac->data;
923:   }
924:   if (jac->use_true_local) {
925:     SLESSetOperators(sles,mat,pmat,pc->flag);
926:   }  else {
927:     SLESSetOperators(sles,pmat,pmat,pc->flag);
928:   }
929:   return(0);
930: }

932: /* ---------------------------------------------------------------------------------------------*/

934: #undef __FUNCT__  
936: int PCDestroy_BJacobi_Multiblock(PC pc)
937: {
938:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
939:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
940:   int                   i,ierr;

943:   MatDestroyMatrices(jac->n_local,&bjac->pmat);
944:   if (jac->use_true_local) {
945:     MatDestroyMatrices(jac->n_local,&bjac->mat);
946:   }

948:   /*
949:         If the on processor block had to be generated via a MatGetDiagonalBlock()
950:      that creates a copy (for example MPIBDiag matrices do), this frees the space
951:   */
952:   if (jac->tp_mat) {
953:     MatDestroy(jac->tp_mat);
954:   }
955:   if (jac->tp_pmat) {
956:     MatDestroy(jac->tp_pmat);
957:   }

959:   for (i=0; i<jac->n_local; i++) {
960:     SLESDestroy(jac->sles[i]);
961:     VecDestroy(bjac->x[i]);
962:     VecDestroy(bjac->y[i]);
963:     ISDestroy(bjac->is[i]);
964:   }
965:   PetscFree(jac->sles);
966:   PetscFree(bjac->x);
967:   PetscFree(bjac->starts);
968:   PetscFree(bjac->is);
969:   PetscFree(bjac);
970:   if (jac->l_lens) {PetscFree(jac->l_lens);}
971:   if (jac->g_lens) {PetscFree(jac->g_lens);}
972:   PetscFree(jac);
973:   return(0);
974: }

976: #undef __FUNCT__  
978: int PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
979: {
980:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
981:   int                   ierr,i,n_local = jac->n_local;
982:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;

985:   for (i=0; i<n_local; i++) {
986:     SLESSetUp(jac->sles[i],bjac->x[i],bjac->y[i]);
987:   }
988:   return(0);
989: }

991: /*
992:       Preconditioner for block Jacobi 
993: */
994: #undef __FUNCT__  
996: int PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
997: {
998:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
999:   int                   ierr,its,i,n_local = jac->n_local;
1000:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1001:   PetscScalar           *xin,*yin;
1002:   static PetscTruth     flag = PETSC_TRUE;
1003:   static int            SUBSlesSolve;

1006:   if (flag) {
1007:     PetscLogEventRegister(&SUBSlesSolve,"SubSlesSolve",SLES_COOKIE);
1008:     flag = PETSC_FALSE;
1009:   }
1010:   VecGetArray(x,&xin);
1011:   VecGetArray(y,&yin);
1012:   for (i=0; i<n_local; i++) {
1013:     /* 
1014:        To avoid copying the subvector from x into a workspace we instead 
1015:        make the workspace vector array point to the subpart of the array of
1016:        the global vector.
1017:     */
1018:     VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);
1019:     VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);

1021:     PetscLogEventBegin(SUBSlesSolve,jac->sles[i],bjac->x[i],bjac->y[i],0);
1022:     SLESSolve(jac->sles[i],bjac->x[i],bjac->y[i],&its);
1023:     PetscLogEventEnd(SUBSlesSolve,jac->sles[i],bjac->x[i],bjac->y[i],0);
1024:   }
1025:   VecRestoreArray(x,&xin);
1026:   VecRestoreArray(y,&yin);
1027:   return(0);
1028: }

1030: /*
1031:       Preconditioner for block Jacobi 
1032: */
1033: #undef __FUNCT__  
1035: int PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1036: {
1037:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1038:   int                   ierr,its,i,n_local = jac->n_local;
1039:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1040:   PetscScalar           *xin,*yin;
1041:   static PetscTruth     flag = PETSC_TRUE;
1042:   static int            SUBSlesSolve;

1045:   if (flag) {
1046:     PetscLogEventRegister(&SUBSlesSolve,"SubSlesSolveTranspose",SLES_COOKIE);
1047:     flag = PETSC_FALSE;
1048:   }
1049:   VecGetArray(x,&xin);
1050:   VecGetArray(y,&yin);
1051:   for (i=0; i<n_local; i++) {
1052:     /* 
1053:        To avoid copying the subvector from x into a workspace we instead 
1054:        make the workspace vector array point to the subpart of the array of
1055:        the global vector.
1056:     */
1057:     VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);
1058:     VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);

1060:     PetscLogEventBegin(SUBSlesSolve,jac->sles[i],bjac->x[i],bjac->y[i],0);
1061:     SLESSolveTranspose(jac->sles[i],bjac->x[i],bjac->y[i],&its);
1062:     PetscLogEventEnd(SUBSlesSolve,jac->sles[i],bjac->x[i],bjac->y[i],0);
1063:   }
1064:   VecRestoreArray(x,&xin);
1065:   VecRestoreArray(y,&yin);
1066:   return(0);
1067: }

1069: #undef __FUNCT__  
1071: static int PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1072: {
1073:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
1074:   int                    ierr,m,n_local,N,M,start,i;
1075:   char                   *prefix,*pprefix,*mprefix;
1076:   SLES                   sles;
1077:   Vec                    x,y;
1078:   PC_BJacobi_Multiblock  *bjac = (PC_BJacobi_Multiblock*)jac->data;
1079:   KSP                    subksp;
1080:   PC                     subpc;
1081:   IS                     is;
1082:   MatReuse               scall = MAT_REUSE_MATRIX;

1085:   MatGetLocalSize(pc->pmat,&M,&N);

1087:   n_local = jac->n_local;

1089:   if (jac->use_true_local) {
1090:     if (mat->type != pmat->type) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1091:   }

1093:   /* set default direct solver with no Krylov method */
1094:   if (!pc->setupcalled) {
1095:     scall                  = MAT_INITIAL_MATRIX;
1096:     pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1097:     pc->ops->apply         = PCApply_BJacobi_Multiblock;
1098:     pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1099:     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;

1101:     PetscMalloc(sizeof(PC_BJacobi_Multiblock),&bjac);
1102:     PetscLogObjectMemory(pc,sizeof(PC_BJacobi_Multiblock));
1103:     PetscMalloc(n_local*sizeof(SLES),&jac->sles);
1104:     PetscLogObjectMemory(pc,sizeof(n_local*sizeof(SLES)));
1105:     PetscMalloc(2*n_local*sizeof(Vec),&bjac->x);
1106:     PetscLogObjectMemory(pc,sizeof(2*n_local*sizeof(Vec)));
1107:     bjac->y      = bjac->x + n_local;
1108:     PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);
1109:     PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));
1110: 
1111:     jac->data    = (void*)bjac;
1112:     PetscMalloc(n_local*sizeof(IS),&bjac->is);
1113:     PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));

1115:     start = 0;
1116:     for (i=0; i<n_local; i++) {
1117:       SLESCreate(PETSC_COMM_SELF,&sles);
1118:       PetscLogObjectParent(pc,sles);
1119:       SLESGetKSP(sles,&subksp);
1120:       KSPSetType(subksp,KSPPREONLY);
1121:       SLESGetPC(sles,&subpc);
1122:       PCSetType(subpc,PCILU);
1123:       PCGetOptionsPrefix(pc,&prefix);
1124:       SLESSetOptionsPrefix(sles,prefix);
1125:       SLESAppendOptionsPrefix(sles,"sub_");
1126:       SLESSetFromOptions(sles);

1128:       m = jac->l_lens[i];

1130:       /*
1131:       The reason we need to generate these vectors is to serve 
1132:       as the right-hand side and solution vector for the solve on the 
1133:       block. We do not need to allocate space for the vectors since
1134:       that is provided via VecPlaceArray() just before the call to 
1135:       SLESSolve() on the block.

1137:       */
1138:       VecCreateSeq(PETSC_COMM_SELF,m,&x);
1139:       VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);
1140:       PetscLogObjectParent(pc,x);
1141:       PetscLogObjectParent(pc,y);
1142:       bjac->x[i]      = x;
1143:       bjac->y[i]      = y;
1144:       bjac->starts[i] = start;
1145:       jac->sles[i]    = sles;

1147:       ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);
1148:       bjac->is[i] = is;
1149:       PetscLogObjectParent(pc,is);

1151:       start += m;
1152:     }
1153:   } else {
1154:     bjac = (PC_BJacobi_Multiblock*)jac->data;
1155:     /* 
1156:        Destroy the blocks from the previous iteration
1157:     */
1158:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1159:       MatDestroyMatrices(n_local,&bjac->pmat);
1160:       if (jac->use_true_local) {
1161:         MatDestroyMatrices(n_local,&bjac->mat);
1162:       }
1163:       scall = MAT_INITIAL_MATRIX;
1164:     }
1165:   }

1167:   MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);
1168:   if (jac->use_true_local) {
1169:     PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);
1170:     MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);
1171:   }
1172:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1173:      different boundary conditions for the submatrices than for the global problem) */
1174:   PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);

1176:   PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);
1177:   for (i=0; i<n_local; i++) {
1178:     PetscLogObjectParent(pc,bjac->pmat[i]);
1179:     PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);
1180:     if (jac->use_true_local) {
1181:       PetscLogObjectParent(pc,bjac->mat[i]);
1182:       PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);
1183:       SLESSetOperators(jac->sles[i],bjac->mat[i],bjac->pmat[i],pc->flag);
1184:     } else {
1185:       SLESSetOperators(jac->sles[i],bjac->pmat[i],bjac->pmat[i],pc->flag);
1186:     }
1187:   }

1189:   return(0);
1190: }