Actual source code: damg.c
1: /*$Id: damg.c,v 1.35 2001/07/20 21:25:12 bsmith Exp $*/
2:
3: #include petscda.h
4: #include petscsles.h
5: #include petscmg.h
7: /*
8: Code for almost fully managing multigrid/multi-level linear solvers for DA grids
9: */
11: #undef __FUNCT__
13: /*@C
14: DMMGCreate - Creates a DA based multigrid solver object. This allows one to
15: easily implement MG methods on regular grids.
17: Collective on MPI_Comm
19: Input Parameter:
20: + comm - the processors that will share the grids and solution process
21: . nlevels - number of multigrid levels
22: - user - an optional user context
24: Output Parameters:
25: . - the context
27: Notes:
28: To provide a different user context for each level call DMMGSetUser() after calling
29: this routine
31: Level: advanced
33: .seealso DMMGDestroy()
35: @*/
36: int DMMGCreate(MPI_Comm comm,int nlevels,void *user,DMMG **dmmg)
37: {
38: int ierr,i;
39: DMMG *p;
40: PetscTruth galerkin;
43: PetscOptionsGetInt(0,"-dmmg_nlevels",&nlevels,PETSC_IGNORE);
44: PetscOptionsHasName(0,"-dmmg_galerkin",&galerkin);
46: PetscMalloc(nlevels*sizeof(DMMG),&p);
47: for (i=0; i<nlevels; i++) {
48: ierr = PetscNew(struct _p_DMMG,&p[i]);
49: ierr = PetscMemzero(p[i],sizeof(struct _p_DMMG));
50: p[i]->nlevels = nlevels - i;
51: p[i]->comm = comm;
52: p[i]->user = user;
53: p[i]->galerkin = galerkin;
54: }
55: *dmmg = p;
56: return(0);
57: }
59: #undef __FUNCT__
61: /*@C
62: DMMGSetUseGalerkinCoarse - Courses the DMMG to use R*A_f*R^T to form
63: the coarser matrices from finest
65: Collective on DMMG
67: Input Parameter:
68: . - the context
70: Level: advanced
72: .seealso DMMGCreate()
74: @*/
75: int DMMGSetUseGalerkinCoarse(DMMG* dmmg)
76: {
77: int i,nlevels = dmmg[0]->nlevels;
80: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
82: for (i=0; i<nlevels; i++) {
83: dmmg[i]->galerkin = PETSC_TRUE;
84: }
85: return(0);
86: }
88: #undef __FUNCT__
90: /*@C
91: DMMGDestroy - Destroys a DA based multigrid solver object.
93: Collective on DMMG
95: Input Parameter:
96: . - the context
98: Level: advanced
100: .seealso DMMGCreate()
102: @*/
103: int DMMGDestroy(DMMG *dmmg)
104: {
105: int ierr,i,nlevels = dmmg[0]->nlevels;
108: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
110: for (i=1; i<nlevels; i++) {
111: if (dmmg[i]->R) {MatDestroy(dmmg[i]->R);}
112: }
113: for (i=0; i<nlevels; i++) {
114: if (dmmg[i]->dm) {DMDestroy(dmmg[i]->dm);}
115: if (dmmg[i]->x) {VecDestroy(dmmg[i]->x);}
116: if (dmmg[i]->b) {VecDestroy(dmmg[i]->b);}
117: if (dmmg[i]->r) {VecDestroy(dmmg[i]->r);}
118: if (dmmg[i]->work1) {VecDestroy(dmmg[i]->work1);}
119: if (dmmg[i]->w) {VecDestroy(dmmg[i]->w);}
120: if (dmmg[i]->work2) {VecDestroy(dmmg[i]->work2);}
121: if (dmmg[i]->lwork1) {VecDestroy(dmmg[i]->lwork1);}
122: if (dmmg[i]->B && dmmg[i]->B != dmmg[i]->J) {MatDestroy(dmmg[i]->B);}
123: if (dmmg[i]->J) {MatDestroy(dmmg[i]->J);}
124: if (dmmg[i]->Rscale) {VecDestroy(dmmg[i]->Rscale);}
125: if (dmmg[i]->fdcoloring) {MatFDColoringDestroy(dmmg[i]->fdcoloring);}
126: if (dmmg[i]->sles) {SLESDestroy(dmmg[i]->sles);}
127: if (dmmg[i]->snes) {PetscObjectDestroy((PetscObject)dmmg[i]->snes);}
128: PetscFree(dmmg[i]);
129: }
130: PetscFree(dmmg);
131: return(0);
132: }
134: #undef __FUNCT__
136: /*@C
137: DMMGSetDM - Sets the coarse grid information for the grids
139: Collective on DMMG
141: Input Parameter:
142: + dmmg - the context
143: - dm - the DA or VecPack object
145: Level: advanced
147: .seealso DMMGCreate(), DMMGDestroy()
149: @*/
150: int DMMGSetDM(DMMG *dmmg,DM dm)
151: {
152: int ierr,i,nlevels = dmmg[0]->nlevels;
155: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
157: /* Create DA data structure for all the levels */
158: dmmg[0]->dm = dm;
159: PetscObjectReference((PetscObject)dm);
160: for (i=1; i<nlevels; i++) {
161: DMRefine(dmmg[i-1]->dm,dmmg[i]->comm,&dmmg[i]->dm);
162: }
163: DMMGSetUp(dmmg);
164: return(0);
165: }
167: #undef __FUNCT__
169: /*@C
170: DMMGSetUp - Prepares the DMMG to solve a system
172: Collective on DMMG
174: Input Parameter:
175: . dmmg - the context
177: Level: advanced
179: .seealso DMMGCreate(), DMMGDestroy(), DMMG, DMMGSetSNES(), DMMGSetSLES(), DMMGSolve()
181: @*/
182: int DMMGSetUp(DMMG *dmmg)
183: {
184: int ierr,i,nlevels = dmmg[0]->nlevels;
188: /* Create work vectors and matrix for each level */
189: for (i=0; i<nlevels; i++) {
190: DMCreateGlobalVector(dmmg[i]->dm,&dmmg[i]->x);
191: VecDuplicate(dmmg[i]->x,&dmmg[i]->b);
192: VecDuplicate(dmmg[i]->x,&dmmg[i]->r);
193: }
195: /* Create interpolation/restriction between levels */
196: for (i=1; i<nlevels; i++) {
197: DMGetInterpolation(dmmg[i-1]->dm,dmmg[i]->dm,&dmmg[i]->R,PETSC_NULL);
198: }
200: return(0);
201: }
203: #undef __FUNCT__
205: /*@C
206: DMMGSolve - Actually solves the (non)linear system defined with the DMMG
208: Collective on DMMG
210: Input Parameter:
211: . dmmg - the context
213: Level: advanced
215: Notes: For linear (SLES) problems may be called more than once, uses the same
216: matrices but recomputes the right hand side for each new solve. Call DMMGSetSLES()
217: to generate new matrices.
218:
219: .seealso DMMGCreate(), DMMGDestroy(), DMMG, DMMGSetSNES(), DMMGSetSLES(), DMMGSetUp()
221: @*/
222: int DMMGSolve(DMMG *dmmg)
223: {
224: int i,ierr,nlevels = dmmg[0]->nlevels;
225: PetscTruth gridseq,vecmonitor,flg;
226: KSP ksp;
229: PetscOptionsHasName(0,"-dmmg_grid_sequence",&gridseq);
230: PetscOptionsHasName(0,"-dmmg_vecmonitor",&vecmonitor);
231: if (gridseq) {
232: if (dmmg[0]->initialguess) {
233: (*dmmg[0]->initialguess)(dmmg[0]->snes,dmmg[0]->x,dmmg[0]);
234: if (dmmg[0]->sles) {
235: SLESGetKSP(dmmg[0]->sles,&ksp);
236: KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
237: }
238: }
239: for (i=0; i<nlevels-1; i++) {
240: (*dmmg[i]->solve)(dmmg,i);
241: if (vecmonitor) {
242: VecView(dmmg[i]->x,PETSC_VIEWER_DRAW_(dmmg[i]->comm));
243: }
244: MatInterpolate(dmmg[i+1]->R,dmmg[i]->x,dmmg[i+1]->x);
245: if (dmmg[i+1]->sles) {
246: SLESGetKSP(dmmg[i+1]->sles,&ksp);
247: KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
248: }
249: }
250: } else {
251: if (dmmg[nlevels-1]->initialguess) {
252: (*dmmg[nlevels-1]->initialguess)(dmmg[nlevels-1]->snes,dmmg[nlevels-1]->x,dmmg[nlevels-1]);
253: if (dmmg[nlevels-1]->sles) {
254: SLESGetKSP(dmmg[nlevels-1]->sles,&ksp);
255: KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
256: }
257: }
258: }
259: (*DMMGGetFine(dmmg)->solve)(dmmg,nlevels-1);
260: if (vecmonitor) {
261: VecView(dmmg[nlevels-1]->x,PETSC_VIEWER_DRAW_(dmmg[nlevels-1]->comm));
262: }
264: PetscOptionsHasName(PETSC_NULL,"-dmmg_view",&flg);
265: if (flg && !PetscPreLoadingOn) {
266: DMMGView(dmmg,PETSC_VIEWER_STDOUT_(dmmg[0]->comm));
267: }
268: return(0);
269: }
271: #undef __FUNCT__
273: int DMMGSolveSLES(DMMG *dmmg,int level)
274: {
275: int ierr,its;
278: (*dmmg[level]->rhs)(dmmg[level],dmmg[level]->b);
279: if (dmmg[level]->matricesset) {
280: SLESSetOperators(dmmg[level]->sles,dmmg[level]->J,dmmg[level]->J,SAME_NONZERO_PATTERN);
281: dmmg[level]->matricesset = PETSC_FALSE;
282: }
283: SLESSolve(dmmg[level]->sles,dmmg[level]->b,dmmg[level]->x,&its);
284: return(0);
285: }
287: /*
288: Sets each of the linear solvers to use multigrid
289: */
290: #undef __FUNCT__
292: int DMMGSetUpLevel(DMMG *dmmg,SLES sles,int nlevels)
293: {
294: int ierr,i;
295: PC pc;
296: PetscTruth ismg,monitor,ismf,isshell,ismffd;
297: SLES lsles; /* solver internal to the multigrid preconditioner */
298: MPI_Comm *comms,comm;
299: PetscViewer ascii;
300: KSP ksp;
304: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
306: PetscOptionsHasName(PETSC_NULL,"-dmmg_ksp_monitor",&monitor);
307: if (monitor) {
308: SLESGetKSP(sles,&ksp);
309: PetscObjectGetComm((PetscObject)ksp,&comm);
310: PetscViewerASCIIOpen(comm,"stdout",&ascii);
311: PetscViewerASCIISetTab(ascii,1+dmmg[0]->nlevels-nlevels);
312: KSPSetMonitor(ksp,KSPDefaultMonitor,ascii,(int(*)(void*))PetscViewerDestroy);
313: }
315: /* use fgmres on outer iteration by default */
316: SLESGetKSP(sles,&ksp);
317: KSPSetType(ksp,KSPFGMRES);
319: ierr = SLESGetPC(sles,&pc);
320: ierr = PCSetType(pc,PCMG);
321: ierr = PetscMalloc(nlevels*sizeof(MPI_Comm),&comms);
322: for (i=0; i<nlevels; i++) {
323: comms[i] = dmmg[i]->comm;
324: }
325: ierr = MGSetLevels(pc,nlevels,comms);
326: ierr = PetscFree(comms);
327: MGSetType(pc,MGFULL);
329: PetscTypeCompare((PetscObject)pc,PCMG,&ismg);
330: if (ismg) {
332: /* set solvers for each level */
333: for (i=0; i<nlevels; i++) {
334: MGGetSmoother(pc,i,&lsles);
335: SLESSetOperators(lsles,dmmg[i]->J,dmmg[i]->B,DIFFERENT_NONZERO_PATTERN);
336: MGSetX(pc,i,dmmg[i]->x);
337: MGSetRhs(pc,i,dmmg[i]->b);
338: MGSetR(pc,i,dmmg[i]->r);
339: MGSetResidual(pc,i,MGDefaultResidual,dmmg[i]->J);
340: if (monitor) {
341: SLESGetKSP(lsles,&ksp);
342: PetscObjectGetComm((PetscObject)ksp,&comm);
343: PetscViewerASCIIOpen(comm,"stdout",&ascii);
344: PetscViewerASCIISetTab(ascii,1+dmmg[0]->nlevels-i);
345: KSPSetMonitor(ksp,KSPDefaultMonitor,ascii,(int(*)(void*))PetscViewerDestroy);
346: }
347: /* If using a matrix free multiply and did not provide an explicit matrix to build
348: the preconditioner then must use no preconditioner
349: */
350: PetscTypeCompare((PetscObject)dmmg[i]->B,MATSHELL,&isshell);
351: PetscTypeCompare((PetscObject)dmmg[i]->B,MATDAAD,&ismf);
352: PetscTypeCompare((PetscObject)dmmg[i]->B,MATMFFD,&ismffd);
353: if (isshell || ismf || ismffd) {
354: PC lpc;
355: SLESGetPC(lsles,&lpc);
356: PCSetType(lpc,PCNONE);
357: }
358: }
360: /* Set interpolation/restriction between levels */
361: for (i=1; i<nlevels; i++) {
362: MGSetInterpolate(pc,i,dmmg[i]->R);
363: MGSetRestriction(pc,i,dmmg[i]->R);
364: }
365: }
366: return(0);
367: }
369: extern int MatSeqAIJPtAP(Mat,Mat,Mat*);
371: #undef __FUNCT__
373: /*@C
374: DMMGSetSLES - Sets the linear solver object that will use the grid hierarchy
376: Collective on DMMG
378: Input Parameter:
379: + dmmg - the context
380: . func - function to compute linear system matrix on each grid level
381: - rhs - function to compute right hand side on each level (need only work on the finest grid
382: if you do not use grid sequencing
384: Level: advanced
386: Notes: For linear problems my be called more than once, reevaluates the matrices if it is called more
387: than once. Call DMMGSolve() directly several times to solve with the same matrix but different
388: right hand sides.
389:
390: .seealso DMMGCreate(), DMMGDestroy, DMMGSetDM(), DMMGSolve()
392: @*/
393: int DMMGSetSLES(DMMG *dmmg,int (*rhs)(DMMG,Vec),int (*func)(DMMG,Mat))
394: {
395: int ierr,i,nlevels = dmmg[0]->nlevels;
396: PetscTruth galerkin;
399: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
400: galerkin = dmmg[0]->galerkin;
402: if (galerkin) {
403: DMGetMatrix(dmmg[nlevels-1]->dm,MATMPIAIJ,&dmmg[nlevels-1]->B);
404: (*func)(dmmg[nlevels-1],dmmg[nlevels-1]->B);
405: for (i=nlevels-2; i>-1; i--) {
406: MatSeqAIJPtAP(dmmg[i+1]->B,dmmg[i+1]->R,&dmmg[i]->B);
407: }
408: }
410: if (!dmmg[0]->sles) {
411: /* create solvers for each level */
412: for (i=0; i<nlevels; i++) {
414: if (!dmmg[i]->B && !galerkin) {
415: DMGetMatrix(dmmg[i]->dm,MATMPIAIJ,&dmmg[i]->B);
416: }
417: if (!dmmg[i]->J) {
418: dmmg[i]->J = dmmg[i]->B;
419: }
421: SLESCreate(dmmg[i]->comm,&dmmg[i]->sles);
422: DMMGSetUpLevel(dmmg,dmmg[i]->sles,i+1);
423: SLESSetFromOptions(dmmg[i]->sles);
424: dmmg[i]->solve = DMMGSolveSLES;
425: dmmg[i]->rhs = rhs;
426: }
427: }
429: /* evalute matrix on each level */
430: for (i=0; i<nlevels; i++) {
431: if (!galerkin) {
432: (*func)(dmmg[i],dmmg[i]->J);
433: }
434: dmmg[i]->matricesset = PETSC_TRUE;
435: }
437: for (i=0; i<nlevels-1; i++) {
438: SLESSetOptionsPrefix(dmmg[i]->sles,"dmmg_");
439: }
441: return(0);
442: }
444: #undef __FUNCT__
446: /*@C
447: DMMGView - prints information on a DA based multi-level preconditioner
449: Collective on DMMG and PetscViewer
451: Input Parameter:
452: + dmmg - the context
453: - viewer - the viewer
455: Level: advanced
457: .seealso DMMGCreate(), DMMGDestroy
459: @*/
460: int DMMGView(DMMG *dmmg,PetscViewer viewer)
461: {
462: int ierr,i,nlevels = dmmg[0]->nlevels,flag;
463: MPI_Comm comm;
464: PetscTruth isascii;
467: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
469: PetscObjectGetComm((PetscObject)viewer,&comm);
470: MPI_Comm_compare(comm,dmmg[0]->comm,&flag);
471: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) {
472: SETERRQ(PETSC_ERR_ARG_NOTSAMECOMM,"Different communicators in the DMMG and the PetscViewer");
473: }
475: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
476: if (isascii) {
477: PetscViewerASCIIPrintf(viewer,"DMMG Object with %d levelsn",nlevels);
478: }
479: for (i=0; i<nlevels; i++) {
480: PetscViewerASCIIPushTab(viewer);
481: DMView(dmmg[i]->dm,viewer);
482: PetscViewerASCIIPopTab(viewer);
483: }
484: if (isascii) {
485: PetscViewerASCIIPrintf(viewer,"%s Object on finest leveln",dmmg[nlevels-1]->sles ? "SLES" : "SNES");
486: if (dmmg[nlevels-1]->galerkin) {
487: PetscViewerASCIIPrintf(viewer,"Using Galerkin R^T*A*R process to compute coarser matrices");
488: }
489: }
490: if (dmmg[nlevels-1]->sles) {
491: SLESView(dmmg[nlevels-1]->sles,viewer);
492: } else {
493: /* use of PetscObjectView() means we do not have to link with libpetscsnes if SNES is not being used */
494: PetscObjectView((PetscObject)dmmg[nlevels-1]->snes,viewer);
495: }
496: return(0);
497: }