Actual source code: ex7.c
petsc-3.7.5 2017-01-01
2: static char help[] = "Block Jacobi preconditioner for solving a linear system in parallel with KSP.\n\
3: The code indicates the\n\
4: procedures for setting the particular block sizes and for using different\n\
5: linear solvers on the individual blocks.\n\n";
7: /*
8: Note: This example focuses on ways to customize the block Jacobi
9: preconditioner. See ex1.c and ex2.c for more detailed comments on
10: the basic usage of KSP (including working with matrices and vectors).
12: Recall: The block Jacobi method is equivalent to the ASM preconditioner
13: with zero overlap.
14: */
16: /*T
17: Concepts: KSP^customizing the block Jacobi preconditioner
18: Processors: n
19: T*/
21: /*
22: Include "petscksp.h" so that we can use KSP solvers. Note that this file
23: automatically includes:
24: petscsys.h - base PETSc routines petscvec.h - vectors
25: petscmat.h - matrices
26: petscis.h - index sets petscksp.h - Krylov subspace methods
27: petscviewer.h - viewers petscpc.h - preconditioners
28: */
29: #include <petscksp.h>
33: int main(int argc,char **args)
34: {
35: Vec x,b,u; /* approx solution, RHS, exact solution */
36: Mat A; /* linear system matrix */
37: KSP ksp; /* KSP context */
38: KSP *subksp; /* array of local KSP contexts on this processor */
39: PC pc; /* PC context */
40: PC subpc; /* PC context for subdomain */
41: PetscReal norm; /* norm of solution error */
43: PetscInt i,j,Ii,J,*blks,m = 4,n;
44: PetscMPIInt rank,size;
45: PetscInt its,nlocal,first,Istart,Iend;
46: PetscScalar v,one = 1.0,none = -1.0;
47: PetscBool isbjacobi;
49: PetscInitialize(&argc,&args,(char*)0,help);
50: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
51: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
52: MPI_Comm_size(PETSC_COMM_WORLD,&size);
53: n = m+2;
55: /* -------------------------------------------------------------------
56: Compute the matrix and right-hand-side vector that define
57: the linear system, Ax = b.
58: ------------------------------------------------------------------- */
60: /*
61: Create and assemble parallel matrix
62: */
63: MatCreate(PETSC_COMM_WORLD,&A);
64: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
65: MatSetFromOptions(A);
66: MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
67: MatSeqAIJSetPreallocation(A,5,NULL);
68: MatGetOwnershipRange(A,&Istart,&Iend);
69: for (Ii=Istart; Ii<Iend; Ii++) {
70: v = -1.0; i = Ii/n; j = Ii - i*n;
71: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
72: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
73: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
74: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
75: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
76: }
77: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
78: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
80: /*
81: Create parallel vectors
82: */
83: VecCreate(PETSC_COMM_WORLD,&u);
84: VecSetSizes(u,PETSC_DECIDE,m*n);
85: VecSetFromOptions(u);
86: VecDuplicate(u,&b);
87: VecDuplicate(b,&x);
89: /*
90: Set exact solution; then compute right-hand-side vector.
91: */
92: VecSet(u,one);
93: MatMult(A,u,b);
95: /*
96: Create linear solver context
97: */
98: KSPCreate(PETSC_COMM_WORLD,&ksp);
100: /*
101: Set operators. Here the matrix that defines the linear system
102: also serves as the preconditioning matrix.
103: */
104: KSPSetOperators(ksp,A,A);
106: /*
107: Set default preconditioner for this program to be block Jacobi.
108: This choice can be overridden at runtime with the option
109: -pc_type <type>
110: */
111: KSPGetPC(ksp,&pc);
112: PCSetType(pc,PCBJACOBI);
115: /* -------------------------------------------------------------------
116: Define the problem decomposition
117: ------------------------------------------------------------------- */
119: /*
120: Call PCBJacobiSetTotalBlocks() to set individually the size of
121: each block in the preconditioner. This could also be done with
122: the runtime option
123: -pc_bjacobi_blocks <blocks>
124: Also, see the command PCBJacobiSetLocalBlocks() to set the
125: local blocks.
127: Note: The default decomposition is 1 block per processor.
128: */
129: PetscMalloc1(m,&blks);
130: for (i=0; i<m; i++) blks[i] = n;
131: PCBJacobiSetTotalBlocks(pc,m,blks);
132: PetscFree(blks);
135: /* -------------------------------------------------------------------
136: Set the linear solvers for the subblocks
137: ------------------------------------------------------------------- */
139: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: Basic method, should be sufficient for the needs of most users.
141: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143: By default, the block Jacobi method uses the same solver on each
144: block of the problem. To set the same solver options on all blocks,
145: use the prefix -sub before the usual PC and KSP options, e.g.,
146: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
147: */
149: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Advanced method, setting different solvers for various blocks.
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: Note that each block's KSP context is completely independent of
154: the others, and the full range of uniprocessor KSP options is
155: available for each block. The following section of code is intended
156: to be a simple illustration of setting different linear solvers for
157: the individual blocks. These choices are obviously not recommended
158: for solving this particular problem.
159: */
160: PetscObjectTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);
161: if (isbjacobi) {
162: /*
163: Call KSPSetUp() to set the block Jacobi data structures (including
164: creation of an internal KSP context for each block).
166: Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP().
167: */
168: KSPSetUp(ksp);
170: /*
171: Extract the array of KSP contexts for the local blocks
172: */
173: PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp);
175: /*
176: Loop over the local blocks, setting various KSP options
177: for each block.
178: */
179: for (i=0; i<nlocal; i++) {
180: KSPGetPC(subksp[i],&subpc);
181: if (!rank) {
182: if (i%2) {
183: PCSetType(subpc,PCILU);
184: } else {
185: PCSetType(subpc,PCNONE);
186: KSPSetType(subksp[i],KSPBCGS);
187: KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
188: }
189: } else {
190: PCSetType(subpc,PCJACOBI);
191: KSPSetType(subksp[i],KSPGMRES);
192: KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
193: }
194: }
195: }
197: /* -------------------------------------------------------------------
198: Solve the linear system
199: ------------------------------------------------------------------- */
201: /*
202: Set runtime options
203: */
204: KSPSetFromOptions(ksp);
206: /*
207: Solve the linear system
208: */
209: KSPSolve(ksp,b,x);
211: /* -------------------------------------------------------------------
212: Check solution and clean up
213: ------------------------------------------------------------------- */
215: /*
216: Check the error
217: */
218: VecAXPY(x,none,u);
219: VecNorm(x,NORM_2,&norm);
220: KSPGetIterationNumber(ksp,&its);
221: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
223: /*
224: Free work space. All PETSc objects should be destroyed when they
225: are no longer needed.
226: */
227: KSPDestroy(&ksp);
228: VecDestroy(&u); VecDestroy(&x);
229: VecDestroy(&b); MatDestroy(&A);
230: PetscFinalize();
231: return 0;
232: }