Actual source code: ex12.c
petsc-3.7.5 2017-01-01
2: static char help[] = "Solves a linear system in parallel with KSP.\n\
3: Input parameters include:\n\
4: -m <mesh_x> : number of mesh points in x-direction\n\
5: -n <mesh_n> : number of mesh points in y-direction\n\n";
7: /*T
8: Concepts: KSP^solving a system of linear equations
9: Concepts: KSP^Laplacian, 2d
10: Concepts: PC^registering preconditioners
11: Processors: n
12: T*/
14: /*
15: Demonstrates registering a new preconditioner (PC) type.
17: To register a PC type whose code is linked into the executable,
18: use PCRegister(). To register a PC type in a dynamic library use PCRegister()
20: Also provide the prototype for your PCCreate_XXX() function. In
21: this example we use the PETSc implementation of the Jacobi method,
22: PCCreate_Jacobi() just as an example.
24: See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
25: write a new PC component.
27: See the manual page PCRegister() for details on how to register a method.
28: */
30: /*
31: Include "petscksp.h" so that we can use KSP solvers. Note that this file
32: automatically includes:
33: petscsys.h - base PETSc routines petscvec.h - vectors
34: petscmat.h - matrices
35: petscis.h - index sets petscksp.h - Krylov subspace methods
36: petscviewer.h - viewers petscpc.h - preconditioners
37: */
38: #include <petscksp.h>
40: PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC);
44: int main(int argc,char **args)
45: {
46: Vec x,b,u; /* approx solution, RHS, exact solution */
47: Mat A; /* linear system matrix */
48: KSP ksp; /* linear solver context */
49: PetscReal norm; /* norm of solution error */
50: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
52: PetscScalar v,one = 1.0,neg_one = -1.0;
53: PC pc; /* preconditioner context */
55: PetscInitialize(&argc,&args,(char*)0,help);
56: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
57: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
59: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60: Compute the matrix and right-hand-side vector that define
61: the linear system, Ax = b.
62: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
63: /*
64: Create parallel matrix, specifying only its global dimensions.
65: When using MatCreate(), the matrix format can be specified at
66: runtime. Also, the parallel partitioning of the matrix can be
67: determined by PETSc at runtime.
68: */
69: MatCreate(PETSC_COMM_WORLD,&A);
70: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
71: MatSetFromOptions(A);
72: MatSetUp(A);
74: /*
75: Currently, all PETSc parallel matrix formats are partitioned by
76: contiguous chunks of rows across the processors. Determine which
77: rows of the matrix are locally owned.
78: */
79: MatGetOwnershipRange(A,&Istart,&Iend);
81: /*
82: Set matrix elements for the 2-D, five-point stencil in parallel.
83: - Each processor needs to insert only elements that it owns
84: locally (but any non-local elements will be sent to the
85: appropriate processor during matrix assembly).
86: - Always specify global rows and columns of matrix entries.
87: */
88: for (Ii=Istart; Ii<Iend; Ii++) {
89: v = -1.0; i = Ii/n; j = Ii - i*n;
90: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
91: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
92: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
93: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
94: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
95: }
97: /*
98: Assemble matrix, using the 2-step process:
99: MatAssemblyBegin(), MatAssemblyEnd()
100: Computations can be done while messages are in transition
101: by placing code between these two statements.
102: */
103: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
104: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
106: /*
107: Create parallel vectors.
108: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
109: we specify only the vector's global
110: dimension; the parallel partitioning is determined at runtime.
111: - When solving a linear system, the vectors and matrices MUST
112: be partitioned accordingly. PETSc automatically generates
113: appropriately partitioned matrices and vectors when MatCreate()
114: and VecCreate() are used with the same communicator.
115: - Note: We form 1 vector from scratch and then duplicate as needed.
116: */
117: VecCreate(PETSC_COMM_WORLD,&u);
118: VecSetSizes(u,PETSC_DECIDE,m*n);
119: VecSetFromOptions(u);
120: VecDuplicate(u,&b);
121: VecDuplicate(b,&x);
123: /*
124: Set exact solution; then compute right-hand-side vector.
125: Use an exact solution of a vector with all elements of 1.0;
126: */
127: VecSet(u,one);
128: MatMult(A,u,b);
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Create the linear solver and set various options
132: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134: /*
135: Create linear solver context
136: */
137: KSPCreate(PETSC_COMM_WORLD,&ksp);
139: /*
140: Set operators. Here the matrix that defines the linear system
141: also serves as the preconditioning matrix.
142: */
143: KSPSetOperators(ksp,A,A);
145: /*
146: First register a new PC type with the command PCRegister()
147: */
148: PCRegister("ourjacobi",PCCreate_Jacobi);
150: /*
151: Set the PC type to be the new method
152: */
153: KSPGetPC(ksp,&pc);
154: PCSetType(pc,"ourjacobi");
156: /*
157: Set runtime options, e.g.,
158: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
159: These options will override those specified above as long as
160: KSPSetFromOptions() is called _after_ any other customization
161: routines.
162: */
163: KSPSetFromOptions(ksp);
165: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: Solve the linear system
167: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169: KSPSolve(ksp,b,x);
171: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172: Check solution and clean up
173: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: /*
176: Check the error
177: */
178: VecAXPY(x,neg_one,u);
179: VecNorm(x,NORM_2,&norm);
180: KSPGetIterationNumber(ksp,&its);
182: /*
183: Print convergence information. PetscPrintf() produces a single
184: print statement from all processes that share a communicator.
185: */
186: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
188: /*
189: Free work space. All PETSc objects should be destroyed when they
190: are no longer needed.
191: */
192: KSPDestroy(&ksp);
193: VecDestroy(&u); VecDestroy(&x);
194: VecDestroy(&b); MatDestroy(&A);
196: /*
197: Always call PetscFinalize() before exiting a program. This routine
198: - finalizes the PETSc libraries as well as MPI
199: - provides summary and diagnostic information if certain runtime
200: options are chosen (e.g., -log_summary).
201: */
202: PetscFinalize();
203: return 0;
204: }