Actual source code: ex12.c
1: /*$Id: ex12.c,v 1.24 2001/08/07 21:30:54 bsmith Exp $*/
3: /* Program usage: mpirun -np <procs> ex12 [-help] [all PETSc options] */
5: static char help[] = "Solves a linear system in parallel with SLES.n
6: Input parameters include:n
7: -m <mesh_x> : number of mesh points in x-directionn
8: -n <mesh_n> : number of mesh points in y-directionnn";
10: /*T
11: Concepts: SLES^solving a system of linear equations
12: Concepts: SLES^Laplacian, 2d
13: Concepts: PC^registering preconditioners
14: Processors: n
15: T*/
17: /*
18: Demonstrates registering a new preconditioner (PC) type.
20: To register a PC type whose code is linked into the executable,
21: use PCRegister(). To register a PC type in a dynamic library use PCRegisterDynamic()
23: Also provide the prototype for your PCCreate_XXX() function. In
24: this example we use the PETSc implementation of the Jacobi method,
25: PCCreate_Jacobi() just as an example.
27: See the file src/sles/pc/impls/jacobi/jacobi.c for details on how to
28: write a new PC component.
30: See the manual page PCRegisterDynamic() for details on how to register a method.
31: */
33: /*
34: Include "petscsles.h" so that we can use SLES solvers. Note that this file
35: automatically includes:
36: petsc.h - base PETSc routines petscvec.h - vectors
37: petscsys.h - system routines petscmat.h - matrices
38: petscis.h - index sets petscksp.h - Krylov subspace methods
39: petscviewer.h - viewers petscpc.h - preconditioners
40: */
41: #include petscsles.h
43: EXTERN_C_BEGIN
44: extern int PCCreate_Jacobi(PC);
45: EXTERN_C_END
47: #undef __FUNCT__
49: int main(int argc,char **args)
50: {
51: Vec x,b,u; /* approx solution, RHS, exact solution */
52: Mat A; /* linear system matrix */
53: SLES sles; /* linear solver context */
54: PetscReal norm; /* norm of solution error */
55: int i,j,I,J,Istart,Iend,ierr,m = 8,n = 7,its;
56: PetscScalar v,one = 1.0,neg_one = -1.0;
57: PC pc; /* preconditioner context */
59: PetscInitialize(&argc,&args,(char *)0,help);
60: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
61: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
63: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64: Compute the matrix and right-hand-side vector that define
65: the linear system, Ax = b.
66: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
67: /*
68: Create parallel matrix, specifying only its global dimensions.
69: When using MatCreate(), the matrix format can be specified at
70: runtime. Also, the parallel partitioning of the matrix is
71: determined by PETSc at runtime.
72: */
73: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
74: MatSetFromOptions(A);
76: /*
77: Currently, all PETSc parallel matrix formats are partitioned by
78: contiguous chunks of rows across the processors. Determine which
79: rows of the matrix are locally owned.
80: */
81: MatGetOwnershipRange(A,&Istart,&Iend);
83: /*
84: Set matrix elements for the 2-D, five-point stencil in parallel.
85: - Each processor needs to insert only elements that it owns
86: locally (but any non-local elements will be sent to the
87: appropriate processor during matrix assembly).
88: - Always specify global rows and columns of matrix entries.
89: */
90: for (I=Istart; I<Iend; I++) {
91: v = -1.0; i = I/n; j = I - i*n;
92: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
93: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
94: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
95: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
96: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
97: }
99: /*
100: Assemble matrix, using the 2-step process:
101: MatAssemblyBegin(), MatAssemblyEnd()
102: Computations can be done while messages are in transition
103: by placing code between these two statements.
104: */
105: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
106: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
108: /*
109: Create parallel vectors.
110: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
111: we specify only the vector's global
112: dimension; the parallel partitioning is determined at runtime.
113: - When solving a linear system, the vectors and matrices MUST
114: be partitioned accordingly. PETSc automatically generates
115: appropriately partitioned matrices and vectors when MatCreate()
116: and VecCreate() are used with the same communicator.
117: - Note: We form 1 vector from scratch and then duplicate as needed.
118: */
119: VecCreate(PETSC_COMM_WORLD,&u);
120: VecSetSizes(u,PETSC_DECIDE,m*n);
121: VecSetFromOptions(u);
122: VecDuplicate(u,&b);
123: VecDuplicate(b,&x);
125: /*
126: Set exact solution; then compute right-hand-side vector.
127: Use an exact solution of a vector with all elements of 1.0;
128: */
129: VecSet(&one,u);
130: MatMult(A,u,b);
132: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: Create the linear solver and set various options
134: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136: /*
137: Create linear solver context
138: */
139: SLESCreate(PETSC_COMM_WORLD,&sles);
141: /*
142: Set operators. Here the matrix that defines the linear system
143: also serves as the preconditioning matrix.
144: */
145: SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);
147: /*
148: First register a new PC type with the command PCRegister()
149: */
150: PCRegister("ourjacobi",0,"PCCreate_Jacobi",PCCreate_Jacobi);
151:
152: /*
153: Set the PC type to be the new method
154: */
155: SLESGetPC(sles,&pc);
156: PCSetType(pc,"ourjacobi");
158: /*
159: Set runtime options, e.g.,
160: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
161: These options will override those specified above as long as
162: SLESSetFromOptions() is called _after_ any other customization
163: routines.
164: */
165: SLESSetFromOptions(sles);
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Solve the linear system
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: SLESSolve(sles,b,x,&its);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Check solution and clean up
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
177: /*
178: Check the error
179: */
180: VecAXPY(&neg_one,u,x);
181: VecNorm(x,NORM_2,&norm);
183: /* Scale the norm */
184: /* norm *= sqrt(1.0/((m+1)*(n+1))); */
186: /*
187: Print convergence information. PetscPrintf() produces a single
188: print statement from all processes that share a communicator.
189: */
190: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %dn",norm,its);
192: /*
193: Free work space. All PETSc objects should be destroyed when they
194: are no longer needed.
195: */
196: SLESDestroy(sles);
197: VecDestroy(u); VecDestroy(x);
198: VecDestroy(b); MatDestroy(A);
200: /*
201: Always call PetscFinalize() before exiting a program. This routine
202: - finalizes the PETSc libraries as well as MPI
203: - provides summary and diagnostic information if certain runtime
204: options are chosen (e.g., -log_summary).
205: */
206: PetscFinalize();
207: return 0;
208: }