Actual source code: ex23.c
1: /*$Id: ex23.c,v 1.11 2001/08/07 21:30:54 bsmith Exp $*/
3: /* Program usage: mpirun ex23 [-help] [all PETSc options] */
5: static char help[] = "Solves a tridiagonal linear system.nn";
7: /*T
8: Concepts: SLES^basic parallel example;
9: Processors: n
10: T*/
12: /*
13: Include "petscsles.h" so that we can use SLES solvers. Note that this file
14: automatically includes:
15: petsc.h - base PETSc routines petscvec.h - vectors
16: petscsys.h - system routines petscmat.h - matrices
17: petscis.h - index sets petscksp.h - Krylov subspace methods
18: petscviewer.h - viewers petscpc.h - preconditioners
20: Note: The corresponding uniprocessor example is ex1.c
21: */
22: #include petscsles.h
24: #undef __FUNCT__
26: int main(int argc,char **args)
27: {
28: Vec x, b, u; /* approx solution, RHS, exact solution */
29: Mat A; /* linear system matrix */
30: SLES sles; /* linear solver context */
31: PC pc; /* preconditioner context */
32: KSP ksp; /* Krylov subspace method context */
33: PetscReal norm; /* norm of solution error */
34: int ierr,i,n = 10,col[3],its,rstart,rend,nlocal;
35: PetscScalar neg_one = -1.0,one = 1.0,value[3];
37: PetscInitialize(&argc,&args,(char *)0,help);
38: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
40: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
41: Compute the matrix and right-hand-side vector that define
42: the linear system, Ax = b.
43: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
45: /*
46: Create vectors. Note that we form 1 vector from scratch and
47: then duplicate as needed. For this simple case let PETSc decide how
48: many elements of the vector are stored on each processor. The second
49: argument to VecSetSizes() below causes PETSc to decide.
50: */
51: VecCreate(PETSC_COMM_WORLD,&x);
52: VecSetSizes(x,PETSC_DECIDE,n);
53: VecSetFromOptions(x);
54: VecDuplicate(x,&b);
55: VecDuplicate(x,&u);
57: /* Identify the starting and ending mesh points on each
58: processor for the interior part of the mesh. We let PETSc decide
59: above. */
61: VecGetOwnershipRange(x,&rstart,&rend);
62: VecGetLocalSize(x,&nlocal);
64: /*
65: Create matrix. When using MatCreate(), the matrix format can
66: be specified at runtime.
68: Performance tuning note: For problems of substantial size,
69: preallocation of matrix memory is crucial for attaining good
70: performance. Since preallocation is not possible via the generic
71: matrix creation routine MatCreate(), we recommend for practical
72: problems instead to use the creation routine for a particular matrix
73: format, e.g.,
74: MatCreateMPIAIJ() - sequential AIJ (compressed sparse row)
75: MatCreateMPIBAIJ() - block AIJ
76: See the matrix chapter of the users manual for details.
78: We pass in nlocal as the "local" size of the matrix to force it
79: to have the same parallel layout as the vector created above.
80: */
81: MatCreate(PETSC_COMM_WORLD,nlocal,nlocal,n,n,&A);
82: MatSetFromOptions(A);
84: /*
85: Assemble matrix.
87: The linear system is distributed across the processors by
88: chunks of contiguous rows, which correspond to contiguous
89: sections of the mesh on which the problem is discretized.
90: For matrix assembly, each processor contributes entries for
91: the part that it owns locally.
92: */
95: if (!rstart) {
96: rstart = 1;
97: i = 0; col[0] = 0; col[1] = 1; value[0] = 2.0; value[1] = -1.0;
98: MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
99: }
100: if (rend == n) {
101: rend = n-1;
102: i = n-1; col[0] = n-2; col[1] = n-1; value[0] = -1.0; value[1] = 2.0;
103: MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
104: }
106: /* Set entries corresponding to the mesh interior */
107: value[0] = -1.0; value[1] = 2.0; value[2] = -1.0;
108: for (i=rstart; i<rend; i++) {
109: col[0] = i-1; col[1] = i; col[2] = i+1;
110: MatSetValues(A,1,&i,3,col,value,INSERT_VALUES);
111: }
113: /* Assemble the matrix */
114: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
115: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
117: /*
118: Set exact solution; then compute right-hand-side vector.
119: */
120: VecSet(&one,u);
121: MatMult(A,u,b);
123: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: Create the linear solver and set various options
125: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126: /*
127: Create linear solver context
128: */
129: SLESCreate(PETSC_COMM_WORLD,&sles);
131: /*
132: Set operators. Here the matrix that defines the linear system
133: also serves as the preconditioning matrix.
134: */
135: SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);
137: /*
138: Set linear solver defaults for this problem (optional).
139: - By extracting the KSP and PC contexts from the SLES context,
140: we can then directly call any KSP and PC routines to set
141: various options.
142: - The following four statements are optional; all of these
143: parameters could alternatively be specified at runtime via
144: SLESSetFromOptions();
145: */
146: SLESGetKSP(sles,&ksp);
147: SLESGetPC(sles,&pc);
148: PCSetType(pc,PCJACOBI);
149: KSPSetTolerances(ksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
151: /*
152: Set runtime options, e.g.,
153: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
154: These options will override those specified above as long as
155: SLESSetFromOptions() is called _after_ any other customization
156: routines.
157: */
158: SLESSetFromOptions(sles);
159:
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: Solve the linear system
162: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
163: /*
164: Solve linear system
165: */
166: SLESSolve(sles,b,x,&its);
168: /*
169: View solver info; we could instead use the option -sles_view to
170: print this info to the screen at the conclusion of SLESSolve().
171: */
172: SLESView(sles,PETSC_VIEWER_STDOUT_WORLD);
174: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: Check solution and clean up
176: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
177: /*
178: Check the error
179: */
180: VecAXPY(&neg_one,u,x);
181: ierr = VecNorm(x,NORM_2,&norm);
182: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A, Iterations %dn",norm,its);
183: /*
184: Free work space. All PETSc objects should be destroyed when they
185: are no longer needed.
186: */
187: VecDestroy(x); VecDestroy(u);
188: VecDestroy(b); MatDestroy(A);
189: SLESDestroy(sles);
191: /*
192: Always call PetscFinalize() before exiting a program. This routine
193: - finalizes the PETSc libraries as well as MPI
194: - provides summary and diagnostic information if certain runtime
195: options are chosen (e.g., -log_summary).
196: */
197: PetscFinalize();
198: return 0;
199: }