Actual source code: ex4.c
1: /*$Id: ex4.c,v 1.53 2001/08/07 03:04:00 balay Exp $*/
3: static char help[] = "Uses a different preconditioner matrix and linear system matrix in the SLES solvers.n
4: Note that different storage formatsn
5: can be used for the different matrices.nn";
7: /*T
8: Concepts: SLES^different matrices for linear system and preconditioner;
9: Processors: n
10: T*/
12: /*
13: Include "petscsles.h" so that we can use SLES solvers. Note that this file
14: automatically includes:
15: petsc.h - base PETSc routines petscvec.h - vectors
16: petscsys.h - system routines petscmat.h - matrices
17: petscis.h - index sets petscksp.h - Krylov subspace methods
18: petscviewer.h - viewers petscpc.h - preconditioners
19: */
20: #include petscsles.h
22: #undef __FUNCT__
24: int main(int argc,char **args)
25: {
26: SLES sles; /* linear solver context */
27: Mat A,B; /* linear system matrix, preconditioning matrix */
28: PetscRandom rctx; /* random number generator context */
29: Vec x,b,u; /* approx solution, RHS, exact solution */
30: Vec tmp; /* work vector */
31: PetscScalar v,one = 1.0,scale = 0.0;
32: int i,j,m = 15,n = 17,its,I,J,ierr,Istart,Iend;
34: PetscInitialize(&argc,&args,(char *)0,help);
35: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
36: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
37: PetscOptionsGetScalar(PETSC_NULL,"-scale",&scale,PETSC_NULL);
39: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
40: Compute the matrix and right-hand-side vector that define
41: the linear system,Ax = b. Also, create a different
42: preconditioner matrix.
43: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
45: /*
46: Create the linear system matrix (A).
47: - Here we use a block diagonal matrix format (MATMPIBDIAG) and
48: specify only the global size. The parallel partitioning of
49: the matrix will be determined at runtime by PETSc.
50: */
51: MatCreateMPIBDiag(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,m*n,
52: 0,1,PETSC_NULL,PETSC_NULL,&A);
54: /*
55: Create a different preconditioner matrix (B). This is usually
56: done to form a cheaper (or sparser) preconditioner matrix
57: compared to the linear system matrix.
58: - Here we use MatCreate(), so that the matrix format and
59: parallel partitioning will be determined at runtime.
60: */
61: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&B);
62: MatSetFromOptions(B);
64: /*
65: Currently, all PETSc parallel matrix formats are partitioned by
66: contiguous chunks of rows across the processors. Determine which
67: rows of the matrix are locally owned.
68: */
69: MatGetOwnershipRange(A,&Istart,&Iend);
71: /*
72: Set entries within the two matrices
73: */
74: for (I=Istart; I<Iend; I++) {
75: v = -1.0; i = I/n; j = I - i*n;
76: if (i>0) {
77: J=I-n;
78: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
79: MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
80: }
81: if (i<m-1) {
82: J=I+n;
83: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
84: MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
85: }
86: if (j>0) {
87: J=I-1;
88: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
89: }
90: if (j<n-1) {
91: J=I+1;
92: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
93: }
94: v = 5.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
95: v = 3.0; MatSetValues(B,1,&I,1,&I,&v,INSERT_VALUES);
96: }
98: /*
99: Assemble the preconditioner matrix (B), using the 2-step process
100: MatAssemblyBegin(), MatAssemblyEnd()
101: Note that computations can be done while messages are in
102: transition by placing code between these two statements.
103: */
104: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
105: for (I=Istart; I<Iend; I++) {
106: v = -0.5; i = I/n;
107: if (i>1) {
108: J=I-(n+1); MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
109: }
110: if (i<m-2) {
111: J=I+n+1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
112: }
113: }
114: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
116: /*
117: Assemble the linear system matrix, (A)
118: */
119: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
120: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
122: /*
123: Create parallel vectors.
124: - When using VecSeSizes(), we specify only the vector's global
125: dimension; the parallel partitioning is determined at runtime.
126: - Note: We form 1 vector from scratch and then duplicate as needed.
127: */
128: VecCreate(PETSC_COMM_WORLD,&b);
129: VecSetSizes(b,PETSC_DECIDE,m*n);
130: VecSetFromOptions(b);
131: VecDuplicate(b,&u);
132: VecDuplicate(b,&x);
134: /*
135: Make solution vector be 1 to random noise
136: */
137: VecSet(&one,u);
138: VecDuplicate(u,&tmp);
139: PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
140: VecSetRandom(rctx,tmp);
141: PetscRandomDestroy(rctx);
142: VecAXPY(&scale,tmp,u);
143: VecDestroy(tmp);
145: /*
146: Compute right-hand-side vector
147: */
148: MatMult(A,u,b);
150: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: Create the linear solver and set various options
152: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154: /*
155: Create linear solver context
156: */
157: SLESCreate(PETSC_COMM_WORLD,&sles);
159: /*
160: Set operators. Note that we use different matrices to define the
161: linear system and to precondition it.
162: */
163: SLESSetOperators(sles,A,B,DIFFERENT_NONZERO_PATTERN);
165: /*
166: Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
167: */
168: SLESSetFromOptions(sles);
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171: Solve the linear system
172: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174: SLESSolve(sles,b,x,&its);
176: /*
177: Free work space. All PETSc objects should be destroyed when they
178: are no longer needed.
179: */
180: SLESDestroy(sles); VecDestroy(u);
181: MatDestroy(B); VecDestroy(x);
182: MatDestroy(A); VecDestroy(b);
184: /*
185: Always call PetscFinalize() before exiting a program. This routine
186: - finalizes the PETSc libraries as well as MPI
187: - provides summary and diagnostic information if certain runtime
188: options are chosen (e.g., -log_summary).
189: */
190: PetscFinalize();
191: return 0;
192: }