Actual source code: ex4.c
1: /*$Id: ex4.c,v 1.29 2001/08/10 03:34:17 bsmith Exp $*/
3: /* Program usage: mpirun -np <procs> ex4 [-help] [all PETSc options] */
5: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).n
6: Input parameters include:n
7: -m <points>, where <points> = number of grid pointsn
8: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand siden
9: -debug : Activate debugging printoutsn
10: -nox : Deactivate x-window graphicsnn";
12: /*
13: Concepts: TS^time-dependent linear problems
14: Concepts: TS^heat equation
15: Concepts: TS^diffusion equation
16: Processors: n
17: */
19: /* ------------------------------------------------------------------------
21: This program solves the one-dimensional heat equation (also called the
22: diffusion equation),
23: u_t = u_xx,
24: on the domain 0 <= x <= 1, with the boundary conditions
25: u(t,0) = 0, u(t,1) = 0,
26: and the initial condition
27: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
28: This is a linear, second-order, parabolic equation.
30: We discretize the right-hand side using finite differences with
31: uniform grid spacing h:
32: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
33: We then demonstrate time evolution using the various TS methods by
34: running the program via
35: mpirun -np <procs> ex3 -ts_type <timestepping solver>
37: We compare the approximate solution with the exact solution, given by
38: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
39: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
41: Notes:
42: This code demonstrates the TS solver interface to two variants of
43: linear problems, u_t = f(u,t), namely
44: - time-dependent f: f(u,t) is a function of t
45: - time-independent f: f(u,t) is simply f(u)
47: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
49: ------------------------------------------------------------------------- */
51: /*
52: Include "petscda.h" so that we can use distributed arrays (DAs) to manage
53: the parallel grid. Include "petscts.h" so that we can use TS solvers.
54: Note that this file automatically includes:
55: petsc.h - base PETSc routines petscvec.h - vectors
56: petscsys.h - system routines petscmat.h - matrices
57: petscis.h - index sets petscksp.h - Krylov subspace methods
58: petscviewer.h - viewers petscpc.h - preconditioners
59: petscsles.h - linear solvers petscsnes.h - nonlinear solvers
60: */
62: #include petscda.h
63: #include petscts.h
65: /*
66: User-defined application context - contains data needed by the
67: application-provided call-back routines.
68: */
69: typedef struct {
70: MPI_Comm comm; /* communicator */
71: DA da; /* distributed array data structure */
72: Vec localwork; /* local ghosted work vector */
73: Vec u_local; /* local ghosted approximate solution vector */
74: Vec solution; /* global exact solution vector */
75: int m; /* total number of grid points */
76: PetscReal h; /* mesh width h = 1/(m-1) */
77: PetscTruth debug; /* flag (1 indicates activation of debugging printouts) */
78: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
79: PetscReal norm_2,norm_max; /* error norms */
80: } AppCtx;
82: /*
83: User-defined routines
84: */
85: extern int InitialConditions(Vec,AppCtx*);
86: extern int RHSMatrixHeat(TS,PetscReal,Mat*,Mat*,MatStructure*,void*);
87: extern int Monitor(TS,int,PetscReal,Vec,void*);
88: extern int ExactSolution(PetscReal,Vec,AppCtx*);
90: #undef __FUNCT__
92: int main(int argc,char **argv)
93: {
94: AppCtx appctx; /* user-defined application context */
95: TS ts; /* timestepping context */
96: Mat A; /* matrix data structure */
97: Vec u; /* approximate solution vector */
98: PetscReal time_total_max = 100.0; /* default max total time */
99: int time_steps_max = 100; /* default max timesteps */
100: PetscDraw draw; /* drawing context */
101: int ierr,steps,size,m;
102: PetscReal dt,ftime;
103: PetscTruth flg;
105: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106: Initialize program and set problem parameters
107: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108:
109: PetscInitialize(&argc,&argv,(char*)0,help);
110: appctx.comm = PETSC_COMM_WORLD;
112: m = 60;
113: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
114: PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
115: appctx.m = m;
116: appctx.h = 1.0/(m-1.0);
117: appctx.norm_2 = 0.0;
118: appctx.norm_max = 0.0;
119: MPI_Comm_size(PETSC_COMM_WORLD,&size);
120: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %dn",size);
122: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123: Create vector data structures
124: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
125: /*
126: Create distributed array (DA) to manage parallel grid and vectors
127: and to set up the ghost point communication pattern. There are M
128: total grid values spread equally among all the processors.
129: */
131: DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,1,1,PETSC_NULL,&appctx.da);
133: /*
134: Extract global and local vectors from DA; we use these to store the
135: approximate solution. Then duplicate these for remaining vectors that
136: have the same types.
137: */
138: DACreateGlobalVector(appctx.da,&u);
139: DACreateLocalVector(appctx.da,&appctx.u_local);
141: /*
142: Create local work vector for use in evaluating right-hand-side function;
143: create global work vector for storing exact solution.
144: */
145: VecDuplicate(appctx.u_local,&appctx.localwork);
146: VecDuplicate(u,&appctx.solution);
148: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: Set up displays to show graphs of the solution and error
150: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
153: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
154: PetscDrawSetDoubleBuffer(draw);
155: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
156: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
157: PetscDrawSetDoubleBuffer(draw);
159: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: Create timestepping solver context
161: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
163: TSCreate(PETSC_COMM_WORLD,&ts);
164: TSSetProblemType(ts,TS_LINEAR);
166: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: Set optional user-defined monitoring routine
168: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
170: TSSetMonitor(ts,Monitor,&appctx,PETSC_NULL);
172: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Create matrix data structure; set matrix evaluation routine.
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
177: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m,m,&A);
178: MatSetFromOptions(A);
180: PetscOptionsHasName(PETSC_NULL,"-time_dependent_rhs",&flg);
181: if (flg) {
182: /*
183: For linear problems with a time-dependent f(u,t) in the equation
184: u_t = f(u,t), the user provides the discretized right-hand-side
185: as a time-dependent matrix.
186: */
187: TSSetRHSMatrix(ts,A,A,RHSMatrixHeat,&appctx);
188: } else {
189: /*
190: For linear problems with a time-independent f(u) in the equation
191: u_t = f(u), the user provides the discretized right-hand-side
192: as a matrix only once, and then sets a null matrix evaluation
193: routine.
194: */
195: MatStructure A_structure;
196: RHSMatrixHeat(ts,0.0,&A,&A,&A_structure,&appctx);
197: TSSetRHSMatrix(ts,A,A,PETSC_NULL,&appctx);
198: }
200: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201: Set solution vector and initial timestep
202: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
204: dt = appctx.h*appctx.h/2.0;
205: TSSetInitialTimeStep(ts,0.0,dt);
206: TSSetSolution(ts,u);
208: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: Customize timestepping solver:
210: - Set the solution method to be the Backward Euler method.
211: - Set timestepping duration info
212: Then set runtime options, which can override these defaults.
213: For example,
214: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
215: to override the defaults set by TSSetDuration().
216: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
218: TSSetDuration(ts,time_steps_max,time_total_max);
219: TSSetFromOptions(ts);
221: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222: Solve the problem
223: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
225: /*
226: Evaluate initial conditions
227: */
228: InitialConditions(u,&appctx);
230: /*
231: Run the timestepping solver
232: */
233: TSStep(ts,&steps,&ftime);
235: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
236: View timestepping solver info
237: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
239: PetscPrintf(PETSC_COMM_WORLD,"avg. error (2 norm) = %g, avg. error (max norm) = %gn",
240: appctx.norm_2/steps,appctx.norm_max/steps);
241: TSView(ts,PETSC_VIEWER_STDOUT_WORLD);
243: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
244: Free work space. All PETSc objects should be destroyed when they
245: are no longer needed.
246: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
248: TSDestroy(ts);
249: MatDestroy(A);
250: VecDestroy(u);
251: PetscViewerDestroy(appctx.viewer1);
252: PetscViewerDestroy(appctx.viewer2);
253: VecDestroy(appctx.localwork);
254: VecDestroy(appctx.solution);
255: VecDestroy(appctx.u_local);
256: DADestroy(appctx.da);
258: /*
259: Always call PetscFinalize() before exiting a program. This routine
260: - finalizes the PETSc libraries as well as MPI
261: - provides summary and diagnostic information if certain runtime
262: options are chosen (e.g., -log_summary).
263: */
264: PetscFinalize();
265: return 0;
266: }
267: /* --------------------------------------------------------------------- */
268: #undef __FUNCT__
270: /*
271: InitialConditions - Computes the solution at the initial time.
273: Input Parameter:
274: u - uninitialized solution vector (global)
275: appctx - user-defined application context
277: Output Parameter:
278: u - vector with solution at initial time (global)
279: */
280: int InitialConditions(Vec u,AppCtx *appctx)
281: {
282: PetscScalar *u_localptr,h = appctx->h;
283: int i,mybase,myend,ierr;
285: /*
286: Determine starting point of each processor's range of
287: grid values.
288: */
289: VecGetOwnershipRange(u,&mybase,&myend);
291: /*
292: Get a pointer to vector data.
293: - For default PETSc vectors, VecGetArray() returns a pointer to
294: the data array. Otherwise, the routine is implementation dependent.
295: - You MUST call VecRestoreArray() when you no longer need access to
296: the array.
297: - Note that the Fortran interface to VecGetArray() differs from the
298: C version. See the users manual for details.
299: */
300: VecGetArray(u,&u_localptr);
302: /*
303: We initialize the solution array by simply writing the solution
304: directly into the array locations. Alternatively, we could use
305: VecSetValues() or VecSetValuesLocal().
306: */
307: for (i=mybase; i<myend; i++) {
308: u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
309: }
311: /*
312: Restore vector
313: */
314: VecRestoreArray(u,&u_localptr);
316: /*
317: Print debugging information if desired
318: */
319: if (appctx->debug) {
320: PetscPrintf(appctx->comm,"initial guess vectorn");
321: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
322: }
324: return 0;
325: }
326: /* --------------------------------------------------------------------- */
327: #undef __FUNCT__
329: /*
330: ExactSolution - Computes the exact solution at a given time.
332: Input Parameters:
333: t - current time
334: solution - vector in which exact solution will be computed
335: appctx - user-defined application context
337: Output Parameter:
338: solution - vector with the newly computed exact solution
339: */
340: int ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
341: {
342: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
343: int i,mybase,myend,ierr;
345: /*
346: Determine starting and ending points of each processor's
347: range of grid values
348: */
349: VecGetOwnershipRange(solution,&mybase,&myend);
351: /*
352: Get a pointer to vector data.
353: */
354: VecGetArray(solution,&s_localptr);
356: /*
357: Simply write the solution directly into the array locations.
358: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
359: */
360: ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
361: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
362: for (i=mybase; i<myend; i++) {
363: s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
364: }
366: /*
367: Restore vector
368: */
369: VecRestoreArray(solution,&s_localptr);
370: return 0;
371: }
372: /* --------------------------------------------------------------------- */
373: #undef __FUNCT__
375: /*
376: Monitor - User-provided routine to monitor the solution computed at
377: each timestep. This example plots the solution and computes the
378: error in two different norms.
380: Input Parameters:
381: ts - the timestep context
382: step - the count of the current step (with 0 meaning the
383: initial condition)
384: time - the current time
385: u - the solution at this timestep
386: ctx - the user-provided context for this monitoring routine.
387: In this case we use the application context which contains
388: information about the problem size, workspace and the exact
389: solution.
390: */
391: int Monitor(TS ts,int step,PetscReal time,Vec u,void *ctx)
392: {
393: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
394: int ierr;
395: PetscReal norm_2,norm_max;
396: PetscScalar mone = -1.0;
398: /*
399: View a graph of the current iterate
400: */
401: VecView(u,appctx->viewer2);
403: /*
404: Compute the exact solution
405: */
406: ExactSolution(time,appctx->solution,appctx);
408: /*
409: Print debugging information if desired
410: */
411: if (appctx->debug) {
412: PetscPrintf(appctx->comm,"Computed solution vectorn");
413: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
414: PetscPrintf(appctx->comm,"Exact solution vectorn");
415: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
416: }
418: /*
419: Compute the 2-norm and max-norm of the error
420: */
421: VecAXPY(&mone,u,appctx->solution);
422: VecNorm(appctx->solution,NORM_2,&norm_2);
423: norm_2 = sqrt(appctx->h)*norm_2;
424: VecNorm(appctx->solution,NORM_MAX,&norm_max);
426: /*
427: PetscPrintf() causes only the first processor in this
428: communicator to print the timestep information.
429: */
430: PetscPrintf(appctx->comm,"Timestep %d: time = %g, 2-norm error = %g, max norm error = %gn",
431: step,time,norm_2,norm_max);
432: appctx->norm_2 += norm_2;
433: appctx->norm_max += norm_max;
435: /*
436: View a graph of the error
437: */
438: VecView(appctx->solution,appctx->viewer1);
440: /*
441: Print debugging information if desired
442: */
443: if (appctx->debug) {
444: PetscPrintf(appctx->comm,"Error vectorn");
445: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
446: }
448: return 0;
449: }
450: /* --------------------------------------------------------------------- */
451: #undef __FUNCT__
453: /*
454: RHSMatrixHeat - User-provided routine to compute the right-hand-side
455: matrix for the heat equation.
457: Input Parameters:
458: ts - the TS context
459: t - current time
460: global_in - global input vector
461: dummy - optional user-defined context, as set by TSetRHSJacobian()
463: Output Parameters:
464: AA - Jacobian matrix
465: BB - optionally different preconditioning matrix
466: str - flag indicating matrix structure
468: Notes:
469: RHSMatrixHeat computes entries for the locally owned part of the system.
470: - Currently, all PETSc parallel matrix formats are partitioned by
471: contiguous chunks of rows across the processors.
472: - Each processor needs to insert only elements that it owns
473: locally (but any non-local elements will be sent to the
474: appropriate processor during matrix assembly).
475: - Always specify global row and columns of matrix entries when
476: using MatSetValues(); we could alternatively use MatSetValuesLocal().
477: - Here, we set all entries for a particular row at once.
478: - Note that MatSetValues() uses 0-based row and column numbers
479: in Fortran as well as in C.
480: */
481: int RHSMatrixHeat(TS ts,PetscReal t,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
482: {
483: Mat A = *AA; /* Jacobian matrix */
484: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
485: int ierr,i,mstart,mend,idx[3];
486: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
488: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
489: Compute entries for the locally owned part of the matrix
490: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
492: MatGetOwnershipRange(A,&mstart,&mend);
494: /*
495: Set matrix rows corresponding to boundary data
496: */
498: if (mstart == 0) { /* first processor only */
499: v[0] = 1.0;
500: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
501: mstart++;
502: }
504: if (mend == appctx->m) { /* last processor only */
505: mend--;
506: v[0] = 1.0;
507: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
508: }
510: /*
511: Set matrix rows corresponding to interior data. We construct the
512: matrix one row at a time.
513: */
514: v[0] = sone; v[1] = stwo; v[2] = sone;
515: for (i=mstart; i<mend; i++) {
516: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
517: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
518: }
520: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
521: Complete the matrix assembly process and set some options
522: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
523: /*
524: Assemble matrix, using the 2-step process:
525: MatAssemblyBegin(), MatAssemblyEnd()
526: Computations can be done while messages are in transition
527: by placing code between these two statements.
528: */
529: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
530: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
532: /*
533: Set flag to indicate that the Jacobian matrix retains an identical
534: nonzero structure throughout all timestepping iterations (although the
535: values of the entries change). Thus, we can save some work in setting
536: up the preconditioner (e.g., no need to redo symbolic factorization for
537: ILU/ICC preconditioners).
538: - If the nonzero structure of the matrix is different during
539: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
540: must be used instead. If you are unsure whether the matrix
541: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
542: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
543: believes your assertion and does not check the structure
544: of the matrix. If you erroneously claim that the structure
545: is the same when it actually is not, the new preconditioner
546: will not function correctly. Thus, use this optimization
547: feature with caution!
548: */
549: *str = SAME_NONZERO_PATTERN;
551: /*
552: Set and option to indicate that we will never add a new nonzero location
553: to the matrix. If we do, it will generate an error.
554: */
555: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR);
557: return 0;
558: }