Actual source code: ex2f.F

  1: !
  2: ! "$Id: ex2f.F,v 1.35 2001/10/03 03:11:29 balay Exp $";
  3: !/*T
  4: !   Concepts: TS^time-dependent nonlinear problems
  5: !   Processors: n
  6: !T*/
  7: !
  8: !  ------------------------------------------------------------------------
  9: !
 10: !   This program solves a simple time-dependent nonlinear PDE using implicit
 11: !   timestepping:
 12: !                                    u * u_xx
 13: !                              u_t = ---------
 14: !                                    2*(t+1)^2
 15: !
 16: !             u(0,x) = 1 + x*x; u(t,0) = t + 1; u(t,1) = 2*t + 2
 17: !
 18: !   The exact solution is u(t,x) = (1 + x*x) * (1 + t).
 19: !
 20: !   Note that since the solution is linear in time and quadratic in x,
 21: !   the finite difference scheme actually computes the "exact" solution.
 22: !
 23: !   We use the backward Euler method.
 24: !
 25: !  --------------------------------------------------------------------------

 27:       program main
 28:       implicit none

 30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 31: !                    Include files
 32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 33: !
 34: !  Each routine within this program uses the include file 'ex2f.h',
 35: !  which itself includes the various PETSc include files as well as
 36: !  problem-specific data in several common blocks.
 37: !
 38: !  This program uses CPP for preprocessing, as indicated by the use of
 39: !  PETSc include files in the directory petsc/include/finclude.  This
 40: !  convention enables use of the CPP preprocessor, which allows the use
 41: !  of the #include statements that define PETSc objects and variables.
 42: !
 43: !  Use of the conventional Fortran include statements is also supported
 44: !  In this case, the PETsc include files are located in the directory
 45: !  petsc/include/foldinclude.
 46: !
 47: !  Since one must be very careful to include each file no more than once
 48: !  in a Fortran routine, application programmers must exlicitly list
 49: !  each file needed for the various PETSc components within their
 50: !  program (unlike the C/C++ interface).
 51: !
 52: !  See the Fortran section of the PETSc users manual for details.

 54: #include "ex2f.h"

 56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 57: !                   Variable declarations
 58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 59: !
 60: !  Variables:
 61: !     ts             - timestepping solver
 62: !     A              - Jacobian matrix context
 63: !     u_local        - local vector
 64: !     u              - approximate solution vector
 65: !     ftime          - final time
 66: !     time_total_max - default total max time
 67: !     time_steps_max - default max timesteps
 68: !
 69: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 70: !  are mathematical objects that contain more than just an array of
 71: !  double precision numbers. I.e., vectors in PETSc are not just
 72: !        double precision x(*).
 73: !  However, local vector data can be easily accessed via VecGetArray().
 74: !  See the Fortran section of the PETSc users manual for details.

 76:       TS               ts
 77:       Vec              u
 78:       Mat              A
 79:       integer          flg,ierr
 80:       integer          time_steps_max,steps
 81:       double precision dt,ftime,time_total_max

 83: !  Note: Any user-defined Fortran routines (such as RHSFunction)
 84: !  MUST be declared as external.

 86:       external Monitor,RHSFunction
 87:       external InitialConditions,RHSJacobian

 89: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 90: !                 Beginning of program
 91: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 93:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 94:       comm = PETSC_COMM_WORLD
 95:       call MPI_Comm_size(comm,size,ierr)
 96:       call MPI_Comm_rank(comm,rank,ierr)

 98: !  Initialize problem parameters

100:       time_steps_max = 1000
101:       time_total_max = 100.0
102:       m              = 60
103:       debug          = 0
104:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',M,flg,ierr)
105:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-debug',debug,ierr)
106:       one_d0  = 1.0d0
107:       two_d0  = 2.0d0
108:       four_d0 = 4.0d0
109:       h       = one_d0/(m-one_d0)

111: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112: !  Create vector data structures
113: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

115: !  Create distributed array (DA) to manage parallel grid and vectors
116: !  Set up the ghost point communication pattern.  There are m total
117: !  grid values spread equally among all the processors.

119:       call DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,1,1,
120:      &     PETSC_NULL_INTEGER,da,ierr)

122: !  Extract global and local vectors from DA; then duplicate for remaining
123: !  vectors that are the same types.

125:       call DACreateGlobalVector(da,u,ierr)
126:       call DACreateLocalVector(da,u_local,ierr)

128: !  Make local work vector for evaluating right-hand-side function
129:       call VecDuplicate(u_local,localwork,ierr)

131: !  Make global work vector for storing exact solution
132:       call VecDuplicate(u,solution,ierr)

134: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: !  Create timestepping solver context; set callback routine for
136: !  right-hand-side function evaluation.
137: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

139:       call TSCreate(comm,ts,ierr)
140:       call TSSetProblemType(ts,TS_NONLINEAR,ierr)
141:       call TSSetRHSFunction(ts,RHSFunction,PETSC_NULL_OBJECT,ierr)

143: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: !  Set optional user-defined monitoring routine
145: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

147:       call TSSetMonitor(ts,Monitor,PETSC_NULL_OBJECT,
148:      &                  PETSC_NULL_FUNCTION,ierr)

150: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: !  For nonlinear problems, the user can provide a Jacobian evaluation
152: !  routine (or use a finite differencing approximation).
153: !
154: !  Create matrix data structure; set Jacobian evaluation routine.
155: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

157:       call MatCreate(comm,PETSC_DECIDE,PETSC_DECIDE,m,m,A,ierr)
158:       call MatSetFromOptions(A,ierr)
159:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-fdjac',flg,ierr)
160:       if (flg .eq. 1) then
161:          call SetCRoutineFromFortran(ts,A,A,ierr)
162:       else
163:          call TSSetRHSJacobian(ts,A,A,RHSJacobian,PETSC_NULL_OBJECT,    &
164:      &        ierr)
165:       endif

167: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: !  Set solution vector and initial timestep
169: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

171:       dt = h/two_d0
172:       call TSSetInitialTimeStep(ts,0.d0,dt,ierr)
173:       call TSSetSolution(ts,u,ierr)

175: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: !  Customize timestepping solver:
177: !     - Set the solution method to be the Backward Euler method.
178: !     - Set timestepping duration info
179: !  Then set runtime options, which can override these defaults.
180: !  For example,
181: !     -ts_max_steps <maxsteps> -ts_max_time <maxtime>
182: !  to override the defaults set by TSSetDuration().
183: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

185:       call TSSetType(ts,TS_BEULER,ierr)
186:       call TSSetDuration(ts,time_steps_max,time_total_max,ierr)
187:       call TSSetFromOptions(ts,ierr)

189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190: !  Solve the problem
191: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

193: !  Evaluate initial conditions

195:       call InitialConditions(u)

197: !  Run the timestepping solver

199:       call TSStep(ts,steps,ftime,ierr)

201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: !  Free work space.  All PETSc objects should be destroyed when they
203: !  are no longer needed.
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

206:       call TSDestroy(ts,ierr)
207:       call VecDestroy(localwork,ierr)
208:       call VecDestroy(solution,ierr)
209:       call VecDestroy(u_local,ierr)
210:       call VecDestroy(u,ierr)
211:       call DADestroy(da,ierr)
212:       call MatDestroy(A,ierr)

214: !  Always call PetscFinalize() before exiting a program.  This routine
215: !    - finalizes the PETSc libraries as well as MPI
216: !    - provides summary and diagnostic information if certain runtime
217: !      options are chosen (e.g., -log_summary).

219:       call PetscFinalize(ierr)
220:       end

222: !  ------------------------------------------------------------------------
223: !
224: !  InitialConditions - Computes the solution at the initial time.
225: !
226: !  Input Parameters:
227: !     u - uninitialized solution vector (global)
228: !     appctx - user-defined application context
229: !
230: !  Output Parameter:
231: !     u - vector with solution at initial time (global)
232: !
233:       subroutine InitialConditions(u)
234:       implicit none
235: #include "ex2f.h"

237: !  Input/output parameters:
238:       Vec     u

240: !  Local variables:
241:       double  precision localptr(1),x
242:       integer           i,mybase,myend,ierr
243:       PetscOffset       idx

245: !  Determine starting and ending point of each processor's range of
246: !  grid values.  Note that we shift by 1 to convert from the 0-based
247: !  C convention of starting indices to the 1-based Fortran convention.

249:       call VecGetOwnershipRange(u,mybase,myend,ierr)
250:       mybase = mybase + 1

252: !  Get a pointer to vector data.
253: !    - For default PETSc vectors, VecGetArray() returns a pointer to
254: !      the data array.  Otherwise, the routine is implementation dependent.
255: !    - You MUST call VecRestoreArray() when you no longer need access to
256: !      the array.
257: !    - Note that the Fortran interface to VecGetArray() differs from the
258: !      C version.  See the users manual for details.

260:       call VecGetArray(u,localptr,idx,ierr)

262: !     We initialize the solution array by simply writing the solution
263: !     directly into the array locations.  Alternatively, we could use
264: !     VecSetValues() or VecSetValuesLocal().

266:       do 10, i=mybase,myend
267: !       x - current location in global grid
268:         x = h*(i-1)
269:         localptr(i-mybase+idx+1) = one_d0 + x*x
270:  10   continue

272: !  Restore vector

274:       call VecRestoreArray(u,localptr,idx,ierr)

276: !  Print debugging information if desired
277:       if (debug .eq. 1) then
278:         if (rank .eq. 0) write(6,*) 'initial guess vector'
279:         call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
280:       endif

282:       return
283:       end

285: !  ------------------------------------------------------------------------
286: !
287: !  ExactSolution - Computes the exact solution at a given time.
288: !
289: !  Input Parameters:
290: !    t - current time
291: !    appctx - user-defined application context
292: !
293: !  Output Parameter:
294: !    ierr - error code
295: !
296: !  Note: The solution vector is stored in the common block!
297: !
298:       subroutine ExactSolution(t,ierr)
299:       implicit none
300: #include "ex2f.h"

302: !  Input/output parameters:
303:       double precision  t,x
304:       integer           ierr

306: !  Local variables:
307:       double precision  localptr(1)
308:       integer           i,mybase,myend
309:       PetscOffset       idx

311: !  Determine starting and ending point of each processors range of
312: !  grid values.  Note that we shift by 1 to convert from the 0-based
313: !  C convention of starting indices to the 1-based Fortran convention.

315:       call VecGetOwnershipRange(solution,mybase,myend,ierr)
316:       mybase = mybase + 1

318: !  Get a pointer to vector data
319:       call VecGetArray(solution,localptr,idx,ierr)

321: !  Simply write the solution directly into the array locations
322: !  Alternatively, we could use VecSetValues() or VecSetValuesLocal().

324:       do 10, i=mybase,myend
325: !       x - current location in global grid
326:         x = h*(i-1)
327:         localptr(i-mybase+idx+1) = (t + one_d0)*(one_d0 + x*x)
328:  10   continue

330: !  Restore vector

332:       call VecRestoreArray(solution,localptr,idx,ierr)

334:       return
335:       end

337: !  ------------------------------------------------------------------------
338: !
339: !   Monitor - A user-provided routine to monitor the solution computed at
340: !   each time-step.  This example plots the solution and computes the
341: !   error in two different norms.
342: !
343: !   Input Parameters:
344: !   ts     - the time-step context
345: !   step   - the count of the current step (with 0 meaning the
346: !            initial condition)
347: !   time   - the current time
348: !   u      - the solution at this timestep
349: !   dummy  - optional user-provided context for this monitoring
350: !            routine (not used here)
351: !
352:       subroutine Monitor(ts,step,time,u,dummy,ierr)
353:       implicit none
354: #include "ex2f.h"

356: !  Input/output parameters:
357:       TS               ts
358:       integer          step,dummy
359:       double precision time
360:       Vec              u
361:       integer          ierr

363: !  Local variables:
364:       double precision en2,en2s,enmax
365:       double precision mone
366:       PetscDraw             draw

368:       mone = -1.0d0

370: !  We use the default X windows viewer
371: !       PETSC_VIEWER_DRAW_WORLD
372: !  that is associated with the PETSC_COMM_WORLD communicator. This
373: !  saves the effort of calling PetscViewerDrawOpen() to create the window.
374: !  Note that if we wished to plot several items in separate windows we
375: !  would create each viewer with PetscViewerDrawOpen() and store them in
376: !  the application context, appctx.
377: !
378: !  Double buffering makes graphics look better.

380:       call PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_WORLD,0,draw,ierr)
381:       call PetscDrawSetDoubleBuffer(draw,ierr)
382:       call VecView(u,PETSC_VIEWER_DRAW_WORLD,ierr)

384: !  Compute the exact solution at this time-step.  Note that the
385: !  solution vector is passed via the user-defined common block.
386:       call ExactSolution(time,ierr)

388: !  Print debugging information if desired
389:       if (debug .eq. 1) then
390:         if (rank .eq. 0) write(6,*) 'Computed solution vector'
391:         call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
392:         if (rank .eq. 0) write(6,*) 'Exact solution vector'
393:         call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
394:       endif

396: !  Compute the 2-norm and max-norm of the error
397:       call VecAXPY(mone,u,solution,ierr)
398:       call VecNorm(solution,NORM_2,en2,ierr)
399: !  Scale the 2-norm by the grid spacing
400:       en2s = dsqrt(h)*en2
401:       call VecNorm(solution,NORM_MAX,enmax,ierr)

403: !  Print only from processor 0
404:       if (rank .eq. 0) write(6,100) step,time,en2s,enmax
405:  100  format('Timestep = ',i5,',time = ',f8.3,                          &
406:      &       ' sec, error [2-norm] = ',e9.3,                            &
407:      &       ', error [max-norm] = ',e9.3)

409: !  Print debugging information if desired
410:       if (debug .eq. 1) then
411:         if (rank .eq. 0) write(6,*) 'Error vector'
412:         call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
413:       endif

415:       return
416:       end

418: !  ------------------------------------------------------------------------
419: !
420: !   RHSFunction - User-provided routine that evalues the RHS function
421: !   in the ODE.  This routine is set in the main program by calling
422: !   TSSetRHSFunction().  We compute:
423: !         global_out = F(global_in)
424: !
425: !   Input Parameters:
426: !      ts         - timestep context
427: !      t          - current time
428: !      global_in  - input vector to function
429: !      dummy      - (optional) user-provided context for function evaluation
430: !                   (not used here because we use a common block instead)
431: !
432: !   Output Parameter:
433: !      global_out - value of function
434: !
435:       subroutine RHSFunction(ts,t,global_in,global_out,dummy)
436:       implicit none
437: #include "ex2f.h"

439: !  Input/output parameters:
440:       TS               ts
441:       double precision t
442:       Vec              global_in,global_out
443:       integer          dummy

445: !  Local variables:
446:       double precision localptr(1),copyptr(1),sc
447:       integer          i,il,ierr,localsize
448:       PetscOffset      idx_c,idx_l

450: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
451: !  Get ready for local function computations
452: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

454: !  Scatter ghost points to local vector, using the 2-step process
455: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
456: !  By placing code between these two statements, computations can be
457: !  done while messages are in transition.

459:       call DAGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
460:       call DAGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)

462: !  Access directly the values in our local INPUT work vector

464:       call VecGetArray(u_local,localptr,idx_l,ierr)

466: !  Access directly the values in our local OUTPUT work vector

468:       call VecGetArray(localwork,copyptr,idx_c,ierr)

470:       sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))

472: !  Evaluate our function on the nodes owned by this processor

474:       call VecGetLocalSize(u_local,localsize,ierr)

476: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
477: !  Compute entries for the locally owned part
478: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

480: !  Handle boundary conditions: This is done by using the boundary condition
481: !        u(t,boundary) = g(t,boundary)
482: !  for some function g. Now take the derivative with respect to t to obtain
483: !        u_{t}(t,boundary) = g_{t}(t,boundary)
484: !
485: !  In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
486: !          and  u(t,1) = 2t+ 1, so that u_{t}(t,1) = 2

488:        if (rank .eq. 0)      copyptr(1+idx_c) = one_d0
489:        if (rank .eq. size-1) copyptr(localsize+idx_c) = two_d0

491: !  Handle the interior nodes where the PDE is replace by finite
492: !  difference operators.

494:       do 10, i=2,localsize-1
495:          il = i + idx_l
496:          copyptr(i+idx_c) =  localptr(il) * sc *                        &
497:      &     (localptr(il+1) + localptr(il-1) - two_d0*localptr(il))
498:  10   continue

500:       call VecRestoreArray(u_local,localptr,idx_l,ierr)
501:       call VecRestoreArray(localwork,copyptr,idx_c,ierr)

503: !  Return the values from our local OUTPUT vector into our global
504: !  output vector.

506:       call DALocalToGlobal(da,localwork,INSERT_VALUES,global_out,ierr)

508: !  Print debugging information if desired
509:       if (debug .eq. 1) then
510:         if (rank .eq. 0) write(6,*) 'RHS function vector'
511:         call VecView(global_out,PETSC_VIEWER_STDOUT_WORLD,ierr)
512:       endif

514:       return
515:       end

517: !  ------------------------------------------------------------------------
518: !
519: !  RHSJacobian - User-provided routine to compute the Jacobian of the
520: !                right-hand-side function.
521: !
522: !  Input Parameters:
523: !     ts - the TS context
524: !     t - current time
525: !     global_in - global input vector
526: !     dummy - optional user-defined context, as set by TSetRHSJacobian()
527: !
528: !  Output Parameters:
529: !     A - Jacobian matrix
530: !     B - optionally different preconditioning matrix
531: !     str - flag indicating matrix structure
532: !
533: !  Notes:
534: !  RHSJacobian computes entries for the locally owned part of the Jacobian.
535: !   - Currently, all PETSc parallel matrix formats are partitioned by
536: !     contiguous chunks of rows across the processors. The "grow"
537: !     parameter computed below specifies the global row number
538: !     corresponding to each local grid point.
539: !   - Each processor needs to insert only elements that it owns
540: !     locally (but any non-local elements will be sent to the
541: !     appropriate processor during matrix assembly).
542: !   - Always specify global row and columns of matrix entries.
543: !   - Here, we set all entries for a particular row at once.
544: !   - Note that MatSetValues() uses 0-based row and column numbers
545: !     in Fortran as well as in C.

547:       subroutine RHSJacobian(ts,t,global_in,A,B,str,dummy)
548:       implicit none
549: #include "ex2f.h"

551: !  Input/output parameters:
552:       TS               ts
553:       double precision t
554:       Vec              global_in
555:       Mat              A,B
556:       MatStructure     str
557:       integer          dummy

559: !  Local variables:
560:       double precision localptr(1),sc,v(3)
561:       integer          i,ierr,col(3),is
562:       integer          mstart,mend,mstarts,mends
563:       PetscOffset      idx

565: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
566: !  Get ready for local Jacobian computations
567: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

569: !  Scatter ghost points to local vector, using the 2-step process
570: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
571: !  By placing code between these two statements, computations can be
572: !  done while messages are in transition.

574:       call DAGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
575:       call DAGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)

577: !  Get pointer to vector data

579:       call VecGetArray(u_local,localptr,idx,ierr)

581: !  Get starting and ending locally owned rows of the matrix

583:       call MatGetOwnershipRange(A,mstarts,mends,ierr)
584:       mstart = mstarts
585:       mend   = mends

587: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
588: !  Compute entries for the locally owned part of the Jacobian.
589: !   - Currently, all PETSc parallel matrix formats are partitioned by
590: !     contiguous chunks of rows across the processors.
591: !   - Each processor needs to insert only elements that it owns
592: !     locally (but any non-local elements will be sent to the
593: !     appropriate processor during matrix assembly).
594: !   - Here, we set all entries for a particular row at once.
595: !   - We can set matrix entries either using either
596: !     MatSetValuesLocal() or MatSetValues().
597: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

599: !  Set matrix rows corresponding to boundary data

601:       if (mstart .eq. 0) then
602:         v(1) = zero_d0
603:         call MatSetValues(A,1,mstart,1,mstart,v,INSERT_VALUES,ierr)
604:         mstart = mstart + 1
605:       endif
606:       if (mend .eq. m) then
607:         mend = mend - 1
608:         v(1) = zero_d0
609:         call MatSetValues(A,1,mend,1,mend,v,INSERT_VALUES,ierr)
610:       endif

612: !  Set matrix rows corresponding to interior data.
613: !  We construct the matrix one row at a time.

615:       sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
616:       do 10, i=mstart,mend-1
617:          col(1) = i-1
618:          col(2) = i
619:          col(3) = i+1
620:          is     = i - mstart + 1 +idx + 1
621:          v(1)   = sc*localptr(is)
622:          v(2)   = sc*(localptr(is+1) + localptr(is-1) -                 &
623:      &                four_d0*localptr(is))
624:          v(3)   = sc*localptr(is)
625:         call MatSetValues(A,1,i,3,col,v,INSERT_VALUES,ierr)
626:  10   continue

628: !  Restore vector

630:       call VecRestoreArray(u_local,localptr,idx,ierr)

632: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
633: !  Complete the matrix assembly process and set some options
634: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

636: !  Assemble matrix, using the 2-step process:
637: !       MatAssemblyBegin(), MatAssemblyEnd()
638: !  Computations can be done while messages are in transition,
639: !  by placing code between these two statements.

641:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
642:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

644: !  Set flag to indicate that the Jacobian matrix retains an identical
645: !  nonzero structure throughout all timestepping iterations (although the
646: !  values of the entries change). Thus, we can save some work in setting
647: !  up the preconditioner (e.g., no need to redo symbolic factorization for
648: !  ILU/ICC preconditioners).
649: !   - If the nonzero structure of the matrix is different during
650: !     successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
651: !     must be used instead.  If you are unsure whether the matrix
652: !     structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
653: !   - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
654: !     believes your assertion and does not check the structure
655: !     of the matrix.  If you erroneously claim that the structure
656: !     is the same when it actually is not, the new preconditioner
657: !     will not function correctly.  Thus, use this optimization
658: !     feature with caution!

660:       str = SAME_NONZERO_PATTERN

662: !  Set and option to indicate that we will never add a new nonzero location
663: !  to the matrix. If we do, it will generate an error.

665:       call MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,ierr)

667:       return
668:       end