Actual source code: util2.c
1: /*$Id: util2.c,v 1.19 2001/08/07 21:31:30 bsmith Exp $*/
3: /*
4: This file contains utility routines for finite difference
5: approximation of Jacobian matrices. This functionality for
6: the TS component will eventually be incorporated as part of
7: the base PETSc libraries.
8: */
9: #include src/ts/tsimpl.h
10: #include src/snes/snesimpl.h
11: #include src/fortran/custom/zpetsc.h
13: int RHSFunction(TS,PetscReal,Vec,Vec,void*);
14: int RHSJacobianFD(TS,PetscReal,Vec,Mat*,Mat*,MatStructure *,void*);
16: /* -------------------------------------------------------------------*/
18: /* Temporary interface routine; this will be eliminated soon! */
19: #ifdef PETSC_HAVE_FORTRAN_CAPS
20: #define setcroutinefromfortran_ SETCROUTINEFROMFORTRAN
21: #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
22: #define setcroutinefromfortran_ setcroutinefromfortran
23: #endif
25: EXTERN_C_BEGIN
27: void PETSC_STDCALL setcroutinefromfortran_(TS *ts,Mat *A,Mat *B,int *__ierr)
28: {
29: *__TSSetRHSJacobian(*ts,*A,*B,RHSJacobianFD,PETSC_NULL);
30: }
32: EXTERN_C_END
35: /* -------------------------------------------------------------------*/
36: /*
37: RHSJacobianFD - Computes the Jacobian using finite differences.
39: Input Parameters:
40: . ts - TS context
41: . xx1 - compute Jacobian at this point
42: . ctx - application's function context, as set with SNESSetFunction()
44: Output Parameters:
45: . J - Jacobian
46: . B - preconditioner, same as Jacobian
47: . flag - matrix flag
49: Notes:
50: This routine is slow and expensive, and is not currently optimized
51: to take advantage of sparsity in the problem.
53: Sparse approximations using colorings are also available and
54: would be a much better alternative!
55: */
56: int RHSJacobianFD(TS ts,PetscReal t,Vec xx1,Mat *J,Mat *B,MatStructure *flag,void *ctx)
57: {
58: Vec jj1,jj2,xx2;
59: int i,ierr,N,start,end,j;
60: PetscScalar dx,mone = -1.0,*y,scale,*xx,wscale;
61: PetscReal amax,epsilon = 1.e-8; /* assumes PetscReal precision */
62: PetscReal dx_min = 1.e-16,dx_par = 1.e-1;
63: MPI_Comm comm;
65: VecDuplicate(xx1,&jj1);
66: VecDuplicate(xx1,&jj2);
67: VecDuplicate(xx1,&xx2);
69: PetscObjectGetComm((PetscObject)xx1,&comm);
70: MatZeroEntries(*J);
72: VecGetSize(xx1,&N);
73: VecGetOwnershipRange(xx1,&start,&end);
74: TSComputeRHSFunction(ts,ts->ptime,xx1,jj1);
76: /* Compute Jacobian approximation, 1 column at a time.
77: xx1 = current iterate, jj1 = F(xx1)
78: xx2 = perturbed iterate, jj2 = F(xx2)
79: */
80: VecGetArray(xx1,&xx);
81: for (i=0; i<N; i++) {
82: VecCopy(xx1,xx2);
83: if (i>= start && i<end) {
84: dx = xx[i-start];
85: #if !defined(PETSC_USE_COMPLEX)
86: if (dx < dx_min && dx >= 0.0) dx = dx_par;
87: else if (dx < 0.0 && dx > -dx_min) dx = -dx_par;
88: #else
89: if (PetscAbsScalar(dx) < dx_min && PetscRealPart(dx) >= 0.0) dx = dx_par;
90: else if (PetscRealPart(dx) < 0.0 && PetscAbsScalar(dx) < dx_min) dx = -dx_par;
91: #endif
92: dx *= epsilon;
93: wscale = 1.0/dx;
94: VecSetValues(xx2,1,&i,&dx,ADD_VALUES);
95: } else {
96: wscale = 0.0;
97: }
98: TSComputeRHSFunction(ts,t,xx2,jj2);
99: VecAXPY(&mone,jj1,jj2);
100: /* Communicate scale to all processors */
101: MPI_Allreduce(&wscale,&scale,1,MPIU_SCALAR,PetscSum_Op,comm);
102: VecScale(&scale,jj2);
103: VecGetArray(jj2,&y);
104: VecNorm(jj2,NORM_INFINITY,&amax);
105: amax *= 1.e-14;
106: for (j=start; j<end; j++) {
107: if (PetscAbsScalar(y[j-start]) > amax) {
108: MatSetValues(*J,1,&j,1,&i,y+j-start,INSERT_VALUES);
109: }
110: }
111: VecRestoreArray(jj2,&y);
112: }
114: VecRestoreArray(xx1,&xx);
115: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
116: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
117: *flag = DIFFERENT_NONZERO_PATTERN;
119: VecDestroy(jj1);
120: VecDestroy(jj2);
121: VecDestroy(xx2);
123: return 0;
124: }