Actual source code: ex8.c

  1: /*$Id: ex8.c,v 1.24 2001/09/11 16:32:18 bsmith Exp $*/

  3: static char help[] = "Demonstrates using a local ordering to set values into a parallel vector.nn";

  5: /*T
  6:    Concepts: vectors^assembling vectors with local ordering;
  7:    Processors: n
  8: T*/

 10: /* 
 11:   Include "petscvec.h" so that we can use vectors.  Note that this file
 12:   automatically includes:
 13:      petsc.h       - base PETSc routines   petscis.h     - index sets
 14:      petscsys.h    - system routines       petscviewer.h - viewers
 15: */
 16:  #include petscvec.h

 18: #undef __FUNCT__
 20: int main(int argc,char **argv)
 21: {
 22:   int     i,N,ierr,rank,ng,*gindices,rstart,rend,M;
 23:   PetscScalar  one = 1.0;
 24:   Vec     x;

 26:   PetscInitialize(&argc,&argv,(char *)0,help);
 27:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 29:   /*
 30:      Create a parallel vector.
 31:       - In this case, we specify the size of each processor's local
 32:         portion, and PETSc computes the global size.  Alternatively,
 33:         PETSc could determine the vector's distribution if we specify
 34:         just the global size.
 35:   */
 36:   VecCreate(PETSC_COMM_WORLD,&x);
 37:   VecSetSizes(x,rank+1,PETSC_DECIDE);
 38:   VecSetFromOptions(x);
 39:   VecGetSize(x,&N);
 40:   VecSet(&one,x);

 42:   /*
 43:      Set the local to global ordering for the vector. Each processor 
 44:      generates a list of the global indices for each local index. Note that
 45:      the local indices are just whatever is convenient for a particular application.
 46:      In this case we treat the vector as lying on a one dimensional grid and 
 47:      have one ghost point on each end of the blocks owned by each processor. 
 48:   */

 50:   VecGetSize(x,&M);
 51:   VecGetOwnershipRange(x,&rstart,&rend);
 52:   ng   = rend - rstart + 2;
 53:   PetscMalloc(ng*sizeof(int),&gindices);
 54:   gindices[0] = rstart - 1;
 55:   for (i=0; i<ng-1; i++) {
 56:     gindices[i+1] = gindices[i] + 1;
 57:   }
 58:   /* map the first and last point as periodic */
 59:   if (gindices[0]    == -1) gindices[0]    = M - 1;
 60:   if (gindices[ng-1] == M)  gindices[ng-1] = 0;
 61:   {
 62:     ISLocalToGlobalMapping ltog;
 63:     ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,ng,gindices,&ltog);
 64:     VecSetLocalToGlobalMapping(x,ltog);
 65:     ISLocalToGlobalMappingDestroy(ltog);
 66:   }
 67:   PetscFree(gindices);

 69:   /*
 70:      Set the vector elements.
 71:       - In this case set the values using the local ordering
 72:       - Each processor can contribute any vector entries,
 73:         regardless of which processor "owns" them; any nonlocal
 74:         contributions will be transferred to the appropriate processor
 75:         during the assembly process.
 76:       - In this example, the flag ADD_VALUES indicates that all
 77:         contributions will be added together.
 78:   */
 79:   for (i=0; i<ng; i++) {
 80:     VecSetValuesLocal(x,1,&i,&one,ADD_VALUES);
 81:   }

 83:   /* 
 84:      Assemble vector, using the 2-step process:
 85:        VecAssemblyBegin(), VecAssemblyEnd()
 86:      Computations can be done while messages are in transition
 87:      by placing code between these two statements.
 88:   */
 89:   VecAssemblyBegin(x);
 90:   VecAssemblyEnd(x);

 92:   /*
 93:       View the vector; then destroy it.
 94:   */
 95:   VecView(x,PETSC_VIEWER_STDOUT_WORLD);
 96:   VecDestroy(x);

 98:   PetscFinalize();
 99:   return 0;
100: }
101: