Actual source code: pbvec.c

  1: /*$Id: pbvec.c,v 1.173 2001/09/12 03:26:59 bsmith Exp $*/

  3: /*
  4:    This file contains routines for Parallel vector operations.
  5:  */
 6:  #include src/vec/impls/mpi/pvecimpl.h

  8: /*
  9:        Note this code is very similar to VecPublish_Seq()
 10: */
 11: #undef __FUNCT__  
 13: static int VecPublish_MPI(PetscObject obj)
 14: {
 15: #if defined(PETSC_HAVE_AMS)
 16:   Vec          v = (Vec) obj;
 17:   Vec_MPI      *s = (Vec_MPI*)v->data;
 18:   int          ierr,(*f)(AMS_Memory,char *,Vec);
 19: #endif  

 22: #if defined(PETSC_HAVE_AMS)
 23:   /* if it is already published then return */
 24:   if (v->amem >=0) return(0);

 26:   PetscObjectPublishBaseBegin(obj);
 27:   AMS_Memory_add_field((AMS_Memory)v->amem,"values",s->array,v->n,AMS_DOUBLE,AMS_READ,
 28:                                 AMS_DISTRIBUTED,AMS_REDUCT_UNDEF);

 30:   /*
 31:      If the vector knows its "layout" let it set it, otherwise it defaults
 32:      to correct 1d distribution
 33:   */
 34:   PetscObjectQueryFunction(obj,"AMSSetFieldBlock_C",(void (**)(void))&f);
 35:   if (f) {
 36:     (*f)((AMS_Memory)v->amem,"values",v);
 37:   }
 38:   PetscObjectPublishBaseEnd(obj);
 39: #endif
 40:   return(0);
 41: }

 43: #undef __FUNCT__  
 45: int VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 46: {
 47:   PetscScalar  sum,work;
 48:   int          ierr;

 51:   VecDot_Seq(xin,yin,&work);
 52:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 53:   *z = sum;
 54:   return(0);
 55: }

 57: #undef __FUNCT__  
 59: int VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 60: {
 61:   PetscScalar  sum,work;
 62:   int          ierr;

 65:   VecTDot_Seq(xin,yin,&work);
 66:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 67:   *z = sum;
 68:   return(0);
 69: }

 71: #undef __FUNCT__  
 73: int VecSetOption_MPI(Vec v,VecOption op)
 74: {
 76:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 77:     v->stash.donotstash = PETSC_TRUE;
 78:   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
 79:     v->stash.donotstash = PETSC_FALSE;
 80:   }
 81:   return(0);
 82: }
 83: 
 84: EXTERN int VecDuplicate_MPI(Vec,Vec *);
 85: EXTERN_C_BEGIN
 86: EXTERN int VecView_MPI_Draw(Vec,PetscViewer);
 87: EXTERN_C_END

 89: static struct _VecOps DvOps = { VecDuplicate_MPI,
 90:             VecDuplicateVecs_Default,
 91:             VecDestroyVecs_Default,
 92:             VecDot_MPI,
 93:             VecMDot_MPI,
 94:             VecNorm_MPI,
 95:             VecTDot_MPI,
 96:             VecMTDot_MPI,
 97:             VecScale_Seq,
 98:             VecCopy_Seq,
 99:             VecSet_Seq,
100:             VecSwap_Seq,
101:             VecAXPY_Seq,
102:             VecAXPBY_Seq,
103:             VecMAXPY_Seq,
104:             VecAYPX_Seq,
105:             VecWAXPY_Seq,
106:             VecPointwiseMult_Seq,
107:             VecPointwiseDivide_Seq,
108:             VecSetValues_MPI,
109:             VecAssemblyBegin_MPI,
110:             VecAssemblyEnd_MPI,
111:             VecGetArray_Seq,
112:             VecGetSize_MPI,
113:             VecGetSize_Seq,
114:             VecRestoreArray_Seq,
115:             VecMax_MPI,
116:             VecMin_MPI,
117:             VecSetRandom_Seq,
118:             VecSetOption_MPI,
119:             VecSetValuesBlocked_MPI,
120:             VecDestroy_MPI,
121:             VecView_MPI,
122:             VecPlaceArray_Seq,
123:             VecReplaceArray_Seq,
124:             VecDot_Seq,
125:             VecTDot_Seq,
126:             VecNorm_Seq,
127:             VecLoadIntoVector_Default,
128:             VecReciprocal_Default,
129:             0, /* VecViewNative... */
130:             VecConjugate_Seq,
131:             0,
132:             0,
133:             VecResetArray_Seq,
134:             0,
135:             VecMaxPointwiseDivide_Seq};

137: #undef __FUNCT__  
139: /*
140:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
141:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
142:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
143: */
144: int VecCreate_MPI_Private(Vec v,int nghost,const PetscScalar array[],PetscMap map)
145: {
146:   Vec_MPI *s;
147:   int     ierr,size,rank;

150:   MPI_Comm_size(v->comm,&size);
151:   MPI_Comm_rank(v->comm,&rank);

153:   v->bops->publish   = VecPublish_MPI;
154:   PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
155:   ierr           = PetscNew(Vec_MPI,&s);
156:   ierr           = PetscMemzero(s,sizeof(Vec_MPI));
157:   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
158:   v->data        = (void*)s;
159:   s->nghost      = nghost;
160:   v->mapping     = 0;
161:   v->bmapping    = 0;
162:   v->petscnative = PETSC_TRUE;

164:   if (array) {
165:     s->array           = (PetscScalar *)array;
166:     s->array_allocated = 0;
167:   } else {
168:     ierr               = PetscMalloc((v->n+nghost+1)*sizeof(PetscScalar),&s->array);
169:     s->array_allocated = s->array;
170:     ierr               = PetscMemzero(s->array,v->n*sizeof(PetscScalar));
171:   }

173:   /* By default parallel vectors do not have local representation */
174:   s->localrep    = 0;
175:   s->localupdate = 0;

177:   v->stash.insertmode  = NOT_SET_VALUES;

179:   if (!v->map) {
180:     if (!map) {
181:       PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
182:     } else {
183:       v->map = map;
184:       PetscObjectReference((PetscObject)map);
185:     }
186:   }
187:   /* create the stashes. The block-size for bstash is set later when 
188:      VecSetValuesBlocked is called.
189:   */
190:   VecStashCreate_Private(v->comm,1,&v->stash);
191:   VecStashCreate_Private(v->comm,v->bs,&v->bstash);
192: 
193: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
194:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
195:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
196: #endif
197:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
198:   PetscPublishAll(v);
199:   return(0);
200: }

202: EXTERN_C_BEGIN
203: #undef __FUNCT__  
205: int VecCreate_MPI(Vec vv)
206: {

210:   if (vv->bs > 0) {
211:     PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
212:   } else {
213:     PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
214:   }
215:   VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
216:   VecSetSerializeType(vv,VEC_SER_MPI_BINARY);
217:   return(0);
218: }

220: #undef __FUNCT__  
222: int VecSerialize_MPI(MPI_Comm comm, Vec *vec, PetscViewer viewer, PetscTruth store)
223: {
224:   Vec          v;
225:   Vec_MPI     *x;
226:   PetscScalar *array;
227:   int          fd;
228:   int          vars, locVars, ghostVars;
229:   int          size;
230:   int          ierr;

233:   PetscViewerBinaryGetDescriptor(viewer, &fd);
234:   if (store) {
235:     v    = *vec;
236:     x    = (Vec_MPI *) v->data;
237:     PetscBinaryWrite(fd, &v->N,      1,                PETSC_INT,     0);
238:     PetscBinaryWrite(fd, &v->n,      1,                PETSC_INT,     0);
239:     PetscBinaryWrite(fd, &x->nghost, 1,                PETSC_INT,     0);
240:     PetscBinaryWrite(fd,  x->array,  v->n + x->nghost, PETSC_SCALAR,  0);
241:   } else {
242:     PetscBinaryRead(fd, &vars,      1,                   PETSC_INT);
243:     PetscBinaryRead(fd, &locVars,   1,                   PETSC_INT);
244:     PetscBinaryRead(fd, &ghostVars, 1,                   PETSC_INT);
245:     MPI_Allreduce(&locVars, &size, 1, MPI_INT, MPI_SUM, comm);
246:     if (size != vars) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid row partition");
247:     VecCreate(comm, &v);
248:     VecSetSizes(v, locVars, vars);
249:     if (locVars + ghostVars > 0) {
250:       PetscMalloc((locVars + ghostVars) * sizeof(PetscScalar), &array);
251:       PetscBinaryRead(fd,  array,     locVars + ghostVars, PETSC_SCALAR);
252:       VecCreate_MPI_Private(v, ghostVars, array, PETSC_NULL);
253:       ((Vec_MPI *) v->data)->array_allocated = array;
254:     } else {
255:       VecCreate_MPI_Private(v, ghostVars, PETSC_NULL, PETSC_NULL);
256:     }

258:     VecAssemblyBegin(v);
259:     VecAssemblyEnd(v);
260:     *vec = v;
261:   }

263:   return(0);
264: }
265: EXTERN_C_END

267: #undef __FUNCT__  
269: /*@C
270:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
271:    where the user provides the array space to store the vector values.

273:    Collective on MPI_Comm

275:    Input Parameters:
276: +  comm  - the MPI communicator to use
277: .  n     - local vector length, cannot be PETSC_DECIDE
278: .  N     - global vector length (or PETSC_DECIDE to have calculated)
279: -  array - the user provided array to store the vector values

281:    Output Parameter:
282: .  vv - the vector
283:  
284:    Notes:
285:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
286:    same type as an existing vector.

288:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
289:    at a later stage to SET the array for storing the vector values.

291:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
292:    The user should not free the array until the vector is destroyed.

294:    Level: intermediate

296:    Concepts: vectors^creating with array

298: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
299:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

301: @*/
302: int VecCreateMPIWithArray(MPI_Comm comm,int n,int N,const PetscScalar array[],Vec *vv)
303: {

307:   if (n == PETSC_DECIDE) {
308:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
309:   }
310:   PetscSplitOwnership(comm,&n,&N);
311:   VecCreate(comm,vv);
312:   VecSetSizes(*vv,n,N);
313:   VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
314:   return(0);
315: }

317: #undef __FUNCT__  
319: /*@C
320:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
321:     a parallel vector created with VecCreateGhost().

323:     Not Collective

325:     Input Parameter:
326: .   g - the global vector. Vector must be have been obtained with either
327:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

329:     Output Parameter:
330: .   l - the local (ghosted) representation

332:     Notes:
333:     This routine does not actually update the ghost values, but rather it
334:     returns a sequential vector that includes the locations for the ghost
335:     values and their current values. The returned vector and the original
336:     vector passed in share the same array that contains the actual vector data.

338:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
339:     finished using the object.

341:     Level: advanced

343:    Concepts: vectors^ghost point access

345: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

347: @*/
348: int VecGhostGetLocalForm(Vec g,Vec *l)
349: {
350:   int        ierr;
351:   PetscTruth isseq,ismpi;


357:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
358:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
359:   if (ismpi) {
360:     Vec_MPI *v  = (Vec_MPI*)g->data;
361:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
362:     *l = v->localrep;
363:   } else if (isseq) {
364:     *l = g;
365:   } else {
366:     SETERRQ1(1,"Vector type %s does not have local representation",g->type_name);
367:   }
368:   PetscObjectReference((PetscObject)*l);
369:   return(0);
370: }

372: #undef __FUNCT__  
374: /*@C
375:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
376:     a parallel vector obtained with VecGhostGetLocalForm().

378:     Not Collective

380:     Input Parameter:
381: +   g - the global vector
382: -   l - the local (ghosted) representation

384:     Notes:
385:     This routine does not actually update the ghost values, but rather it
386:     returns a sequential vector that includes the locations for the ghost values
387:     and their current values.

389:     Level: advanced

391: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
392: @*/
393: int VecGhostRestoreLocalForm(Vec g,Vec *l)
394: {
396:   PetscObjectDereference((PetscObject)*l);
397:   return(0);
398: }

400: #undef __FUNCT__  
402: /*@
403:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
404:    local representation to global or global representation to local.

406:    Collective on Vec

408:    Input Parameters:
409: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
410: .  insertmode - one of ADD_VALUES or INSERT_VALUES
411: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

413:    Notes:
414:    Use the following to update the ghost regions with correct values from the owning process
415: .vb
416:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
417:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
418: .ve

420:    Use the following to accumulate the ghost region values onto the owning processors
421: .vb
422:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
423:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
424: .ve

426:    To accumulate the ghost region values onto the owning processors and then update
427:    the ghost regions correctly, call the later followed by the former, i.e.,
428: .vb
429:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
430:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
431:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
432:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
433: .ve

435:    Level: advanced

437: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
438:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

440: @*/
441: int VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
442: {
443:   Vec_MPI *v;
444:   int     ierr;


449:   v  = (Vec_MPI*)g->data;
450:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
451:   if (!v->localupdate) return(0);
452: 
453:   if (scattermode == SCATTER_REVERSE) {
454:     VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
455:   } else {
456:     VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
457:   }
458:   return(0);
459: }

461: #undef __FUNCT__  
463: /*@
464:    VecGhostUpdateEnd - End the vector scatter to update the vector from
465:    local representation to global or global representation to local.

467:    Collective on Vec

469:    Input Parameters:
470: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
471: .  insertmode - one of ADD_VALUES or INSERT_VALUES
472: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

474:    Notes:

476:    Use the following to update the ghost regions with correct values from the owning process
477: .vb
478:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
479:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
480: .ve

482:    Use the following to accumulate the ghost region values onto the owning processors
483: .vb
484:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
485:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
486: .ve

488:    To accumulate the ghost region values onto the owning processors and then update
489:    the ghost regions correctly, call the later followed by the former, i.e.,
490: .vb
491:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
492:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
493:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
494:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
495: .ve

497:    Level: advanced

499: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
500:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

502: @*/
503: int VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
504: {
505:   Vec_MPI *v;
506:   int     ierr;


511:   v  = (Vec_MPI*)g->data;
512:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
513:   if (!v->localupdate) return(0);

515:   if (scattermode == SCATTER_REVERSE) {
516:     VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
517:   } else {
518:     VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
519:   }
520:   return(0);
521: }

523: #undef __FUNCT__  
525: /*@C
526:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
527:    the caller allocates the array space.

529:    Collective on MPI_Comm

531:    Input Parameters:
532: +  comm - the MPI communicator to use
533: .  n - local vector length 
534: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
535: .  nghost - number of local ghost points
536: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
537: -  array - the space to store the vector values (as long as n + nghost)

539:    Output Parameter:
540: .  vv - the global vector representation (without ghost points as part of vector)
541:  
542:    Notes:
543:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
544:    of the vector.

546:    Level: advanced

548:    Concepts: vectors^creating with array
549:    Concepts: vectors^ghosted

551: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
552:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
553:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

555: @*/
556: int VecCreateGhostWithArray(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
557: {
558:   int          ierr;
559:   Vec_MPI      *w;
560:   PetscScalar  *larray;
561:   IS           from,to;

564:   *vv = 0;

566:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
567:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
568:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
569:   PetscSplitOwnership(comm,&n,&N);
570:   /* Create global representation */
571:   VecCreate(comm,vv);
572:   VecSetSizes(*vv,n,N);
573:   VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
574:   w    = (Vec_MPI *)(*vv)->data;
575:   /* Create local representation */
576:   VecGetArray(*vv,&larray);
577:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
578:   PetscLogObjectParent(*vv,w->localrep);
579:   VecRestoreArray(*vv,&larray);

581:   /*
582:        Create scatter context for scattering (updating) ghost values 
583:   */
584:   ISCreateGeneral(comm,nghost,ghosts,&from);
585:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
586:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
587:   PetscLogObjectParent(*vv,w->localupdate);
588:   ISDestroy(to);
589:   ISDestroy(from);

591:   return(0);
592: }

594: #undef __FUNCT__  
596: /*@C
597:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

599:    Collective on MPI_Comm

601:    Input Parameters:
602: +  comm - the MPI communicator to use
603: .  n - local vector length 
604: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
605: .  nghost - number of local ghost points
606: -  ghosts - global indices of ghost points

608:    Output Parameter:
609: .  vv - the global vector representation (without ghost points as part of vector)
610:  
611:    Notes:
612:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
613:    of the vector.

615:    Level: advanced

617:    Concepts: vectors^ghosted

619: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
620:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
621:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
622:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

624: @*/
625: int VecCreateGhost(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],Vec *vv)
626: {

630:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
631:   return(0);
632: }

634: #undef __FUNCT__  
636: int VecDuplicate_MPI(Vec win,Vec *v)
637: {
638:   int          ierr;
639:   Vec_MPI      *vw,*w = (Vec_MPI *)win->data;
640:   PetscScalar  *array;
641: #if defined(PETSC_HAVE_AMS)
642:   int          (*f)(AMS_Memory,char *,Vec);
643: #endif

646:   VecCreate(win->comm,v);
647:   VecSetSizes(*v,win->n,win->N);
648:   VecCreate_MPI_Private(*v,w->nghost,0,win->map);
649:   vw   = (Vec_MPI *)(*v)->data;
650:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

652:   /* save local representation of the parallel vector (and scatter) if it exists */
653:   if (w->localrep) {
654:     VecGetArray(*v,&array);
655:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
656:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
657:     VecRestoreArray(*v,&array);
658:     PetscLogObjectParent(*v,vw->localrep);
659:     vw->localupdate = w->localupdate;
660:     if (vw->localupdate) {
661:       PetscObjectReference((PetscObject)vw->localupdate);
662:     }
663:   }

665:   /* New vector should inherit stashing property of parent */
666:   (*v)->stash.donotstash = win->stash.donotstash;
667: 
668:   PetscOListDuplicate(win->olist,&(*v)->olist);
669:   PetscFListDuplicate(win->qlist,&(*v)->qlist);
670:   if (win->mapping) {
671:     (*v)->mapping = win->mapping;
672:     PetscObjectReference((PetscObject)win->mapping);
673:   }
674:   if (win->bmapping) {
675:     (*v)->bmapping = win->bmapping;
676:     PetscObjectReference((PetscObject)win->bmapping);
677:   }
678:   (*v)->bs        = win->bs;
679:   (*v)->bstash.bs = win->bstash.bs;

681: #if defined(PETSC_HAVE_AMS)
682:   /*
683:      If the vector knows its "layout" let it set it, otherwise it defaults
684:      to correct 1d distribution
685:   */
686:   PetscObjectQueryFunction((PetscObject)(*v),"AMSSetFieldBlock_C",(void (**)(void))&f);
687:   if (f) {
688:     (*f)((AMS_Memory)(*v)->amem,"values",*v);
689:   }
690: #endif
691:   return(0);
692: }

694: /* ------------------------------------------------------------------------------------------*/
695: #undef __FUNCT__  
697: /*@C
698:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
699:    the caller allocates the array space. Indices in the ghost region are based on blocks.

701:    Collective on MPI_Comm

703:    Input Parameters:
704: +  comm - the MPI communicator to use
705: .  bs - block size
706: .  n - local vector length 
707: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
708: .  nghost - number of local ghost blocks
709: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
710: -  array - the space to store the vector values (as long as n + nghost*bs)

712:    Output Parameter:
713: .  vv - the global vector representation (without ghost points as part of vector)
714:  
715:    Notes:
716:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
717:    of the vector.

719:    n is the local vector size (total local size not the number of blocks) while nghost
720:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
721:    portion is bs*nghost

723:    Level: advanced

725:    Concepts: vectors^creating ghosted
726:    Concepts: vectors^creating with array

728: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
729:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
730:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

732: @*/
733: int VecCreateGhostBlockWithArray(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
734: {
735:   int          ierr;
736:   Vec_MPI      *w;
737:   PetscScalar  *larray;
738:   IS           from,to;

741:   *vv = 0;

743:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
744:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
745:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
746:   PetscSplitOwnership(comm,&n,&N);
747:   /* Create global representation */
748:   VecCreate(comm,vv);
749:   VecSetSizes(*vv,n,N);
750:   VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
751:   VecSetBlockSize(*vv,bs);
752:   w    = (Vec_MPI *)(*vv)->data;
753:   /* Create local representation */
754:   VecGetArray(*vv,&larray);
755:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
756:   VecSetBlockSize(w->localrep,bs);
757:   PetscLogObjectParent(*vv,w->localrep);
758:   VecRestoreArray(*vv,&larray);

760:   /*
761:        Create scatter context for scattering (updating) ghost values 
762:   */
763:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
764:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
765:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
766:   PetscLogObjectParent(*vv,w->localupdate);
767:   ISDestroy(to);
768:   ISDestroy(from);

770:   return(0);
771: }

773: #undef __FUNCT__  
775: /*@C
776:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
777:         The indicing of the ghost points is done with blocks.

779:    Collective on MPI_Comm

781:    Input Parameters:
782: +  comm - the MPI communicator to use
783: .  bs - the block size
784: .  n - local vector length 
785: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
786: .  nghost - number of local ghost blocks
787: -  ghosts - global indices of ghost blocks

789:    Output Parameter:
790: .  vv - the global vector representation (without ghost points as part of vector)
791:  
792:    Notes:
793:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
794:    of the vector.

796:    n is the local vector size (total local size not the number of blocks) while nghost
797:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
798:    portion is bs*nghost

800:    Level: advanced

802:    Concepts: vectors^ghosted

804: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
805:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
806:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

808: @*/
809: int VecCreateGhostBlock(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],Vec *vv)
810: {

814:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
815:   return(0);
816: }

818: /*
819:     These introduce a ghosted vector where the ghosting is determined by the call to 
820:   VecSetLocalToGlobalMapping()
821: */

823: #undef __FUNCT__  
825: int VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
826: {
827:   int     ierr;
828:   Vec_MPI *v = (Vec_MPI *)vv->data;

831:   v->nghost = map->n - vv->n;

833:   /* we need to make longer the array space that was allocated when the vector was created */
834:   ierr     = PetscFree(v->array_allocated);
835:   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
836:   v->array = v->array_allocated;
837: 
838:   /* Create local representation */
839:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
840:   PetscLogObjectParent(vv,v->localrep);

842:   return(0);
843: }


846: #undef __FUNCT__  
848: int VecSetValuesLocal_FETI(Vec vv,int n,const int *ix,const PetscScalar *values,InsertMode mode)
849: {
850:   int      ierr;
851:   Vec_MPI *v = (Vec_MPI *)vv->data;

854:   VecSetValues(v->localrep,n,ix,values,mode);
855:   return(0);
856: }

858: EXTERN_C_BEGIN
859: #undef __FUNCT__  
861: int VecCreate_FETI(Vec vv)
862: {

866:   VecSetType(vv,VECMPI);
867: 
868:   /* overwrite the functions to handle setting values locally */
869:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
870:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
871:   vv->ops->assemblybegin           = 0;
872:   vv->ops->assemblyend             = 0;
873:   vv->ops->setvaluesblocked        = 0;
874:   vv->ops->setvaluesblocked        = 0;

876:   return(0);
877: }
878: EXTERN_C_END