Actual source code: vscat.c

  1: /*$Id: vscat.c,v 1.173 2001/08/10 03:29:59 bsmith Exp $*/

  3: /*
  4:      Code for creating scatters between vectors. This file 
  5:   includes the code for scattering between sequential vectors and
  6:   some special cases for parallel scatters.
  7: */

 9:  #include src/vec/is/isimpl.h
 10:  #include src/vec/vecimpl.h

 12: /* Logging support */
 13: int VEC_SCATTER_COOKIE;

 15: /*
 16:      Checks if any indices are less than zero and generates an error
 17: */
 18: #undef __FUNCT__  
 20: static int VecScatterCheckIndices_Private(int nmax,int n,int *idx)
 21: {
 22:   int i;

 25:   for (i=0; i<n; i++) {
 26:     if (idx[i] < 0)     SETERRQ2(1,"Negative index %d at %d location",idx[i],i);
 27:     if (idx[i] >= nmax) SETERRQ3(1,"Index %d at %d location greater than max %d",idx[i],i,nmax);
 28:   }
 29:   return(0);
 30: }

 32: /*
 33:       This is special scatter code for when the entire parallel vector is 
 34:    copied to each processor.

 36:    This code was written by Cameron Cooper, Occidental College, Fall 1995,
 37:    will working at ANL as a SERS student.
 38: */
 39: #undef __FUNCT__  
 41: int VecScatterBegin_MPI_ToAll(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
 42: {
 43:   int          ierr,yy_n,xx_n,*range;
 44:   PetscScalar  *xv,*yv;
 45:   PetscMap     map;

 48:   VecGetLocalSize(y,&yy_n);
 49:   VecGetLocalSize(x,&xx_n);
 50:   VecGetArray(y,&yv);
 51:   if (x != y) {VecGetArray(x,&xv);}

 53:   if (mode & SCATTER_REVERSE) {
 54:     PetscScalar          *xvt,*xvt2;
 55:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
 56:     int                  i;

 58:     if (addv == INSERT_VALUES) {
 59:       int rstart,rend;
 60:       /* 
 61:          copy the correct part of the local vector into the local storage of 
 62:          the MPI one.  Note: this operation only makes sense if all the local 
 63:          vectors have the same values
 64:       */
 65:       VecGetOwnershipRange(y,&rstart,&rend);
 66:       PetscMemcpy(yv,xv+rstart,yy_n*sizeof(PetscScalar));
 67:     } else {
 68:       MPI_Comm comm;
 69:       int      rank;
 70:       PetscObjectGetComm((PetscObject)y,&comm);
 71:       MPI_Comm_rank(comm,&rank);
 72:       if (scat->work1) xvt = scat->work1;
 73:       else {
 74:         ierr        = PetscMalloc((xx_n+1)*sizeof(PetscScalar),&xvt);
 75:         scat->work1 = xvt;
 76:         PetscLogObjectMemory(ctx,xx_n*sizeof(PetscScalar));
 77:       }
 78:       if (!rank) { /* I am the zeroth processor, values are accumulated here */
 79:         if   (scat->work2) xvt2 = scat->work2;
 80:         else {
 81:           ierr        = PetscMalloc((xx_n+1)*sizeof(PetscScalar),& xvt2);
 82:           scat->work2 = xvt2;
 83:           PetscLogObjectMemory(ctx,xx_n*sizeof(PetscScalar));
 84:         }
 85:         VecGetPetscMap(y,&map);
 86:         PetscMapGetGlobalRange(map,&range);
 87:         MPI_Gatherv(yv,yy_n,MPIU_SCALAR,xvt2,scat->count,range,MPIU_SCALAR,0,ctx->comm);
 88: #if defined(PETSC_USE_COMPLEX)
 89:         MPI_Reduce(xv,xvt,2*xx_n,MPIU_REAL,MPI_SUM,0,ctx->comm);
 90: #else
 91:         MPI_Reduce(xv,xvt,xx_n,MPIU_SCALAR,MPI_SUM,0,ctx->comm);
 92: #endif
 93:         if (addv == ADD_VALUES) {
 94:           for (i=0; i<xx_n; i++) {
 95:             xvt[i] += xvt2[i];
 96:           }
 97: #if !defined(PETSC_USE_COMPLEX)
 98:         } else if (addv == MAX_VALUES) {
 99:           for (i=0; i<xx_n; i++) {
100:             xvt[i] = PetscMax(xvt[i],xvt2[i]);
101:           }
102: #endif
103:         } else {SETERRQ(1,"Wrong insert option");}
104:         MPI_Scatterv(xvt,scat->count,map->range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
105:       } else {
106:         VecGetPetscMap(y,&map);
107:         PetscMapGetGlobalRange(map,&range);
108:         MPI_Gatherv(yv,yy_n,MPIU_SCALAR,0, 0,0,MPIU_SCALAR,0,ctx->comm);
109: #if defined(PETSC_USE_COMPLEX)
110:         MPI_Reduce(xv,xvt,2*xx_n,MPIU_REAL,MPI_SUM,0,ctx->comm);
111: #else
112:         MPI_Reduce(xv,xvt,xx_n,MPIU_SCALAR,MPI_SUM,0,ctx->comm);
113: #endif
114:         MPI_Scatterv(0,scat->count,range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
115:       }
116:     }
117:   } else {
118:     PetscScalar          *yvt;
119:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
120:     int                  i;

122:     VecGetPetscMap(x,&map);
123:     PetscMapGetGlobalRange(map,&range);
124:     if (addv == INSERT_VALUES) {
125:       MPI_Allgatherv(xv,xx_n,MPIU_SCALAR,yv,scat->count,range,MPIU_SCALAR,ctx->comm);
126:     } else {
127:       if (scat->work1) yvt = scat->work1;
128:       else {
129:         ierr        = PetscMalloc((yy_n+1)*sizeof(PetscScalar),&yvt);
130:         scat->work1 = yvt;
131:         PetscLogObjectMemory(ctx,yy_n*sizeof(PetscScalar));
132:       }
133:       MPI_Allgatherv(xv,xx_n,MPIU_SCALAR,yvt,scat->count,map->range,MPIU_SCALAR,ctx->comm);
134:       if (addv == ADD_VALUES){
135:         for (i=0; i<yy_n; i++) {
136:           yv[i] += yvt[i];
137:         }
138: #if !defined(PETSC_USE_COMPLEX)
139:       } else if (addv == MAX_VALUES) {
140:         for (i=0; i<yy_n; i++) {
141:           yv[i] = PetscMax(yv[i],yvt[i]);
142:         }
143: #endif
144:       } else {SETERRQ(1,"Wrong insert option");}
145:     }
146:   }
147:   VecRestoreArray(y,&yv);
148:   if (x != y) {VecRestoreArray(x,&xv);}
149:   return(0);
150: }

152: /*
153:       This is special scatter code for when the entire parallel vector is 
154:    copied to processor 0.

156: */
157: #undef __FUNCT__  
159: int VecScatterBegin_MPI_ToOne(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
160: {
161:   int          rank,ierr,yy_n,xx_n,*range;
162:   PetscScalar  *xv,*yv;
163:   MPI_Comm     comm;
164:   PetscMap     map;

167:   VecGetLocalSize(y,&yy_n);
168:   VecGetLocalSize(x,&xx_n);
169:   VecGetArray(x,&xv);
170:   VecGetArray(y,&yv);

172:   PetscObjectGetComm((PetscObject)x,&comm);
173:   MPI_Comm_rank(comm,&rank);

175:   /* --------  Reverse scatter; spread from processor 0 to other processors */
176:   if (mode & SCATTER_REVERSE) {
177:     PetscScalar          *yvt;
178:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
179:     int                  i;

181:     VecGetPetscMap(y,&map);
182:     PetscMapGetGlobalRange(map,&range);
183:     if (addv == INSERT_VALUES) {
184:       MPI_Scatterv(xv,scat->count,range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
185:     } else {
186:       if (scat->work2) yvt = scat->work2;
187:       else {
188:         ierr        = PetscMalloc((xx_n+1)*sizeof(PetscScalar),&yvt);
189:         scat->work2 = yvt;
190:         PetscLogObjectMemory(ctx,xx_n*sizeof(PetscScalar));
191:       }
192:       MPI_Scatterv(xv,scat->count,range,MPIU_SCALAR,yvt,yy_n,MPIU_SCALAR,0,ctx->comm);
193:       if (addv == ADD_VALUES) {
194:         for (i=0; i<yy_n; i++) {
195:           yv[i] += yvt[i];
196:         }
197: #if !defined(PETSC_USE_COMPLEX)
198:       } else if (addv == MAX_VALUES) {
199:         for (i=0; i<yy_n; i++) {
200:           yv[i] = PetscMax(yv[i],yvt[i]);
201:         }
202: #endif
203:       } else {SETERRQ(1,"Wrong insert option");}
204:     }
205:   /* ---------  Forward scatter; gather all values onto processor 0 */
206:   } else {
207:     PetscScalar          *yvt  = 0;
208:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
209:     int                  i;

211:     VecGetPetscMap(x,&map);
212:     PetscMapGetGlobalRange(map,&range);
213:     if (addv == INSERT_VALUES) {
214:       MPI_Gatherv(xv,xx_n,MPIU_SCALAR,yv,scat->count,range,MPIU_SCALAR,0,ctx->comm);
215:     } else {
216:       if (!rank) {
217:         if (scat->work1) yvt = scat->work1;
218:         else {
219:           ierr        = PetscMalloc((yy_n+1)*sizeof(PetscScalar),&yvt);
220:           scat->work1 = yvt;
221:           PetscLogObjectMemory(ctx,yy_n*sizeof(PetscScalar));
222:         }
223:       }
224:       MPI_Gatherv(xv,xx_n,MPIU_SCALAR,yvt,scat->count,range,MPIU_SCALAR,0,ctx->comm);
225:       if (!rank) {
226:         if (addv == ADD_VALUES) {
227:           for (i=0; i<yy_n; i++) {
228:             yv[i] += yvt[i];
229:           }
230: #if !defined(PETSC_USE_COMPLEX)
231:         } else if (addv == MAX_VALUES) {
232:           for (i=0; i<yy_n; i++) {
233:             yv[i] = PetscMax(yv[i],yvt[i]);
234:           }
235: #endif
236:         }  else {SETERRQ(1,"Wrong insert option");}
237:       }
238:     }
239:   }
240:   VecRestoreArray(x,&xv);
241:   VecRestoreArray(y,&yv);
242:   return(0);
243: }

245: /*
246:        The follow to are used for both VecScatterBegin_MPI_ToAll() and VecScatterBegin_MPI_ToOne()
247: */
248: #undef __FUNCT__  
250: int VecScatterDestroy_MPI_ToAll(VecScatter ctx)
251: {
252:   VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;

256:   PetscFree(scat->count);
257:   if (scat->work1) {PetscFree(scat->work1);}
258:   if (scat->work2) {PetscFree(scat->work2);}
259:   PetscFree(ctx->todata);
260:   PetscLogObjectDestroy(ctx);
261:   PetscHeaderDestroy(ctx);
262:   return(0);
263: }

265: #undef __FUNCT__  
267: int VecScatterCopy_MPI_ToAll(VecScatter in,VecScatter out)
268: {
269:   VecScatter_MPI_ToAll *in_to = (VecScatter_MPI_ToAll*)in->todata,*sto;
270:   int                  size,i,ierr;

273:   out->postrecvs      = 0;
274:   out->begin          = in->begin;
275:   out->end            = in->end;
276:   out->copy           = in->copy;
277:   out->destroy        = in->destroy;
278:   out->view           = in->view;

280:   ierr      = PetscNew(VecScatter_MPI_ToAll,&sto);
281:   sto->type = in_to->type;

283:   MPI_Comm_size(out->comm,&size);
284:   PetscMalloc(size*sizeof(int),&sto->count);
285:   for (i=0; i<size; i++) {
286:     sto->count[i] = in_to->count[i];
287:   }
288:   sto->work1         = 0;
289:   sto->work2         = 0;
290:   PetscLogObjectMemory(out,sizeof(VecScatter_MPI_ToAll)+size*sizeof(int));
291:   out->todata        = (void*)sto;
292:   out->fromdata      = (void*)0;
293:   return(0);
294: }

296: /* --------------------------------------------------------------------------------------*/
297: /* 
298:         Scatter: sequential general to sequential general 
299: */
300: #undef __FUNCT__  
302: int VecScatterBegin_SGtoSG(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
303: {
304:   VecScatter_Seq_General *gen_to = (VecScatter_Seq_General*)ctx->todata;
305:   VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
306:   int                    i,n = gen_from->n,*fslots,*tslots,ierr;
307:   PetscScalar            *xv,*yv;
308: 
310:   VecGetArray(x,&xv);
311:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
312:   if (mode & SCATTER_REVERSE){
313:     gen_to   = (VecScatter_Seq_General*)ctx->fromdata;
314:     gen_from = (VecScatter_Seq_General*)ctx->todata;
315:   }
316:   fslots = gen_from->slots;
317:   tslots = gen_to->slots;

319:   if (addv == INSERT_VALUES) {
320:     for (i=0; i<n; i++) {yv[tslots[i]] = xv[fslots[i]];}
321:   } else if (addv == ADD_VALUES) {
322:     for (i=0; i<n; i++) {yv[tslots[i]] += xv[fslots[i]];}
323: #if !defined(PETSC_USE_COMPLEX)
324:   } else  if (addv == MAX_VALUES) {
325:     for (i=0; i<n; i++) {yv[tslots[i]] = PetscMax(yv[tslots[i]],xv[fslots[i]]);}
326: #endif
327:   } else {SETERRQ(1,"Wrong insert option");}
328:   VecRestoreArray(x,&xv);
329:   if (x != y) {VecRestoreArray(y,&yv);}
330:   return(0);
331: }

333: /* 
334:     Scatter: sequential general to sequential stride 1 
335: */
336: #undef __FUNCT__  
338: int VecScatterBegin_SGtoSS_Stride1(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
339: {
340:   VecScatter_Seq_Stride  *gen_to   = (VecScatter_Seq_Stride*)ctx->todata;
341:   VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
342:   int                    i,n = gen_from->n,*fslots = gen_from->slots;
343:   int                    first = gen_to->first,ierr;
344:   PetscScalar            *xv,*yv;
345: 
347:   VecGetArray(x,&xv);
348:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
349:   if (mode & SCATTER_REVERSE){
350:     xv += first;
351:     if (addv == INSERT_VALUES) {
352:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[i];}
353:     } else  if (addv == ADD_VALUES) {
354:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[i];}
355: #if !defined(PETSC_USE_COMPLEX)
356:     } else  if (addv == MAX_VALUES) {
357:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[i]);}
358: #endif
359:     } else {SETERRQ(1,"Wrong insert option");}
360:   } else {
361:     yv += first;
362:     if (addv == INSERT_VALUES) {
363:       for (i=0; i<n; i++) {yv[i] = xv[fslots[i]];}
364:     } else  if (addv == ADD_VALUES) {
365:       for (i=0; i<n; i++) {yv[i] += xv[fslots[i]];}
366: #if !defined(PETSC_USE_COMPLEX)
367:     } else if (addv == MAX_VALUES) {
368:       for (i=0; i<n; i++) {yv[i] = PetscMax(yv[i],xv[fslots[i]]);}
369: #endif
370:     } else {SETERRQ(1,"Wrong insert option");}
371:   }
372:   VecRestoreArray(x,&xv);
373:   if (x != y) {VecRestoreArray(y,&yv);}
374:   return(0);
375: }

377: /* 
378:    Scatter: sequential general to sequential stride 
379: */
380: #undef __FUNCT__  
382: int VecScatterBegin_SGtoSS(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
383: {
384:   VecScatter_Seq_Stride  *gen_to   = (VecScatter_Seq_Stride*)ctx->todata;
385:   VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
386:   int                    i,n = gen_from->n,*fslots = gen_from->slots;
387:   int                    first = gen_to->first,step = gen_to->step,ierr;
388:   PetscScalar            *xv,*yv;
389: 
391:   VecGetArray(x,&xv);
392:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

394:   if (mode & SCATTER_REVERSE){
395:     if (addv == INSERT_VALUES) {
396:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[first + i*step];}
397:     } else if (addv == ADD_VALUES) {
398:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[first + i*step];}
399: #if !defined(PETSC_USE_COMPLEX)
400:     } else if (addv == MAX_VALUES) {
401:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[first + i*step]);}
402: #endif
403:     } else {SETERRQ(1,"Wrong insert option");}
404:   } else {
405:     if (addv == INSERT_VALUES) {
406:       for (i=0; i<n; i++) {yv[first + i*step] = xv[fslots[i]];}
407:     } else if (addv == ADD_VALUES) {
408:       for (i=0; i<n; i++) {yv[first + i*step] += xv[fslots[i]];}
409: #if !defined(PETSC_USE_COMPLEX)
410:     } else if (addv == MAX_VALUES) {
411:       for (i=0; i<n; i++) {yv[first + i*step] = PetscMax(yv[first + i*step],xv[fslots[i]]);}
412: #endif
413:     } else {SETERRQ(1,"Wrong insert option");}
414:   }
415:   VecRestoreArray(x,&xv);
416:   if (x != y) {VecRestoreArray(y,&yv);}
417:   return(0);
418: }

420: /* 
421:     Scatter: sequential stride 1 to sequential general 
422: */
423: #undef __FUNCT__  
425: int VecScatterBegin_SStoSG_Stride1(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
426: {
427:   VecScatter_Seq_Stride  *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
428:   VecScatter_Seq_General *gen_to   = (VecScatter_Seq_General*)ctx->todata;
429:   int                    i,n = gen_from->n,*fslots = gen_to->slots;
430:   int                    first = gen_from->first,ierr;
431:   PetscScalar            *xv,*yv;
432: 
434:   VecGetArray(x,&xv);
435:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

437:   if (mode & SCATTER_REVERSE){
438:     yv += first;
439:     if (addv == INSERT_VALUES) {
440:       for (i=0; i<n; i++) {yv[i] = xv[fslots[i]];}
441:     } else  if (addv == ADD_VALUES) {
442:       for (i=0; i<n; i++) {yv[i] += xv[fslots[i]];}
443: #if !defined(PETSC_USE_COMPLEX)
444:     } else  if (addv == MAX_VALUES) {
445:       for (i=0; i<n; i++) {yv[i] = PetscMax(yv[i],xv[fslots[i]]);}
446: #endif
447:     } else {SETERRQ(1,"Wrong insert option");}
448:   } else {
449:     xv += first;
450:     if (addv == INSERT_VALUES) {
451:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[i];}
452:     } else  if (addv == ADD_VALUES) {
453:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[i];}
454: #if !defined(PETSC_USE_COMPLEX)
455:     } else  if (addv == MAX_VALUES) {
456:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[i]);}
457: #endif
458:     } else {SETERRQ(1,"Wrong insert option");}
459:   }
460:   VecRestoreArray(x,&xv);
461:   if (x != y) {VecRestoreArray(y,&yv);}
462:   return(0);
463: }

465: #undef __FUNCT__  
467: /* 
468:    Scatter: sequential stride to sequential general 
469: */
470: int VecScatterBegin_SStoSG(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
471: {
472:   VecScatter_Seq_Stride  *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
473:   VecScatter_Seq_General *gen_to   = (VecScatter_Seq_General*)ctx->todata;
474:   int                    i,n = gen_from->n,*fslots = gen_to->slots;
475:   int                    first = gen_from->first,step = gen_from->step,ierr;
476:   PetscScalar            *xv,*yv;
477: 
479:   VecGetArray(x,&xv);
480:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

482:   if (mode & SCATTER_REVERSE){
483:     if (addv == INSERT_VALUES) {
484:       for (i=0; i<n; i++) {yv[first + i*step] = xv[fslots[i]];}
485:     } else  if (addv == ADD_VALUES) {
486:       for (i=0; i<n; i++) {yv[first + i*step] += xv[fslots[i]];}
487: #if !defined(PETSC_USE_COMPLEX)
488:     } else  if (addv == MAX_VALUES) {
489:       for (i=0; i<n; i++) {yv[first + i*step] = PetscMax(yv[first + i*step],xv[fslots[i]]);}
490: #endif
491:     } else {SETERRQ(1,"Wrong insert option");}
492:   } else {
493:     if (addv == INSERT_VALUES) {
494:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[first + i*step];}
495:     } else  if (addv == ADD_VALUES) {
496:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[first + i*step];}
497: #if !defined(PETSC_USE_COMPLEX)
498:     } else  if (addv == MAX_VALUES) {
499:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[first + i*step]);}
500: #endif
501:     } else {SETERRQ(1,"Wrong insert option");}
502:   }
503:   VecRestoreArray(x,&xv);
504:   if (x != y) {VecRestoreArray(y,&yv);}
505:   return(0);
506: }

508: /* 
509:      Scatter: sequential stride to sequential stride 
510: */
511: #undef __FUNCT__  
513: int VecScatterBegin_SStoSS(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
514: {
515:   VecScatter_Seq_Stride *gen_to   = (VecScatter_Seq_Stride*)ctx->todata;
516:   VecScatter_Seq_Stride *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
517:   int                   i,n = gen_from->n,to_first = gen_to->first,to_step = gen_to->step;
518:   int                   from_first = gen_from->first,from_step = gen_from->step,ierr;
519:   PetscScalar           *xv,*yv;
520: 
522:   VecGetArray(x,&xv);
523:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

525:   if (mode & SCATTER_REVERSE){
526:     from_first = gen_to->first;
527:     to_first   = gen_from->first;
528:     from_step  = gen_to->step;
529:     to_step    = gen_from->step;
530:   }

532:   if (addv == INSERT_VALUES) {
533:     if (to_step == 1 && from_step == 1) {
534:       PetscMemcpy(yv+to_first,xv+from_first,n*sizeof(PetscScalar));
535:     } else  {
536:       for (i=0; i<n; i++) {
537:         yv[to_first + i*to_step] = xv[from_first+i*from_step];
538:       }
539:     }
540:   } else if (addv == ADD_VALUES) {
541:     if (to_step == 1 && from_step == 1) {
542:       yv += to_first; xv += from_first;
543:       for (i=0; i<n; i++) {
544:         yv[i] += xv[i];
545:       }
546:     } else {
547:       for (i=0; i<n; i++) {
548:         yv[to_first + i*to_step] += xv[from_first+i*from_step];
549:       }
550:     }
551: #if !defined(PETSC_USE_COMPLEX)
552:   } else if (addv == MAX_VALUES) {
553:     if (to_step == 1 && from_step == 1) {
554:       yv += to_first; xv += from_first;
555:       for (i=0; i<n; i++) {
556:         yv[i] = PetscMax(yv[i],xv[i]);
557:       }
558:     } else {
559:       for (i=0; i<n; i++) {
560:         yv[to_first + i*to_step] = PetscMax(yv[to_first + i*to_step],xv[from_first+i*from_step]);
561:       }
562:     }
563: #endif
564:   } else {SETERRQ(1,"Wrong insert option");}
565:   VecRestoreArray(x,&xv);
566:   if (x != y) {VecRestoreArray(y,&yv);}
567:   return(0);
568: }

570: /* --------------------------------------------------------------------------*/


573: #undef __FUNCT__  
575: int VecScatterCopy_SGToSG(VecScatter in,VecScatter out)
576: {
577:   int                    ierr;
578:   VecScatter_Seq_General *in_to   = (VecScatter_Seq_General*)in->todata,*out_to;
579:   VecScatter_Seq_General *in_from = (VecScatter_Seq_General*)in->fromdata,*out_from;
580: 
582:   out->postrecvs     = 0;
583:   out->begin         = in->begin;
584:   out->end           = in->end;
585:   out->copy          = in->copy;
586:   out->destroy       = in->destroy;
587:   out->view          = in->view;

589:   ierr                           = PetscMalloc(in_to->n*sizeof(int)+sizeof(VecScatter_Seq_General),&out_to);
590:   out_to->n                      = in_to->n;
591:   out_to->type                   = in_to->type;
592:   out_to->nonmatching_computed   = PETSC_FALSE;
593:   out_to->slots_nonmatching      = 0;
594:   out_to->is_copy                = PETSC_FALSE;
595:   out_to->slots                  = (int*)(out_to + 1);
596:   PetscMemcpy(out_to->slots,in_to->slots,(out_to->n)*sizeof(int));

598:   ierr                           = PetscMalloc(in_from->n*sizeof(int)+sizeof(VecScatter_Seq_General),&out_from);
599:   out_from->n                    = in_from->n;
600:   out_from->type                 = in_from->type;
601:   out_from->nonmatching_computed = PETSC_FALSE;
602:   out_from->slots_nonmatching    = 0;
603:   out_from->is_copy              = PETSC_FALSE;
604:   out_from->slots                = (int*)(out_from + 1);
605:   PetscMemcpy(out_from->slots,in_from->slots,(out_from->n)*sizeof(int));

607:   PetscLogObjectMemory(out,2*sizeof(VecScatter_Seq_General)+(out_from->n+out_to->n)*sizeof(int));
608:   out->todata     = (void*)out_to;
609:   out->fromdata   = (void*)out_from;
610:   return(0);
611: }

613: #undef __FUNCT__  
615: int VecScatterDestroy_SGtoSG(VecScatter ctx)
616: {

620:   PetscFree(ctx->todata);
621:   PetscFree(ctx->fromdata);
622:   PetscLogObjectDestroy(ctx);
623:   PetscHeaderDestroy(ctx);
624:   return(0);
625: }

627: #undef __FUNCT__  
629: int VecScatterCopy_SGToStride(VecScatter in,VecScatter out)
630: {
632:   VecScatter_Seq_Stride  *in_to   = (VecScatter_Seq_Stride*)in->todata,*out_to;
633:   VecScatter_Seq_General *in_from = (VecScatter_Seq_General*)in->fromdata,*out_from;
634: 
636:   out->postrecvs     = 0;
637:   out->begin         = in->begin;
638:   out->end           = in->end;
639:   out->copy          = in->copy;
640:   out->destroy       = in->destroy;
641:   out->view          = in->view;

643:   ierr            = PetscNew(VecScatter_Seq_Stride,&out_to);
644:   out_to->n       = in_to->n;
645:   out_to->type    = in_to->type;
646:   out_to->first   = in_to->first;
647:   out_to->step    = in_to->step;
648:   out_to->type    = in_to->type;

650:   ierr                           = PetscMalloc(in_from->n*sizeof(int)+sizeof(VecScatter_Seq_General),&out_from);
651:   out_from->n                    = in_from->n;
652:   out_from->type                 = in_from->type;
653:   out_from->nonmatching_computed = PETSC_FALSE;
654:   out_from->slots_nonmatching    = 0;
655:   out_from->is_copy              = PETSC_FALSE;
656:   out_from->slots                = (int*)(out_from + 1);
657:   PetscMemcpy(out_from->slots,in_from->slots,(out_from->n)*sizeof(int));

659:   PetscLogObjectMemory(out,sizeof(VecScatter_Seq_General)+sizeof(VecScatter_Seq_Stride)+in_from->n*sizeof(int));
660:   out->todata     = (void*)out_to;
661:   out->fromdata   = (void*)out_from;
662:   return(0);
663: }

665: /* --------------------------------------------------------------------------*/
666: /* 
667:     Scatter: parallel to sequential vector, sequential strides for both. 
668: */
669: #undef __FUNCT__  
671: int VecScatterCopy_PStoSS(VecScatter in,VecScatter out)
672: {
673:   VecScatter_Seq_Stride *in_to   = (VecScatter_Seq_Stride*)in->todata,*out_to;
674:   VecScatter_Seq_Stride *in_from = (VecScatter_Seq_Stride*)in->fromdata,*out_from;
675:   int                   ierr;

678:   out->postrecvs  = 0;
679:   out->begin      = in->begin;
680:   out->end        = in->end;
681:   out->copy       = in->copy;
682:   out->destroy    = in->destroy;
683:   out->view       = in->view;

685:   ierr            = PetscNew(VecScatter_Seq_Stride,&out_to);
686:   out_to->n       = in_to->n;
687:   out_to->type    = in_to->type;
688:   out_to->first   = in_to->first;
689:   out_to->step    = in_to->step;
690:   out_to->type    = in_to->type;
691:   ierr            = PetscNew(VecScatter_Seq_Stride,&out_from);
692:   PetscLogObjectMemory(out,2*sizeof(VecScatter_Seq_Stride));
693:   out_from->n     = in_from->n;
694:   out_from->type  = in_from->type;
695:   out_from->first = in_from->first;
696:   out_from->step  = in_from->step;
697:   out_from->type  = in_from->type;
698:   out->todata     = (void*)out_to;
699:   out->fromdata   = (void*)out_from;
700:   return(0);
701: }

703: EXTERN int VecScatterCreate_PtoS(int,int *,int,int *,Vec,Vec,int,VecScatter);
704: EXTERN int VecScatterCreate_PtoP(int,int *,int,int *,Vec,Vec,VecScatter);
705: EXTERN int VecScatterCreate_StoP(int,int *,int,int *,Vec,VecScatter);

707: /* =======================================================================*/
708: #define VEC_SEQ_ID 0
709: #define VEC_MPI_ID 1

711: #undef __FUNCT__  
713: /*@C
714:    VecScatterCreate - Creates a vector scatter context.

716:    Collective on Vec

718:    Input Parameters:
719: +  xin - a vector that defines the shape (parallel data layout of the vector)
720:          of vectors from which we scatter
721: .  yin - a vector that defines the shape (parallel data layout of the vector)
722:          of vectors to which we scatter
723: .  ix - the indices of xin to scatter
724: -  iy - the indices of yin to hold results

726:    Output Parameter:
727: .  newctx - location to store the new scatter context

729:    Options Database Keys:
730: +  -vecscatter_merge     - Merges scatter send and receive (may offer better performance with some MPIs)
731: .  -vecscatter_ssend     - Uses MPI_Ssend_init() instead of MPI_Send_init() (may offer better performance with some MPIs)
732: .  -vecscatter_sendfirst - Posts sends before receives (may offer better performance with some MPIs)
733: .  -vecscatter_rr        - use ready receiver mode for MPI sends in scatters (rarely used)
734: -  -vecscatter_packtogether - Pack all messages before sending, receive all messages before unpacking
735:     Level: intermediate

737:   Notes:
738:    In calls to VecScatter() you can use different vectors than the xin and 
739:    yin you used above; BUT they must have the same parallel data layout, for example,
740:    they could be obtained from VecDuplicate().
741:    A VecScatter context CANNOT be used in two or more simultaneous scatters;
742:    that is you cannot call a second VecScatterBegin() with the same scatter
743:    context until the VecScatterEnd() has been called on the first VecScatterBegin().
744:    In this case a separate VecScatter is needed for each concurrent scatter.

746:    Concepts: scatter^between vectors
747:    Concepts: gather^between vectors

749: .seealso: VecScatterDestroy()
750: @*/
751: int VecScatterCreate(Vec xin,IS ix,Vec yin,IS iy,VecScatter *newctx)
752: {
753:   VecScatter ctx;
754:   int        len,size,cando,totalv,ierr,*range,xin_type = VEC_SEQ_ID,yin_type = VEC_SEQ_ID;
755:   PetscTruth flag;
756:   MPI_Comm   comm,ycomm;
757:   PetscTruth ixblock,iyblock,iystride,islocal;
758:   IS         tix = 0,tiy = 0;


762:   /*
763:       Determine if the vectors are "parallel", ie. it shares a comm with other processors, or
764:       sequential (it does not share a comm). The difference is that parallel vectors treat the 
765:       index set as providing indices in the global parallel numbering of the vector, with 
766:       sequential vectors treat the index set as providing indices in the local sequential
767:       numbering
768:   */
769:   PetscObjectGetComm((PetscObject)xin,&comm);
770:   MPI_Comm_size(comm,&size);
771:   if (size > 1) {xin_type = VEC_MPI_ID;}

773:   PetscObjectGetComm((PetscObject)yin,&ycomm);
774:   MPI_Comm_size(ycomm,&size);
775:   if (size > 1) {comm = ycomm; yin_type = VEC_MPI_ID;}
776: 
777:   /* generate the Scatter context */
778:   PetscHeaderCreate(ctx,_p_VecScatter,int,VEC_SCATTER_COOKIE,0,"VecScatter",comm,VecScatterDestroy,VecScatterView);
779:   PetscLogObjectCreate(ctx);
780:   PetscLogObjectMemory(ctx,sizeof(struct _p_VecScatter));
781:   ctx->inuse               = PETSC_FALSE;

783:   ctx->beginandendtogether = PETSC_FALSE;
784:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_merge",&ctx->beginandendtogether);
785:   if (ctx->beginandendtogether) {
786:     PetscLogInfo(ctx,"VecScatterCreate:Using combined (merged) vector scatter begin and endn");
787:   }
788:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_packtogether",&ctx->packtogether);
789:   if (ctx->packtogether) {
790:     PetscLogInfo(ctx,"VecScatterCreate:Pack all messages before sendingn");
791:   }

793:   VecGetLocalSize(xin,&ctx->from_n);
794:   VecGetLocalSize(yin,&ctx->to_n);

796:   /*
797:       if ix or iy is not included; assume just grabbing entire vector
798:   */
799:   if (!ix && xin_type == VEC_SEQ_ID) {
800:     ISCreateStride(comm,ctx->from_n,0,1,&ix);
801:     tix  = ix;
802:   } else if (!ix && xin_type == VEC_MPI_ID) {
803:     int bign;
804:     VecGetSize(xin,&bign);
805:     ISCreateStride(comm,bign,0,1,&ix);
806:     tix  = ix;
807:   } else if (!ix) {
808:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"iy not given, but not Seq or MPI vector");
809:   }

811:   if (!iy && yin_type == VEC_SEQ_ID) {
812:     ISCreateStride(comm,ctx->to_n,0,1,&iy);
813:     tiy  = iy;
814:   } else if (!iy && yin_type == VEC_MPI_ID) {
815:     int bign;
816:     VecGetSize(yin,&bign);
817:     ISCreateStride(comm,bign,0,1,&iy);
818:     tiy  = iy;
819:   } else if (!iy) {
820:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"iy not given, but not Seq or MPI vector");
821:   }

823:   /* ===========================================================================================================
824:         Check for special cases
825:      ==========================================================================================================*/
826:   /* ---------------------------------------------------------------------------*/
827:   if (xin_type == VEC_SEQ_ID && yin_type == VEC_SEQ_ID) {
828:     if (ix->type == IS_GENERAL && iy->type == IS_GENERAL){
829:       int                    nx,ny,*idx,*idy;
830:       VecScatter_Seq_General *to,*from;

832:       ISGetLocalSize(ix,&nx);
833:       ISGetLocalSize(iy,&ny);
834:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
835:       ISGetIndices(ix,&idx);
836:       ISGetIndices(iy,&idy);
837:       len  = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
838:       PetscMalloc(len,&to);
839:       PetscLogObjectMemory(ctx,2*len);
840:       to->slots         = (int*)(to + 1);
841:       to->n             = nx;
842:       VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
843:       PetscMemcpy(to->slots,idy,nx*sizeof(int));
844:       PetscMalloc(len,&from);
845:       from->slots       = (int*)(from + 1);
846:       from->n           = nx;
847:       VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
848:        PetscMemcpy(from->slots,idx,nx*sizeof(int));
849:       to->type          = VEC_SCATTER_SEQ_GENERAL;
850:       from->type        = VEC_SCATTER_SEQ_GENERAL;
851:       ctx->todata       = (void*)to;
852:       ctx->fromdata     = (void*)from;
853:       ctx->postrecvs    = 0;
854:       ctx->begin        = VecScatterBegin_SGtoSG;
855:       ctx->end          = 0;
856:       ctx->destroy      = VecScatterDestroy_SGtoSG;
857:       ctx->copy         = VecScatterCopy_SGToSG;
858:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector general scattern");
859:       goto functionend;
860:     } else if (ix->type == IS_STRIDE &&  iy->type == IS_STRIDE){
861:       int                    nx,ny,to_first,to_step,from_first,from_step;
862:       VecScatter_Seq_Stride  *from8,*to8;

864:       ISGetLocalSize(ix,&nx);
865:       ISGetLocalSize(iy,&ny);
866:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
867:       ISStrideGetInfo(iy,&to_first,&to_step);
868:       ISStrideGetInfo(ix,&from_first,&from_step);
869:       ierr               = PetscNew(VecScatter_Seq_Stride,&to8);
870:       to8->n             = nx;
871:       to8->first         = to_first;
872:       to8->step          = to_step;
873:       ierr               = PetscNew(VecScatter_Seq_Stride,&from8);
874:       PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
875:       from8->n           = nx;
876:       from8->first       = from_first;
877:       from8->step        = from_step;
878:       to8->type          = VEC_SCATTER_SEQ_STRIDE;
879:       from8->type        = VEC_SCATTER_SEQ_STRIDE;
880:       ctx->todata       = (void*)to8;
881:       ctx->fromdata     = (void*)from8;
882:       ctx->postrecvs    = 0;
883:       ctx->begin        = VecScatterBegin_SStoSS;
884:       ctx->end          = 0;
885:       ctx->destroy      = VecScatterDestroy_SGtoSG;
886:       ctx->copy         = 0;
887:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector stride to striden");
888:       goto functionend;
889:     } else if (ix->type == IS_GENERAL && iy->type == IS_STRIDE){
890:       int                    nx,ny,*idx,first,step;
891:       VecScatter_Seq_General *from9;
892:       VecScatter_Seq_Stride  *to9;

894:       ISGetLocalSize(ix,&nx);
895:       ISGetIndices(ix,&idx);
896:       ISGetLocalSize(iy,&ny);
897:       ISStrideGetInfo(iy,&first,&step);
898:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
899:       ierr           = PetscNew(VecScatter_Seq_Stride,&to9);
900:       to9->n         = nx;
901:       to9->first     = first;
902:       to9->step      = step;
903:       len            = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
904:       PetscMalloc(len,&from9);
905:       PetscLogObjectMemory(ctx,len + sizeof(VecScatter_Seq_Stride));
906:       from9->slots   = (int*)(from9 + 1);
907:       from9->n       = nx;
908:       ierr           = VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
909:       ierr           = PetscMemcpy(from9->slots,idx,nx*sizeof(int));
910:       ctx->todata    = (void*)to9; ctx->fromdata = (void*)from9;
911:       ctx->postrecvs = 0;
912:       if (step == 1)  ctx->begin = VecScatterBegin_SGtoSS_Stride1;
913:       else            ctx->begin = VecScatterBegin_SGtoSS;
914:       ctx->destroy = VecScatterDestroy_SGtoSG;
915:       ctx->end     = 0;
916:       ctx->copy    = VecScatterCopy_SGToStride;
917:       to9->type    = VEC_SCATTER_SEQ_STRIDE;
918:       from9->type  = VEC_SCATTER_SEQ_GENERAL;
919:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector general to striden");
920:       goto functionend;
921:     } else if (ix->type == IS_STRIDE && iy->type == IS_GENERAL){
922:       int                    nx,ny,*idy,first,step;
923:       VecScatter_Seq_General *to10;
924:       VecScatter_Seq_Stride  *from10;

926:       ISGetLocalSize(ix,&nx);
927:       ISGetIndices(iy,&idy);
928:       ISGetLocalSize(iy,&ny);
929:       ISStrideGetInfo(ix,&first,&step);
930:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
931:       ierr              = PetscNew(VecScatter_Seq_Stride,&from10);
932:       from10->n         = nx;
933:       from10->first     = first;
934:       from10->step      = step;
935:       len               = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
936:       PetscMalloc(len,&to10);
937:       PetscLogObjectMemory(ctx,len + sizeof(VecScatter_Seq_Stride));
938:       to10->slots       = (int*)(to10 + 1);
939:       to10->n           = nx;
940:       VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
941:       PetscMemcpy(to10->slots,idy,nx*sizeof(int));
942:       ctx->todata     = (void*)to10;
943:       ctx->fromdata   = (void*)from10;
944:       ctx->postrecvs  = 0;
945:       if (step == 1) ctx->begin = VecScatterBegin_SStoSG_Stride1;
946:       else           ctx->begin = VecScatterBegin_SStoSG;
947:       ctx->destroy    = VecScatterDestroy_SGtoSG;
948:       ctx->end        = 0;
949:       ctx->copy       = 0;
950:       to10->type      = VEC_SCATTER_SEQ_GENERAL;
951:       from10->type    = VEC_SCATTER_SEQ_STRIDE;
952:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector stride to generaln");
953:       goto functionend;
954:     } else {
955:       int                    nx,ny,*idx,*idy;
956:       VecScatter_Seq_General *to11,*from11;
957:       PetscTruth             idnx,idny;

959:       ISGetLocalSize(ix,&nx);
960:       ISGetLocalSize(iy,&ny);
961:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");

963:       ISIdentity(ix,&idnx);
964:       ISIdentity(iy,&idny);
965:       if (idnx && idny) {
966:         VecScatter_Seq_Stride *to13,*from13;
967:         ierr              = PetscNew(VecScatter_Seq_Stride,&to13);
968:         to13->n           = nx;
969:         to13->first       = 0;
970:         to13->step        = 1;
971:         ierr              = PetscNew(VecScatter_Seq_Stride,&from13);
972:         PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
973:         from13->n         = nx;
974:         from13->first     = 0;
975:         from13->step      = 1;
976:         to13->type        = VEC_SCATTER_SEQ_STRIDE;
977:         from13->type      = VEC_SCATTER_SEQ_STRIDE;
978:         ctx->todata       = (void*)to13;
979:         ctx->fromdata     = (void*)from13;
980:         ctx->postrecvs    = 0;
981:         ctx->begin        = VecScatterBegin_SStoSS;
982:         ctx->end          = 0;
983:         ctx->destroy      = VecScatterDestroy_SGtoSG;
984:         ctx->copy         = 0;
985:         PetscLogInfo(xin,"VecScatterCreate:Special case: sequential copyn");
986:         goto functionend;
987:       }

989:       ISGetIndices(iy,&idy);
990:       ISGetIndices(ix,&idx);
991:       len               = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
992:       PetscMalloc(len,&to11);
993:       PetscLogObjectMemory(ctx,2*len);
994:       to11->slots       = (int*)(to11 + 1);
995:       to11->n           = nx;
996:       VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
997:        PetscMemcpy(to11->slots,idy,nx*sizeof(int));
998:       PetscMalloc(len,&from11);
999:       from11->slots     = (int*)(from11 + 1);
1000:       from11->n         = nx;
1001:       VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
1002:       PetscMemcpy(from11->slots,idx,nx*sizeof(int));
1003:       to11->type        = VEC_SCATTER_SEQ_GENERAL;
1004:       from11->type      = VEC_SCATTER_SEQ_GENERAL;
1005:       ctx->todata       = (void*)to11;
1006:       ctx->fromdata     = (void*)from11;
1007:       ctx->postrecvs    = 0;
1008:       ctx->begin        = VecScatterBegin_SGtoSG;
1009:       ctx->end          = 0;
1010:       ctx->destroy      = VecScatterDestroy_SGtoSG;
1011:       ctx->copy         = VecScatterCopy_SGToSG;
1012:       ISRestoreIndices(ix,&idx);
1013:       ISRestoreIndices(iy,&idy);
1014:       PetscLogInfo(xin,"VecScatterCreate:Sequential vector scatter with block indicesn");
1015:       goto functionend;
1016:     }
1017:   }
1018:   /* ---------------------------------------------------------------------------*/
1019:   if (xin_type == VEC_MPI_ID && yin_type == VEC_SEQ_ID) {

1021:   /* ===========================================================================================================
1022:         Check for special cases
1023:      ==========================================================================================================*/
1024:     islocal = PETSC_FALSE;
1025:     /* special case extracting (subset of) local portion */
1026:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1027:       int                   nx,ny,to_first,to_step,from_first,from_step;
1028:       int                   start,end;
1029:       VecScatter_Seq_Stride *from12,*to12;

1031:       VecGetOwnershipRange(xin,&start,&end);
1032:       ISGetLocalSize(ix,&nx);
1033:       ISStrideGetInfo(ix,&from_first,&from_step);
1034:       ISGetLocalSize(iy,&ny);
1035:       ISStrideGetInfo(iy,&to_first,&to_step);
1036:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1037:       if (ix->min >= start && ix->max < end) islocal = PETSC_TRUE; else islocal = PETSC_FALSE;
1038:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1039:       if (cando) {
1040:         ierr                = PetscNew(VecScatter_Seq_Stride,&to12);
1041:         to12->n             = nx;
1042:         to12->first         = to_first;
1043:         to12->step          = to_step;
1044:         ierr                = PetscNew(VecScatter_Seq_Stride,&from12);
1045:         PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
1046:         from12->n           = nx;
1047:         from12->first       = from_first-start;
1048:         from12->step        = from_step;
1049:         to12->type          = VEC_SCATTER_SEQ_STRIDE;
1050:         from12->type        = VEC_SCATTER_SEQ_STRIDE;
1051:         ctx->todata       = (void*)to12;
1052:         ctx->fromdata     = (void*)from12;
1053:         ctx->postrecvs    = 0;
1054:         ctx->begin        = VecScatterBegin_SStoSS;
1055:         ctx->end          = 0;
1056:         ctx->destroy      = VecScatterDestroy_SGtoSG;
1057:         ctx->copy         = VecScatterCopy_PStoSS;
1058:         PetscLogInfo(xin,"VecScatterCreate:Special case: processors only getting local valuesn");
1059:         goto functionend;
1060:       }
1061:     } else {
1062:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1063:     }

1065:     /* test for special case of all processors getting entire vector */
1066:     totalv = 0;
1067:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1068:       int                  i,nx,ny,to_first,to_step,from_first,from_step,*count,N;
1069:       VecScatter_MPI_ToAll *sto;

1071:       ISGetLocalSize(ix,&nx);
1072:       ISStrideGetInfo(ix,&from_first,&from_step);
1073:       ISGetLocalSize(iy,&ny);
1074:       ISStrideGetInfo(iy,&to_first,&to_step);
1075:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1076:       VecGetSize(xin,&N);
1077:       if (nx != N) {
1078:         totalv = 0;
1079:       } else if (from_first == 0        && from_step == 1 &&
1080:                  from_first == to_first && from_step == to_step){
1081:         totalv = 1;
1082:       } else totalv = 0;
1083:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);

1085:       if (cando) {
1086:         PetscMap map;

1088:         ierr  = MPI_Comm_size(ctx->comm,&size);
1089:         ierr  = PetscNew(VecScatter_MPI_ToAll,&sto);
1090:         ierr  = PetscMalloc(size*sizeof(int),&count);
1091:         ierr  = VecGetPetscMap(xin,&map);
1092:         ierr  = PetscMapGetGlobalRange(map,&range);
1093:         for (i=0; i<size; i++) {
1094:           count[i] = range[i+1] - range[i];
1095:         }
1096:         sto->count        = count;
1097:         sto->work1        = 0;
1098:         sto->work2        = 0;
1099:         sto->type         = VEC_SCATTER_MPI_TOALL;
1100:         PetscLogObjectMemory(ctx,sizeof(VecScatter_MPI_ToAll)+size*sizeof(int));
1101:         ctx->todata       = (void*)sto;
1102:         ctx->fromdata     = 0;
1103:         ctx->postrecvs    = 0;
1104:         ctx->begin        = VecScatterBegin_MPI_ToAll;
1105:         ctx->end          = 0;
1106:         ctx->destroy      = VecScatterDestroy_MPI_ToAll;
1107:         ctx->copy         = VecScatterCopy_MPI_ToAll;
1108:         PetscLogInfo(xin,"VecScatterCreate:Special case: all processors get entire parallel vectorn");
1109:         goto functionend;
1110:       }
1111:     } else {
1112:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1113:     }

1115:     /* test for special case of processor 0 getting entire vector */
1116:     totalv = 0;
1117:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1118:       int                  i,nx,ny,to_first,to_step,from_first,from_step,*count,rank,N;
1119:       VecScatter_MPI_ToAll *sto;

1121:       PetscObjectGetComm((PetscObject)xin,&comm);
1122:       MPI_Comm_rank(comm,&rank);
1123:       ISGetLocalSize(ix,&nx);
1124:       ISStrideGetInfo(ix,&from_first,&from_step);
1125:       ISGetLocalSize(iy,&ny);
1126:       ISStrideGetInfo(iy,&to_first,&to_step);
1127:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1128:       if (!rank) {
1129:         VecGetSize(xin,&N);
1130:         if (nx != N) {
1131:           totalv = 0;
1132:         } else if (from_first == 0        && from_step == 1 &&
1133:                    from_first == to_first && from_step == to_step){
1134:           totalv = 1;
1135:         } else totalv = 0;
1136:       } else {
1137:         if (!nx) totalv = 1;
1138:         else     totalv = 0;
1139:       }
1140:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);

1142:       if (cando) {
1143:         PetscMap map;

1145:         ierr  = MPI_Comm_size(ctx->comm,&size);
1146:         ierr  = PetscNew(VecScatter_MPI_ToAll,&sto);
1147:         ierr  = PetscMalloc(size*sizeof(int),&count);
1148:         ierr  = VecGetPetscMap(xin,&map);
1149:         ierr  = PetscMapGetGlobalRange(map,&range);
1150:         for (i=0; i<size; i++) {
1151:           count[i] = range[i+1] - range[i];
1152:         }
1153:         sto->count        = count;
1154:         sto->work1        = 0;
1155:         sto->work2        = 0;
1156:         sto->type         = VEC_SCATTER_MPI_TOONE;
1157:         PetscLogObjectMemory(ctx,sizeof(VecScatter_MPI_ToAll)+size*sizeof(int));
1158:         ctx->todata       = (void*)sto;
1159:         ctx->fromdata     = 0;
1160:         ctx->postrecvs    = 0;
1161:         ctx->begin        = VecScatterBegin_MPI_ToOne;
1162:         ctx->end          = 0;
1163:         ctx->destroy      = VecScatterDestroy_MPI_ToAll;
1164:         ctx->copy         = VecScatterCopy_MPI_ToAll;
1165:         PetscLogInfo(xin,"VecScatterCreate:Special case: processor zero gets entire parallel vector, rest get nonen");
1166:         goto functionend;
1167:       }
1168:     } else {
1169:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1170:     }

1172:     ISBlock(ix,&ixblock);
1173:     ISBlock(iy,&iyblock);
1174:     ISStride(iy,&iystride);
1175:     if (ixblock) {
1176:       /* special case block to block */
1177:       if (iyblock) {
1178:         int nx,ny,*idx,*idy,bsx,bsy;
1179:         ISBlockGetBlockSize(iy,&bsy);
1180:         ISBlockGetBlockSize(ix,&bsx);
1181:         if (bsx == bsy && (bsx == 12 || bsx == 5 || bsx == 4 || bsx == 3 || bsx == 2)) {
1182:           ISBlockGetSize(ix,&nx);
1183:           ISBlockGetIndices(ix,&idx);
1184:           ISBlockGetSize(iy,&ny);
1185:           ISBlockGetIndices(iy,&idy);
1186:           if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1187:           VecScatterCreate_PtoS(nx,idx,ny,idy,xin,yin,bsx,ctx);
1188:           ISBlockRestoreIndices(ix,&idx);
1189:           ISBlockRestoreIndices(iy,&idy);
1190:           PetscLogInfo(xin,"VecScatterCreate:Special case: blocked indicesn");
1191:           goto functionend;
1192:         }
1193:       /* special case block to stride */
1194:       } else if (iystride) {
1195:         int ystart,ystride,ysize,bsx;
1196:         ISStrideGetInfo(iy,&ystart,&ystride);
1197:         ISGetLocalSize(iy,&ysize);
1198:         ISBlockGetBlockSize(ix,&bsx);
1199:         /* see if stride index set is equivalent to block index set */
1200:         if (((bsx == 2) || (bsx == 3) || (bsx == 4) || (bsx == 5) || (bsx == 12)) &&
1201:             ((ystart % bsx) == 0) && (ystride == 1) && ((ysize % bsx) == 0)) {
1202:           int nx,*idx,*idy,il;
1203:           ISBlockGetSize(ix,&nx); ISBlockGetIndices(ix,&idx);
1204:           if (ysize != bsx*nx) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1205:           PetscMalloc((nx+1)*sizeof(int),&idy);
1206:           idy[0] = ystart;
1207:           for (il=1; il<nx; il++) idy[il] = idy[il-1] + bsx;
1208:           VecScatterCreate_PtoS(nx,idx,nx,idy,xin,yin,bsx,ctx);
1209:           PetscFree(idy);
1210:           ISBlockRestoreIndices(ix,&idx);
1211:           PetscLogInfo(xin,"VecScatterCreate:Special case: blocked indices to striden");
1212:           goto functionend;
1213:         }
1214:       }
1215:     }
1216:     /* left over general case */
1217:     {
1218:       int nx,ny,*idx,*idy;
1219:       ISGetLocalSize(ix,&nx);
1220:       ISGetIndices(ix,&idx);
1221:       ISGetLocalSize(iy,&ny);
1222:       ISGetIndices(iy,&idy);
1223:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1224:       VecScatterCreate_PtoS(nx,idx,ny,idy,xin,yin,1,ctx);
1225:       ISRestoreIndices(ix,&idx);
1226:       ISRestoreIndices(iy,&idy);
1227:       goto functionend;
1228:     }
1229:   }
1230:   /* ---------------------------------------------------------------------------*/
1231:   if (xin_type == VEC_SEQ_ID && yin_type == VEC_MPI_ID) {
1232:   /* ===========================================================================================================
1233:         Check for special cases
1234:      ==========================================================================================================*/
1235:     /* special case local copy portion */
1236:     islocal = PETSC_FALSE;
1237:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1238:       int                   nx,ny,to_first,to_step,from_step,start,end,from_first;
1239:       VecScatter_Seq_Stride *from,*to;

1241:       VecGetOwnershipRange(yin,&start,&end);
1242:       ISGetLocalSize(ix,&nx);
1243:       ISStrideGetInfo(ix,&from_first,&from_step);
1244:       ISGetLocalSize(iy,&ny);
1245:       ISStrideGetInfo(iy,&to_first,&to_step);
1246:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1247:       if (iy->min >= start && iy->max < end) islocal = PETSC_TRUE; else islocal = PETSC_FALSE;
1248:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,yin->comm);
1249:       if (cando) {
1250:         ierr              = PetscNew(VecScatter_Seq_Stride,&to);
1251:         to->n             = nx;
1252:         to->first         = to_first-start;
1253:         to->step          = to_step;
1254:         ierr              = PetscNew(VecScatter_Seq_Stride,&from);
1255:         PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
1256:         from->n           = nx;
1257:         from->first       = from_first;
1258:         from->step        = from_step;
1259:         to->type          = VEC_SCATTER_SEQ_STRIDE;
1260:         from->type        = VEC_SCATTER_SEQ_STRIDE;
1261:         ctx->todata       = (void*)to;
1262:         ctx->fromdata     = (void*)from;
1263:         ctx->postrecvs    = 0;
1264:         ctx->begin        = VecScatterBegin_SStoSS;
1265:         ctx->end          = 0;
1266:         ctx->destroy      = VecScatterDestroy_SGtoSG;
1267:         ctx->copy         = VecScatterCopy_PStoSS;
1268:         PetscLogInfo(xin,"VecScatterCreate:Special case: sequential stride to striden");
1269:         goto functionend;
1270:       }
1271:     } else {
1272:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,yin->comm);
1273:     }
1274:     /* general case */
1275:     {
1276:       int nx,ny,*idx,*idy;
1277:       ISGetLocalSize(ix,&nx);
1278:       ISGetIndices(ix,&idx);
1279:       ISGetLocalSize(iy,&ny);
1280:       ISGetIndices(iy,&idy);
1281:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1282:       VecScatterCreate_StoP(nx,idx,ny,idy,yin,ctx);
1283:       ISRestoreIndices(ix,&idx);
1284:       ISRestoreIndices(iy,&idy);
1285:       goto functionend;
1286:     }
1287:   }
1288:   /* ---------------------------------------------------------------------------*/
1289:   if (xin_type == VEC_MPI_ID && yin_type == VEC_MPI_ID) {
1290:     /* no special cases for now */
1291:     int nx,ny,*idx,*idy;
1292:     ierr    = ISGetLocalSize(ix,&nx);
1293:     ierr    = ISGetIndices(ix,&idx);
1294:     ierr    = ISGetLocalSize(iy,&ny);
1295:     ierr    = ISGetIndices(iy,&idy);
1296:     if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1297:     ierr    = VecScatterCreate_PtoP(nx,idx,ny,idy,xin,yin,ctx);
1298:     ierr    = ISRestoreIndices(ix,&idx);
1299:     ierr    = ISRestoreIndices(iy,&idy);
1300:     goto functionend;
1301:   }

1303:   functionend:
1304:   *newctx = ctx;
1305:   if (tix) {ISDestroy(tix);}
1306:   if (tiy) {ISDestroy(tiy);}
1307:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_view_info",&flag);
1308:   if (flag) {
1309:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_(comm),PETSC_VIEWER_ASCII_INFO);
1310:     VecScatterView(ctx,PETSC_VIEWER_STDOUT_(comm));
1311:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_(comm));
1312:   }
1313:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_view",&flag);
1314:   if (flag) {
1315:     VecScatterView(ctx,PETSC_VIEWER_STDOUT_(comm));
1316:   }
1317:   return(0);
1318: }

1320: /* ------------------------------------------------------------------*/
1321: #undef __FUNCT__  
1323: /*@
1324:    VecScatterPostRecvs - Posts the receives required for the ready-receiver
1325:    version of the VecScatter routines.

1327:    Collective on VecScatter

1329:    Input Parameters:
1330: +  x - the vector from which we scatter (not needed,can be null)
1331: .  y - the vector to which we scatter
1332: .  addv - either ADD_VALUES or INSERT_VALUES
1333: .  mode - the scattering mode, usually SCATTER_FORWARD.  The available modes are:
1334:     SCATTER_FORWARD, SCATTER_REVERSE
1335: -  inctx - scatter context generated by VecScatterCreate()

1337:    Output Parameter:
1338: .  y - the vector to which we scatter

1340:    Level: advanced

1342:    Notes:
1343:    If you use SCATTER_REVERSE the first two arguments should be reversed, from 
1344:    the SCATTER_FORWARD.
1345:    The vectors x and y cannot be the same. y[iy[i]] = x[ix[i]], for i=0,...,ni-1

1347:    This scatter is far more general than the conventional
1348:    scatter, since it can be a gather or a scatter or a combination,
1349:    depending on the indices ix and iy.  If x is a parallel vector and y
1350:    is sequential, VecScatterBegin() can serve to gather values to a
1351:    single processor.  Similarly, if y is parallel and x sequential, the
1352:    routine can scatter from one processor to many processors.

1354: .seealso: VecScatterCreate(), VecScatterEnd(), VecScatterBegin()
1355: @*/
1356: int VecScatterPostRecvs(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter inctx)
1357: {


1364:   if (inctx->postrecvs) {
1365:     (*inctx->postrecvs)(x,y,addv,mode,inctx);
1366:   }
1367:   return(0);
1368: }

1370: /* ------------------------------------------------------------------*/
1371: #undef __FUNCT__  
1373: /*@
1374:    VecScatterBegin - Begins a generalized scatter from one vector to
1375:    another. Complete the scattering phase with VecScatterEnd().

1377:    Collective on VecScatter and Vec

1379:    Input Parameters:
1380: +  x - the vector from which we scatter
1381: .  y - the vector to which we scatter
1382: .  addv - either ADD_VALUES or INSERT_VALUES
1383: .  mode - the scattering mode, usually SCATTER_FORWARD.  The available modes are:
1384:     SCATTER_FORWARD or SCATTER_REVERSE
1385: -  inctx - scatter context generated by VecScatterCreate()

1387:    Level: intermediate

1389:    Notes:
1390:    The vectors x and y need not be the same vectors used in the call 
1391:    to VecScatterCreate(), but x must have the same parallel data layout
1392:    as that passed in as the x to VecScatterCreate(), similarly for the y.
1393:    Most likely they have been obtained from VecDuplicate().

1395:    You cannot change the values in the input vector between the calls to VecScatterBegin()
1396:    and VecScatterEnd().

1398:    If you use SCATTER_REVERSE the first two arguments should be reversed, from 
1399:    the SCATTER_FORWARD.
1400:    
1401:    y[iy[i]] = x[ix[i]], for i=0,...,ni-1

1403:    This scatter is far more general than the conventional
1404:    scatter, since it can be a gather or a scatter or a combination,
1405:    depending on the indices ix and iy.  If x is a parallel vector and y
1406:    is sequential, VecScatterBegin() can serve to gather values to a
1407:    single processor.  Similarly, if y is parallel and x sequential, the
1408:    routine can scatter from one processor to many processors.

1410:    Concepts: scatter^between vectors
1411:    Concepts: gather^between vectors

1413: .seealso: VecScatterCreate(), VecScatterEnd()
1414: @*/
1415: int VecScatterBegin(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter inctx)
1416: {
1418: #if defined(PETSC_USE_BOPT_g)
1419:   int to_n,from_n;
1420: #endif

1425:   if (inctx->inuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE," Scatter ctx already in use");
1426: #if defined(PETSC_USE_BOPT_g)
1427:   /*
1428:      Error checking to make sure these vectors match the vectors used
1429:    to create the vector scatter context. -1 in the from_n and to_n indicate the
1430:    vector lengths are unknown (for example with mapped scatters) and thus 
1431:    no error checking is performed.
1432:   */
1433:   if (inctx->from_n >= 0 && inctx->to_n >= 0) {
1434:     VecGetLocalSize(x,&from_n);
1435:     VecGetLocalSize(y,&to_n);
1436:     if (mode & SCATTER_REVERSE) {
1437:       if (to_n != inctx->from_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1438:       if (from_n != inctx->to_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1439:     } else {
1440:       if (to_n != inctx->to_n)     SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1441:       if (from_n != inctx->from_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1442:     }
1443:   }
1444: #endif

1446:   inctx->inuse = PETSC_TRUE;
1447:   PetscLogEventBarrierBegin(VEC_ScatterBarrier,0,0,0,0,inctx->comm);
1448:   (*inctx->begin)(x,y,addv,mode,inctx);
1449:   if (inctx->beginandendtogether && inctx->end) {
1450:     inctx->inuse = PETSC_FALSE;
1451:     (*inctx->end)(x,y,addv,mode,inctx);
1452:   }
1453:   PetscLogEventBarrierEnd(VEC_ScatterBarrier,0,0,0,0,inctx->comm);
1454:   return(0);
1455: }

1457: /* --------------------------------------------------------------------*/
1458: #undef __FUNCT__  
1460: /*@
1461:    VecScatterEnd - Ends a generalized scatter from one vector to another.  Call
1462:    after first calling VecScatterBegin().

1464:    Collective on VecScatter and Vec

1466:    Input Parameters:
1467: +  x - the vector from which we scatter
1468: .  y - the vector to which we scatter
1469: .  addv - either ADD_VALUES or INSERT_VALUES.
1470: .  mode - the scattering mode, usually SCATTER_FORWARD.  The available modes are:
1471:      SCATTER_FORWARD, SCATTER_REVERSE
1472: -  ctx - scatter context generated by VecScatterCreate()

1474:    Level: intermediate

1476:    Notes:
1477:    If you use SCATTER_REVERSE the first two arguments should be reversed, from 
1478:    the SCATTER_FORWARD.
1479:    y[iy[i]] = x[ix[i]], for i=0,...,ni-1

1481: .seealso: VecScatterBegin(), VecScatterCreate()
1482: @*/
1483: int VecScatterEnd(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
1484: {

1490:   ctx->inuse = PETSC_FALSE;
1491:   if (!ctx->end) return(0);
1492:   if (!ctx->beginandendtogether) {
1493:     PetscLogEventBegin(VEC_ScatterEnd,ctx,x,y,0);
1494:     (*(ctx)->end)(x,y,addv,mode,ctx);
1495:     PetscLogEventEnd(VEC_ScatterEnd,ctx,x,y,0);
1496:   }
1497:   return(0);
1498: }

1500: #undef __FUNCT__  
1502: /*@C
1503:    VecScatterDestroy - Destroys a scatter context created by 
1504:    VecScatterCreate().

1506:    Collective on VecScatter

1508:    Input Parameter:
1509: .  ctx - the scatter context

1511:    Level: intermediate

1513: .seealso: VecScatterCreate(), VecScatterCopy()
1514: @*/
1515: int VecScatterDestroy(VecScatter ctx)
1516: {

1521:   if (--ctx->refct > 0) return(0);

1523:   /* if memory was published with AMS then destroy it */
1524:   PetscObjectDepublish(ctx);

1526:   (*ctx->destroy)(ctx);
1527:   return(0);
1528: }

1530: #undef __FUNCT__  
1532: /*@C
1533:    VecScatterCopy - Makes a copy of a scatter context.

1535:    Collective on VecScatter

1537:    Input Parameter:
1538: .  sctx - the scatter context

1540:    Output Parameter:
1541: .  ctx - the context copy

1543:    Level: advanced

1545: .seealso: VecScatterCreate(), VecScatterDestroy()
1546: @*/
1547: int VecScatterCopy(VecScatter sctx,VecScatter *ctx)
1548: {

1554:   if (!sctx->copy) SETERRQ(PETSC_ERR_SUP,"Cannot copy this type");
1555:   PetscHeaderCreate(*ctx,_p_VecScatter,int,VEC_SCATTER_COOKIE,0,"VecScatter",sctx->comm,VecScatterDestroy,VecScatterView);
1556:   PetscLogObjectCreate(*ctx);
1557:   PetscLogObjectMemory(*ctx,sizeof(struct _p_VecScatter));
1558:   (*ctx)->to_n   = sctx->to_n;
1559:   (*ctx)->from_n = sctx->from_n;
1560:   (*sctx->copy)(sctx,*ctx);
1561:   return(0);
1562: }


1565: /* ------------------------------------------------------------------*/
1566: #undef __FUNCT__  
1568: /*@
1569:    VecScatterView - Views a vector scatter context.

1571:    Collective on VecScatter

1573:    Input Parameters:
1574: +  ctx - the scatter context
1575: -  viewer - the viewer for displaying the context

1577:    Level: intermediate

1579: @*/
1580: int VecScatterView(VecScatter ctx,PetscViewer viewer)
1581: {

1586:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(ctx->comm);
1588:   if (!ctx->view) SETERRQ(PETSC_ERR_SUP,"Cannot view this type of scatter context yet");

1590:   (*ctx->view)(ctx,viewer);
1591:   return(0);
1592: }

1594: #undef __FUNCT__  
1596: /*@
1597:    VecScatterRemap - Remaps the "from" and "to" indices in a 
1598:    vector scatter context. FOR EXPERTS ONLY!

1600:    Collective on VecScatter

1602:    Input Parameters:
1603: +  scat - vector scatter context
1604: .  from - remapping for "from" indices (may be PETSC_NULL)
1605: -  to   - remapping for "to" indices (may be PETSC_NULL)

1607:    Level: developer

1609:    Notes: In the parallel case the todata is actually the indices
1610:           from which the data is TAKEN! The from stuff is where the 
1611:           data is finally put. This is VERY VERY confusing!

1613:           In the sequential case the todata is the indices where the 
1614:           data is put and the fromdata is where it is taken from.
1615:           This is backwards from the paralllel case! CRY! CRY! CRY!

1617: @*/
1618: int VecScatterRemap(VecScatter scat,int *rto,int *rfrom)
1619: {
1620:   VecScatter_Seq_General *to,*from;
1621:   VecScatter_MPI_General *mto;
1622:   int                    i;


1629:   from = (VecScatter_Seq_General *)scat->fromdata;
1630:   mto  = (VecScatter_MPI_General *)scat->todata;

1632:   if (mto->type == VEC_SCATTER_MPI_TOALL) SETERRQ(PETSC_ERR_ARG_SIZ,"Not for to all scatter");

1634:   if (rto) {
1635:     if (mto->type == VEC_SCATTER_MPI_GENERAL) {
1636:       /* handle off processor parts */
1637:       for (i=0; i<mto->starts[mto->n]; i++) {
1638:         mto->indices[i] = rto[mto->indices[i]];
1639:       }
1640:       /* handle local part */
1641:       to = &mto->local;
1642:       for (i=0; i<to->n; i++) {
1643:         to->slots[i] = rto[to->slots[i]];
1644:       }
1645:     } else if (from->type == VEC_SCATTER_SEQ_GENERAL) {
1646:       for (i=0; i<from->n; i++) {
1647:         from->slots[i] = rto[from->slots[i]];
1648:       }
1649:     } else if (from->type == VEC_SCATTER_SEQ_STRIDE) {
1650:       VecScatter_Seq_Stride *sto = (VecScatter_Seq_Stride*)from;
1651: 
1652:       /* if the remapping is the identity and stride is identity then skip remap */
1653:       if (sto->step == 1 && sto->first == 0) {
1654:         for (i=0; i<sto->n; i++) {
1655:           if (rto[i] != i) {
1656:             SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1657:           }
1658:         }
1659:       } else SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1660:     } else SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1661:   }

1663:   if (rfrom) {
1664:     SETERRQ(PETSC_ERR_SUP,"Unable to remap the FROM in scatters yet");
1665:   }

1667:   /*
1668:      Mark then vector lengths as unknown because we do not know the 
1669:    lengths of the remapped vectors
1670:   */
1671:   scat->from_n = -1;
1672:   scat->to_n   = -1;

1674:   return(0);
1675: }