Actual source code: gvec.c

  1: #ifdef PETSC_RCS_HEADER
  2: static char vcid[] = "$Id: gvec.c,v 1.48 2000/07/16 23:20:04 knepley Exp $";
  3: #endif
  4: /*
  5:   This file provides routines for grid vectors (vectors that are associated with grids,
  6:   possibly with multiple degrees of freedom per node). 
  7: */

  9: #include "petscsles.h"            /* For ALE Mesh Support */
 10:  #include src/gvec/gvecimpl.h

 12: /* Logging support */
 13: int GVEC_EvaluateFunction, GVEC_EvaluateFunctionCollective, GVEC_EvaluateRhs, GVEC_EvaluateJacobian;
 14: int GVEC_SetBoundary, GVEC_InterpolateField, GVEC_InterpolateFieldBatch, GVEC_InterpolateFieldBatchParallel;
 15: int GVEC_InterpolateFieldBatchCalc;

 17: extern int VecCreate_MPI_Private(Vec, int, const PetscScalar [], PetscMap);

 19: /*--------------------------------------------- General Vector Functions ---------------------------------------------*/
 20: #undef  __FUNCT__
 22: /*@C
 23:    GVecDestroy - Destroys a grid vector.

 25:    Input Parameters:
 26: .  v  - the vector

 28:   Level: beginner

 30: .keywords: grid vector, destroy
 31: .seealso: VecDestroy(), GVecDuplicate()
 32: @*/
 33: int GVecDestroy(GVec v)
 34: {
 35:   int  ierr;

 39:   if (--v->refct > 0) return(0);
 40:   VecDestroy(v);
 41:   return(0);
 42: }

 44: #undef  __FUNCT__
 46: /*@
 47:   GVecViewFromOptions - This function visualizes the vector based upon user options.

 49:   Collective on GVec

 51:   Input Parameter:
 52: . gvec - The vector

 54:   Level: intermediate

 56: .keywords: GVec, view, options, database
 57: .seealso: GVecSetFromOptions(), GVecView()
 58: @*/
 59: int GVecViewFromOptions(GVec gvec, char *title)
 60: {
 61:   PetscViewer viewer;
 62:   PetscDraw   draw;
 63:   PetscTruth  opt;
 64:   char       *titleStr;
 65:   char        typeName[1024];
 66:   char        fileName[PETSC_MAX_PATH_LEN];
 67:   int         len;
 68:   int         ierr;

 71:   PetscOptionsHasName(gvec->prefix, "-gvec_view", &opt);
 72:   if (opt == PETSC_TRUE) {
 73:     PetscOptionsGetString(gvec->prefix, "-gvec_view", typeName, 1024, &opt);
 74:     PetscStrlen(typeName, &len);
 75:     if (len > 0) {
 76:       PetscViewerCreate(gvec->comm, &viewer);
 77:       PetscViewerSetType(viewer, typeName);
 78:       PetscOptionsGetString(gvec->prefix, "-gvec_view_file", fileName, 1024, &opt);
 79:       if (opt == PETSC_TRUE) {
 80:         PetscViewerSetFilename(viewer, fileName);
 81:       } else {
 82:         PetscViewerSetFilename(viewer, gvec->name);
 83:       }
 84:       GVecView(gvec, viewer);
 85:       PetscViewerFlush(viewer);
 86:       PetscViewerDestroy(viewer);
 87:     } else {
 88:       GVecView(gvec, PETSC_NULL);
 89:     }
 90:   }
 91:   PetscOptionsHasName(gvec->prefix, "-gvec_view_draw", &opt);
 92:   if (opt == PETSC_TRUE) {
 93:     PetscViewerDrawOpen(gvec->comm, 0, 0, 0, 0, 300, 300, &viewer);
 94:     PetscViewerDrawGetDraw(viewer, 0, &draw);
 95:     if (title != PETSC_NULL) {
 96:       titleStr = title;
 97:     } else {
 98:       PetscObjectName((PetscObject) gvec);                                                         CHKERRQ(ierr) ;
 99:       titleStr = gvec->name;
100:     }
101:     PetscDrawSetTitle(draw, titleStr);
102:     GVecView(gvec, viewer);
103:     PetscViewerFlush(viewer);
104:     PetscDrawPause(draw);
105:     PetscViewerDestroy(viewer);
106:   }
107:   return(0);
108: }

110: #undef  __FUNCT__
112: /*@ 
113:    GVecView - Views a grid vector.

115:    Input Parameters:
116: .  v      - The grid vector
117: .  viewer - [Optional] A visualization context

119:    Notes:
120:    GVecView() supports the same viewers as VecView().  The only difference
121:    is that for all multiprocessor cases, the output vector employs the natural
122:    ordering of the grid, so it many cases this corresponds to the ordering 
123:    that would have been used for the uniprocessor case.

125:    The available visualization contexts include
126: $     PETSC_VIEWER_STDOUT_SELF - standard output (default)
127: $     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
128: $       output where only the first processor opens
129: $       the file.  All other processors send their 
130: $       data to the first processor to print. 

132:    The user can open alternative vistualization contexts with
133: $    PetscViewerFileOpenASCII() - output vector to a specified file
134: $    PetscViewerFileOpenBinary() - output in binary to a
135: $         specified file; corresponding input uses VecLoad()
136: $    PetscViewerDrawOpenX() - output vector to an X window display
137: $    DrawLGCreate() - output vector as a line graph to an X window display
138: $    PetscViewerMatlabOpen() - output vector to Matlab viewer
139: $    PetscViewerMathematicaOpen() - output vector to Mathematica viewer

141:   Level: beginner

143: .keywords: view, visualize, output, print, write, draw
144: .seealso: VecView()
145: @*/
146: int GVecView(GVec v, PetscViewer viewer)
147: {
148:   Grid grid;
149:   int  ierr;

153:   if (!viewer) {
154:     viewer = PETSC_VIEWER_STDOUT_SELF;
155:   } else {
157:   }

159:   GVecGetGrid(v, &grid);
160:   (*grid->ops->gvecview)(v, viewer);
161:   return(0);
162: }

164: #undef  __FUNCT__
166: /*@ 
167:   GVecSerialize - This function stores or recreates a grid vector using a viewer for
168:   a binary file.

170:   Input Parameters:
171: . viewer - The viewer context
172: . store  - This flag is PETSC_TRUE is data is being written, otherwise it will be read

174:   Output Parameter:
175: . v      - The grid vector

177:   Level: beginner

179: .keywords: grid vector, serialize
180: .seealso: GridSerialize()
181: @*/
182: int GVecSerialize(Grid grid, GVec *v, PetscViewer viewer, PetscTruth store)
183: {


191:   VecSerialize(grid->comm, v, viewer, store);
192:   /* Newly created object should have grid as a parent */
193:   if (store == PETSC_FALSE) {
194:     PetscObjectCompose((PetscObject) *v, "Grid", (PetscObject) grid);
195:   }
196: 
197:   return(0);
198: }

200: #undef  __FUNCT__
202: /*@C
203:    GVecDuplicate - Duplicates a grid vector.

205:    Input Parameters:
206: .  v  - the vector

208:   Level: beginner

210: .keywords: grid vector, destroy
211: .seealso: VecDuplicate(), GVecDestroy()
212: @*/
213: int GVecDuplicate(GVec v, GVec *newvec)
214: {

219:   VecDuplicate(v, newvec);
220:   return(0);
221: }

223: #undef  __FUNCT__
225: /*@
226:   GVecGetGrid - This function returns the grid from a grid vector.

228:   Not collective

230:   Input Parameter:
231: . v    - The vector

233:   Output Parameter:
234: . grid - The grid

236:   Level: intermediate

238: .keywords: grid vector, get, grid
239: .seealso: GVecGetOrder(), GridGetMesh(), GMatGetGrid()
240: @*/
241: int GVecGetGrid(GVec v, Grid *grid)
242: {


249:   PetscObjectQuery((PetscObject) v, "Grid", (PetscObject *) grid);
251:   return(0);
252: }

254: #undef  __FUNCT__
256: /*@
257:   GVecGetOrder - This function returns the variable ordering from a grid vector.

259:   Not collective

261:   Input Parameter:
262: . v     - The vector

264:   Output Parameter:
265: . order - The variable ordering

267:   Level: intermediate

269: .keywords: grid vector, get, variable ordering
270: .seealso: GVecGetGrid(), GridGetMesh(), GMatGetGrid()
271: @*/
272: int GVecGetOrder(GVec v, VarOrdering *order)
273: {


280:   PetscObjectQuery((PetscObject) v, "Order", (PetscObject *) order);
282:   return(0);
283: }

285: #undef  __FUNCT__
287: /*@C
288:   GVecScatterCreate - This function creates a scatter from a vector to an embedded vector.

290:   Collective on GVec

292:   Input Parameters:
293: + vec      - A vector
294: - embedVec - A vector constructed from an embedded ordering

296:   Output Parameter:
297: . scatter  - The scatter

299:   Level: beginner

301: .keywords variable ordering, vector scatter, embedding
302: .seealso VecScatterCreate()
303: @*/
304: int GVecScatterCreate(GVec vec, GVec embedVec, VecScatter *scatter)
305: {
306:   Grid          grid;
307:   VarOrdering   order, embedOrder;
308:   FieldClassMap map,   embedMap;
309:   int           f, field, nclass;
310:   int           ierr;

316:   GVecGetGrid(vec, &grid);
317:   GVecGetOrder(vec,      &order);
318:   GVecGetOrder(embedVec, &embedOrder);
319:   VarOrderingGetClassMap(order,      &map);
320:   VarOrderingGetClassMap(embedOrder, &embedMap);
321:   if (map->numFields < embedMap->numFields) {
322:     SETERRQ2(PETSC_ERR_ARG_INCOMP, "The embedded ordering cannot have fewer fields than the original (%d < %d)",
323:              map->numFields, embedMap->numFields);
324:   }
325:   if (map->numClasses != embedMap->numClasses) {
326:     SETERRQ2(PETSC_ERR_ARG_INCOMP, "The embedded ordering must have the same number of classes as the original (%d < %d)",
327:              map->numClasses, embedMap->numClasses);
328:   }
329:   for(nclass = 0; nclass < map->numClasses; nclass++) {
330:     if (embedMap->classSizes[nclass] > 0) {
331:       if (map->classSizes[nclass] <= 0) {
332:         SETERRQ1(PETSC_ERR_ARG_INCOMP, "Mapping not an embedding: class %d is larger than the original", nclass);
333:       }
334:     }
335:     for(f = 0; f < embedMap->numFields; f++) {
336:       field = map->fields[f];
337:       if ((order->localStart[field][nclass] < 0) && (embedOrder->localStart[field][nclass] >= 0)) {
338:         SETERRQ2(PETSC_ERR_ARG_INCOMP, "Mapping not an embedding: field %d not present in class %d in the original",
339:                  field, nclass);
340:       }
341:     }
342:   }
343:   GridSetUp(grid);
344:   (*grid->ops->gridcreatevarscatter)(grid, order, vec, embedOrder, embedVec, scatter);
345:   return(0);
346: }

348: #undef  __FUNCT__
350: /*@
351:    GVecGetLocalWorkGVec - Creates a new GVec to contain the local + ghost
352:        values portion of a GVec.

354:    Input Parameter:
355: .  v - the grid vector

357:    Output Parameters:
358: .  lv - the local vector

360:   Level: intermediate

362: .seealso: GVecLocalToGlobal(), GVecGlobalToLocal(), GVecRestoreLocalWorkGVec(),
363:           GVecGetWorkGVec(), GVecRestoreWorkGVec()
364: @*/
365: int GVecGetLocalWorkGVec(GVec v, GVec *lv)
366: {
367:   Grid grid;
368:   int  ierr;


374:   GVecGetGrid(v, &grid);
375:   (*grid->ops->gvecgetlocalworkgvec)(v, lv);
376:   return(0);
377: }

379: #undef  __FUNCT__
381: /*@
382:    GVecRestoreLocalWorkGVec - 

384:    Input Parameter:
385: .  v - the grid vector

387:    Output Parameters:
388: .  lv - the local vector

390:   Level: intermediate

392: .seealso: GVecLocalToGlobal(), GVecGlobalToLocal(), GVecGetLocalWorkGVec(),
393:           GVecGetWorkGVec(), GVecRestoreWorkGVec()
394: @*/
395: int GVecRestoreLocalWorkGVec(GVec v, GVec *lv)
396: {
397:   Grid grid;
398:   int  ierr;


404:   GVecGetGrid(v, &grid);
405:   (*grid->ops->gvecrestorelocalworkgvec)(v, lv);
406:   return(0);
407: }

409: #undef  __FUNCT__
411: /*@
412:   GVecGetWorkGVec - Get an identical work grid vector.

414:   Input Parameter:
415: . v  - The grid vector

417:   Output Parameters:
418: . wv - The work vector

420:   Level: intermediate

422: .seealso: GVecLocalToGlobal(), GVecGlobalToLocal(), GVecRestoreWorkGVec(),
423:           GVecGetLocalWorkGVec(), GVecRestoreLocalWorkGVec()
424: @*/
425: int GVecGetWorkGVec(GVec v, GVec *wv)
426: {

432:   VecDuplicate(v, wv);
433:   return(0);
434: }

436: #undef  __FUNCT__
438: /*@
439:   GVecRestoreWorkGVec - Restores the work vector.

441:   Input Parameter:
442: . v  - The grid vector

444:   Output Parameters:
445: . wv - The work vector

447:   Level: intermediate

449: .seealso: GVecLocalToGlobal(), GVecGlobalToLocal(), GVecGetWorkGVec(),
450:           GVecGetLocalWorkGVec(), GVecRestoreLocalWorkGVec()
451: @*/
452: int GVecRestoreWorkGVec(GVec v, GVec *wv)
453: {

459:   VecDestroy(*wv);
460:   return(0);
461: }

463: #undef  __FUNCT__
465: /*@
466:    GVecGlobalToLocal - Copies a global vector including ghost points to 
467:        a local work vector.

469:    Input Parameter:
470: .  v - the grid vector
471: .  mode - either SET_VALUES or ADD_VALUES

473:    Output Parameters:
474: .  lv - the local vector

476:   Level: intermediate

478: .seealso: GVecLocalToGlobal(), GVecCreateLocalGVec()
479: @*/
480: int GVecGlobalToLocal(GVec v, InsertMode mode, GVec lv)
481: {
482:   int  ierr;
483:   Grid grid;


489:   GVecGetGrid(v, &grid);
490:   (*grid->ops->gvecglobaltolocal)(v, mode, lv);
491:   return(0);
492: }

494: #undef  __FUNCT__
496: /*@
497:    GVecLocalToGlobal - Copies a local vector including ghost points to its
498:        global representation.

500:    Input Parameter:
501: .  v - the local grid vector
502: .  mode - either SET_VALUES or ADD_VALUES

504:    Output Parameters:
505: .  lv - the global vector

507:   Level: intermediate

509: .seealso: GVecGlobalToLocal(), GVecCreateLocalGVec()
510: @*/
511: int GVecLocalToGlobal(GVec v,InsertMode mode,GVec lv)
512: {
513:   int  ierr;
514:   Grid grid;


520:   GVecGetGrid(v, &grid);
521:   (*grid->ops->gveclocaltoglobal)(v, mode, lv);
522:   return(0);
523: }

525: #undef  __FUNCT__
527: /*@
528:   GVecEvaluateFunction - Evaluates a function over the specified fields
529:   on the locations defined by the underlying grid and its discretization.

531:   Input Parameters:
532: + v         - The grid vector
533: . numFields - The number of fields to evaluate
534: . fields    - The fields
535: . f         - The user provided function
536: . alpha     - The scalar multiplier
537: - ctx       - [Optional] A user provided context for the function

539:   The function f should return ordered first by node and then by component.
540:         For instance, if superscripts denote components and subscripts denote nodes:
541:     v^0_0 v^1_0 v^2_0 v^0_1 v^1_1 v^2_1 ... v^0_n v^1_n v^2_n
542:   This is the standard for PointFunctions.

544:   Level: intermediate

546: .seealso: GVecEvaluateFunctionGalerkin,GMatEvaluateOperatorGalerkin
547: @*/
548: int GVecEvaluateFunction(GVec v, int numFields, int *fields, PointFunction f, PetscScalar alpha, void *ctx)
549: {
550:   Grid        grid;
551:   VarOrdering order;
552:   int         size;
553:   int         fieldIdx;
554:   int         ierr;

558:   GVecGetGrid(v, &grid);
559:   PetscLogEventBegin(GVEC_EvaluateFunction, v, grid, 0, 0);

561:   VecGetSize(v, &size);
562:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++)
563:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
564:       SETERRQ1(PETSC_ERR_ARG_WRONG, "Invalid field number %d", fields[fieldIdx]);
565:     }
566:   /* Should check that fields are in the local order */

568:   if ((grid->isConstrained) && (size == grid->constraintOrder->numVars)) {
569:     VarOrderingCreateSubset(grid->constraintOrder, numFields, fields, PETSC_FALSE, &order);
570:     /* This is a kludge to get around the size check since the new constrained vars are not formed */
571:     order->numLocVars -= order->numLocNewVars;
572:     (*grid->ops->gvecevaluatefunction)(grid, v, order, f, alpha, ctx);
573:     order->numLocVars += order->numLocNewVars;
574:   } else if (size == grid->order->numVars) {
575:     VarOrderingCreateSubset(grid->order, numFields, fields, PETSC_FALSE, &order);
576:     (*grid->ops->gvecevaluatefunction)(grid, v, order, f, alpha, ctx);
577:   } else {
578:     SETERRQ(PETSC_ERR_ARG_INCOMP, "Vector size does not conform to grid");
579:   }
580:   VarOrderingDestroy(order);
581:   PetscLogEventEnd(GVEC_EvaluateFunction, v, grid, 0, 0);
582:   return(0);
583: }

585: #undef  __FUNCT__
587: /*@
588:   GVecEvaluateFunctionCollective - Evaluates a function over the specified fields
589:   on the locations defined by the underlying grid and its discretization. The only
590:   difference between it and GVecEvaluateFunction(), is that each processor is
591:   guaranteed to call f at each iteration, possibly with null arguments.

593:   Input Parameters:
594: + v         - The grid vector
595: . numFields - The number of fields to evaluate
596: . fields    - The fields
597: . f         - The user provided function
598: . alpha     - The  scalar multiplier
599: - ctx       - [Optional] The user provided context for the function

601:   The function f should return ordered first by node and then by component.
602:         For instance, if superscripts denote components and subscripts denote nodes:
603:     v^0_0 v^1_0 v^2_0 v^0_1 v^1_1 v^2_1 ... v^0_n v^1_n v^2_n
604:   This is the standard for PointFunctions. Also, this function is typically
605:   used with an f that has a barrier. In this way you can guarantee that any
606:   collective operation will complete inside f.

608:   Level: intermediate

610: .seealso: GVecEvaluateFunctionGalerkin,GMatEvaluateOperatorGalerkin
611: @*/
612: int GVecEvaluateFunctionCollective(GVec v, int numFields, int *fields, PointFunction f, PetscScalar alpha, void *ctx)
613: {
614:   Grid        grid;
615:   VarOrdering order;
616:   int         size;
617:   int         fieldIdx;
618:   int         ierr;

622:   GVecGetGrid(v, &grid);
623:   PetscLogEventBegin(GVEC_EvaluateFunctionCollective, v, grid, 0, 0);

625:   VecGetSize(v, &size);
626:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++)
627:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
628:       SETERRQ1(PETSC_ERR_ARG_WRONG, "Invalid field number %d", fields[fieldIdx]);
629:     }
630:   /* Should check that fields are in the local order */

632:   if ((grid->isConstrained) && (size == grid->constraintOrder->numVars)) {
633:     VarOrderingCreateSubset(grid->constraintOrder, numFields, fields, PETSC_FALSE, &order);
634:     /* This is a kludge to get around the size check since the new constrained vars are not formed */
635:     order->numLocVars -= order->numLocNewVars;
636:     (*grid->ops->gvecevaluatefunctioncollective)(grid, v, order, f, alpha, ctx);
637:     order->numLocVars += order->numLocNewVars;
638:   } else if (size == grid->order->numVars) {
639:     VarOrderingCreateSubset(grid->order, numFields, fields, PETSC_FALSE, &order);
640:     (*grid->ops->gvecevaluatefunctioncollective)(grid, v, order, f, alpha, ctx);
641:   } else {
642:     SETERRQ(PETSC_ERR_ARG_INCOMP, "Vector size does not conform to grid");
643:   }
644:   VarOrderingDestroy(order);
645:   PetscLogEventEnd(GVEC_EvaluateFunctionCollective, v, grid, 0, 0);
646:   return(0);
647: }

649: #undef  __FUNCT__
651: /*@
652:   GVecEvaluateFunctionRectangular - Evaluates a function over the
653:   specified fields on the        locations defined by the underlying grid
654:   and its discretization, and takes a user-defined ordering.

656:   Input Parameters:
657: + v     - The grid vector
658: . order - The variable ordering
659: . f     - The user provided function
660: . alpha - A scalar multiplier
661: - ctx   - An optional user provided context for the function

663:   Level: intermediate

665: .seealso: GVecEvaluateFunctionGalerkin,GMatEvaluateOperatorGalerkin
666: @*/
667: int GVecEvaluateFunctionRectangular(GVec v, VarOrdering order, PointFunction f, PetscScalar alpha, void *ctx)
668: {
669:   Grid grid;
670:   int  size;
671:   int  ierr;

675:   GVecGetGrid(v, &grid);
676:   VecGetSize(v, &size);
677:   if (size != order->numVars) SETERRQ(PETSC_ERR_ARG_INCOMP, "Vector size does not conform to ordering");
678:   /* Should check that fields are in the local order */

680:   (*grid->ops->gvecevaluatefunction)(grid, v, order, f, alpha, ctx);
681:   return(0);
682: }

684: #undef  __FUNCT__
686: /*@
687:   GVecEvaluateFunctionGalerkin - Evaluates the weak form of a function over
688:   a field on the locations defined by the underlying grid and its discretization.

690:   Input Parameter:
691: . v         - The grid vector
692: . numFields - The number of fields to evaluate
693: . fields    - The fields
694: . f         - The user provided function
695: . alpha     - A scalar multiplier
696: . ctx       - An optional user provided context for the function

698:   Level: intermediate

700: .seealso: GVecEvaluateFunction,GMatEvaluateFunctionGalerkin
701: @*/
702: int GVecEvaluateFunctionGalerkin(GVec v, int numFields, int *fields, PointFunction f, PetscScalar alpha, void *ctx)
703: {
704:   Grid grid;
705:   int  fieldIdx;
706:   int  ierr;


712:   GVecGetGrid(v, &grid);
713:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
714:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
715:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
716:     }
717:   }
718:   /* Should check that fields are in the local order */
719:   (*grid->ops->gvecevaluatefunctiongalerkin)(grid, v, numFields, fields, grid->locOrder, f, alpha, ctx);
720: 
721:   return(0);
722: }

724: #undef  __FUNCT__
726: /*@
727:   GVecEvaluateFunctionGalerkinCollective - Evaluates the weak form of a function over
728:   a field on the locations defined by the underlying grid and its discretization. The
729:   only difference between it and GVecEvaluateFunctionGalerkin(), is that each processor
730:   is guaranteed to call f at each iteration, possibly with null arguments.

732:   Input Parameter:
733: . v         - The grid vector
734: . numFields - The number of fields to evaluate
735: . fields    - The fields
736: . f         - The user provided function
737: . alpha     - A scalar multiplier
738: . ctx       - An optional user provided context for the function

740:   Level: intermediate

742: .seealso: GVecEvaluateFunction,GMatEvaluateFunctionGalerkin
743: @*/
744: int GVecEvaluateFunctionGalerkinCollective(GVec v, int numFields, int *fields, PointFunction f, PetscScalar alpha, void *ctx)
745: {
746:   Grid grid;
747:   int  fieldIdx;
748:   int  ierr;


754:   GVecGetGrid(v, &grid);
755:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
756:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
757:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
758:     }
759:   }
760:   /* Should check that fields are in the local order */
761:   (*grid->ops->gvecevaluatefunctiongalerkincollective)(grid, v, numFields, fields, grid->locOrder, f, alpha, ctx);
762: 
763:   return(0);
764: }

766: #undef  __FUNCT__
768: /*@
769:   GVecEvaluateBoundaryFunctionGalerkin - Evaluates the weak form of a function over
770:   a field on the locations defined by the boundary of the underlying mesh and its
771:   discretization.

773:   Input Parameter:
774: . v         - The grid vector
775: . numFields - The number of fields to evaluate
776: . fields    - The fields
777: . f         - The user provided function
778: . alpha     - A scalar multiplier
779: . ctx       - An optional user provided context for the function

781:   Level: intermediate

783: .seealso: GVecEvaluateFunction,GMatEvaluateFunctionGalerkin
784: @*/
785: int GVecEvaluateBoundaryFunctionGalerkin(GVec v, int numFields, int *fields, PointFunction f, PetscScalar alpha, void *ctx)
786: {
787:   Grid grid;
788:   int  fieldIdx;
789:   int  ierr;


795:   GVecGetGrid(v, &grid);
796:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
797:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
798:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
799:     }
800:   }
801:   /* Should check that fields are in the local order */
802:   (*grid->ops->gvecevaluateboundaryfunctiongalerkin)(grid, v, numFields, fields, grid->bdLocOrder, f, alpha, ctx);
803: 
804:   return(0);
805: }

807: #undef  __FUNCT__
809: /*@
810:   GVecEvaluateBoundaryFunctionGalerkinCollective - Evaluates the weak form of a
811:   function over a field on the locations defined by the boundary of the underlying
812:   mesh and its discretization. The only difference between it and
813:   GVecEvaluateBoundaryFunctionGalerkin(), is that each processor is guaranteed to
814:   call f at each iteration, possibly with null arguments.

816:   Input Parameter:
817: . v         - The grid vector
818: . numFields - The number of fields to evaluate
819: . fields    - The fields
820: . f         - The user provided function
821: . alpha     - A scalar multiplier
822: . ctx       - An optional user provided context for the function

824:   Level: intermediate

826: .seealso: GVecEvaluateFunction,GMatEvaluateFunctionGalerkin
827: @*/
828: int GVecEvaluateBoundaryFunctionGalerkinCollective(GVec v, int numFields, int *fields, PointFunction f, PetscScalar alpha, void *ctx)
829: {
830:   Grid grid;
831:   int  fieldIdx;
832:   int  ierr;


838:   GVecGetGrid(v, &grid);
839:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
840:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
841:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
842:     }
843:   }
844:   /* Should check that fields are in the local order */
845:   (*grid->ops->gvecevaluateboundaryfunctiongalerkincollective)(grid, v, numFields, fields, grid->bdLocOrder, f, alpha, ctx);
846: 
847:   return(0);
848: }

850: #undef  __FUNCT__
852: /*@
853:   GVecEvaluateNonlinearOperatorGalerkin - Evaluates the weak form of a nonlinear operator
854:   over a field on the locations defined by the underlying grid and its discretization.

856:   Collective on GVec

858:   Input Parameter:
859: . v         - The grid vector
860: . n         - The number of input vectors
861: . vecs      - The input vectors
862: . numFields - The number of fields to evaluate
863: . fields    - The fields
864: . op        - The nonlinear operator
865: . alpha     - The scalar multiplier
866: . isALE     - The flag for ALE operators
867: . ctx       - [Optional] A user provided context for the function

869:   Level: intermediate

871: .keywords: grid vector, nonlinear, operator, galerkin
872: .seealso: GVecEvaluateFunctionGalerkin()
873: @*/
874: int GVecEvaluateNonlinearOperatorGalerkin(GVec v, int n, GVec vecs[], int numFields, int *fields, NonlinearOperator op,
875:                                           PetscScalar alpha, PetscTruth isALE, void *ctx)
876: {
877:   Grid grid;
878:   GVec x, y;
879:   int  fieldIdx;
880:   int  ierr;


886:   GVecGetGrid(v, &grid);
887:   for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
888:     if ((fields[fieldIdx] < 0) || (fields[fieldIdx] > grid->numFields)) {
889:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
890:     }
891:   }
892:   switch(n) {
893:   case 1:
894:     x = y = vecs[0];
895:     break;
896:   case 2:
897:     x = vecs[0];
898:     y = vecs[1];
899:     break;
900:   default:
901:       SETERRQ(PETSC_ERR_ARG_OUTOFRANGE, "Only quadratic operators supported");
902:   }
903:   /* Should check that fields are in the local order */
904:   (*grid->ops->gvecevaluatenonlinearoperatorgalerkin)(grid, v, x, y, numFields, fields, grid->locOrder, op, alpha,
905:                                                              isALE, ctx);
906: 
907:   return(0);
908: }

910: #undef  __FUNCT__
912: /*@
913:    GVecEvaluateOperatorGalerkin - Evaluates the weak form of an operator over
914:    a field on the locations defined by the underlying grid and its discretization.

916:    Input Parameter:
917: .  v      - The grid vector
918: .  x,y    - The input grid vectors, usually the previous solution
919: .  sField - The shape function field
920: .  tField - The test function field
921: .  op     - The operator
922: .  alpha  - A scalar multiplier
923: .  ctx    - An optional user provided context for the function

925:   Level: intermediate

927: .seealso: GVecEvaluateFunction,GMatEvaluateFunctionGalerkin
928: @*/
929: int GVecEvaluateOperatorGalerkin(GVec v, GVec x, GVec y, int sField, int tField, int op, PetscScalar alpha, void *ctx)
930: {
931:   Grid             grid;
932:   VarOrdering      sOldOrder, tOldOrder;
933:   VarOrdering      sOrder,    tOrder;
934:   LocalVarOrdering sLocOrder, tLocOrder;
935:   int              numFields;
936:   int              ierr;


943:   GVecGetGrid(v, &grid);
944:   /* Check for valid fields */
945:   GridGetNumFields(grid, &numFields);
946:   if ((sField < 0) || (sField >= numFields) || (tField < 0) || (tField >= numFields)) {
947:     SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
948:   }
949:   /* Check for valid operator */
950:   if ((op < 0) || (op >= grid->fields[sField].disc->numOps) || (!grid->fields[sField].disc->operators[op])) {
951:     SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid operator");
952:   }
953:   if ((grid->fields[sField].disc->operators[op]->precompInt == PETSC_NULL) &&
954:       (grid->fields[sField].disc->operators[op]->opFunc     == PETSC_NULL) &&
955:       (grid->fields[sField].disc->operators[op]->ALEOpFunc  == PETSC_NULL)) {
956:     SETERRQ(PETSC_ERR_ARG_WRONG, "Operator cannot be calculated");
957:   }
958:   if (grid->fields[sField].disc->numQuadPoints != grid->fields[tField].disc->numQuadPoints) {
959:     SETERRQ(PETSC_ERR_ARG_INCOMP, "Incompatible quadrature sets");
960:   }
961:   /* Create orderings */
962:   GVecGetOrder(x, &sOldOrder);
963:   VarOrderingCreateSubset(sOldOrder, 1, &sField, PETSC_FALSE, &sOrder);
964:   LocalVarOrderingCreate(grid, 1, &sField, &sLocOrder);
965:   GVecGetOrder(v, &tOldOrder);
966:   VarOrderingCreateSubset(tOldOrder, 1, &tField, PETSC_FALSE, &tOrder);
967:   LocalVarOrderingCreate(grid, 1, &tField, &tLocOrder);
968:   /* Calculate operator */
969:   (*grid->ops->gvecevaluateoperatorgalerkin)(grid, v, x, y, sOrder, sLocOrder, tOrder, tLocOrder, op, alpha, ctx);
970: 
971:   /* Destroy orderings */
972:   VarOrderingDestroy(sOrder);
973:   LocalVarOrderingDestroy(sLocOrder);
974:   VarOrderingDestroy(tOrder);
975:   LocalVarOrderingDestroy(tLocOrder);
976:   return(0);
977: }

979: #undef  __FUNCT__
981: /*@
982:   GVecEvaluateOperatorGalerkinRectangular - Evaluates the weak form of an operator over
983:   a field on the locations defined by the underlying grid and its discretization.

985:   Collective on GVec

987:   Input Parameters:
988: + v         - The grid vector
989: . x         - A grid vector, usually the previous solution
990: . sOrder    - The global variable ordering for the shape functions 
991: . sLocOrder - The local variable ordering for the shape functions 
992: . sOrder    - The global variable ordering for the test functions 
993: . sLocOrder - The local variable ordering for the test functions 
994: . op        - The operator
995: . alpha     - The scalar multiplier for the operator
996: - ctx       - [Optional] The user provided context for the function

998:   Level: intermediate

1000: .keywords: grid vector, evaluate, operator, galerkin
1001: .seealso: GVecEvaluateFunction(), GMatEvaluateFunctionGalerkin()
1002: @*/
1003: int GVecEvaluateOperatorGalerkinRectangular(GVec v, GVec x, VarOrdering sOrder, LocalVarOrdering sLocOrder,
1004:                                             VarOrdering tOrder, LocalVarOrdering tLocOrder, int op, PetscScalar alpha,
1005:                                             void *ctx)
1006: {
1007:   Grid          grid;
1008:   FieldClassMap sMap, tMap;
1009:   int           numTotalFields;
1010:   int          *sFields, *tFields;
1011:   int           f;
1012:   int           ierr;


1022:   GVecGetGrid(v, &grid);
1023:   GridGetNumFields(grid, &numTotalFields);
1024:   VarOrderingGetClassMap(sOrder, &sMap);
1025:   VarOrderingGetClassMap(tOrder, &tMap);
1026:   sFields = sMap->fields;
1027:   tFields = tMap->fields;
1028:   /* Check for compatible orderings */
1029:   if ((tOrder->numVars != v->N) || (tOrder->numLocVars != v->n)) {
1030:     SETERRQ(PETSC_ERR_ARG_INCOMP, "Incompatible test function ordering");
1031:   }
1032:   if ((sOrder->numVars != x->N) || (sOrder->numLocVars != x->n)) {
1033:     SETERRQ(PETSC_ERR_ARG_INCOMP, "Incompatible shape function ordering");
1034:   }
1035:   if (sMap->numFields != tMap->numFields) {
1036:     SETERRQ(PETSC_ERR_ARG_INCOMP, "Shape and test function orderings must have the same number of fields");
1037:   }
1038:   for(f = 0; f < sMap->numFields; f++) {
1039:     if ((sFields[f] < 0) || (sFields[f] >= numTotalFields) || (tFields[f] < 0) || (tFields[f] >= numTotalFields)) {
1040:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid field number");
1041:     }
1042:     /* Check for valid operator */
1043:     if ((op < 0) || (op >= grid->fields[sFields[f]].disc->numOps) || (!grid->fields[sFields[f]].disc->operators[op])) {
1044:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid operator");
1045:     }
1046:     if ((grid->fields[sFields[f]].disc->operators[op]->precompInt == PETSC_NULL) &&
1047:         (grid->fields[sFields[f]].disc->operators[op]->opFunc     == PETSC_NULL) &&
1048:         (grid->fields[sFields[f]].disc->operators[op]->ALEOpFunc  == PETSC_NULL)) {
1049:       SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid operator");
1050:     }
1051:     if (grid->fields[sFields[f]].disc->numQuadPoints != grid->fields[tFields[f]].disc->numQuadPoints) {
1052:       SETERRQ(PETSC_ERR_ARG_INCOMP, "Incompatible quadrature sets");
1053:     }
1054:   }
1055:   /* Calculate operator */
1056:   (*grid->ops->gvecevaluateoperatorgalerkin)(grid, v, x, x, sOrder, sLocOrder, tOrder, tLocOrder, op, alpha, ctx);
1057: 
1058:   return(0);
1059: }

1061: #undef  __FUNCT__
1063: /*@
1064:    GVecEvaluateJacobian - Evaluates the action of the system matrix
1065:          over the active fields on the locations defined by the underlying grid and
1066:          its discretization.

1068:    Input Parameters:
1069: +  x   - The argument vector to the Jacobian
1070: .  y   - The vector to which the Jacobian is applied
1071: -  ctx - An optional user provided context for the function

1073:    Output Parameter:
1074: .  v   - The result vector J(x) y

1076:   Level: intermediate

1078: .seealso: GVecEvaluateFunction, GMatEvaluateFunctionGalerkin
1079: @*/
1080: int GVecEvaluateJacobian(GVec v, GVec x, GVec y, void *ctx)
1081: {
1082:   Grid      grid;
1083:   Mesh      mesh;
1084:   MeshMover mover;
1085:   Grid      meshVelGrid;
1086:   int       ierr;


1091:   GVecGetGrid(v, &grid);
1092:   if (grid->ALEActive == PETSC_TRUE) {
1093:     GridGetMesh(grid, &mesh);
1094:     MeshGetMover(mesh, &mover);
1095:     MeshMoverGetVelocityGrid(mover, &meshVelGrid);
1096:     GridSetUp(meshVelGrid);
1097:   }
1098:   PetscLogEventBegin(GVEC_EvaluateJacobian, v, grid, x, y);
1099:   (*grid->ops->gvecevaluatesystemmatrix)(grid, x, y, v, ctx);
1100:   PetscLogEventEnd(GVEC_EvaluateJacobian, v, grid, x, y);
1101:   return(0);
1102: }

1104: #undef  __FUNCT__
1106: /*@
1107:    GVecEvaluateJacobianDiagonal - Evaluates the action of the diagonal of the
1108:    system matrix over the active fields on the locations defined by the underlying
1109:    grid and its discretization.

1111:    Input Parameters:
1112: +  x   - The argument vector to the Jacobian
1113: -  ctx - An optional user provided context for the function

1115:    Output Parameter:
1116: .  d   - The diagonal of J(x)

1118:   Level: intermediate

1120: .seealso: GVecEvaluateJacobian, GVecEvaluateFunction
1121: @*/
1122: int GVecEvaluateJacobianDiagonal(GVec d, GVec x, void *ctx)
1123: {
1124:   Grid      grid;
1125:   Mesh      mesh;
1126:   MeshMover mover;
1127:   Grid      meshVelGrid;
1128:   int       ierr;


1133:   GVecGetGrid(d, &grid);
1134:   if (grid->ALEActive == PETSC_TRUE) {
1135:     GridGetMesh(grid, &mesh);
1136:     MeshGetMover(mesh, &mover);
1137:     MeshMoverGetVelocityGrid(mover, &meshVelGrid);
1138:     GridSetUp(meshVelGrid);
1139:   }
1140:   PetscLogEventBegin(GVEC_EvaluateJacobian, d, grid, x, 0);
1141:   (*grid->ops->gvecevaluatesystemmatrixdiagonal)(grid, x, d, ctx);
1142:   PetscLogEventEnd(GVEC_EvaluateJacobian, d, grid, x, 0);
1143:   return(0);
1144: }

1146: #undef  __FUNCT__
1148: /*@
1149:   GVecEvaluateJacobianConstrained - Evaluates the action of the Jacobian
1150:   of the extra fields introduced by the constraints.

1152:   Collective on GVec

1154:   Input Parameter:
1155: . v - The grid vector

1157:   Output Parameter
1158: . x - The action of the constraint Jacobian 

1160:   Level: intermediate

1162: .keywords: grid vector, jacobian, evaluate, constraint
1163: .seealso: GVecEvaluateFunction(), GVecEvaluateRhs()
1164: @*/
1165: int GVecEvaluateJacobianConstrained(GVec v, GVec x)
1166: {
1167:   Grid       grid;
1168:   PetscTruth isConstrained;
1169:   int        ierr;


1175:   GVecGetGrid(v, &grid);
1176:   GridIsConstrained(grid, &isConstrained);
1177:   if ((isConstrained == PETSC_TRUE) && (grid->constraintCtx->ops->applyjac)) {
1178:     (*grid->constraintCtx->ops->applyjac)(grid, x, v);
1179:   }
1180:   return(0);
1181: }

1183: #undef  __FUNCT__
1185: /*@
1186:   GVecSolveJacobianConstrained - Evaluates the action of the inverse of
1187:   the Jacobian of the extra fields introduced by the constraints.

1189:   Collective on GVec

1191:   Input Parameter:
1192: . v - The grid vector

1194:   Output Parameter:
1195: . x - The action of the inverse of the constraint Jacobian

1197:   Level: intermediate

1199: .keywords: grid vector, jacobian, solve, constraint
1200: .seealso: GVecEvaluateFunction(), GVecEvaluateRhs()
1201: @*/
1202: int GVecSolveJacobianConstrained(GVec v, GVec x)
1203: {
1204:   Grid       grid;
1205:   PetscTruth isConstrained;
1206:   int        ierr;


1212:   GVecGetGrid(v, &grid);
1213:   GridIsConstrained(grid, &isConstrained);
1214:   if ((isConstrained == PETSC_TRUE) && (grid->constraintCtx->ops->solvejac)) {
1215:     (*grid->constraintCtx->ops->solvejac)(grid, x, v);
1216:   }
1217:   return(0);
1218: }

1220: #undef  __FUNCT__
1222: /*@
1223:    GVecSetBoundary - Applies the specified Dirichlet boundary conditions to this vector.

1225:    Input Parameter:
1226: .  v   - The grid vector
1227: .  ctx - An optional user provided context for the function

1229:   Level: intermediate

1231: .seealso: GridSetBC(), GridAddBC()
1232: @*/
1233: int GVecSetBoundary(GVec v, void *ctx)
1234: {
1235:   Grid           grid;
1236:   int            bc;
1237:   int            num;
1238:   int           *bd;
1239:   int           *field;
1240:   PointFunction *func;
1241:   int            ierr;

1245:   GVecGetGrid(v, &grid);
1246:   PetscLogEventBegin(GVEC_SetBoundary, v, grid, 0, 0);
1247:   for(bc = 0, num = 0; bc < grid->numBC; bc++) {
1248:     if (grid->bc[bc].reduce == PETSC_FALSE) num++;
1249:   }
1250:   if (num > 0) {
1251:     PetscMalloc(num * sizeof(int),             &bd);
1252:     PetscMalloc(num * sizeof(int),             &field);
1253:     PetscMalloc(num * sizeof(PointFunction *), &func);
1254:     for(bc = 0, num = 0; bc < grid->numBC; bc++) {
1255:       if (grid->bc[bc].reduce == PETSC_FALSE) {
1256:         bd[num]     = grid->bc[bc].boundary;
1257:         field[num]  = grid->bc[bc].field;
1258:         func[num++] = grid->bc[bc].func;
1259:       }
1260:     }
1261:     GridSetVecBoundaryRectangular(num, bd, field, func, grid->order, v, ctx);
1262:     PetscFree(bd);
1263:     PetscFree(field);
1264:     PetscFree(func);
1265:   }
1266:   for(bc = 0; bc < grid->numPointBC; bc++) {
1267:     if (grid->pointBC[bc].reduce == PETSC_FALSE) {
1268:       GridSetVecPointBoundary(grid->pointBC[bc].node, grid->pointBC[bc].field, grid->pointBC[bc].func, v, ctx);
1269:     }
1270:   }
1271:   PetscLogEventEnd(GVEC_SetBoundary, v, grid, 0, 0);
1272:   return(0);
1273: }

1275: #undef  __FUNCT__
1277: /*@
1278:    GVecSetBoundaryZero - Applies a Dirichlet boundary condition of zero to the specified
1279:    fields in this vector. This is mainly used to double up the boundary conditions in a
1280:    time dependent nonlinear problem. The original boundary condition is used to initialize
1281:    the solution at the start of the Newton iteration, and this is used to set the corresponding
1282:    components of the residual to zero.

1284:    Input Parameter:
1285: .  v   - The grid vector
1286: .  ctx - An optional user provided context for the function

1288:   Level: intermediate

1290: .seealso: GridSetBC(), GridAddBC()
1291: @*/
1292: int GVecSetBoundaryZero(GVec v, void *ctx)
1293: {
1294:   Grid grid;
1295:   int  bc;
1296:   int  ierr;

1300:   GVecGetGrid(v, &grid);
1301:   for(bc = 0; bc < grid->numBC; bc++) {
1302:     if (grid->bc[bc].reduce == PETSC_FALSE) {
1303:       GridSetVecBoundary(grid->bc[bc].boundary, grid->bc[bc].field, PointFunctionZero, v, ctx);
1304:     }
1305:   }
1306:   for(bc = 0; bc < grid->numPointBC; bc++) {
1307:     if (grid->pointBC[bc].reduce == PETSC_FALSE) {
1308:       GridSetVecPointBoundary(grid->pointBC[bc].node, grid->pointBC[bc].field, PointFunctionZero, v, ctx);
1309:     }
1310:   }
1311:   return(0);
1312: }

1314: #undef  __FUNCT__
1316: /*@
1317:    GVecSetBoundaryDifference - Applies the specified Dirichlet boundary conditions
1318:    to this vector, but uses the difference of the value in the given vector and the
1319:    computed value.

1321:    Input Parameter:
1322: .  v   - The grid vector
1323: .  u   - A grid vector, usually the previous solution
1324: .  ctx - An optional user provided context for the function

1326:   Level: intermediate

1328: .seealso: GridSetVecBoundaryDifference(), GridSetBC(), GridAddBC()
1329: @*/
1330: int GVecSetBoundaryDifference(GVec v, GVec u, void *ctx)
1331: {
1332:   Grid grid;
1333:   int  bc;
1334:   int  ierr;

1339:   GVecGetGrid(v, &grid);
1340:   for(bc = 0; bc < grid->numBC; bc++) {
1341:     if (grid->bc[bc].reduce == PETSC_FALSE) {
1342:       GridSetVecBoundaryDifference(grid->bc[bc].boundary, grid->bc[bc].field, u, grid->bc[bc].func, v, ctx);
1343:     }
1344:   }
1345:   for(bc = 0; bc < grid->numPointBC; bc++) {
1346:     if (grid->pointBC[bc].reduce == PETSC_FALSE) {
1347:       GridSetVecPointBoundaryDifference(grid->pointBC[bc].node, grid->pointBC[bc].field, u, grid->pointBC[bc].func, v, ctx);
1348:     }
1349:   }
1350:   return(0);
1351: }

1353: #undef  __FUNCT__
1355: /*@
1356:   GVecInterpolateField - Interpolates the field defined by the grid
1357:   vector to the specified point, and stores the components in the
1358:   array provided.

1360:   Input Parameter:
1361: . v      - The grid vector
1362: . x,y,z  - The interpolation point
1363: . ctx    - An InterpolationContext

1365:   Output Parameter:
1366: . values - The interpolated field at the point

1368:   Level: intermediate

1370: .keywords: grid vector, interpolation
1371: .seealso: GVecEvaluateFunction, GVecEvaluateFunctionGalerkin
1372: @*/
1373: int GVecInterpolateField(GVec v, int field, double x, double y, double z, PetscScalar *values, InterpolationContext *ctx)
1374: {
1375:   Grid           grid;
1376:   Discretization disc;
1377:   int           *elemStart;
1378:   ElementVec     vec;
1379:   PetscScalar   *array;
1380:   double         coords[3];
1381:   double        *tmp;
1382:   MPI_Status     status;
1383:   PetscTruth     lost;
1384:   /*double         lostDist, lostX, lostY, lostZ;*/
1385:   int            numFound, numSent;
1386:   int            comp, elem, newElem, closeNode;
1387:   int            proc, var;
1388:   int            ierr;

1392:   ierr      = GVecGetGrid(v, &grid);
1393:   PetscLogEventBegin(GVEC_InterpolateField, v, grid, 0, 0);
1394:   disc      = grid->fields[field].disc;
1395:   comp      = disc->comp;
1396:   elemStart = grid->locOrder->elemStart;
1397:   vec       = grid->ghostElementVec;
1398:   array     = &vec->array[elemStart[field]];

1400:   /* Watch out for null collective call */
1401:   if (values == PETSC_NULL) {
1402:     /* Mark point as inactive */
1403:     elem = -2;
1404:   } else {
1405:     /* Locate the point in the mesh */
1406:     MeshLocatePoint(ctx->mesh, x, y, 0.0, &elem);
1407:   }

1409:   if (elem >= 0) {
1410:     /* Initialize element vector */
1411:     ElementVecZero(vec);
1412:     vec->reduceSize = grid->locOrder->elemSize;
1413:     /* Get indices for the old element */
1414:     GridCalcInterpolationElementVecIndices(grid, ctx->mesh, elem, ctx->order, vec);
1415:     /* Get values from local vector to element vector */
1416:     GridLocalToElementGeneral(grid, ctx->ghostVec, grid->bdReduceVecCur, grid->reduceSystem, grid->reduceElement, vec);
1417:     if (grid->explicitConstraints == PETSC_TRUE) {
1418:       /* Must transform to unconstrained variables for element integrals */
1419:       GridProjectInterpolationElementVec(grid, ctx->mesh, elem, ctx->order, PETSC_FALSE, vec);
1420:     }
1421:     /* Interpolate data to get field at new point */
1422:     DiscretizationInterpolateField(disc, ctx->mesh, elem, x, y, 0.0, array, values, INTERPOLATION_LOCAL);
1423:   } else if (elem != -2) {
1424:     if (ctx->numProcs == 1) {
1425:       SETERRQ2(PETSC_ERR_ARG_WRONG, "Node (%g,%g) not found in new mesh", x, y);
1426:     } else if (ctx->batchMode == PETSC_TRUE) {
1427:       if (ctx->numPoints[ctx->rank] >= ctx->maxPoints) {
1428:         PetscMalloc(ctx->maxPoints*6 * sizeof(double), &tmp);
1429:         PetscMemcpy(tmp, ctx->coords, ctx->maxPoints*3 * sizeof(double));
1430:         PetscFree(ctx->coords);
1431:         ctx->maxPoints *= 2;
1432:         ctx->coords     = tmp;
1433:       }
1434:       ctx->coords[ctx->numPoints[ctx->rank]*3]   = x;
1435:       ctx->coords[ctx->numPoints[ctx->rank]*3+1] = y;
1436:       ctx->coords[ctx->numPoints[ctx->rank]*3+2] = z;
1437:       ctx->numPoints[ctx->rank]++;
1438:       for(var = 0; var < comp; var++)
1439:         values[var] = 0.0;
1440:     }
1441:   }

1443:   if ((ctx->numProcs > 1) && (ctx->batchMode == PETSC_FALSE)) {
1444:     /* Communicate missing nodes */
1445:     MPI_Allgather(&elem, 1, MPI_INT, ctx->activeProcs, 1, MPI_INT, grid->comm);
1446:     coords[0] = x; coords[1] = y; coords[2] = z;
1447:     MPI_Allgather(coords, 3, MPI_DOUBLE, ctx->coords, 3, MPI_DOUBLE, grid->comm);

1449:     /* Handle each point */
1450:     numFound = 0;
1451:     for(proc = 0; proc < ctx->numProcs; proc++) {
1452:       /* If the point was not located by someone else */
1453:       if ((ctx->activeProcs[proc] == -1) && (proc != ctx->rank)) {
1454:         MeshLocatePoint(ctx->mesh, ctx->coords[proc*3], ctx->coords[proc*3+1], 0.0, &newElem);
1455:         if (newElem >= 0) {
1456:           /* Mark point as found by this processor */
1457:           ctx->activeProcs[proc] = ctx->rank;
1458:           /* Get indices for the old element */
1459:           GridCalcInterpolationElementVecIndices(grid, ctx->mesh, newElem, ctx->order, vec);
1460:           /* Get values from local vector to element vector */
1461:           GridLocalToElementGeneral(grid, ctx->ghostVec, grid->bdReduceVecCur, grid->reduceSystem, grid->reduceElement, vec);
1462:           if (grid->explicitConstraints == PETSC_TRUE) {
1463:             /* Must transform to unconstrained variables for element integrals */
1464:             GridProjectInterpolationElementVec(grid, ctx->mesh, newElem, ctx->order, PETSC_FALSE, vec);
1465:           }
1466:           /* Interpolate data to get field at new point */
1467:           DiscretizationInterpolateField(disc, ctx->mesh, newElem, ctx->coords[proc*3], ctx->coords[proc*3+1], 0.0,
1468:                                                 array, &ctx->values[proc*comp], INTERPOLATION_LOCAL);
1469: 
1470:           numFound++;
1471:         }
1472:       } else {
1473:         /* Mark point as inactive */
1474:         ctx->activeProcs[proc] = -2;
1475:       }
1476:     }

1478:     /* Extra step to handle nodes which slip outside the boundary */
1479:     MPI_Allreduce(ctx->activeProcs, ctx->calcProcs, ctx->numProcs, MPI_INT, MPI_MAX, grid->comm);
1480:     lost = PETSC_FALSE;
1481:     for(proc = 0; proc < ctx->numProcs; proc++) {
1482:       if (ctx->calcProcs[proc] == -1) {
1483:         /* PetscPrintf(grid->comm, "Lost point:n ");
1484:         for(var = 0; var < ctx->numProcs; var++) PetscPrintf(grid->comm, " aProc[%d]: %d", var, ctx->activeProcs[var]);
1485:         PetscPrintf(grid->comm, "n");
1486:         for(var = 0; var < ctx->numProcs; var++) PetscPrintf(grid->comm, " cProc[%d]: %d", var, ctx->calcProcs[var]);
1487:         PetscPrintf(grid->comm, "n");
1488:         PetscPrintf(grid->comm, "  (%g, %g)n", ctx->coords[proc*3], ctx->coords[proc*3+1]);*/
1489:         lost = PETSC_TRUE;
1490:         /* No one found it so search for the closest element */
1491:         MeshGetNearestNode(ctx->mesh, ctx->coords[proc*3], ctx->coords[proc*3+1], 0.0, PETSC_TRUE, &closeNode);
1492:         if (closeNode >= 0) {
1493:           MeshGetElementFromNode(ctx->mesh, closeNode, &newElem);
1494:           /* PetscPrintf(PETSC_COMM_SELF, "  found lost point on processor %d in element %dn", ctx->rank, newElem);
1495:           MeshGetNodeCoords(ctx->mesh, closeNode, &lostX, &lostY, &lostZ);                         
1496:           lostDist = PetscSqr(MeshPeriodicDiffX(ctx->mesh, lostX - ctx->coords[proc*3])) +
1497:             PetscSqr(MeshPeriodicDiffY(ctx->mesh, lostY - ctx->coords[proc*3+1]));
1498:           PetscPrintf(PETSC_COMM_SELF, "  at (%g, %g), it was %g away from the pointn", lostX, lostY, lostDist); */
1499:           /* Mark point as found by this processor */
1500:           ctx->activeProcs[proc] = ctx->rank;
1501:           /* Get indices for the old element */
1502:           GridCalcInterpolationElementVecIndices(grid, ctx->mesh, newElem, ctx->order, vec);
1503:           /* Get values from local vector to element vector */
1504:           GridLocalToElementGeneral(grid, ctx->ghostVec, grid->bdReduceVecCur, grid->reduceSystem, grid->reduceElement, vec);
1505:           if (grid->explicitConstraints == PETSC_TRUE) {
1506:             /* Must transform to unconstrained variables for element integrals */
1507:             GridProjectInterpolationElementVec(grid, ctx->mesh, newElem, ctx->order, PETSC_FALSE, vec);
1508:           }
1509:           /* Interpolate data to get field at new point */
1510:           DiscretizationInterpolateField(disc, ctx->mesh, newElem, ctx->coords[proc*3], ctx->coords[proc*3+1], 0.0,
1511:                                                 array, &ctx->values[proc*comp], INTERPOLATION_LOCAL);
1512: 
1513:           numFound++;
1514:         }
1515:       }
1516:     }

1518:     /* Communicate interpolated values */
1519:     if (lost == PETSC_TRUE) {
1520:       MPI_Allreduce(ctx->activeProcs, ctx->calcProcs, ctx->numProcs, MPI_INT, MPI_MAX, grid->comm);
1521:       /* PetscPrintf(grid->comm, "Recommunicated interpolated valuesn ");
1522:       for(var = 0; var < ctx->numProcs; var++) PetscPrintf(grid->comm, " aProc[%d]: %d", var, ctx->activeProcs[var]);
1523:       PetscPrintf(grid->comm, "n");
1524:       for(var = 0; var < ctx->numProcs; var++) PetscPrintf(grid->comm, " cProc[%d]: %d", var, ctx->calcProcs[var]);
1525:       PetscPrintf(grid->comm, "n"); */
1526:     }
1527:     numSent = 0;
1528:     for(proc = 0; proc < ctx->numProcs; proc++) {
1529:       if ((elem == -1) && (ctx->rank == proc)) {
1530:         /* Point from this domain was not found */
1531:         if (ctx->calcProcs[proc] < 0) {
1532:           SETERRQ2(PETSC_ERR_ARG_WRONG, "Node not found at (%g,%g)", x, y);
1533:         }
1534:         /* Must check for lost points found in the same domain */
1535:         if (ctx->calcProcs[proc] != ctx->rank) {
1536:           MPI_Recv(values, comp, MPI_DOUBLE, ctx->calcProcs[proc], 0, grid->comm, &status);
1537:         } else {
1538:           for(var = 0; var < comp; var++) values[var] = ctx->values[proc*comp+var];
1539:           numSent++;
1540:         }
1541: #if 0
1542:         PetscPrintf(PETSC_COMM_SELF, "Immediate point %d on proc %dn", ctx->curPoint++, ctx->rank);
1543:         PetscPrintf(PETSC_COMM_SELF, "  x: %g y: %g z: %gn  ", ctx->coords[proc*3], ctx->coords[proc*3+1], ctx->coords[proc*3+2]);
1544:         for(c = 0; c < comp; c++) {
1545:           PetscPrintf(PETSC_COMM_SELF, "val[%d]: %g ", c, values[c]);
1546:         }
1547:         PetscPrintf(PETSC_COMM_SELF, "n");
1548: #endif
1549:       } else if (ctx->rank == ctx->calcProcs[proc]) {
1550:         /* Point from another domain was found here */
1551:         MPI_Send(&ctx->values[proc*comp], comp, MPI_DOUBLE, proc, 0, grid->comm);
1552:         numSent++;
1553:       } else if (ctx->rank == ctx->activeProcs[proc]) {
1554:         /* Point was found by multiple processors */
1555:         if (ctx->calcProcs[proc] < ctx->rank) {
1556:           SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid precendence during interpolation");
1557:         }
1558:         numSent++;
1559:       }
1560:     }
1561:     if (numSent != numFound) {
1562:       SETERRQ2(PETSC_ERR_ARG_CORRUPT, "Number of nodes found %d does not match number sent %d", numFound, numSent);
1563:     }
1564:     /* if (lost == PETSC_TRUE) {
1565:       PetscBarrier((PetscObject) grid);
1566:       PetscPrintf(grid->comm, "Finished interpolation after lost pointn");
1567:     }*/
1568:   }

1570:   PetscLogEventEnd(GVEC_InterpolateField, v, grid, 0, 0);
1571:   return(0);
1572: }

1574: #undef  __FUNCT__
1576: int GVecInterpolateFieldBatch(GVec v, int field, InterpolationContext *ctx)
1577: {
1578:   Grid           grid;
1579:   Discretization disc;
1580:   int            comp;
1581:   ElementVec     vec;
1582:   int            numProcs = ctx->numProcs;
1583:   int            rank     = ctx->rank;
1584:   int           *numCoords, *cOffsets;
1585:   int           *activePoints, *calcPoints;
1586:   double        *coords;
1587:   PetscScalar   *values, *array;
1588:   int            totPoints;
1589:   int            numFound;
1590:   int            proc, p, elem;
1591:   int            ierr;

1594:   if ((ctx->numProcs == 1) || (ctx->batchMode == PETSC_FALSE))
1595:     return(0);

1598:   ierr  = GVecGetGrid(v, &grid);
1599:   PetscLogEventBegin(GVEC_InterpolateFieldBatch, v, grid, 0, 0);
1600:   disc  = grid->fields[field].disc;
1601:   comp  = disc->comp;
1602:   vec   = grid->ghostElementVec;
1603:   array = &vec->array[grid->locOrder->elemStart[field]];
1604:   /* Get the number of points from each domain */
1605:   PetscMalloc((numProcs+1) * sizeof(int), &numCoords);
1606:   PetscMalloc((numProcs+1) * sizeof(int), &cOffsets);
1607:   PetscLogEventBegin(GVEC_InterpolateFieldBatchParallel, v, grid, 0, 0);
1608:   MPI_Allgather(&ctx->numPoints[rank], 1, MPI_INT, &ctx->numPoints[1], 1, MPI_INT, v->comm);
1609:   PetscLogEventEnd(GVEC_InterpolateFieldBatchParallel, v, grid, 0, 0);
1610:   /* Get the coordintes of each point */
1611:   ctx->numPoints[0] = 0;
1612:   cOffsets[0]       = 0;
1613:   for(proc = 0; proc < numProcs; proc++) {
1614:     numCoords[proc]         = ctx->numPoints[proc+1]*3;
1615:     ctx->numPoints[proc+1] += ctx->numPoints[proc];
1616:     cOffsets[proc+1]        = ctx->numPoints[proc+1]*3;
1617:   }
1618:   totPoints = ctx->numPoints[numProcs];
1619:   PetscMalloc(totPoints*3 * sizeof(double), &coords);
1620:   PetscLogEventBegin(GVEC_InterpolateFieldBatchParallel, v, grid, 0, 0);
1621:   MPI_Allgatherv(ctx->coords, numCoords[rank], MPI_DOUBLE, coords, numCoords, cOffsets, MPI_DOUBLE, v->comm);
1622:   PetscLogEventEnd(GVEC_InterpolateFieldBatchParallel, v, grid, 0, 0);

1624:   PetscMalloc(totPoints      * sizeof(int),         &activePoints);
1625:   PetscMalloc(totPoints      * sizeof(int),         &calcPoints);
1626:   PetscMalloc(totPoints*comp * sizeof(PetscScalar), &values);
1627:   PetscMalloc(totPoints*comp * sizeof(PetscScalar), &ctx->values);
1628:   PetscMemzero(values, totPoints*comp * sizeof(PetscScalar));
1629:   PetscLogEventBegin(GVEC_InterpolateFieldBatchCalc, v, grid, 0, 0);
1630:   /* Mark this domain's points as inactive */
1631:   for(p = 0; p < totPoints; p++) {
1632:     if ((p >= ctx->numPoints[rank]) && (p < ctx->numPoints[rank+1]))
1633:       activePoints[p] = -2;
1634:     else
1635:       activePoints[p] = -1;
1636:   }
1637:   /* Handle each point */
1638:   for(p = 0, numFound = 0; p < totPoints; p++)
1639:   {
1640:     if (activePoints[p] > -2) {
1641:       MeshLocatePoint(ctx->mesh, coords[p*3], coords[p*3+1], coords[p*3+2], &elem);
1642:       if (elem >= 0) {
1643:         /* Mark point as found by this processor */
1644:         activePoints[p] = rank;
1645:         /* Get indices for the old element */
1646:         GridCalcInterpolationElementVecIndices(grid, ctx->mesh, elem, ctx->order, vec);
1647:         /* Get values from local vector to element vector */
1648:         GridLocalToElementGeneral(grid, ctx->ghostVec, grid->bdReduceVecCur, grid->reduceSystem, grid->reduceElement, vec);
1649:         if (grid->explicitConstraints == PETSC_TRUE) {
1650:           /* Must transform to unconstrained variables for element integrals */
1651:           GridProjectInterpolationElementVec(grid, ctx->mesh, elem, ctx->order, PETSC_FALSE, vec);
1652:         }
1653:         /* Interpolate data to get field at new point */
1654:         DiscretizationInterpolateField(disc, ctx->mesh, elem, coords[p*3], coords[p*3+1], coords[p*3+2],
1655:                                               array, &values[p*comp], INTERPOLATION_LOCAL);
1656: 
1657:         numFound++;
1658:       }
1659:     }
1660:   }
1661:   PetscLogEventEnd(GVEC_InterpolateFieldBatchCalc, v, grid, 0, 0);

1663:   /* Communicate interpolated values */
1664: #if 1
1665:   /* This is REALLY bad but probably better than what is there now */
1666:   PetscLogEventBegin(GVEC_InterpolateFieldBatchParallel, v, grid, 0, 0);
1667:   MPI_Allreduce(values, ctx->values, totPoints*comp, MPI_DOUBLE, MPI_SUM, v->comm);
1668:   PetscLogEventEnd(GVEC_InterpolateFieldBatchParallel, v, grid, 0, 0);
1669: #else
1670:   MPI_Allreduce(activePoints, calcPoints, totPoints, MPI_INT, MPI_MAX, v->comm);
1671:   for(proc = 0, numSent = 0; proc < numProcs; proc++)
1672:     for(p = ctx->numPoints[proc]; p < ctx->numPoints[proc+1]; p++)
1673:     {
1674:       if (rank == proc)
1675:       {
1676:         /* Point from this domain was not found */
1677:         if (calcPoints[p] < 0) {
1678:           PetscLogInfo(v, "[%d]x: %g y: %g z: %gn", rank, coords[p*3], coords[p*3+1], coords[p*3+2]);
1679:           SETERRQ(PETSC_ERR_PLIB, "Node not found");
1680:         }
1681:         MPI_Recv(&ctx->values[p*comp], comp, MPI_DOUBLE, calcPoints[p], 0, v->comm, &status);
1682:       } else if (rank == calcPoints[p]) {
1683:         /* Point from another domain was found here */
1684:         MPI_Send(&values[p*comp], comp, MPI_DOUBLE, proc, 0, v->comm);
1685:         numSent++;
1686:       } else if (rank == activePoints[p]) {
1687:         /* Point was found by multiple processors */
1688:         if (calcPoints[p] < rank) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid precendence during interpolation");
1689:         numSent++;
1690:       }
1691:     }
1692:   if (numSent != numFound) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Number of nodes found does not match number sent");
1693: #endif
1694:   PetscFree(numCoords);
1695:   PetscFree(activePoints);
1696:   PetscFree(calcPoints);
1697:   PetscFree(values);
1698:   PetscLogEventEnd(GVEC_InterpolateFieldBatch, v, grid, 0, 0);
1699:   return(0);
1700: }

1702: #undef  __FUNCT__
1704: /*@
1705:   GVecCreate - Creates a grid vector associated with a particular grid.

1707:   Input Parameter:
1708: . grid - The grid defining the discrete function

1710:   Output Parameter:
1711: . gvec - The resulting grid vector

1713:   Level: beginner

1715: .seealso GVecCreateConstrained
1716: @*/
1717: int GVecCreate(Grid grid, GVec *gvec)
1718: {

1724:   GridSetUp(grid);
1725:   if (ierr) SETERRQ(PETSC_ERR_PLIB, "Grid setup failed");
1726:   GVecCreateRectangular(grid, grid->order, gvec);
1727:   return(0);
1728: }

1730: #undef  __FUNCT__
1732: /*@
1733:   GVecCreateGhost - Creates a grid vector associated with a particular grid
1734:   with ghost padding on each processor.

1736:   Input Parameter:
1737: . grid - The grid defining the discrete function

1739:   Output Parameter:
1740: . gvec - The resulting grid vector

1742:   Level: beginner

1744: .seealso GVecCreate, GVecCreateConstrained
1745: @*/
1746: int GVecCreateGhost(Grid grid, GVec *gvec)
1747: {

1753:   GridSetUp(grid);
1754:   if (ierr) SETERRQ(PETSC_ERR_PLIB, "Grid setup failed");
1755:   GVecCreateRectangularGhost(grid, grid->order, gvec);
1756:   return(0);
1757: }

1759: #undef  __FUNCT__
1761: /*@
1762:   GVecCreateRectangular - Creates a grid vector associated with a particular grid and ordering.

1764:   Input Parameter:
1765: . grid  - The grid defining the discrete function
1766: . order - The variable ordering

1768:   Output Parameter:
1769: . gvec - The resulting grid vector

1771:   Level: beginner

1773: .seealso GVecCreate, GVecCreateConstrained
1774: @*/
1775: int GVecCreateRectangular(Grid grid, VarOrdering order, GVec *gvec)
1776: {

1783:   GridSetUp(grid);
1784:   if (ierr) SETERRQ(PETSC_ERR_PLIB, "Grid setup failed");
1785:   VecCreateMPI(grid->comm, order->numLocVars, order->numVars, gvec);
1786:   PetscObjectCompose((PetscObject) *gvec, "Grid",  (PetscObject) grid);
1787:   PetscObjectCompose((PetscObject) *gvec, "Order", (PetscObject) order);
1788:   return(0);
1789: }

1791: #undef  __FUNCT__
1793: /*@
1794:   GVecCreateRectangularGhost - Creates a grid vector associated with a particular grid
1795:   and ordering, adding ghost padding on each processor.

1797:   Input Parameter:
1798: . grid  - The grid defining the discrete function
1799: . order - The variable ordering

1801:   Output Parameter:
1802: . gvec - The resulting grid vector

1804:   Level: beginner

1806: .seealso GVecCreate, GVecCreateConstrained
1807: @*/
1808: int GVecCreateRectangularGhost(Grid grid, VarOrdering order, GVec *gvec)
1809: {

1816:   GridSetUp(grid);
1817:   if (ierr) SETERRQ(PETSC_ERR_PLIB, "Grid setup failed");
1818:   /* I should use MPICreateGhost(), but it is in a state of flux */
1819:   VecCreate(grid->comm, gvec);
1820:   VecSetSizes(*gvec, order->numLocVars, order->numVars);
1821:   VecCreate_MPI_Private(*gvec, order->numOverlapVars-order->numLocVars, PETSC_NULL, PETSC_NULL);
1822:   PetscObjectCompose((PetscObject) *gvec, "Grid",  (PetscObject) grid);
1823:   PetscObjectCompose((PetscObject) *gvec, "Order", (PetscObject) order);
1824:   return(0);
1825: }

1827: #undef  __FUNCT__
1829: /*@
1830:   GVecCreateConstrained - Creates a grid vector associated with a
1831:   particular grid, but after constraints have been applied

1833:   Input Parameter:
1834: . grid - The grid defining the discrete function

1836:   Output Parameter:
1837: . gvec - The resulting grid vector

1839:   Level: beginner

1841: .seealso GVecCreate
1842: @*/
1843: int GVecCreateConstrained(Grid grid, GVec *gvec)
1844: {

1850:   GridSetUp(grid);
1851:   if (ierr) SETERRQ(PETSC_ERR_PLIB, "Grid setup failed");
1852:   if (grid->isConstrained) {
1853:     GVecCreateRectangular(grid, grid->constraintOrder, gvec);
1854:   } else {
1855:     GVecCreate(grid, gvec);
1856:   }
1857:   return(0);
1858: }

1860: #undef  __FUNCT__
1862: /*@
1863:   GVecCreateBoundaryRestriction - Creates a grid vector associated with
1864:   the boundary variables of a particular grid.

1866:   Input Parameter:
1867: . grid - The grid defining the discrete function

1869:   Output Parameter:
1870: . gvec - The resulting grid vector

1872:   Level: beginner

1874: .seealso GVecCreate, GVecCreateConstrained
1875: @*/
1876: int GVecCreateBoundaryRestriction(Grid grid, GVec *gvec)
1877: {

1883:   GridSetUp(grid);
1884:   if (ierr) SETERRQ(PETSC_ERR_PLIB, "Grid setup failed");
1885:   GridSetupBoundary(grid);
1886:   GVecCreateRectangular(grid, grid->bdOrder, gvec);
1887:   return(0);
1888: }

1890: #undef  __FUNCT__
1892: /*@
1893:   PointFunctionInterpolateField - A PointFunction that gives the field value at a given
1894:   point in the mesh based upon a grid vector representation.

1896:   Input Paramters:
1897: . n      - The number of points
1898: . comp   - The number of components
1899: . x,y,z  - The coordinates of points
1900: . ctx    - An InterpolationContext

1902:   Output Paramter:
1903: . values - The location where the field values are stored

1905:   Level: advanced

1907: .keywords point function, grid
1908: .seealso PointFunctionOne, PointFunctionZero, PointFunctionConstant
1909: @*/
1910: int PointFunctionInterpolateField(int n, int comp, double *x, double *y, double *z, PetscScalar *values, void *ctx) {
1911:   InterpolationContext *iCtx = (InterpolationContext *) ctx;
1912:   double               *px   = x;
1913:   double               *py   = y;
1914:   double               *pz   = z;
1915:   int                   i;
1916:   int                   ierr;

1919:   /* Check for a call purely for collective operations */
1920:   if (n == 0) {
1921:     GVecInterpolateField(iCtx->vec, iCtx->field, 0.0, 0.0, 0.0, PETSC_NULL, iCtx);
1922:     return(0);
1923:   }

1925:   /* We might do better by assuming that any two of these could be null */
1926:   if (z == PETSC_NULL) pz = px;
1927:   if (y == PETSC_NULL) py = px;

1929:   for(i = 0; i < n; i++) {
1930:     GVecInterpolateField(iCtx->vec, iCtx->field, px[i], py[i], pz[i], &values[i*comp], iCtx);
1931:   }
1932:   return(0);
1933: }

1935: #undef  __FUNCT__
1937: /*@
1938:   PointFunctionInterpolateFieldBatch - A PointFunction that finishes an interpolation
1939:   by setting the off-processor values stored in the InterpolationContext.

1941:   Input Paramters:
1942: . n      - The number of points
1943: . comp   - The number of components
1944: . x,y,z  - The coordinates of points
1945: . ctx    - An InterpolationContext

1947:   Output Paramter:
1948: . values - The location where the field values are stored

1950:   Level: advanced

1952: .keywords point function, grid
1953: .seealso PointFunctionOne, PointFunctionZero, PointFunctionConstant
1954: @*/
1955: int PointFunctionInterpolateFieldBatch(int n, int comp, double *x, double *y, double *z, PetscScalar *values, void *ctx) {
1956:   InterpolationContext *iCtx = (InterpolationContext *) ctx;
1957:   double               *px   = x;
1958:   double               *py   = y;
1959:   double               *pz   = z;
1960:   int                   p    = iCtx->curPoint - iCtx->numPoints[iCtx->rank];
1961:   int                   i, c;

1964:   /* We might do better by assuming that any two of these could be null */
1965:   if (z == PETSC_NULL) pz = px;
1966:   if (y == PETSC_NULL) py = px;

1968:   for(i = 0; i < n; i++) {
1969:     if ((*px == iCtx->coords[p*3])   &&
1970:         (*py == iCtx->coords[p*3+1]) &&
1971:         (*pz == iCtx->coords[p*3+2])) {
1972:       /* Copy stored values */
1973: #if 0
1974:       PetscPrintf(PETSC_COMM_SELF, "Batch point %d on proc %dn", iCtx->curPoint, iCtx->rank);
1975:       PetscPrintf(PETSC_COMM_SELF, "  x: %g y: %g z: %gn  ", px[i], py[i], pz[i]);
1976:       for(c = 0; c < comp; c++) {
1977:         values[i*comp+c] = iCtx->values[iCtx->curPoint*comp+c];
1978:         PetscPrintf(PETSC_COMM_SELF, "val[%d]: %g ", c, values[i*comp+c]);
1979:       }
1980:       PetscPrintf(PETSC_COMM_SELF, "n");
1981: #else
1982:       for(c = 0; c < comp; c++) values[i*comp+c] = iCtx->values[iCtx->curPoint*comp+c];
1983: #endif
1984:       iCtx->curPoint++;
1985:       p++;
1986:     }
1987:   }
1988:   return(0);
1989: }