Actual source code: general.c
petsc-3.7.5 2017-01-01
2: /*
3: Provides the functions for index sets (IS) defined by a list of integers.
4: */
5: #include <../src/vec/is/is/impls/general/general.h> /*I "petscis.h" I*/
6: #include <petscvec.h>
7: #include <petscviewer.h>
8: #include <petscviewerhdf5.h>
12: static PetscErrorCode ISDuplicate_General(IS is,IS *newIS)
13: {
15: IS_General *sub = (IS_General*)is->data;
16: PetscInt n;
19: PetscLayoutGetLocalSize(is->map, &n);
20: ISCreateGeneral(PetscObjectComm((PetscObject) is), n, sub->idx, PETSC_COPY_VALUES, newIS);
21: return(0);
22: }
26: static PetscErrorCode ISDestroy_General(IS is)
27: {
28: IS_General *is_general = (IS_General*)is->data;
32: if (is_general->allocated) {PetscFree(is_general->idx);}
33: PetscObjectComposeFunction((PetscObject)is,"ISGeneralSetIndices_C",0);
34: PetscFree(is->data);
35: return(0);
36: }
40: static PetscErrorCode ISIdentity_General(IS is, PetscBool *ident)
41: {
42: IS_General *is_general = (IS_General*)is->data;
43: PetscInt i,n,*idx = is_general->idx;
47: PetscLayoutGetLocalSize(is->map, &n);
48: is->isidentity = PETSC_TRUE;
49: *ident = PETSC_TRUE;
50: for (i=0; i<n; i++) {
51: if (idx[i] != i) {
52: is->isidentity = PETSC_FALSE;
53: *ident = PETSC_FALSE;
54: break;
55: }
56: }
57: return(0);
58: }
62: static PetscErrorCode ISCopy_General(IS is,IS isy)
63: {
64: IS_General *is_general = (IS_General*)is->data,*isy_general = (IS_General*)isy->data;
65: PetscInt n, N, ny, Ny;
69: PetscLayoutGetLocalSize(is->map, &n);
70: PetscLayoutGetSize(is->map, &N);
71: PetscLayoutGetLocalSize(isy->map, &ny);
72: PetscLayoutGetSize(isy->map, &Ny);
73: if (n != ny || N != Ny) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Index sets incompatible");
74: isy_general->sorted = is_general->sorted;
75: PetscMemcpy(isy_general->idx,is_general->idx,n*sizeof(PetscInt));
76: return(0);
77: }
81: static PetscErrorCode ISOnComm_General(IS is,MPI_Comm comm,PetscCopyMode mode,IS *newis)
82: {
84: IS_General *sub = (IS_General*)is->data;
85: PetscInt n;
88: if (mode == PETSC_OWN_POINTER) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"Cannot use PETSC_OWN_POINTER");
89: PetscLayoutGetLocalSize(is->map, &n);
90: ISCreateGeneral(comm,n,sub->idx,mode,newis);
91: return(0);
92: }
96: static PetscErrorCode ISSetBlockSize_General(IS is,PetscInt bs)
97: {
98: #if defined(PETSC_USE_DEBUG)
99: IS_General *sub = (IS_General*)is->data;
100: PetscInt n;
101: #endif
105: PetscLayoutSetBlockSize(is->map, bs);
106: #if defined(PETSC_USE_DEBUG)
107: PetscLayoutGetLocalSize(is->map, &n);
108: {
109: PetscInt i,j;
110: for (i=0; i<n; i+=bs) {
111: for (j=0; j<bs; j++) {
112: if (sub->idx[i+j] != sub->idx[i]+j) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Index set does not have block structure, cannot set block size to %D",bs);
113: }
114: }
115: }
116: #endif
117: return(0);
118: }
122: static PetscErrorCode ISContiguousLocal_General(IS is,PetscInt gstart,PetscInt gend,PetscInt *start,PetscBool *contig)
123: {
124: IS_General *sub = (IS_General*)is->data;
125: PetscInt n,i,p;
129: *start = 0;
130: *contig = PETSC_TRUE;
131: PetscLayoutGetLocalSize(is->map, &n);
132: if (!n) return(0);
133: p = sub->idx[0];
134: if (p < gstart) goto nomatch;
135: *start = p - gstart;
136: if (n > gend-p) goto nomatch;
137: for (i=1; i<n; i++,p++) {
138: if (sub->idx[i] != p+1) goto nomatch;
139: }
140: return(0);
141: nomatch:
142: *start = -1;
143: *contig = PETSC_FALSE;
144: return(0);
145: }
149: static PetscErrorCode ISGetIndices_General(IS in,const PetscInt *idx[])
150: {
151: IS_General *sub = (IS_General*)in->data;
154: *idx = sub->idx;
155: return(0);
156: }
160: static PetscErrorCode ISRestoreIndices_General(IS in,const PetscInt *idx[])
161: {
162: IS_General *sub = (IS_General*)in->data;
165: if (*idx != sub->idx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Must restore with value from ISGetIndices()");
166: return(0);
167: }
171: static PetscErrorCode ISGetSize_General(IS is,PetscInt *size)
172: {
176: PetscLayoutGetSize(is->map, size);
177: return(0);
178: }
182: static PetscErrorCode ISGetLocalSize_General(IS is,PetscInt *size)
183: {
187: PetscLayoutGetLocalSize(is->map, size);
188: return(0);
189: }
193: static PetscErrorCode ISInvertPermutation_General(IS is,PetscInt nlocal,IS *isout)
194: {
195: IS_General *sub = (IS_General*)is->data;
196: PetscInt i,*ii,n,nstart;
197: const PetscInt *idx = sub->idx;
198: PetscMPIInt size;
199: IS istmp,nistmp;
203: PetscLayoutGetLocalSize(is->map, &n);
204: MPI_Comm_size(PetscObjectComm((PetscObject)is),&size);
205: if (size == 1) {
206: PetscMalloc1(n,&ii);
207: for (i=0; i<n; i++) ii[idx[i]] = i;
208: ISCreateGeneral(PETSC_COMM_SELF,n,ii,PETSC_OWN_POINTER,isout);
209: ISSetPermutation(*isout);
210: } else {
211: /* crude, nonscalable get entire IS on each processor */
212: if (nlocal == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Do not yet support nlocal of PETSC_DECIDE");
213: ISAllGather(is,&istmp);
214: ISSetPermutation(istmp);
215: ISInvertPermutation(istmp,PETSC_DECIDE,&nistmp);
216: ISDestroy(&istmp);
217: /* get the part we need */
218: MPI_Scan(&nlocal,&nstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)is));
219: #if defined(PETSC_USE_DEBUG)
220: {
221: PetscInt N;
222: PetscMPIInt rank;
223: MPI_Comm_rank(PetscObjectComm((PetscObject)is),&rank);
224: PetscLayoutGetSize(is->map, &N);
225: if (rank == size-1) {
226: if (nstart != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of nlocal lengths %d != total IS length %d",nstart,N);
227: }
228: }
229: #endif
230: nstart -= nlocal;
231: ISGetIndices(nistmp,&idx);
232: ISCreateGeneral(PetscObjectComm((PetscObject)is),nlocal,idx+nstart,PETSC_COPY_VALUES,isout);
233: ISRestoreIndices(nistmp,&idx);
234: ISDestroy(&nistmp);
235: }
236: return(0);
237: }
239: #if defined(PETSC_HAVE_HDF5)
242: static PetscErrorCode ISView_General_HDF5(IS is, PetscViewer viewer)
243: {
244: hid_t filespace; /* file dataspace identifier */
245: hid_t chunkspace; /* chunk dataset property identifier */
246: hid_t plist_id; /* property list identifier */
247: hid_t dset_id; /* dataset identifier */
248: hid_t memspace; /* memory dataspace identifier */
249: hid_t inttype; /* int type (H5T_NATIVE_INT or H5T_NATIVE_LLONG) */
250: hid_t file_id, group;
251: hsize_t dim, maxDims[3], dims[3], chunkDims[3], count[3],offset[3];
252: PetscInt bs, N, n, timestep, low;
253: const PetscInt *ind;
254: const char *isname;
255: PetscErrorCode ierr;
258: ISGetBlockSize(is,&bs);
259: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
260: PetscViewerHDF5GetTimestep(viewer, ×tep);
262: /* Create the dataspace for the dataset.
263: *
264: * dims - holds the current dimensions of the dataset
265: *
266: * maxDims - holds the maximum dimensions of the dataset (unlimited
267: * for the number of time steps with the current dimensions for the
268: * other dimensions; so only additional time steps can be added).
269: *
270: * chunkDims - holds the size of a single time step (required to
271: * permit extending dataset).
272: */
273: dim = 0;
274: if (timestep >= 0) {
275: dims[dim] = timestep+1;
276: maxDims[dim] = H5S_UNLIMITED;
277: chunkDims[dim] = 1;
278: ++dim;
279: }
280: ISGetSize(is, &N);
281: ISGetLocalSize(is, &n);
282: PetscHDF5IntCast(N/bs,dims + dim);
284: maxDims[dim] = dims[dim];
285: chunkDims[dim] = dims[dim];
286: ++dim;
287: if (bs >= 1) {
288: dims[dim] = bs;
289: maxDims[dim] = dims[dim];
290: chunkDims[dim] = dims[dim];
291: ++dim;
292: }
293: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
295: #if defined(PETSC_USE_64BIT_INDICES)
296: inttype = H5T_NATIVE_LLONG;
297: #else
298: inttype = H5T_NATIVE_INT;
299: #endif
301: /* Create the dataset with default properties and close filespace */
302: PetscObjectGetName((PetscObject) is, &isname);
303: if (!H5Lexists(group, isname, H5P_DEFAULT)) {
304: /* Create chunk */
305: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
306: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
308: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
309: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, isname, inttype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
310: #else
311: PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, isname, inttype, filespace, H5P_DEFAULT));
312: #endif
313: PetscStackCallHDF5(H5Pclose,(chunkspace));
314: } else {
315: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, isname, H5P_DEFAULT));
316: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
317: }
318: PetscStackCallHDF5(H5Sclose,(filespace));
320: /* Each process defines a dataset and writes it to the hyperslab in the file */
321: dim = 0;
322: if (timestep >= 0) {
323: count[dim] = 1;
324: ++dim;
325: }
326: PetscHDF5IntCast(n/bs,count + dim);
327: ++dim;
328: if (bs >= 1) {
329: count[dim] = bs;
330: ++dim;
331: }
332: if (n > 0) {
333: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
334: } else {
335: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
336: PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
337: }
339: /* Select hyperslab in the file */
340: PetscLayoutGetRange(is->map, &low, NULL);
341: dim = 0;
342: if (timestep >= 0) {
343: offset[dim] = timestep;
344: ++dim;
345: }
346: PetscHDF5IntCast(low/bs,offset + dim);
347: ++dim;
348: if (bs >= 1) {
349: offset[dim] = 0;
350: ++dim;
351: }
352: if (n > 0) {
353: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
354: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
355: } else {
356: /* Create null filespace to match null memspace. */
357: PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
358: }
360: /* Create property list for collective dataset write */
361: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
362: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
363: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
364: #endif
365: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
367: ISGetIndices(is, &ind);
368: PetscStackCallHDF5(H5Dwrite,(dset_id, inttype, memspace, filespace, plist_id, ind));
369: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
370: ISGetIndices(is, &ind);
372: /* Close/release resources */
373: if (group != file_id) PetscStackCallHDF5(H5Gclose,(group));
374: PetscStackCallHDF5(H5Pclose,(plist_id));
375: PetscStackCallHDF5(H5Sclose,(filespace));
376: PetscStackCallHDF5(H5Sclose,(memspace));
377: PetscStackCallHDF5(H5Dclose,(dset_id));
378: PetscInfo1(is, "Wrote IS object with name %s\n", isname);
379: return(0);
380: }
381: #endif
385: static PetscErrorCode ISView_General_Binary(IS is,PetscViewer viewer)
386: {
388: IS_General *isa = (IS_General*) is->data;
389: PetscMPIInt rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
390: PetscInt n,N,len,j,tr[2];
391: int fdes;
392: MPI_Status status;
393: PetscInt message_count,flowcontrolcount,*values;
396: PetscLayoutGetLocalSize(is->map, &n);
397: PetscLayoutGetSize(is->map, &N);
398: PetscViewerBinaryGetDescriptor(viewer,&fdes);
400: /* determine maximum message to arrive */
401: MPI_Comm_rank(PetscObjectComm((PetscObject)is),&rank);
402: MPI_Comm_size(PetscObjectComm((PetscObject)is),&size);
404: tr[0] = IS_FILE_CLASSID;
405: tr[1] = N;
406: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
407: MPI_Reduce(&n,&len,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)is));
409: PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
410: if (!rank) {
411: PetscBinaryWrite(fdes,isa->idx,n,PETSC_INT,PETSC_FALSE);
413: PetscMalloc1(len,&values);
414: PetscMPIIntCast(len,&mesgsize);
415: /* receive and save messages */
416: for (j=1; j<size; j++) {
417: PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
418: MPI_Recv(values,mesgsize,MPIU_INT,j,tag,PetscObjectComm((PetscObject)is),&status);
419: MPI_Get_count(&status,MPIU_INT,&mesglen);
420: PetscBinaryWrite(fdes,values,(PetscInt)mesglen,PETSC_INT,PETSC_TRUE);
421: }
422: PetscViewerFlowControlEndMaster(viewer,&message_count);
423: PetscFree(values);
424: } else {
425: PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
426: PetscMPIIntCast(n,&mesgsize);
427: MPI_Send(isa->idx,mesgsize,MPIU_INT,0,tag,PetscObjectComm((PetscObject)is));
428: PetscViewerFlowControlEndWorker(viewer,&message_count);
429: }
430: return(0);
431: }
435: static PetscErrorCode ISView_General(IS is,PetscViewer viewer)
436: {
437: IS_General *sub = (IS_General*)is->data;
439: PetscInt i,n,*idx = sub->idx;
440: PetscBool iascii,isbinary,ishdf5;
443: PetscLayoutGetLocalSize(is->map, &n);
444: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
445: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
446: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
447: if (iascii) {
448: MPI_Comm comm;
449: PetscMPIInt rank,size;
451: PetscObjectGetComm((PetscObject)viewer,&comm);
452: MPI_Comm_rank(comm,&rank);
453: MPI_Comm_size(comm,&size);
455: PetscViewerASCIIPushSynchronized(viewer);
456: if (size > 1) {
457: if (is->isperm) {
458: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Index set is permutation\n",rank);
459: }
460: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of indices in set %D\n",rank,n);
461: for (i=0; i<n; i++) {
462: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,idx[i]);
463: }
464: } else {
465: if (is->isperm) {
466: PetscViewerASCIISynchronizedPrintf(viewer,"Index set is permutation\n");
467: }
468: PetscViewerASCIISynchronizedPrintf(viewer,"Number of indices in set %D\n",n);
469: for (i=0; i<n; i++) {
470: PetscViewerASCIISynchronizedPrintf(viewer,"%D %D\n",i,idx[i]);
471: }
472: }
473: PetscViewerFlush(viewer);
474: PetscViewerASCIIPopSynchronized(viewer);
475: } else if (isbinary) {
476: ISView_General_Binary(is,viewer);
477: } else if (ishdf5) {
478: #if defined(PETSC_HAVE_HDF5)
479: ISView_General_HDF5(is,viewer);
480: #endif
481: }
482: return(0);
483: }
487: static PetscErrorCode ISSort_General(IS is)
488: {
489: IS_General *sub = (IS_General*)is->data;
490: PetscInt n;
494: if (sub->sorted) return(0);
495: PetscLayoutGetLocalSize(is->map, &n);
496: PetscSortInt(n,sub->idx);
497: sub->sorted = PETSC_TRUE;
498: return(0);
499: }
503: static PetscErrorCode ISSortRemoveDups_General(IS is)
504: {
505: IS_General *sub = (IS_General*)is->data;
506: PetscInt n;
510: if (sub->sorted) return(0);
511: PetscLayoutGetLocalSize(is->map, &n);
512: PetscSortRemoveDupsInt(&n,sub->idx);
513: PetscLayoutSetLocalSize(is->map, n);
514: sub->sorted = PETSC_TRUE;
515: return(0);
516: }
520: static PetscErrorCode ISSorted_General(IS is,PetscBool *flg)
521: {
522: IS_General *sub = (IS_General*)is->data;
525: *flg = sub->sorted;
526: return(0);
527: }
531: PetscErrorCode ISToGeneral_General(IS is)
532: {
534: return(0);
535: }
537: static struct _ISOps myops = { ISGetSize_General,
538: ISGetLocalSize_General,
539: ISGetIndices_General,
540: ISRestoreIndices_General,
541: ISInvertPermutation_General,
542: ISSort_General,
543: ISSortRemoveDups_General,
544: ISSorted_General,
545: ISDuplicate_General,
546: ISDestroy_General,
547: ISView_General,
548: ISLoad_Default,
549: ISIdentity_General,
550: ISCopy_General,
551: ISToGeneral_General,
552: ISOnComm_General,
553: ISSetBlockSize_General,
554: ISContiguousLocal_General};
558: static PetscErrorCode ISCreateGeneral_Private(IS is)
559: {
561: IS_General *sub = (IS_General*)is->data;
562: const PetscInt *idx = sub->idx;
563: PetscBool sorted = PETSC_TRUE;
564: PetscInt n,i,min,max;
567: PetscLayoutGetLocalSize(is->map, &n);
568: PetscLayoutSetUp(is->map);
569: for (i=1; i<n; i++) {
570: if (idx[i] < idx[i-1]) {sorted = PETSC_FALSE; break;}
571: }
572: if (n) min = max = idx[0];
573: else min = max = 0;
574: for (i=1; i<n; i++) {
575: if (idx[i] < min) min = idx[i];
576: if (idx[i] > max) max = idx[i];
577: }
578: sub->sorted = sorted;
579: is->min = min;
580: is->max = max;
581: is->isperm = PETSC_FALSE;
582: is->isidentity = PETSC_FALSE;
583: ISViewFromOptions(is,NULL,"-is_view");
584: return(0);
585: }
589: /*@
590: ISCreateGeneral - Creates a data structure for an index set
591: containing a list of integers.
593: Collective on MPI_Comm
595: Input Parameters:
596: + comm - the MPI communicator
597: . n - the length of the index set
598: . idx - the list of integers
599: - mode - see PetscCopyMode for meaning of this flag.
601: Output Parameter:
602: . is - the new index set
604: Notes:
605: When the communicator is not MPI_COMM_SELF, the operations on IS are NOT
606: conceptually the same as MPI_Group operations. The IS are then
607: distributed sets of indices and thus certain operations on them are
608: collective.
611: Level: beginner
613: Concepts: index sets^creating
614: Concepts: IS^creating
616: .seealso: ISCreateStride(), ISCreateBlock(), ISAllGather()
617: @*/
618: PetscErrorCode ISCreateGeneral(MPI_Comm comm,PetscInt n,const PetscInt idx[],PetscCopyMode mode,IS *is)
619: {
623: ISCreate(comm,is);
624: ISSetType(*is,ISGENERAL);
625: ISGeneralSetIndices(*is,n,idx,mode);
626: return(0);
627: }
631: /*@
632: ISGeneralSetIndices - Sets the indices for an ISGENERAL index set
634: Collective on IS
636: Input Parameters:
637: + is - the index set
638: . n - the length of the index set
639: . idx - the list of integers
640: - mode - see PetscCopyMode for meaning of this flag.
642: Level: beginner
644: Concepts: index sets^creating
645: Concepts: IS^creating
647: .seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlock(), ISAllGather()
648: @*/
649: PetscErrorCode ISGeneralSetIndices(IS is,PetscInt n,const PetscInt idx[],PetscCopyMode mode)
650: {
654: PetscUseMethod(is,"ISGeneralSetIndices_C",(IS,PetscInt,const PetscInt[],PetscCopyMode),(is,n,idx,mode));
655: return(0);
656: }
660: PetscErrorCode ISGeneralSetIndices_General(IS is,PetscInt n,const PetscInt idx[],PetscCopyMode mode)
661: {
663: IS_General *sub = (IS_General*)is->data;
666: if (n < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"length < 0");
669: if (sub->allocated) {PetscFree(sub->idx);}
670: PetscLayoutSetLocalSize(is->map, n);
671: if (mode == PETSC_COPY_VALUES) {
672: PetscMalloc1(n,&sub->idx);
673: PetscLogObjectMemory((PetscObject)is,n*sizeof(PetscInt));
674: PetscMemcpy(sub->idx,idx,n*sizeof(PetscInt));
675: sub->allocated = PETSC_TRUE;
676: } else if (mode == PETSC_OWN_POINTER) {
677: sub->idx = (PetscInt*)idx;
678: sub->allocated = PETSC_TRUE;
679: } else {
680: sub->idx = (PetscInt*)idx;
681: sub->allocated = PETSC_FALSE;
682: }
683: ISCreateGeneral_Private(is);
684: return(0);
685: }
689: PETSC_EXTERN PetscErrorCode ISCreate_General(IS is)
690: {
692: IS_General *sub;
695: PetscMemcpy(is->ops,&myops,sizeof(myops));
696: PetscNewLog(is,&sub);
697: is->data = (void *) sub;
698: PetscObjectComposeFunction((PetscObject)is,"ISGeneralSetIndices_C",ISGeneralSetIndices_General);
699: return(0);
700: }