Actual source code: pmetis.c

petsc-3.7.5 2017-01-01
Report Typos and Errors
  2: #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/

  4: /*
  5:    Currently using ParMetis-4.0.2
  6: */

  8: #include <parmetis.h>

 10: /*
 11:       The first 5 elements of this structure are the input control array to Metis
 12: */
 13: typedef struct {
 14:   PetscInt  cuts;         /* number of cuts made (output) */
 15:   PetscInt  foldfactor;
 16:   PetscInt  parallel;     /* use parallel partitioner for coarse problem */
 17:   PetscInt  indexing;     /* 0 indicates C indexing, 1 Fortran */
 18:   PetscInt  printout;     /* indicates if one wishes Metis to print info */
 19:   PetscBool repartition;
 20: } MatPartitioning_Parmetis;

 22: #define CHKERRQPARMETIS(n,func)                                             \
 23:   if (n == METIS_ERROR_INPUT) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to wrong inputs and/or options for %s",func); \
 24:   else if (n == METIS_ERROR_MEMORY) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to insufficient memory in %s",func); \
 25:   else if (n == METIS_ERROR) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS general error in %s",func); \

 27: #define PetscStackCallParmetis(func,args) do {PetscStackPush(#func);status = func args;PetscStackPop; CHKERRQPARMETIS(status,#func);} while (0)

 29: /*
 30:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 31: */
 34: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 35: {
 36:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
 37:   PetscErrorCode           ierr;
 38:   PetscInt                 *locals = NULL;
 39:   Mat                      mat     = part->adj,amat,pmat;
 40:   PetscBool                flg;
 41:   PetscInt                 bs = 1;

 44:   PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 45:   if (flg) {
 46:     amat = mat;
 47:     PetscObjectReference((PetscObject)amat);
 48:   } else {
 49:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
 50:        resulting partition results need to be stretched to match the original matrix */
 51:     MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&amat);
 52:     if (amat->rmap->n > 0) bs = mat->rmap->n/amat->rmap->n;
 53:   }
 54:   MatMPIAdjCreateNonemptySubcommMat(amat,&pmat);
 55:   MPI_Barrier(PetscObjectComm((PetscObject)part));

 57:   if (pmat) {
 58:     MPI_Comm   pcomm,comm;
 59:     Mat_MPIAdj *adj     = (Mat_MPIAdj*)pmat->data;
 60:     PetscInt   *vtxdist = pmat->rmap->range;
 61:     PetscInt   *xadj    = adj->i;
 62:     PetscInt   *adjncy  = adj->j;
 63:     PetscInt   itmp     = 0,wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[24], i, j;
 64:     real_t     *tpwgts,*ubvec,itr=0.1;
 65:     int        status;

 67:     PetscObjectGetComm((PetscObject)pmat,&pcomm);
 68: #if defined(PETSC_USE_DEBUG)
 69:     /* check that matrix has no diagonal entries */
 70:     {
 71:       PetscInt rstart;
 72:       MatGetOwnershipRange(pmat,&rstart,NULL);
 73:       for (i=0; i<pmat->rmap->n; i++) {
 74:         for (j=xadj[i]; j<xadj[i+1]; j++) {
 75:           if (adjncy[j] == i+rstart) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
 76:         }
 77:       }
 78:     }
 79: #endif

 81:     PetscMalloc1(amat->rmap->n,&locals);

 83:     if (PetscLogPrintInfo) {itmp = pmetis->printout; pmetis->printout = 127;}
 84:     PetscMalloc1(ncon*nparts,&tpwgts);
 85:     for (i=0; i<ncon; i++) {
 86:       for (j=0; j<nparts; j++) {
 87:         if (part->part_weights) {
 88:           tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
 89:         } else {
 90:           tpwgts[i*nparts+j] = 1./nparts;
 91:         }
 92:       }
 93:     }
 94:     PetscMalloc1(ncon,&ubvec);
 95:     for (i=0; i<ncon; i++) {
 96:       ubvec[i] = 1.05;
 97:     }
 98:     /* This sets the defaults */
 99:     options[0] = 0;
100:     for (i=1; i<24; i++) {
101:       options[i] = -1;
102:     }
103:     /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */
104:     MPI_Comm_dup(pcomm,&comm);
105:     if(pmetis->repartition){
106:       PetscStackCallParmetis(ParMETIS_V3_AdaptiveRepart,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)part->vertex_weights,(idx_t*)adj->values,(idx_t*)&wgtflag,(idx_t*)&numflag,(idx_t*)&ncon,(idx_t*)&nparts,tpwgts,ubvec,&itr,(idx_t*)options,(idx_t*)&pmetis->cuts,(idx_t*)locals,&comm));
107:     }else{
108:       PetscStackCallParmetis(ParMETIS_V3_PartKway,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)adj->values,(idx_t*)&wgtflag,(idx_t*)&numflag,(idx_t*)&ncon,(idx_t*)&nparts,tpwgts,ubvec,(idx_t*)options,(idx_t*)&pmetis->cuts,(idx_t*)locals,&comm));
109:     }
110:     MPI_Comm_free(&comm);

112:     PetscFree(tpwgts);
113:     PetscFree(ubvec);
114:     if (PetscLogPrintInfo) pmetis->printout = itmp;
115:   }

117:   if (bs > 1) {
118:     PetscInt i,j,*newlocals;
119:     PetscMalloc1(bs*amat->rmap->n,&newlocals);
120:     for (i=0; i<amat->rmap->n; i++) {
121:       for (j=0; j<bs; j++) {
122:         newlocals[bs*i + j] = locals[i];
123:       }
124:     }
125:     PetscFree(locals);
126:     ISCreateGeneral(PetscObjectComm((PetscObject)part),bs*amat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);
127:   } else {
128:     ISCreateGeneral(PetscObjectComm((PetscObject)part),amat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);
129:   }

131:   MatDestroy(&pmat);
132:   MatDestroy(&amat);
133:   return(0);
134: }

138: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
139: {
140:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
141:   PetscErrorCode           ierr;
142:   int                      rank;
143:   PetscBool                iascii;

146:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
147:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
148:   if (iascii) {
149:     if (pmetis->parallel == 2) {
150:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitioner\n");
151:     } else {
152:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitioner\n");
153:     }
154:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factor\n",pmetis->foldfactor);
155:     PetscViewerASCIIPushSynchronized(viewer);
156:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %d\n",rank,pmetis->cuts);
157:     PetscViewerFlush(viewer);
158:     PetscViewerASCIIPopSynchronized(viewer);
159:   }
160:   return(0);
161: }

165: /*@
166:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to
167:          do the partitioning of the coarse grid.

169:   Logically Collective on MatPartitioning

171:   Input Parameter:
172: .  part - the partitioning context

174:    Level: advanced

176: @*/
177: PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
178: {
179:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;

182:   pmetis->parallel = 1;
183:   return(0);
184: }

188: /*@
189:      MatPartitioningParmetisSetRepartition - Repartition
190:      current mesh to rebalance computation.

192:   Logically Collective on MatPartitioning

194:   Input Parameter:
195: .  part - the partitioning context

197:    Level: advanced

199: @*/
200: PetscErrorCode  MatPartitioningParmetisSetRepartition(MatPartitioning part)
201: {
202:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;

205:   pmetis->repartition = PETSC_TRUE;
206:   return(0);
207: }

211: /*@
212:   MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.

214:   Input Parameter:
215: . part - the partitioning context

217:   Output Parameter:
218: . cut - the edge cut

220:    Level: advanced

222: @*/
223: PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
224: {
225:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*) part->data;

228:   *cut = pmetis->cuts;
229:   return(0);
230: }

234: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
235: {
237:   PetscBool      flag = PETSC_FALSE;

240:   PetscOptionsHead(PetscOptionsObject,"Set ParMeTiS partitioning options");
241:   PetscOptionsBool("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",flag,&flag,NULL);
242:   if (flag) {
243:     MatPartitioningParmetisSetCoarseSequential(part);
244:   }
245:   PetscOptionsBool("-mat_partitioning_parmetis_repartition","","MatPartitioningParmetisSetRepartition",flag,&flag,NULL);
246:   if(flag){
247:      MatPartitioningParmetisSetRepartition(part);
248:   }
249:   PetscOptionsTail();
250:   return(0);
251: }


256: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
257: {
258:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
259:   PetscErrorCode           ierr;

262:   PetscFree(pmetis);
263:   return(0);
264: }


267: /*MC
268:    MATPARTITIONINGPARMETIS - Creates a partitioning context via the external package PARMETIS.

270:    Collective on MPI_Comm

272:    Input Parameter:
273: .  part - the partitioning context

275:    Options Database Keys:
276: +  -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner

278:    Level: beginner

280:    Notes: See http://www-users.cs.umn.edu/~karypis/metis/

282: .keywords: Partitioning, create, context

284: .seealso: MatPartitioningSetType(), MatPartitioningType

286: M*/

290: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Parmetis(MatPartitioning part)
291: {
292:   PetscErrorCode           ierr;
293:   MatPartitioning_Parmetis *pmetis;

296:   PetscNewLog(part,&pmetis);
297:   part->data = (void*)pmetis;

299:   pmetis->cuts       = 0;   /* output variable */
300:   pmetis->foldfactor = 150; /*folding factor */
301:   pmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
302:   pmetis->indexing   = 0;   /* index numbering starts from 0 */
303:   pmetis->printout   = 0;   /* print no output while running */
304:   pmetis->repartition      = PETSC_FALSE;

306:   part->ops->apply          = MatPartitioningApply_Parmetis;
307:   part->ops->view           = MatPartitioningView_Parmetis;
308:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
309:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
310:   return(0);
311: }

315: /*@
316:  MatMeshToVertexGraph -   This routine does not exist because ParMETIS does not provide the functionality.  Uses the ParMETIS package to
317:                        convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
318:                        between vertices of the cells and is suitable for partitioning with the MatPartitioning object. Use this to partition
319:                        vertices of a mesh. More likely you should use MatMeshToCellGraph()

321:    Collective on Mat

323:    Input Parameter:
324: +     mesh - the graph that represents the mesh
325: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
326:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

328:    Output Parameter:
329: .     dual - the dual graph

331:    Notes:
332:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

334:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is
335:      number of cells, the number of columns is the number of vertices.

337:    Level: advanced

339: .seealso: MatMeshToCellGraph(), MatCreateMPIAdj(), MatPartitioningCreate()

341: @*/
342: PetscErrorCode MatMeshToVertexGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
343: {
345:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"ParMETIS does not provide this functionality");
346:   return(0);
347: }

351: /*@
352:      MatMeshToCellGraph -   Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
353:                        between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
354:                        cells of a mesh.

356:    Collective on Mat

358:    Input Parameter:
359: +     mesh - the graph that represents the mesh
360: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
361:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

363:    Output Parameter:
364: .     dual - the dual graph

366:    Notes:
367:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

369:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is
370:      number of cells, the number of columns is the number of vertices.


373:    Level: advanced

375: .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()


378: @*/
379: PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
380: {
382:   PetscInt       *newxadj,*newadjncy;
383:   PetscInt       numflag=0;
384:   Mat_MPIAdj     *adj   = (Mat_MPIAdj*)mesh->data,*newadj;
385:   PetscBool      flg;
386:   int            status;
387:   MPI_Comm       comm;

390:   PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);
391:   if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");
392: 
393:   PetscObjectGetComm((PetscObject)mesh,&comm);
394:   PetscStackCallParmetis(ParMETIS_V3_Mesh2Dual,((idx_t*)mesh->rmap->range,(idx_t*)adj->i,(idx_t*)adj->j,(idx_t*)&numflag,(idx_t*)&ncommonnodes,(idx_t**)&newxadj,(idx_t**)&newadjncy,&comm));
395:   MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh),mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,NULL,dual);
396:   newadj = (Mat_MPIAdj*)(*dual)->data;

398:   newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
399:   return(0);
400: }