Actual source code: mpiadj.c

  1: /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/

  3: /*
  4:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  5: */
 6:  #include src/mat/impls/adj/mpi/mpiadj.h
 7:  #include petscsys.h

  9: #undef __FUNCT__  
 11: int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 12: {
 13:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 14:   int               ierr,i,j,m = A->m;
 15:   char              *name;
 16:   PetscViewerFormat format;

 19:   PetscObjectGetName((PetscObject)A,&name);
 20:   PetscViewerGetFormat(viewer,&format);
 21:   if (format == PETSC_VIEWER_ASCII_INFO) {
 22:     return(0);
 23:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 24:     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
 25:   } else {
 26:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
 27:     for (i=0; i<m; i++) {
 28:       PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);
 29:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 30:         PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);
 31:       }
 32:       PetscViewerASCIISynchronizedPrintf(viewer,"n");
 33:     }
 34:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
 35:   }
 36:   PetscViewerFlush(viewer);
 37:   return(0);
 38: }

 40: #undef __FUNCT__  
 42: int MatView_MPIAdj(Mat A,PetscViewer viewer)
 43: {
 44:   int        ierr;
 45:   PetscTruth isascii;

 48:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
 49:   if (isascii) {
 50:     MatView_MPIAdj_ASCII(A,viewer);
 51:   } else {
 52:     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 53:   }
 54:   return(0);
 55: }

 57: #undef __FUNCT__  
 59: int MatDestroy_MPIAdj(Mat mat)
 60: {
 61:   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
 62:   int        ierr;

 65: #if defined(PETSC_USE_LOG)
 66:   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
 67: #endif
 68:   if (a->diag) {PetscFree(a->diag);}
 69:   if (a->freeaij) {
 70:     PetscFree(a->i);
 71:     PetscFree(a->j);
 72:     if (a->values) {PetscFree(a->values);}
 73:   }
 74:   PetscFree(a->rowners);
 75:   PetscFree(a);
 76:   return(0);
 77: }

 79: #undef __FUNCT__  
 81: int MatSetOption_MPIAdj(Mat A,MatOption op)
 82: {
 83:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

 86:   switch (op) {
 87:   case MAT_STRUCTURALLY_SYMMETRIC:
 88:     a->symmetric = PETSC_TRUE;
 89:     break;
 90:   default:
 91:     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignoredn");
 92:     break;
 93:   }
 94:   return(0);
 95: }


 98: /*
 99:      Adds diagonal pointers to sparse matrix structure.
100: */

102: #undef __FUNCT__  
104: int MatMarkDiagonal_MPIAdj(Mat A)
105: {
106:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
107:   int        i,j,*diag,m = A->m,ierr;

110:   PetscMalloc((m+1)*sizeof(int),&diag);
111:   PetscLogObjectMemory(A,(m+1)*sizeof(int));
112:   for (i=0; i<A->m; i++) {
113:     for (j=a->i[i]; j<a->i[i+1]; j++) {
114:       if (a->j[j] == i) {
115:         diag[i] = j;
116:         break;
117:       }
118:     }
119:   }
120:   a->diag = diag;
121:   return(0);
122: }

124: #undef __FUNCT__  
126: int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
127: {
128:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
129:   int        *itmp;

132:   row -= a->rstart;

134:   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

136:   *nz = a->i[row+1] - a->i[row];
137:   if (v) *v = PETSC_NULL;
138:   if (idx) {
139:     itmp = a->j + a->i[row];
140:     if (*nz) {
141:       *idx = itmp;
142:     }
143:     else *idx = 0;
144:   }
145:   return(0);
146: }

148: #undef __FUNCT__  
150: int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
151: {
153:   return(0);
154: }

156: #undef __FUNCT__  
158: int MatGetBlockSize_MPIAdj(Mat A,int *bs)
159: {
161:   *bs = 1;
162:   return(0);
163: }


166: #undef __FUNCT__  
168: int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
169: {
170:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
171:   int         ierr;
172:   PetscTruth  flag;

175:   PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);
176:   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");

178:   /* If the  matrix dimensions are not equal,or no of nonzeros */
179:   if ((A->m != B->m) ||(a->nz != b->nz)) {
180:     flag = PETSC_FALSE;
181:   }
182: 
183:   /* if the a->i are the same */
184:   PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);
185: 
186:   /* if a->j are the same */
187:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);

189:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
190:   return(0);
191: }

193: #undef __FUNCT__  
195: int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
196: {
197:   int        ierr,size,i;
198:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

201:   MPI_Comm_size(A->comm,&size);
202:   if (size > 1) {*done = PETSC_FALSE; return(0);}
203:   *m    = A->m;
204:   *ia   = a->i;
205:   *ja   = a->j;
206:   *done = PETSC_TRUE;
207:   if (oshift) {
208:     for (i=0; i<(*ia)[*m]; i++) {
209:       (*ja)[i]++;
210:     }
211:     for (i=0; i<=(*m); i++) (*ia)[i]++;
212:   }
213:   return(0);
214: }

216: #undef __FUNCT__  
218: int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
219: {
220:   int        i;
221:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

224:   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
225:   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
226:   if (oshift) {
227:     for (i=0; i<=(*m); i++) (*ia)[i]--;
228:     for (i=0; i<(*ia)[*m]; i++) {
229:       (*ja)[i]--;
230:     }
231:   }
232:   return(0);
233: }

235: /* -------------------------------------------------------------------*/
236: static struct _MatOps MatOps_Values = {0,
237:        MatGetRow_MPIAdj,
238:        MatRestoreRow_MPIAdj,
239:        0,
240:        0,
241:        0,
242:        0,
243:        0,
244:        0,
245:        0,
246:        0,
247:        0,
248:        0,
249:        0,
250:        0,
251:        0,
252:        MatEqual_MPIAdj,
253:        0,
254:        0,
255:        0,
256:        0,
257:        0,
258:        0,
259:        MatSetOption_MPIAdj,
260:        0,
261:        0,
262:        0,
263:        0,
264:        0,
265:        0,
266:        0,
267:        0,
268:        0,
269:        0,
270:        0,
271:        0,
272:        0,
273:        0,
274:        0,
275:        0,
276:        0,
277:        0,
278:        0,
279:        0,
280:        0,
281:        0,
282:        0,
283:        0,
284:        0,
285:        0,
286:        MatGetBlockSize_MPIAdj,
287:        MatGetRowIJ_MPIAdj,
288:        MatRestoreRowIJ_MPIAdj,
289:        0,
290:        0,
291:        0,
292:        0,
293:        0,
294:        0,
295:        0,
296:        0,
297:        MatDestroy_MPIAdj,
298:        MatView_MPIAdj,
299:        MatGetPetscMaps_Petsc};


302: EXTERN_C_BEGIN
303: #undef __FUNCT__  
305: int MatCreate_MPIAdj(Mat B)
306: {
307:   Mat_MPIAdj *b;
308:   int        ii,ierr,size,rank;

311:   MPI_Comm_size(B->comm,&size);
312:   MPI_Comm_rank(B->comm,&rank);

314:   ierr                = PetscNew(Mat_MPIAdj,&b);
315:   B->data             = (void*)b;
316:   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));
317:   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
318:   B->factor           = 0;
319:   B->lupivotthreshold = 1.0;
320:   B->mapping          = 0;
321:   B->assembled        = PETSC_FALSE;
322: 
323:   MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);
324:   B->n = B->N;

326:   /* the information in the maps duplicates the information computed below, eventually 
327:      we should remove the duplicate information that is not contained in the maps */
328:   PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);
329:   /* we don't know the "local columns" so just use the row information :-(*/
330:   PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);

332:   PetscMalloc((size+1)*sizeof(int),&b->rowners);
333:   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
334:   MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
335:   b->rowners[0] = 0;
336:   for (ii=2; ii<=size; ii++) {
337:     b->rowners[ii] += b->rowners[ii-1];
338:   }
339:   b->rstart = b->rowners[rank];
340:   b->rend   = b->rowners[rank+1];

342:   return(0);
343: }
344: EXTERN_C_END

346: #undef __FUNCT__  
348: int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
349: {
350:   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
351:   int        ierr;
352: #if defined(PETSC_USE_BOPT_g)
353:   int        ii;
354: #endif

357:   B->preallocated = PETSC_TRUE;
358: #if defined(PETSC_USE_BOPT_g)
359:   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %dn",i[0]);
360:   for (ii=1; ii<B->m; ii++) {
361:     if (i[ii] < 0 || i[ii] < i[ii-1]) {
362:       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
363:     }
364:   }
365:   for (ii=0; ii<i[B->m]; ii++) {
366:     if (j[ii] < 0 || j[ii] >= B->N) {
367:       SETERRQ2(1,"Column index %d out of range %dn",ii,j[ii]);
368:     }
369:   }
370: #endif

372:   b->j      = j;
373:   b->i      = i;
374:   b->values = values;

376:   b->nz               = i[B->m];
377:   b->diag             = 0;
378:   b->symmetric        = PETSC_FALSE;
379:   b->freeaij          = PETSC_TRUE;

381:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
382:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
383:   return(0);
384: }

386: #undef __FUNCT__  
388: /*@C
389:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
390:    The matrix does not have numerical values associated with it, but is
391:    intended for ordering (to reduce bandwidth etc) and partitioning.

393:    Collective on MPI_Comm

395:    Input Parameters:
396: +  comm - MPI communicator
397: .  m - number of local rows
398: .  n - number of columns
399: .  i - the indices into j for the start of each row
400: .  j - the column indices for each row (sorted for each row).
401:        The indices in i and j start with zero (NOT with one).
402: -  values -[optional] edge weights

404:    Output Parameter:
405: .  A - the matrix 

407:    Level: intermediate

409:    Notes: This matrix object does not support most matrix operations, include
410:    MatSetValues().
411:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
412:    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you 
413:     call from Fortran you need not create the arrays with PetscMalloc().
414:    Should not include the matrix diagonals.

416:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

418: .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
419: @*/
420: int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
421: {
422:   int        ierr;

425:   MatCreate(comm,m,n,PETSC_DETERMINE,n,A);
426:   MatSetType(*A,MATMPIADJ);
427:   MatMPIAdjSetPreallocation(*A,i,j,values);
428:   return(0);
429: }

431: EXTERN_C_BEGIN
432: #undef __FUNCT__  
434: int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
435: {
436:   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
437:   PetscScalar  *ra;
438:   MPI_Comm     comm;

441:   MatGetSize(A,PETSC_NULL,&N);
442:   MatGetLocalSize(A,&m,PETSC_NULL);
443:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
444: 
445:   /* count the number of nonzeros per row */
446:   for (i=0; i<m; i++) {
447:     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
448:     for (j=0; j<len; j++) {
449:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
450:     }
451:     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
452:     nzeros += len;
453:   }

455:   /* malloc space for nonzeros */
456:   PetscMalloc((nzeros+1)*sizeof(int),&a);
457:   PetscMalloc((N+1)*sizeof(int),&ia);
458:   PetscMalloc((nzeros+1)*sizeof(int),&ja);

460:   nzeros = 0;
461:   ia[0]  = 0;
462:   for (i=0; i<m; i++) {
463:     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);
464:     cnt     = 0;
465:     for (j=0; j<len; j++) {
466:       if (rj[j] != i+rstart) { /* if not diagonal */
467:         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
468:         ja[nzeros+cnt++] = rj[j];
469:       }
470:     }
471:     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);
472:     nzeros += cnt;
473:     ia[i+1] = nzeros;
474:   }

476:   PetscObjectGetComm((PetscObject)A,&comm);
477:   MatCreateMPIAdj(comm,m,N,ia,ja,a,B);

479:   return(0);
480: }
481: EXTERN_C_END