Actual source code: pastix.c

petsc-3.7.5 2017-01-01
Report Typos and Errors
  1: /*
  2:  Provides an interface to the PaStiX sparse solver
  3:  */
  4: #include <../src/mat/impls/aij/seq/aij.h>
  5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
  7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>

  9: #if defined(PETSC_USE_COMPLEX)
 10: #define _H_COMPLEX
 11: #endif

 13: EXTERN_C_BEGIN
 14: #include <pastix.h>
 15: EXTERN_C_END

 17: #if defined(PETSC_USE_COMPLEX)
 18: #if defined(PETSC_USE_REAL_SINGLE)
 19: #define PASTIX_CALL c_pastix
 20: #define PASTIX_CHECKMATRIX c_pastix_checkMatrix
 21: #else
 22: #define PASTIX_CALL z_pastix
 23: #define PASTIX_CHECKMATRIX z_pastix_checkMatrix
 24: #endif

 26: #else /* PETSC_USE_COMPLEX */

 28: #if defined(PETSC_USE_REAL_SINGLE)
 29: #define PASTIX_CALL s_pastix
 30: #define PASTIX_CHECKMATRIX s_pastix_checkMatrix
 31: #else
 32: #define PASTIX_CALL d_pastix
 33: #define PASTIX_CHECKMATRIX d_pastix_checkMatrix
 34: #endif

 36: #endif /* PETSC_USE_COMPLEX */

 38: typedef PetscScalar PastixScalar;

 40: typedef struct Mat_Pastix_ {
 41:   pastix_data_t *pastix_data;    /* Pastix data storage structure                        */
 42:   MatStructure  matstruc;
 43:   PetscInt      n;               /* Number of columns in the matrix                      */
 44:   PetscInt      *colptr;         /* Index of first element of each column in row and val */
 45:   PetscInt      *row;            /* Row of each element of the matrix                    */
 46:   PetscScalar   *val;            /* Value of each element of the matrix                  */
 47:   PetscInt      *perm;           /* Permutation tabular                                  */
 48:   PetscInt      *invp;           /* Reverse permutation tabular                          */
 49:   PetscScalar   *rhs;            /* Rhight-hand-side member                              */
 50:   PetscInt      rhsnbr;          /* Rhight-hand-side number (must be 1)                  */
 51:   PetscInt      iparm[64];       /* Integer parameters                                   */
 52:   double        dparm[64];       /* Floating point parameters                            */
 53:   MPI_Comm      pastix_comm;     /* PaStiX MPI communicator                              */
 54:   PetscMPIInt   commRank;        /* MPI rank                                             */
 55:   PetscMPIInt   commSize;        /* MPI communicator size                                */
 56:   PetscBool     CleanUpPastix;   /* Boolean indicating if we call PaStiX clean step      */
 57:   VecScatter    scat_rhs;
 58:   VecScatter    scat_sol;
 59:   Vec           b_seq;
 60:   PetscBool     isAIJ;
 61:   PetscErrorCode (*Destroy)(Mat);
 62: } Mat_Pastix;

 64: extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);

 68: /*
 69:    convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]

 71:   input:
 72:     A       - matrix in seqaij or mpisbaij (bs=1) format
 73:     valOnly - FALSE: spaces are allocated and values are set for the CSC
 74:               TRUE:  Only fill values
 75:   output:
 76:     n       - Size of the matrix
 77:     colptr  - Index of first element of each column in row and val
 78:     row     - Row of each element of the matrix
 79:     values  - Value of each element of the matrix
 80:  */
 81: PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
 82: {
 83:   Mat_SeqAIJ     *aa      = (Mat_SeqAIJ*)A->data;
 84:   PetscInt       *rowptr  = aa->i;
 85:   PetscInt       *col     = aa->j;
 86:   PetscScalar    *rvalues = aa->a;
 87:   PetscInt       m        = A->rmap->N;
 88:   PetscInt       nnz;
 89:   PetscInt       i,j, k;
 90:   PetscInt       base = 1;
 91:   PetscInt       idx;
 93:   PetscInt       colidx;
 94:   PetscInt       *colcount;
 95:   PetscBool      isSBAIJ;
 96:   PetscBool      isSeqSBAIJ;
 97:   PetscBool      isMpiSBAIJ;
 98:   PetscBool      isSym;
 99:   PetscBool      flg;
100:   PetscInt       icntl;
101:   PetscInt       verb;
102:   PetscInt       check;

105:   MatIsSymmetric(A,0.0,&isSym);
106:   PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
107:   PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
108:   PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);

110:   *n = A->cmap->N;

112:   /* PaStiX only needs triangular matrix if matrix is symmetric
113:    */
114:   if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n)/2 + *n;
115:   else nnz = aa->nz;

117:   if (!valOnly) {
118:     PetscMalloc1((*n)+1,colptr);
119:     PetscMalloc1(nnz,row);
120:     PetscMalloc1(nnz,values);

122:     if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
123:       PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
124:       for (i = 0; i < *n+1; i++) (*colptr)[i] += base;
125:       PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
126:       for (i = 0; i < nnz; i++) (*row)[i] += base;
127:       PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
128:     } else {
129:       PetscMalloc1(*n,&colcount);

131:       for (i = 0; i < m; i++) colcount[i] = 0;
132:       /* Fill-in colptr */
133:       for (i = 0; i < m; i++) {
134:         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
135:           if (!isSym || col[j] <= i)  colcount[col[j]]++;
136:         }
137:       }

139:       (*colptr)[0] = base;
140:       for (j = 0; j < *n; j++) {
141:         (*colptr)[j+1] = (*colptr)[j] + colcount[j];
142:         /* in next loop we fill starting from (*colptr)[colidx] - base */
143:         colcount[j] = -base;
144:       }

146:       /* Fill-in rows and values */
147:       for (i = 0; i < m; i++) {
148:         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
149:           if (!isSym || col[j] <= i) {
150:             colidx         = col[j];
151:             idx            = (*colptr)[colidx] + colcount[colidx];
152:             (*row)[idx]    = i + base;
153:             (*values)[idx] = rvalues[j];
154:             colcount[colidx]++;
155:           }
156:         }
157:       }
158:       PetscFree(colcount);
159:     }
160:   } else {
161:     /* Fill-in only values */
162:     for (i = 0; i < m; i++) {
163:       for (j = rowptr[i]; j < rowptr[i+1]; j++) {
164:         colidx = col[j];
165:         if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i) {
166:           /* look for the value to fill */
167:           for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
168:             if (((*row)[k]-base) == i) {
169:               (*values)[k] = rvalues[j];
170:               break;
171:             }
172:           }
173:           /* data structure of sparse matrix has changed */
174:           if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
175:         }
176:       }
177:     }
178:   }

180:   icntl =-1;
181:   check = 0;
182:   PetscOptionsGetInt(NULL,((PetscObject) A)->prefix, "-mat_pastix_check", &icntl, &flg);
183:   if ((flg && icntl >= 0) || PetscLogPrintInfo) check =  icntl;

185:   if (check == 1) {
186:     PetscScalar *tmpvalues;
187:     PetscInt    *tmprows,*tmpcolptr;
188:     tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar));    if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
189:     tmprows   = (PetscInt*)   malloc(nnz*sizeof(PetscInt));       if (!tmprows)   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
190:     tmpcolptr = (PetscInt*)   malloc((*n+1)*sizeof(PetscInt));    if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");

192:     PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
193:     PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
194:     PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
195:     PetscFree(*row);
196:     PetscFree(*values);

198:     icntl=-1;
199:     verb = API_VERBOSE_NOT;
200:     /* "iparm[IPARM_VERBOSE] : level of printing (0 to 2)" */
201:     PetscOptionsGetInt(NULL,((PetscObject) A)->prefix, "-mat_pastix_verbose", &icntl, &flg);
202:     if ((flg && icntl >= 0) || PetscLogPrintInfo) verb =  icntl;
203:     PASTIX_CHECKMATRIX(MPI_COMM_WORLD,verb,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,(PastixScalar**)&tmpvalues,NULL,1);

205:     PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
206:     PetscMalloc1(((*colptr)[*n]-1),row);
207:     PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
208:     PetscMalloc1(((*colptr)[*n]-1),values);
209:     PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
210:     free(tmpvalues);
211:     free(tmprows);
212:     free(tmpcolptr);

214:   }
215:   return(0);
216: }

220: PetscErrorCode MatGetDiagonal_Pastix(Mat A,Vec v)
221: {
223:   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: Pastix factor");
224:   return(0);
225: }

229: /*
230:   Call clean step of PaStiX if lu->CleanUpPastix == true.
231:   Free the CSC matrix.
232:  */
233: PetscErrorCode MatDestroy_Pastix(Mat A)
234: {
235:   Mat_Pastix     *lu=(Mat_Pastix*)A->spptr;
237:   PetscMPIInt    size=lu->commSize;

240:   if (lu && lu->CleanUpPastix) {
241:     /* Terminate instance, deallocate memories */
242:     if (size > 1) {
243:       VecScatterDestroy(&lu->scat_rhs);
244:       VecDestroy(&lu->b_seq);
245:       VecScatterDestroy(&lu->scat_sol);
246:     }

248:     lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
249:     lu->iparm[IPARM_END_TASK]  =API_TASK_CLEAN;

251:     PASTIX_CALL(&(lu->pastix_data),
252:                 lu->pastix_comm,
253:                 lu->n,
254:                 lu->colptr,
255:                 lu->row,
256:                 (PastixScalar*)lu->val,
257:                 lu->perm,
258:                 lu->invp,
259:                 (PastixScalar*)lu->rhs,
260:                 lu->rhsnbr,
261:                 lu->iparm,
262:                 lu->dparm);

264:     PetscFree(lu->colptr);
265:     PetscFree(lu->row);
266:     PetscFree(lu->val);
267:     PetscFree(lu->perm);
268:     PetscFree(lu->invp);
269:     MPI_Comm_free(&(lu->pastix_comm));
270:   }
271:   if (lu && lu->Destroy) {
272:     (lu->Destroy)(A);
273:   }
274:   PetscFree(A->spptr);
275:   return(0);
276: }

280: /*
281:   Gather right-hand-side.
282:   Call for Solve step.
283:   Scatter solution.
284:  */
285: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
286: {
287:   Mat_Pastix     *lu=(Mat_Pastix*)A->spptr;
288:   PetscScalar    *array;
289:   Vec            x_seq;

293:   lu->rhsnbr = 1;
294:   x_seq      = lu->b_seq;
295:   if (lu->commSize > 1) {
296:     /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
297:     VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
298:     VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
299:     VecGetArray(x_seq,&array);
300:   } else {  /* size == 1 */
301:     VecCopy(b,x);
302:     VecGetArray(x,&array);
303:   }
304:   lu->rhs = array;
305:   if (lu->commSize == 1) {
306:     VecRestoreArray(x,&array);
307:   } else {
308:     VecRestoreArray(x_seq,&array);
309:   }

311:   /* solve phase */
312:   /*-------------*/
313:   lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
314:   lu->iparm[IPARM_END_TASK]   = API_TASK_REFINE;
315:   lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;

317:   PASTIX_CALL(&(lu->pastix_data),
318:               lu->pastix_comm,
319:               lu->n,
320:               lu->colptr,
321:               lu->row,
322:               (PastixScalar*)lu->val,
323:               lu->perm,
324:               lu->invp,
325:               (PastixScalar*)lu->rhs,
326:               lu->rhsnbr,
327:               lu->iparm,
328:               lu->dparm);

330:   if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER]);

332:   if (lu->commSize == 1) {
333:     VecRestoreArray(x,&(lu->rhs));
334:   } else {
335:     VecRestoreArray(x_seq,&(lu->rhs));
336:   }

338:   if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
339:     VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
340:     VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
341:   }
342:   return(0);
343: }

345: /*
346:   Numeric factorisation using PaStiX solver.

348:  */
351: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
352: {
353:   Mat_Pastix     *lu =(Mat_Pastix*)(F)->spptr;
354:   Mat            *tseq;
355:   PetscErrorCode 0;
356:   PetscInt       icntl;
357:   PetscInt       M=A->rmap->N;
358:   PetscBool      valOnly,flg, isSym;
359:   Mat            F_diag;
360:   IS             is_iden;
361:   Vec            b;
362:   IS             isrow;
363:   PetscBool      isSeqAIJ,isSeqSBAIJ,isMPIAIJ;

366:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
367:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
368:   PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
369:   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
370:     (F)->ops->solve = MatSolve_PaStiX;

372:     /* Initialize a PASTIX instance */
373:     MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->pastix_comm));
374:     MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
375:     MPI_Comm_size(lu->pastix_comm, &lu->commSize);

377:     /* Set pastix options */
378:     lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
379:     lu->iparm[IPARM_START_TASK]       = API_TASK_INIT;
380:     lu->iparm[IPARM_END_TASK]         = API_TASK_INIT;

382:     lu->rhsnbr = 1;

384:     /* Call to set default pastix options */
385:     PASTIX_CALL(&(lu->pastix_data),
386:                 lu->pastix_comm,
387:                 lu->n,
388:                 lu->colptr,
389:                 lu->row,
390:                 (PastixScalar*)lu->val,
391:                 lu->perm,
392:                 lu->invp,
393:                 (PastixScalar*)lu->rhs,
394:                 lu->rhsnbr,
395:                 lu->iparm,
396:                 lu->dparm);

398:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"PaStiX Options","Mat");

400:     icntl = -1;

402:     lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;

404:     PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
405:     if ((flg && icntl >= 0) || PetscLogPrintInfo) {
406:       lu->iparm[IPARM_VERBOSE] =  icntl;
407:     }
408:     icntl=-1;
409:     PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,&flg);
410:     if ((flg && icntl > 0)) {
411:       lu->iparm[IPARM_THREAD_NBR] = icntl;
412:     }
413:     PetscOptionsEnd();
414:     valOnly = PETSC_FALSE;
415:   } else {
416:     if (isSeqAIJ || isMPIAIJ) {
417:       PetscFree(lu->colptr);
418:       PetscFree(lu->row);
419:       PetscFree(lu->val);
420:       valOnly = PETSC_FALSE;
421:     } else valOnly = PETSC_TRUE;
422:   }

424:   lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;

426:   /* convert mpi A to seq mat A */
427:   ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
428:   MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
429:   ISDestroy(&isrow);

431:   MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
432:   MatIsSymmetric(*tseq,0.0,&isSym);
433:   MatDestroyMatrices(1,&tseq);

435:   if (!lu->perm) {
436:     PetscMalloc1(lu->n,&(lu->perm));
437:     PetscMalloc1(lu->n,&(lu->invp));
438:   }

440:   if (isSym) {
441:     /* On symmetric matrix, LLT */
442:     lu->iparm[IPARM_SYM]           = API_SYM_YES;
443:     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
444:   } else {
445:     /* On unsymmetric matrix, LU */
446:     lu->iparm[IPARM_SYM]           = API_SYM_NO;
447:     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
448:   }

450:   /*----------------*/
451:   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
452:     if (!(isSeqAIJ || isSeqSBAIJ)) {
453:       /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
454:       VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
455:       ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
456:       MatCreateVecs(A,NULL,&b);
457:       VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
458:       VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
459:       ISDestroy(&is_iden);
460:       VecDestroy(&b);
461:     }
462:     lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
463:     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;

465:     PASTIX_CALL(&(lu->pastix_data),
466:                 lu->pastix_comm,
467:                 lu->n,
468:                 lu->colptr,
469:                 lu->row,
470:                 (PastixScalar*)lu->val,
471:                 lu->perm,
472:                 lu->invp,
473:                 (PastixScalar*)lu->rhs,
474:                 lu->rhsnbr,
475:                 lu->iparm,
476:                 lu->dparm);
477:     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
478:   } else {
479:     lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
480:     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
481:     PASTIX_CALL(&(lu->pastix_data),
482:                 lu->pastix_comm,
483:                 lu->n,
484:                 lu->colptr,
485:                 lu->row,
486:                 (PastixScalar*)lu->val,
487:                 lu->perm,
488:                 lu->invp,
489:                 (PastixScalar*)lu->rhs,
490:                 lu->rhsnbr,
491:                 lu->iparm,
492:                 lu->dparm);

494:     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
495:   }

497:   if (lu->commSize > 1) {
498:     if ((F)->factortype == MAT_FACTOR_LU) {
499:       F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
500:     } else {
501:       F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
502:     }
503:     F_diag->assembled = PETSC_TRUE;
504:   }
505:   (F)->assembled    = PETSC_TRUE;
506:   lu->matstruc      = SAME_NONZERO_PATTERN;
507:   lu->CleanUpPastix = PETSC_TRUE;
508:   return(0);
509: }

511: /* Note the Petsc r and c permutations are ignored */
514: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
515: {
516:   Mat_Pastix *lu = (Mat_Pastix*)F->spptr;

519:   lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
520:   lu->iparm[IPARM_SYM]           = API_SYM_YES;
521:   lu->matstruc                   = DIFFERENT_NONZERO_PATTERN;
522:   F->ops->lufactornumeric        = MatFactorNumeric_PaStiX;
523:   return(0);
524: }


527: /* Note the Petsc r permutation is ignored */
530: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
531: {
532:   Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;

535:   lu->iparm[IPARM_FACTORIZATION]  = API_FACT_LLT;
536:   lu->iparm[IPARM_SYM]            = API_SYM_NO;
537:   lu->matstruc                    = DIFFERENT_NONZERO_PATTERN;
538:   (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
539:   return(0);
540: }

544: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
545: {
546:   PetscErrorCode    ierr;
547:   PetscBool         iascii;
548:   PetscViewerFormat format;

551:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
552:   if (iascii) {
553:     PetscViewerGetFormat(viewer,&format);
554:     if (format == PETSC_VIEWER_ASCII_INFO) {
555:       Mat_Pastix *lu=(Mat_Pastix*)A->spptr;

557:       PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
558:       PetscViewerASCIIPrintf(viewer,"  Matrix type :                      %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));
559:       PetscViewerASCIIPrintf(viewer,"  Level of printing (0,1,2):         %d \n",lu->iparm[IPARM_VERBOSE]);
560:       PetscViewerASCIIPrintf(viewer,"  Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
561:       PetscPrintf(PETSC_COMM_SELF,"  Error :                        %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
562:     }
563:   }
564:   return(0);
565: }


568: /*MC
569:      MATSOLVERPASTIX  - A solver package providing direct solvers (LU) for distributed
570:   and sequential matrices via the external package PaStiX.

572:   Use ./configure --download-pastix --download-ptscotch  to have PETSc installed with PasTiX

574:   Use -pc_type lu -pc_factor_mat_solver_package pastix to us this direct solver

576:   Options Database Keys:
577: + -mat_pastix_verbose   <0,1,2>   - print level
578: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.

580:   Level: beginner

582: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage

584: M*/


589: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
590: {
591:   Mat_Pastix *lu =(Mat_Pastix*)A->spptr;

594:   info->block_size        = 1.0;
595:   info->nz_allocated      = lu->iparm[IPARM_NNZEROS];
596:   info->nz_used           = lu->iparm[IPARM_NNZEROS];
597:   info->nz_unneeded       = 0.0;
598:   info->assemblies        = 0.0;
599:   info->mallocs           = 0.0;
600:   info->memory            = 0.0;
601:   info->fill_ratio_given  = 0;
602:   info->fill_ratio_needed = 0;
603:   info->factor_mallocs    = 0;
604:   return(0);
605: }

609: static PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
610: {
612:   *type = MATSOLVERPASTIX;
613:   return(0);
614: }

616: /*
617:     The seq and mpi versions of this function are the same
618: */
621: static PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
622: {
623:   Mat            B;
625:   Mat_Pastix     *pastix;

628:   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
629:   /* Create the factorization matrix */
630:   MatCreate(PetscObjectComm((PetscObject)A),&B);
631:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
632:   MatSetType(B,((PetscObject)A)->type_name);
633:   MatSeqAIJSetPreallocation(B,0,NULL);

635:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
636:   B->ops->view             = MatView_PaStiX;
637:   B->ops->getinfo          = MatGetInfo_PaStiX;
638:   B->ops->getdiagonal      = MatGetDiagonal_Pastix;

640:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);

642:   B->factortype = MAT_FACTOR_LU;
643: 
644:   /* set solvertype */
645:   PetscFree(B->solvertype);
646:   PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);

648:   PetscNewLog(B,&pastix);

650:   pastix->CleanUpPastix = PETSC_FALSE;
651:   pastix->isAIJ         = PETSC_TRUE;
652:   pastix->scat_rhs      = NULL;
653:   pastix->scat_sol      = NULL;
654:   pastix->Destroy       = B->ops->destroy;
655:   B->ops->destroy       = MatDestroy_Pastix;
656:   B->spptr              = (void*)pastix;

658:   *F = B;
659:   return(0);
660: }

664: static PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
665: {
666:   Mat            B;
668:   Mat_Pastix     *pastix;

671:   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
672:   /* Create the factorization matrix */
673:   MatCreate(PetscObjectComm((PetscObject)A),&B);
674:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
675:   MatSetType(B,((PetscObject)A)->type_name);
676:   MatSeqAIJSetPreallocation(B,0,NULL);
677:   MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);

679:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
680:   B->ops->view             = MatView_PaStiX;
681:   B->ops->getdiagonal      = MatGetDiagonal_Pastix;

683:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);

685:   B->factortype = MAT_FACTOR_LU;

687:   /* set solvertype */
688:   PetscFree(B->solvertype);
689:   PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);

691:   PetscNewLog(B,&pastix);

693:   pastix->CleanUpPastix = PETSC_FALSE;
694:   pastix->isAIJ         = PETSC_TRUE;
695:   pastix->scat_rhs      = NULL;
696:   pastix->scat_sol      = NULL;
697:   pastix->Destroy       = B->ops->destroy;
698:   B->ops->destroy       = MatDestroy_Pastix;
699:   B->spptr              = (void*)pastix;

701:   *F = B;
702:   return(0);
703: }

707: static PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
708: {
709:   Mat            B;
711:   Mat_Pastix     *pastix;

714:   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
715:   /* Create the factorization matrix */
716:   MatCreate(PetscObjectComm((PetscObject)A),&B);
717:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
718:   MatSetType(B,((PetscObject)A)->type_name);
719:   MatSeqSBAIJSetPreallocation(B,1,0,NULL);
720:   MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);

722:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
723:   B->ops->view                   = MatView_PaStiX;
724:   B->ops->getdiagonal            = MatGetDiagonal_Pastix;

726:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);

728:   B->factortype = MAT_FACTOR_CHOLESKY;

730:   /* set solvertype */
731:   PetscFree(B->solvertype);
732:   PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);

734:   PetscNewLog(B,&pastix);

736:   pastix->CleanUpPastix = PETSC_FALSE;
737:   pastix->isAIJ         = PETSC_TRUE;
738:   pastix->scat_rhs      = NULL;
739:   pastix->scat_sol      = NULL;
740:   pastix->Destroy       = B->ops->destroy;
741:   B->ops->destroy       = MatDestroy_Pastix;
742:   B->spptr              = (void*)pastix;

744:   *F = B;
745:   return(0);
746: }

750: static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
751: {
752:   Mat            B;
754:   Mat_Pastix     *pastix;

757:   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");

759:   /* Create the factorization matrix */
760:   MatCreate(PetscObjectComm((PetscObject)A),&B);
761:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
762:   MatSetType(B,((PetscObject)A)->type_name);
763:   MatSeqSBAIJSetPreallocation(B,1,0,NULL);
764:   MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);

766:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
767:   B->ops->view                   = MatView_PaStiX;
768:   B->ops->getdiagonal            = MatGetDiagonal_Pastix;

770:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);

772:   B->factortype = MAT_FACTOR_CHOLESKY;

774:   /* set solvertype */
775:   PetscFree(B->solvertype);
776:   PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);

778:   PetscNewLog(B,&pastix);

780:   pastix->CleanUpPastix = PETSC_FALSE;
781:   pastix->isAIJ         = PETSC_TRUE;
782:   pastix->scat_rhs      = NULL;
783:   pastix->scat_sol      = NULL;
784:   pastix->Destroy       = B->ops->destroy;
785:   B->ops->destroy       = MatDestroy_Pastix;
786:   B->spptr              = (void*)pastix;

788:   *F = B;
789:   return(0);
790: }

794: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Pastix(void)
795: {

799:   MatSolverPackageRegister(MATSOLVERPASTIX,MATMPIAIJ,        MAT_FACTOR_LU,MatGetFactor_mpiaij_pastix);
800:   MatSolverPackageRegister(MATSOLVERPASTIX,MATSEQAIJ,        MAT_FACTOR_LU,MatGetFactor_seqaij_pastix);
801:   MatSolverPackageRegister(MATSOLVERPASTIX,MATMPISBAIJ,      MAT_FACTOR_CHOLESKY,MatGetFactor_mpisbaij_pastix);
802:   MatSolverPackageRegister(MATSOLVERPASTIX,MATSEQSBAIJ,      MAT_FACTOR_CHOLESKY,MatGetFactor_seqsbaij_pastix);
803:   return(0);
804: }