Actual source code: bddc.c

petsc-3.7.5 2017-01-01
Report Typos and Errors
  1: /* TODOLIST

  3:    Solvers
  4:    - Add support for cholesky for coarse solver (similar to local solvers)
  5:    - Propagate ksp prefixes for solvers to mat objects?

  7:    User interface
  8:    - ** DM attached to pc?

 10:    Debugging output
 11:    - * Better management of verbosity levels of debugging output

 13:    Extra
 14:    - *** Is it possible to work with PCBDDCGraph on boundary indices only (less memory consumed)?
 15:    - BDDC with MG framework?

 17:    FETIDP
 18:    - Move FETIDP code to its own classes

 20:    MATIS related operations contained in BDDC code
 21:    - Provide general case for subassembling

 23: */

 25: #include <../src/ksp/pc/impls/bddc/bddc.h> /*I "petscpc.h" I*/  /* includes for fortran wrappers */
 26: #include <../src/ksp/pc/impls/bddc/bddcprivate.h>
 27: #include <petscblaslapack.h>

 29: /* -------------------------------------------------------------------------- */
 32: PetscErrorCode PCSetFromOptions_BDDC(PetscOptionItems *PetscOptionsObject,PC pc)
 33: {
 34:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

 38:   PetscOptionsHead(PetscOptionsObject,"BDDC options");
 39:   /* Verbose debugging */
 40:   PetscOptionsInt("-pc_bddc_check_level","Verbose output for PCBDDC (intended for debug)","none",pcbddc->dbg_flag,&pcbddc->dbg_flag,NULL);
 41:   /* Primal space customization */
 42:   PetscOptionsBool("-pc_bddc_use_local_mat_graph","Use or not adjacency graph of local mat for interface analysis","none",pcbddc->use_local_adj,&pcbddc->use_local_adj,NULL);
 43:   PetscOptionsBool("-pc_bddc_use_vertices","Use or not corner dofs in coarse space","none",pcbddc->use_vertices,&pcbddc->use_vertices,NULL);
 44:   PetscOptionsBool("-pc_bddc_use_edges","Use or not edge constraints in coarse space","none",pcbddc->use_edges,&pcbddc->use_edges,NULL);
 45:   PetscOptionsBool("-pc_bddc_use_faces","Use or not face constraints in coarse space","none",pcbddc->use_faces,&pcbddc->use_faces,NULL);
 46:   PetscOptionsBool("-pc_bddc_use_true_nnsp","Use near null space attached to the matrix without modifications","none",pcbddc->use_nnsp_true,&pcbddc->use_nnsp_true,NULL);
 47:   PetscOptionsBool("-pc_bddc_use_qr_single","Use QR factorization for single constraints on cc (QR is used when multiple constraints are present)","none",pcbddc->use_qr_single,&pcbddc->use_qr_single,NULL);
 48:   /* Change of basis */
 49:   PetscOptionsBool("-pc_bddc_use_change_of_basis","Use or not internal change of basis on local edge nodes","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);
 50:   PetscOptionsBool("-pc_bddc_use_change_on_faces","Use or not internal change of basis on local face nodes","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);
 51:   if (!pcbddc->use_change_of_basis) {
 52:     pcbddc->use_change_on_faces = PETSC_FALSE;
 53:   }
 54:   /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */
 55:   PetscOptionsBool("-pc_bddc_switch_static","Switch on static condensation ops around the interface preconditioner","none",pcbddc->switch_static,&pcbddc->switch_static,NULL);
 56:   PetscOptionsInt("-pc_bddc_coarse_redistribute","Number of procs where to redistribute coarse problem","none",pcbddc->redistribute_coarse,&pcbddc->redistribute_coarse,NULL);
 57:   PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);
 58:   PetscOptionsInt("-pc_bddc_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);
 59:   PetscOptionsBool("-pc_bddc_use_coarse_estimates","Use estimated eigenvalues for coarse problem","none",pcbddc->use_coarse_estimates,&pcbddc->use_coarse_estimates,NULL);
 60:   PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);
 61:   PetscOptionsBool("-pc_bddc_schur_rebuild","Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)","none",pcbddc->sub_schurs_rebuild,&pcbddc->sub_schurs_rebuild,NULL);
 62:   PetscOptionsInt("-pc_bddc_schur_layers","Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)","none",pcbddc->sub_schurs_layers,&pcbddc->sub_schurs_layers,NULL);
 63:   PetscOptionsBool("-pc_bddc_schur_use_useradj","Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)","none",pcbddc->sub_schurs_use_useradj,&pcbddc->sub_schurs_use_useradj,NULL);
 64:   PetscOptionsBool("-pc_bddc_deluxe_faster","Faster application of deluxe scaling (requires extra work during setup)","none",pcbddc->faster_deluxe,&pcbddc->faster_deluxe,NULL);
 65:   PetscOptionsReal("-pc_bddc_adaptive_threshold","Threshold to be used for adaptive selection of constraints","none",pcbddc->adaptive_threshold,&pcbddc->adaptive_threshold,NULL);
 66:   PetscOptionsInt("-pc_bddc_adaptive_nmin","Minimum number of constraints per connected components","none",pcbddc->adaptive_nmin,&pcbddc->adaptive_nmin,NULL);
 67:   PetscOptionsInt("-pc_bddc_adaptive_nmax","Maximum number of constraints per connected components","none",pcbddc->adaptive_nmax,&pcbddc->adaptive_nmax,NULL);
 68:   PetscOptionsBool("-pc_bddc_symmetric","Symmetric computation of primal basis functions","none",pcbddc->symmetric_primal,&pcbddc->symmetric_primal,NULL);
 69:   PetscOptionsInt("-pc_bddc_coarse_adj","Number of processors where to map the coarse adjacency list","none",pcbddc->coarse_adj_red,&pcbddc->coarse_adj_red,NULL);
 70:   PetscOptionsTail();
 71:   return(0);
 72: }

 74: /* -------------------------------------------------------------------------- */
 77: static PetscErrorCode PCView_BDDC(PC pc,PetscViewer viewer)
 78: {
 79:   PC_BDDC              *pcbddc = (PC_BDDC*)pc->data;
 80:   PetscErrorCode       ierr;
 81:   PetscBool            isascii,isstring;

 84:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
 85:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);

 87:   /* In a braindead way, print out anything which the user can control from the command line,
 88:      cribbing from PCSetFromOptions_BDDC */

 90:   /* Nothing printed for the String viewer */

 92:   /* ASCII viewer */
 93:   if (isascii) {
 94:     /* Verbose debugging */
 95:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use verbose output: %d\n",pcbddc->dbg_flag);

 97:     /* Primal space customization */
 98:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use local mat graph: %d\n",pcbddc->use_local_adj);
 99:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use vertices: %d\n",pcbddc->use_vertices);
100:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use edges: %d\n",pcbddc->use_edges);
101:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use faces: %d\n",pcbddc->use_faces);
102:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use true near null space: %d\n",pcbddc->use_nnsp_true);
103:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use QR for single constraints on cc: %d\n",pcbddc->use_qr_single);

105:     /* Change of basis */
106:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use change of basis on local edge nodes: %d\n",pcbddc->use_change_of_basis);
107:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use change of basis on local face nodes: %d\n",pcbddc->use_change_on_faces);

109:     /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */
110:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Switch on static condensation ops around the interface preconditioner: %d\n",pcbddc->switch_static);
111:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Coarse problem restribute procs: %d\n",pcbddc->redistribute_coarse);
112:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Multilevel coarsening ratio: %d\n",pcbddc->coarsening_ratio);
113:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Multilevel max levels: %d\n",pcbddc->max_levels);
114:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use estimated eigs for coarse problem: %d\n",pcbddc->use_coarse_estimates);
115:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use deluxe scaling: %d\n",pcbddc->use_deluxe_scaling);
116:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Rebuild interface graph for Schur principal minors: %d\n",pcbddc->sub_schurs_rebuild);
117:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Number of dofs' layers for the computation of principal minors: %d\n",pcbddc->sub_schurs_layers);
118:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Use user CSR graph to compute successive layers: %d\n",pcbddc->sub_schurs_use_useradj);
119:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Fast deluxe scaling: %d\n",pcbddc->faster_deluxe);
120:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Adaptive constraint selection threshold: %g\n",pcbddc->adaptive_threshold);
121:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Min constraints / connected component: %d\n",pcbddc->adaptive_nmin);
122:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Max constraints / connected component: %d\n",pcbddc->adaptive_nmax);
123:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Symmetric computation of primal basis functions: %d\n",pcbddc->symmetric_primal);
124:     PetscViewerASCIIPrintf(viewer,    "  BDDC: Num. Procs. to map coarse adjacency list: %d\n",pcbddc->coarse_adj_red);
125:   }

127:   return(0);
128: }

130: /* -------------------------------------------------------------------------- */
133: static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change)
134: {
135:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

139:   MatDestroy(&pcbddc->user_ChangeOfBasisMatrix);
140:   PetscObjectReference((PetscObject)change);
141:   pcbddc->user_ChangeOfBasisMatrix = change;
142:   return(0);
143: }
146: /*@
147:  PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs

149:    Collective on PC

151:    Input Parameters:
152: +  pc - the preconditioning context
153: -  change - the change of basis matrix

155:    Level: intermediate

157:    Notes:

159: .seealso: PCBDDC
160: @*/
161: PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change)
162: {

169:   if (pc->mat) {
170:     PetscInt rows_c,cols_c,rows,cols;
171:     MatGetSize(pc->mat,&rows,&cols);
172:     MatGetSize(change,&rows_c,&cols_c);
173:     if (rows_c != rows) SETERRQ2(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of rows for change of basis matrix! %d != %d",rows_c,rows);
174:     if (cols_c != cols) SETERRQ2(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of columns for change of basis matrix! %d != %d",cols_c,cols);
175:     MatGetLocalSize(pc->mat,&rows,&cols);
176:     MatGetLocalSize(change,&rows_c,&cols_c);
177:     if (rows_c != rows) SETERRQ2(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of local rows for change of basis matrix! %d != %d",rows_c,rows);
178:     if (cols_c != cols) SETERRQ2(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of local columns for change of basis matrix! %d != %d",cols_c,cols);
179:   }
180:   PetscTryMethod(pc,"PCBDDCSetChangeOfBasisMat_C",(PC,Mat),(pc,change));
181:   return(0);
182: }
183: /* -------------------------------------------------------------------------- */
186: static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
187: {
188:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

192:   ISDestroy(&pcbddc->user_primal_vertices);
193:   PetscObjectReference((PetscObject)PrimalVertices);
194:   pcbddc->user_primal_vertices = PrimalVertices;
195:   return(0);
196: }
199: /*@
200:  PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in PCBDDC

202:    Collective

204:    Input Parameters:
205: +  pc - the preconditioning context
206: -  PrimalVertices - index set of primal vertices in local numbering (can be empty)

208:    Level: intermediate

210:    Notes:

212: .seealso: PCBDDC
213: @*/
214: PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
215: {

222:   PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));
223:   return(0);
224: }
225: /* -------------------------------------------------------------------------- */
228: static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)
229: {
230:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

233:   pcbddc->coarsening_ratio = k;
234:   return(0);
235: }

239: /*@
240:  PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel

242:    Logically collective on PC

244:    Input Parameters:
245: +  pc - the preconditioning context
246: -  k - coarsening ratio (H/h at the coarser level)

248:    Options Database Keys:
249: .    -pc_bddc_coarsening_ratio

251:    Level: intermediate

253:    Notes:
254:      Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level

256: .seealso: PCBDDC, PCBDDCSetLevels()
257: @*/
258: PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k)
259: {

265:   PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));
266:   return(0);
267: }

269: /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */
272: static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc,PetscBool flg)
273: {
274:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

277:   pcbddc->use_exact_dirichlet_trick = flg;
278:   return(0);
279: }

283: PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc,PetscBool flg)
284: {

290:   PetscTryMethod(pc,"PCBDDCSetUseExactDirichlet_C",(PC,PetscBool),(pc,flg));
291:   return(0);
292: }

296: static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc,PetscInt level)
297: {
298:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

301:   pcbddc->current_level = level;
302:   return(0);
303: }

307: PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level)
308: {

314:   PetscTryMethod(pc,"PCBDDCSetLevel_C",(PC,PetscInt),(pc,level));
315:   return(0);
316: }

320: static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc,PetscInt levels)
321: {
322:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

325:   pcbddc->max_levels = levels;
326:   return(0);
327: }

331: /*@
332:  PCBDDCSetLevels - Sets the maximum number of levels for multilevel

334:    Logically collective on PC

336:    Input Parameters:
337: +  pc - the preconditioning context
338: -  levels - the maximum number of levels (max 9)

340:    Options Database Keys:
341: .    -pc_bddc_levels

343:    Level: intermediate

345:    Notes:
346:      Default value is 0, i.e. traditional one-level BDDC

348: .seealso: PCBDDC, PCBDDCSetCoarseningRatio()
349: @*/
350: PetscErrorCode PCBDDCSetLevels(PC pc,PetscInt levels)
351: {

357:   if (levels > 99) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Maximum number of levels for bddc is 99\n");
358:   PetscTryMethod(pc,"PCBDDCSetLevels_C",(PC,PetscInt),(pc,levels));
359:   return(0);
360: }
361: /* -------------------------------------------------------------------------- */

365: static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace)
366: {
367:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

371:   PetscObjectReference((PetscObject)NullSpace);
372:   MatNullSpaceDestroy(&pcbddc->NullSpace);
373:   pcbddc->NullSpace = NullSpace;
374:   return(0);
375: }

379: /*@
380:  PCBDDCSetNullSpace - Set nullspace for BDDC operator

382:    Logically collective on PC and MatNullSpace

384:    Input Parameters:
385: +  pc - the preconditioning context
386: -  NullSpace - Null space of the linear operator to be preconditioned (Pmat)

388:    Level: intermediate

390:    Notes:

392: .seealso: PCBDDC
393: @*/
394: PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace)
395: {

402:   PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));
403:   return(0);
404: }
405: /* -------------------------------------------------------------------------- */

409: static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)
410: {
411:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

415:   /* last user setting takes precendence -> destroy any other customization */
416:   ISDestroy(&pcbddc->DirichletBoundariesLocal);
417:   ISDestroy(&pcbddc->DirichletBoundaries);
418:   PetscObjectReference((PetscObject)DirichletBoundaries);
419:   pcbddc->DirichletBoundaries = DirichletBoundaries;
420:   pcbddc->recompute_topography = PETSC_TRUE;
421:   return(0);
422: }

426: /*@
427:  PCBDDCSetDirichletBoundaries - Set IS defining Dirichlet boundaries for the global problem.

429:    Collective

431:    Input Parameters:
432: +  pc - the preconditioning context
433: -  DirichletBoundaries - parallel IS defining the Dirichlet boundaries

435:    Level: intermediate

437:    Notes:
438:      Provide the information if you used MatZeroRows/Columns routines. Any process can list any global node

440: .seealso: PCBDDC, PCBDDCSetDirichletBoundariesLocal(), MatZeroRows(), MatZeroRowsColumns()
441: @*/
442: PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)
443: {

450:   PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));
451:   return(0);
452: }
453: /* -------------------------------------------------------------------------- */

457: static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc,IS DirichletBoundaries)
458: {
459:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

463:   /* last user setting takes precendence -> destroy any other customization */
464:   ISDestroy(&pcbddc->DirichletBoundariesLocal);
465:   ISDestroy(&pcbddc->DirichletBoundaries);
466:   PetscObjectReference((PetscObject)DirichletBoundaries);
467:   pcbddc->DirichletBoundariesLocal = DirichletBoundaries;
468:   pcbddc->recompute_topography = PETSC_TRUE;
469:   return(0);
470: }

474: /*@
475:  PCBDDCSetDirichletBoundariesLocal - Set IS defining Dirichlet boundaries for the global problem in local ordering.

477:    Collective

479:    Input Parameters:
480: +  pc - the preconditioning context
481: -  DirichletBoundaries - parallel IS defining the Dirichlet boundaries (in local ordering)

483:    Level: intermediate

485:    Notes:

487: .seealso: PCBDDC, PCBDDCSetDirichletBoundaries(), MatZeroRows(), MatZeroRowsColumns()
488: @*/
489: PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc,IS DirichletBoundaries)
490: {

497:   PetscTryMethod(pc,"PCBDDCSetDirichletBoundariesLocal_C",(PC,IS),(pc,DirichletBoundaries));
498:   return(0);
499: }
500: /* -------------------------------------------------------------------------- */

504: static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)
505: {
506:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

510:   /* last user setting takes precendence -> destroy any other customization */
511:   ISDestroy(&pcbddc->NeumannBoundariesLocal);
512:   ISDestroy(&pcbddc->NeumannBoundaries);
513:   PetscObjectReference((PetscObject)NeumannBoundaries);
514:   pcbddc->NeumannBoundaries = NeumannBoundaries;
515:   pcbddc->recompute_topography = PETSC_TRUE;
516:   return(0);
517: }

521: /*@
522:  PCBDDCSetNeumannBoundaries - Set IS defining Neumann boundaries for the global problem.

524:    Collective

526:    Input Parameters:
527: +  pc - the preconditioning context
528: -  NeumannBoundaries - parallel IS defining the Neumann boundaries

530:    Level: intermediate

532:    Notes:
533:      Any process can list any global node

535: .seealso: PCBDDC, PCBDDCSetNeumannBoundariesLocal()
536: @*/
537: PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)
538: {

545:   PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));
546:   return(0);
547: }
548: /* -------------------------------------------------------------------------- */

552: static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc,IS NeumannBoundaries)
553: {
554:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

558:   /* last user setting takes precendence -> destroy any other customization */
559:   ISDestroy(&pcbddc->NeumannBoundariesLocal);
560:   ISDestroy(&pcbddc->NeumannBoundaries);
561:   PetscObjectReference((PetscObject)NeumannBoundaries);
562:   pcbddc->NeumannBoundariesLocal = NeumannBoundaries;
563:   pcbddc->recompute_topography = PETSC_TRUE;
564:   return(0);
565: }

569: /*@
570:  PCBDDCSetNeumannBoundariesLocal - Set IS defining Neumann boundaries for the global problem in local ordering.

572:    Collective

574:    Input Parameters:
575: +  pc - the preconditioning context
576: -  NeumannBoundaries - parallel IS defining the subdomain part of Neumann boundaries (in local ordering)

578:    Level: intermediate

580:    Notes:

582: .seealso: PCBDDC, PCBDDCSetNeumannBoundaries()
583: @*/
584: PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc,IS NeumannBoundaries)
585: {

592:   PetscTryMethod(pc,"PCBDDCSetNeumannBoundariesLocal_C",(PC,IS),(pc,NeumannBoundaries));
593:   return(0);
594: }
595: /* -------------------------------------------------------------------------- */

599: static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries)
600: {
601:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

604:   *DirichletBoundaries = pcbddc->DirichletBoundaries;
605:   return(0);
606: }

610: /*@
611:  PCBDDCGetDirichletBoundaries - Get parallel IS for Dirichlet boundaries

613:    Collective

615:    Input Parameters:
616: .  pc - the preconditioning context

618:    Output Parameters:
619: .  DirichletBoundaries - index set defining the Dirichlet boundaries

621:    Level: intermediate

623:    Notes:
624:      The IS returned (if any) is the same passed in earlier by the user with PCBDDCSetDirichletBoundaries

626: .seealso: PCBDDC
627: @*/
628: PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries)
629: {

634:   PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));
635:   return(0);
636: }
637: /* -------------------------------------------------------------------------- */

641: static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc,IS *DirichletBoundaries)
642: {
643:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

646:   *DirichletBoundaries = pcbddc->DirichletBoundariesLocal;
647:   return(0);
648: }

652: /*@
653:  PCBDDCGetDirichletBoundariesLocal - Get parallel IS for Dirichlet boundaries (in local ordering)

655:    Collective

657:    Input Parameters:
658: .  pc - the preconditioning context

660:    Output Parameters:
661: .  DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries

663:    Level: intermediate

665:    Notes:
666:      The IS returned could be the same passed in earlier by the user (if provided with PCBDDCSetDirichletBoundariesLocal) or a global-to-local map of the global IS (if provided with PCBDDCSetDirichletBoundaries).
667:           In the latter case, the IS will be available after PCSetUp.

669: .seealso: PCBDDC
670: @*/
671: PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc,IS *DirichletBoundaries)
672: {

677:   PetscUseMethod(pc,"PCBDDCGetDirichletBoundariesLocal_C",(PC,IS*),(pc,DirichletBoundaries));
678:   return(0);
679: }
680: /* -------------------------------------------------------------------------- */

684: static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries)
685: {
686:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

689:   *NeumannBoundaries = pcbddc->NeumannBoundaries;
690:   return(0);
691: }

695: /*@
696:  PCBDDCGetNeumannBoundaries - Get parallel IS for Neumann boundaries

698:    Collective

700:    Input Parameters:
701: .  pc - the preconditioning context

703:    Output Parameters:
704: .  NeumannBoundaries - index set defining the Neumann boundaries

706:    Level: intermediate

708:    Notes:
709:      The IS returned (if any) is the same passed in earlier by the user with PCBDDCSetNeumannBoundaries

711: .seealso: PCBDDC
712: @*/
713: PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries)
714: {

719:   PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));
720:   return(0);
721: }
722: /* -------------------------------------------------------------------------- */

726: static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc,IS *NeumannBoundaries)
727: {
728:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;

731:   *NeumannBoundaries = pcbddc->NeumannBoundariesLocal;
732:   return(0);
733: }

737: /*@
738:  PCBDDCGetNeumannBoundariesLocal - Get parallel IS for Neumann boundaries (in local ordering)

740:    Collective

742:    Input Parameters:
743: .  pc - the preconditioning context

745:    Output Parameters:
746: .  NeumannBoundaries - index set defining the subdomain part of Neumann boundaries

748:    Level: intermediate

750:    Notes:
751:      The IS returned could be the same passed in earlier by the user (if provided with PCBDDCSetNeumannBoundariesLocal) or a global-to-local map of the global IS (if provided with PCBDDCSetNeumannBoundaries).
752:           In the latter case, the IS will be available after PCSetUp.

754: .seealso: PCBDDC
755: @*/
756: PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc,IS *NeumannBoundaries)
757: {

762:   PetscUseMethod(pc,"PCBDDCGetNeumannBoundariesLocal_C",(PC,IS*),(pc,NeumannBoundaries));
763:   return(0);
764: }
765: /* -------------------------------------------------------------------------- */

769: static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
770: {
771:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
772:   PCBDDCGraph    mat_graph = pcbddc->mat_graph;

776:   /* free old CSR */
777:   PCBDDCGraphResetCSR(mat_graph);
778:   /* TODO: PCBDDCGraphSetAdjacency */
779:   /* get CSR into graph structure */
780:   if (copymode == PETSC_COPY_VALUES) {
781:     PetscMalloc1(nvtxs+1,&mat_graph->xadj);
782:     PetscMalloc1(xadj[nvtxs],&mat_graph->adjncy);
783:     PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));
784:     PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));
785:   } else if (copymode == PETSC_OWN_POINTER) {
786:     mat_graph->xadj = (PetscInt*)xadj;
787:     mat_graph->adjncy = (PetscInt*)adjncy;
788:   }
789:   mat_graph->nvtxs_csr = nvtxs;
790:   return(0);
791: }

795: /*@
796:  PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local matrix

798:    Not collective

800:    Input Parameters:
801: +  pc - the preconditioning context
802: .  nvtxs - number of local vertices of the graph (i.e., the size of the local problem)
803: .  xadj, adjncy - the CSR graph
804: -  copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER.

806:    Level: intermediate

808:    Notes:

810: .seealso: PCBDDC,PetscCopyMode
811: @*/
812: PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
813: {
814:   void (*f)(void) = 0;

821:   if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER)  SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d",copymode);
822:   PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));
823:   /* free arrays if PCBDDC is not the PC type */
824:   PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f);
825:   if (!f && copymode == PETSC_OWN_POINTER) {
826:     PetscFree(xadj);
827:     PetscFree(adjncy);
828:   }
829:   return(0);
830: }
831: /* -------------------------------------------------------------------------- */

835: static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
836: {
837:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
838:   PetscInt i;

842:   /* Destroy ISes if they were already set */
843:   for (i=0;i<pcbddc->n_ISForDofsLocal;i++) {
844:     ISDestroy(&pcbddc->ISForDofsLocal[i]);
845:   }
846:   PetscFree(pcbddc->ISForDofsLocal);
847:   /* last user setting takes precendence -> destroy any other customization */
848:   for (i=0;i<pcbddc->n_ISForDofs;i++) {
849:     ISDestroy(&pcbddc->ISForDofs[i]);
850:   }
851:   PetscFree(pcbddc->ISForDofs);
852:   pcbddc->n_ISForDofs = 0;
853:   /* allocate space then set */
854:   if (n_is) {
855:     PetscMalloc1(n_is,&pcbddc->ISForDofsLocal);
856:   }
857:   for (i=0;i<n_is;i++) {
858:     PetscObjectReference((PetscObject)ISForDofs[i]);
859:     pcbddc->ISForDofsLocal[i]=ISForDofs[i];
860:   }
861:   pcbddc->n_ISForDofsLocal=n_is;
862:   if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE;
863:   pcbddc->recompute_topography = PETSC_TRUE;
864:   return(0);
865: }

869: /*@
870:  PCBDDCSetDofsSplittingLocal - Set index sets defining fields of the local subdomain matrix

872:    Collective

874:    Input Parameters:
875: +  pc - the preconditioning context
876: .  n_is - number of index sets defining the fields
877: -  ISForDofs - array of IS describing the fields in local ordering

879:    Level: intermediate

881:    Notes:
882:      n_is should be the same among processes. Not all nodes need to be listed: unlisted nodes will belong to the complement field.

884: .seealso: PCBDDC
885: @*/
886: PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc,PetscInt n_is, IS ISForDofs[])
887: {
888:   PetscInt       i;

894:   for (i=0;i<n_is;i++) {
897:   }
898:   PetscTryMethod(pc,"PCBDDCSetDofsSplittingLocal_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));
899:   return(0);
900: }
901: /* -------------------------------------------------------------------------- */

905: static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
906: {
907:   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
908:   PetscInt i;

912:   /* Destroy ISes if they were already set */
913:   for (i=0;i<pcbddc->n_ISForDofs;i++) {
914:     ISDestroy(&pcbddc->ISForDofs[i]);
915:   }
916:   PetscFree(pcbddc->ISForDofs);
917:   /* last user setting takes precendence -> destroy any other customization */
918:   for (i=0;i<pcbddc->n_ISForDofsLocal;i++) {
919:     ISDestroy(&pcbddc->ISForDofsLocal[i]);
920:   }
921:   PetscFree(pcbddc->ISForDofsLocal);
922:   pcbddc->n_ISForDofsLocal = 0;
923:   /* allocate space then set */
924:   if (n_is) {
925:     PetscMalloc1(n_is,&pcbddc->ISForDofs);
926:   }
927:   for (i=0;i<n_is;i++) {
928:     PetscObjectReference((PetscObject)ISForDofs[i]);
929:     pcbddc->ISForDofs[i]=ISForDofs[i];
930:   }
931:   pcbddc->n_ISForDofs=n_is;
932:   if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE;
933:   pcbddc->recompute_topography = PETSC_TRUE;
934:   return(0);
935: }

939: /*@
940:  PCBDDCSetDofsSplitting - Set index sets defining fields of the global matrix

942:    Collective

944:    Input Parameters:
945: +  pc - the preconditioning context
946: .  n_is - number of index sets defining the fields
947: -  ISForDofs - array of IS describing the fields in global ordering

949:    Level: intermediate

951:    Notes:
952:      Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field.

954: .seealso: PCBDDC
955: @*/
956: PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[])
957: {
958:   PetscInt       i;

964:   for (i=0;i<n_is;i++) {
967:   }
968:   PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));
969:   return(0);
970: }

972: /* -------------------------------------------------------------------------- */
975: /* -------------------------------------------------------------------------- */
976: /*
977:    PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial
978:                      guess if a transformation of basis approach has been selected.

980:    Input Parameter:
981: +  pc - the preconditioner contex

983:    Application Interface Routine: PCPreSolve()

985:    Notes:
986:      The interface routine PCPreSolve() is not usually called directly by
987:    the user, but instead is called by KSPSolve().
988: */
989: static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
990: {
992:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
993:   PC_IS          *pcis = (PC_IS*)(pc->data);
994:   Vec            used_vec;
995:   PetscBool      copy_rhs = PETSC_TRUE;

998:   /* if we are working with cg, one dirichlet solve can be avoided during Krylov iterations */
999:   if (ksp) {
1000:     PetscBool iscg;
1001:     PetscObjectTypeCompare((PetscObject)ksp,KSPCG,&iscg);
1002:     if (!iscg) {
1003:       PCBDDCSetUseExactDirichlet(pc,PETSC_FALSE);
1004:     }
1005:   }
1006:   /* Creates parallel work vectors used in presolve */
1007:   if (!pcbddc->original_rhs) {
1008:     VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);
1009:   }
1010:   if (!pcbddc->temp_solution) {
1011:     VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);
1012:   }

1014:   if (x) {
1015:     PetscObjectReference((PetscObject)x);
1016:     used_vec = x;
1017:   } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */
1018:     PetscObjectReference((PetscObject)pcbddc->temp_solution);
1019:     used_vec = pcbddc->temp_solution;
1020:     VecSet(used_vec,0.0);
1021:   }

1023:   /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */
1024:   if (ksp) {
1025:     /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */
1026:     KSPGetInitialGuessNonzero(ksp,&pcbddc->ksp_guess_nonzero);
1027:     if (!pcbddc->ksp_guess_nonzero) {
1028:       VecSet(used_vec,0.0);
1029:     }
1030:   }

1032:   pcbddc->rhs_change = PETSC_FALSE;

1034:   /* Take into account zeroed rows -> change rhs and store solution removed */
1035:   if (rhs) {
1036:     IS dirIS = NULL;

1038:     /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */
1039:     PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph,&dirIS);
1040:     if (dirIS) {
1041:       Mat_IS            *matis = (Mat_IS*)pc->pmat->data;
1042:       PetscInt          dirsize,i,*is_indices;
1043:       PetscScalar       *array_x;
1044:       const PetscScalar *array_diagonal;

1046:       MatGetDiagonal(pc->pmat,pcis->vec1_global);
1047:       VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);
1048:       VecScatterBegin(matis->rctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);
1049:       VecScatterEnd(matis->rctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);
1050:       VecScatterBegin(matis->rctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
1051:       VecScatterEnd(matis->rctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
1052:       ISGetLocalSize(dirIS,&dirsize);
1053:       VecGetArray(pcis->vec1_N,&array_x);
1054:       VecGetArrayRead(pcis->vec2_N,&array_diagonal);
1055:       ISGetIndices(dirIS,(const PetscInt**)&is_indices);
1056:       for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
1057:       ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);
1058:       VecRestoreArrayRead(pcis->vec2_N,&array_diagonal);
1059:       VecRestoreArray(pcis->vec1_N,&array_x);
1060:       VecScatterBegin(matis->rctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);
1061:       VecScatterEnd(matis->rctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);
1062:       pcbddc->rhs_change = PETSC_TRUE;
1063:       ISDestroy(&dirIS);
1064:     }
1065:   }

1067:   /* remove the computed solution or the initial guess from the rhs */
1068:   if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero) ) {
1069:     /* store the original rhs */
1070:     if (copy_rhs) {
1071:       VecCopy(rhs,pcbddc->original_rhs);
1072:       copy_rhs = PETSC_FALSE;
1073:     }
1074:     pcbddc->rhs_change = PETSC_TRUE;
1075:     VecScale(used_vec,-1.0);
1076:     MatMultAdd(pc->pmat,used_vec,rhs,rhs);
1077:     VecScale(used_vec,-1.0);
1078:     VecCopy(used_vec,pcbddc->temp_solution);
1079:     if (ksp) {
1080:       KSPSetInitialGuessNonzero(ksp,PETSC_FALSE);
1081:     }
1082:   }
1083:   VecDestroy(&used_vec);

1085:   /* store partially computed solution and set initial guess */
1086:   if (x && pcbddc->use_exact_dirichlet_trick) {
1087:     VecSet(x,0.0);
1088:     VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
1089:     VecScatterEnd(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
1090:     KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);
1091:     VecScatterBegin(pcis->global_to_D,pcis->vec2_D,x,INSERT_VALUES,SCATTER_REVERSE);
1092:     VecScatterEnd(pcis->global_to_D,pcis->vec2_D,x,INSERT_VALUES,SCATTER_REVERSE);
1093:     if (ksp) {
1094:       KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
1095:     }
1096:   }

1098:   if (pcbddc->ChangeOfBasisMatrix) {
1099:     PCBDDCChange_ctx change_ctx;

1101:     /* get change ctx */
1102:     MatShellGetContext(pcbddc->new_global_mat,&change_ctx);

1104:     /* set current iteration matrix inside change context (change of basis has been already set into the ctx during PCSetUp) */
1105:     MatDestroy(&change_ctx->original_mat);
1106:     PetscObjectReference((PetscObject)pc->mat);
1107:     change_ctx->original_mat = pc->mat;

1109:     /* change iteration matrix */
1110:     MatDestroy(&pc->mat);
1111:     PetscObjectReference((PetscObject)pcbddc->new_global_mat);
1112:     pc->mat = pcbddc->new_global_mat;

1114:     /* store the original rhs */
1115:     if (copy_rhs) {
1116:       VecCopy(rhs,pcbddc->original_rhs);
1117:       copy_rhs = PETSC_FALSE;
1118:     }

1120:     /* change rhs */
1121:     MatMultTranspose(change_ctx->global_change,rhs,pcis->vec1_global);
1122:     VecCopy(pcis->vec1_global,rhs);
1123:     pcbddc->rhs_change = PETSC_TRUE;
1124:   }

1126:   /* remove nullspace if present */
1127:   if (ksp && x && pcbddc->NullSpace) {
1128:     MatNullSpaceRemove(pcbddc->NullSpace,x);
1129:     /* store the original rhs */
1130:     if (copy_rhs) {
1131:       VecCopy(rhs,pcbddc->original_rhs);
1132:     }
1133:     pcbddc->rhs_change = PETSC_TRUE;
1134:     MatNullSpaceRemove(pcbddc->NullSpace,rhs);
1135:   }
1136:   return(0);
1137: }

1139: /* -------------------------------------------------------------------------- */
1142: /* -------------------------------------------------------------------------- */
1143: /*
1144:    PCPostSolve_BDDC - Changes the computed solution if a transformation of basis
1145:                      approach has been selected. Also, restores rhs to its original state.

1147:    Input Parameter:
1148: +  pc - the preconditioner contex

1150:    Application Interface Routine: PCPostSolve()

1152:    Notes:
1153:      The interface routine PCPostSolve() is not usually called directly by
1154:      the user, but instead is called by KSPSolve().
1155: */
1156: static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
1157: {
1159:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

1162:   if (pcbddc->ChangeOfBasisMatrix) {
1163:     PCBDDCChange_ctx change_ctx;

1165:     /* get change ctx */
1166:     MatShellGetContext(pcbddc->new_global_mat,&change_ctx);

1168:     /* restore iteration matrix */
1169:     MatDestroy(&pc->mat);
1170:     PetscObjectReference((PetscObject)change_ctx->original_mat);
1171:     pc->mat = change_ctx->original_mat;

1173:     /* get solution in original basis */
1174:     if (x) {
1175:       PC_IS *pcis = (PC_IS*)(pc->data);
1176:       MatMult(change_ctx->global_change,x,pcis->vec1_global);
1177:       VecCopy(pcis->vec1_global,x);
1178:     }
1179:   }

1181:   /* add solution removed in presolve */
1182:   if (x && pcbddc->rhs_change) {
1183:     VecAXPY(x,1.0,pcbddc->temp_solution);
1184:   }

1186:   /* restore rhs to its original state */
1187:   if (rhs && pcbddc->rhs_change) {
1188:     VecCopy(pcbddc->original_rhs,rhs);
1189:   }
1190:   pcbddc->rhs_change = PETSC_FALSE;

1192:   /* restore ksp guess state */
1193:   if (ksp) {
1194:     KSPSetInitialGuessNonzero(ksp,pcbddc->ksp_guess_nonzero);
1195:   }
1196:   return(0);
1197: }
1198: /* -------------------------------------------------------------------------- */
1201: /* -------------------------------------------------------------------------- */
1202: /*
1203:    PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner
1204:                   by setting data structures and options.

1206:    Input Parameter:
1207: +  pc - the preconditioner context

1209:    Application Interface Routine: PCSetUp()

1211:    Notes:
1212:      The interface routine PCSetUp() is not usually called directly by
1213:      the user, but instead is called by PCApply() if necessary.
1214: */
1215: PetscErrorCode PCSetUp_BDDC(PC pc)
1216: {
1218:   PC_BDDC*       pcbddc = (PC_BDDC*)pc->data;
1219:   Mat_IS*        matis;
1220:   MatNullSpace   nearnullspace;
1221:   PetscInt       nrows,ncols;
1222:   PetscBool      computetopography,computesolvers,computesubschurs;
1223:   PetscBool      computeconstraintsmatrix;
1224:   PetscBool      new_nearnullspace_provided,ismatis;

1227:   PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&ismatis);
1228:   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PCBDDC preconditioner requires matrix of type MATIS");
1229:   MatGetSize(pc->pmat,&nrows,&ncols);
1230:   if (nrows != ncols) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCBDDC preconditioner requires a square preconditioning matrix");
1231:   matis = (Mat_IS*)pc->pmat->data;
1232:   /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */
1233:   /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup
1234:      Also, BDDC directly build the Dirichlet problem */
1235:   /* split work */
1236:   if (pc->setupcalled) {
1237:     if (pc->flag == SAME_NONZERO_PATTERN) {
1238:       computetopography = PETSC_FALSE;
1239:       computesolvers = PETSC_TRUE;
1240:     } else { /* DIFFERENT_NONZERO_PATTERN */
1241:       computetopography = PETSC_TRUE;
1242:       computesolvers = PETSC_TRUE;
1243:     }
1244:   } else {
1245:     computetopography = PETSC_TRUE;
1246:     computesolvers = PETSC_TRUE;
1247:   }
1248:   if (pcbddc->recompute_topography) {
1249:     computetopography = PETSC_TRUE;
1250:   }
1251:   computeconstraintsmatrix = PETSC_FALSE;
1252:   if (pcbddc->adaptive_threshold > 0.0 && !pcbddc->use_deluxe_scaling) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot compute adaptive constraints without deluxe scaling. Rerun with -pc_bddc_use_deluxe_scaling");
1253:   pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold > 0.0 && pcbddc->use_deluxe_scaling);
1254:   if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE;

1256:   computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling);
1257:   if (pcbddc->faster_deluxe && pcbddc->adaptive_selection && pcbddc->use_change_of_basis) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot compute faster deluxe if adaptivity and change of basis are both requested. Rerun with -pc_bddc_deluxe_faster false");

1259:   /* Get stdout for dbg */
1260:   if (pcbddc->dbg_flag) {
1261:     if (!pcbddc->dbg_viewer) {
1262:       pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc));
1263:       PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer);
1264:     }
1265:     PetscViewerASCIIAddTab(pcbddc->dbg_viewer,2*pcbddc->current_level);
1266:   }

1268:   if (pcbddc->user_ChangeOfBasisMatrix) {
1269:     /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */
1270:     pcbddc->use_change_of_basis = PETSC_FALSE;
1271:     PCBDDCComputeLocalMatrix(pc,pcbddc->user_ChangeOfBasisMatrix);
1272:   } else {
1273:     MatDestroy(&pcbddc->local_mat);
1274:     PetscObjectReference((PetscObject)matis->A);
1275:     pcbddc->local_mat = matis->A;
1276:   }

1278:   /* workaround for reals */
1279: #if !defined(PETSC_USE_COMPLEX)
1280:   if (matis->A->symmetric_set) {
1281:     MatSetOption(pcbddc->local_mat,MAT_HERMITIAN,matis->A->symmetric);
1282:   }
1283: #endif

1285:   /* Set up all the "iterative substructuring" common block without computing solvers */
1286:   {
1287:     Mat temp_mat;

1289:     temp_mat = matis->A;
1290:     matis->A = pcbddc->local_mat;
1291:     PCISSetUp(pc,PETSC_FALSE);
1292:     pcbddc->local_mat = matis->A;
1293:     matis->A = temp_mat;
1294:   }

1296:   /* Analyze interface and setup sub_schurs data */
1297:   if (computetopography) {
1298:     PCBDDCAnalyzeInterface(pc);
1299:     computeconstraintsmatrix = PETSC_TRUE;
1300:   }

1302:   /* Setup local dirichlet solver ksp_D and sub_schurs solvers */
1303:   if (computesolvers) {
1304:     PCBDDCSubSchurs sub_schurs=pcbddc->sub_schurs;

1306:     if (computesubschurs && computetopography) {
1307:       PCBDDCInitSubSchurs(pc);
1308:     }
1309:     if (sub_schurs->use_mumps) {
1310:       if (computesubschurs) {
1311:         PCBDDCSetUpSubSchurs(pc);
1312:       }
1313:       PCBDDCSetUpLocalSolvers(pc,PETSC_TRUE,PETSC_FALSE);
1314:     } else {
1315:       PCBDDCSetUpLocalSolvers(pc,PETSC_TRUE,PETSC_FALSE);
1316:       if (computesubschurs) {
1317:         PCBDDCSetUpSubSchurs(pc);
1318:       }
1319:     }
1320:     if (pcbddc->adaptive_selection) {
1321:       PCBDDCAdaptiveSelection(pc);
1322:       computeconstraintsmatrix = PETSC_TRUE;
1323:     }
1324:   }

1326:   /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */
1327:   new_nearnullspace_provided = PETSC_FALSE;
1328:   MatGetNearNullSpace(pc->pmat,&nearnullspace);
1329:   if (pcbddc->onearnullspace) { /* already used nearnullspace */
1330:     if (!nearnullspace) { /* near null space attached to mat has been destroyed */
1331:       new_nearnullspace_provided = PETSC_TRUE;
1332:     } else {
1333:       /* determine if the two nullspaces are different (should be lightweight) */
1334:       if (nearnullspace != pcbddc->onearnullspace) {
1335:         new_nearnullspace_provided = PETSC_TRUE;
1336:       } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */
1337:         PetscInt         i;
1338:         const Vec        *nearnullvecs;
1339:         PetscObjectState state;
1340:         PetscInt         nnsp_size;
1341:         MatNullSpaceGetVecs(nearnullspace,NULL,&nnsp_size,&nearnullvecs);
1342:         for (i=0;i<nnsp_size;i++) {
1343:           PetscObjectStateGet((PetscObject)nearnullvecs[i],&state);
1344:           if (pcbddc->onearnullvecs_state[i] != state) {
1345:             new_nearnullspace_provided = PETSC_TRUE;
1346:             break;
1347:           }
1348:         }
1349:       }
1350:     }
1351:   } else {
1352:     if (!nearnullspace) { /* both nearnullspaces are null */
1353:       new_nearnullspace_provided = PETSC_FALSE;
1354:     } else { /* nearnullspace attached later */
1355:       new_nearnullspace_provided = PETSC_TRUE;
1356:     }
1357:   }

1359:   /* Setup constraints and related work vectors */
1360:   /* reset primal space flags */
1361:   pcbddc->new_primal_space = PETSC_FALSE;
1362:   pcbddc->new_primal_space_local = PETSC_FALSE;
1363:   if (computeconstraintsmatrix || new_nearnullspace_provided) {
1364:     /* It also sets the primal space flags */
1365:     PCBDDCConstraintsSetUp(pc);
1366:     /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */
1367:     PCBDDCSetUpLocalWorkVectors(pc);
1368:   }

1370:   if (computesolvers || pcbddc->new_primal_space) {
1371:     if (pcbddc->use_change_of_basis) {
1372:       PC_IS *pcis = (PC_IS*)(pc->data);

1374:       PCBDDCComputeLocalMatrix(pc,pcbddc->ChangeOfBasisMatrix);
1375:       /* get submatrices */
1376:       MatDestroy(&pcis->A_IB);
1377:       MatDestroy(&pcis->A_BI);
1378:       MatDestroy(&pcis->A_BB);
1379:       MatGetSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);
1380:       MatGetSubMatrix(pcbddc->local_mat,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);
1381:       MatGetSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);
1382:       /* set flag in pcis to not reuse submatrices during PCISCreate */
1383:       pcis->reusesubmatrices = PETSC_FALSE;
1384:     } else if (!pcbddc->user_ChangeOfBasisMatrix) {
1385:       MatDestroy(&pcbddc->local_mat);
1386:       PetscObjectReference((PetscObject)matis->A);
1387:       pcbddc->local_mat = matis->A;
1388:     }
1389:     /* SetUp coarse and local Neumann solvers */
1390:     PCBDDCSetUpSolvers(pc);
1391:     /* SetUp Scaling operator */
1392:     PCBDDCScalingSetUp(pc);
1393:   }

1395:   if (pcbddc->dbg_flag) {
1396:     PetscViewerASCIISubtractTab(pcbddc->dbg_viewer,2*pcbddc->current_level);
1397:   }
1398:   return(0);
1399: }

1401: /* -------------------------------------------------------------------------- */
1402: /*
1403:    PCApply_BDDC - Applies the BDDC operator to a vector.

1405:    Input Parameters:
1406: +  pc - the preconditioner context
1407: -  r - input vector (global)

1409:    Output Parameter:
1410: .  z - output vector (global)

1412:    Application Interface Routine: PCApply()
1413:  */
1416: PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z)
1417: {
1418:   PC_IS             *pcis = (PC_IS*)(pc->data);
1419:   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
1420:   PetscInt          n_B = pcis->n_B, n_D = pcis->n - n_B;
1421:   PetscErrorCode    ierr;
1422:   const PetscScalar one = 1.0;
1423:   const PetscScalar m_one = -1.0;
1424:   const PetscScalar zero = 0.0;

1426: /* This code is similar to that provided in nn.c for PCNN
1427:    NN interface preconditioner changed to BDDC
1428:    Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */

1431:   if (!pcbddc->use_exact_dirichlet_trick) {
1432:     VecCopy(r,z);
1433:     /* First Dirichlet solve */
1434:     VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
1435:     VecScatterEnd(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
1436:     /*
1437:       Assembling right hand side for BDDC operator
1438:       - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE)
1439:       - pcis->vec1_B the interface part of the global vector z
1440:     */
1441:     if (n_D) {
1442:       KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);
1443:       VecScale(pcis->vec2_D,m_one);
1444:       if (pcbddc->switch_static) { MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D); }
1445:       MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);
1446:     } else {
1447:       VecSet(pcis->vec1_B,zero);
1448:     }
1449:     VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);
1450:     VecScatterEnd(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);
1451:     PCBDDCScalingRestriction(pc,z,pcis->vec1_B);
1452:   } else {
1453:     if (pcbddc->switch_static) {
1454:       VecSet(pcis->vec1_D,zero);
1455:     }
1456:     PCBDDCScalingRestriction(pc,r,pcis->vec1_B);
1457:   }

1459:   /* Apply interface preconditioner
1460:      input/output vecs: pcis->vec1_B and pcis->vec1_D */
1461:   PCBDDCApplyInterfacePreconditioner(pc,PETSC_FALSE);

1463:   /* Apply transpose of partition of unity operator */
1464:   PCBDDCScalingExtension(pc,pcis->vec1_B,z);

1466:   /* Second Dirichlet solve and assembling of output */
1467:   VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
1468:   VecScatterEnd(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
1469:   if (n_B) {
1470:     MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);
1471:     if (pcbddc->switch_static) { MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D); }
1472:   } else if (pcbddc->switch_static) {
1473:     MatMult(pcis->A_II,pcis->vec1_D,pcis->vec3_D);
1474:   }
1475:   KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcis->vec4_D);
1476:   if (!pcbddc->use_exact_dirichlet_trick) {
1477:     if (pcbddc->switch_static) {
1478:       VecAXPBYPCZ(pcis->vec2_D,m_one,one,m_one,pcis->vec4_D,pcis->vec1_D);
1479:     } else {
1480:       VecAXPBY(pcis->vec2_D,m_one,m_one,pcis->vec4_D);
1481:     }
1482:     VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);
1483:     VecScatterEnd(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);
1484:   } else {
1485:     if (pcbddc->switch_static) {
1486:       VecAXPBY(pcis->vec4_D,one,m_one,pcis->vec1_D);
1487:     } else {
1488:       VecScale(pcis->vec4_D,m_one);
1489:     }
1490:     VecScatterBegin(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE);
1491:     VecScatterEnd(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE);
1492:   }
1493:   return(0);
1494: }

1496: /* -------------------------------------------------------------------------- */
1497: /*
1498:    PCApplyTranspose_BDDC - Applies the transpose of the BDDC operator to a vector.

1500:    Input Parameters:
1501: +  pc - the preconditioner context
1502: -  r - input vector (global)

1504:    Output Parameter:
1505: .  z - output vector (global)

1507:    Application Interface Routine: PCApplyTranspose()
1508:  */
1511: PetscErrorCode PCApplyTranspose_BDDC(PC pc,Vec r,Vec z)
1512: {
1513:   PC_IS             *pcis = (PC_IS*)(pc->data);
1514:   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
1515:   PetscInt          n_B = pcis->n_B, n_D = pcis->n - n_B;
1516:   PetscErrorCode    ierr;
1517:   const PetscScalar one = 1.0;
1518:   const PetscScalar m_one = -1.0;
1519:   const PetscScalar zero = 0.0;

1522:   if (!pcbddc->use_exact_dirichlet_trick) {
1523:     VecCopy(r,z);
1524:     /* First Dirichlet solve */
1525:     VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
1526:     VecScatterEnd(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);
1527:     /*
1528:       Assembling right hand side for BDDC operator
1529:       - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE)
1530:       - pcis->vec1_B the interface part of the global vector z
1531:     */
1532:     if (n_D) {
1533:       KSPSolveTranspose(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);
1534:       VecScale(pcis->vec2_D,m_one);
1535:       if (pcbddc->switch_static) { MatMultTransposeAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D); }
1536:       MatMultTranspose(pcis->A_IB,pcis->vec2_D,pcis->vec1_B);
1537:     } else {
1538:       VecSet(pcis->vec1_B,zero);
1539:     }
1540:     VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);
1541:     VecScatterEnd(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);
1542:     PCBDDCScalingRestriction(pc,z,pcis->vec1_B);
1543:   } else {
1544:     if (pcbddc->switch_static) {
1545:       VecSet(pcis->vec1_D,zero);
1546:     }
1547:     PCBDDCScalingRestriction(pc,r,pcis->vec1_B);
1548:   }

1550:   /* Apply interface preconditioner
1551:      input/output vecs: pcis->vec1_B and pcis->vec1_D */
1552:   PCBDDCApplyInterfacePreconditioner(pc,PETSC_TRUE);

1554:   /* Apply transpose of partition of unity operator */
1555:   PCBDDCScalingExtension(pc,pcis->vec1_B,z);

1557:   /* Second Dirichlet solve and assembling of output */
1558:   VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
1559:   VecScatterEnd(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
1560:   if (n_B) {
1561:     MatMultTranspose(pcis->A_BI,pcis->vec1_B,pcis->vec3_D);
1562:     if (pcbddc->switch_static) { MatMultTransposeAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D); }
1563:   } else if (pcbddc->switch_static) {
1564:     MatMultTranspose(pcis->A_II,pcis->vec1_D,pcis->vec3_D);
1565:   }
1566:   KSPSolveTranspose(pcbddc->ksp_D,pcis->vec3_D,pcis->vec4_D);
1567:   if (!pcbddc->use_exact_dirichlet_trick) {
1568:     if (pcbddc->switch_static) {
1569:       VecAXPBYPCZ(pcis->vec2_D,m_one,one,m_one,pcis->vec4_D,pcis->vec1_D);
1570:     } else {
1571:       VecAXPBY(pcis->vec2_D,m_one,m_one,pcis->vec4_D);
1572:     }
1573:     VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);
1574:     VecScatterEnd(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);
1575:   } else {
1576:     if (pcbddc->switch_static) {
1577:       VecAXPBY(pcis->vec4_D,one,m_one,pcis->vec1_D);
1578:     } else {
1579:       VecScale(pcis->vec4_D,m_one);
1580:     }
1581:     VecScatterBegin(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE);
1582:     VecScatterEnd(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE);
1583:   }
1584:   return(0);
1585: }
1586: /* -------------------------------------------------------------------------- */

1590: PetscErrorCode PCDestroy_BDDC(PC pc)
1591: {
1592:   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;

1596:   /* free data created by PCIS */
1597:   PCISDestroy(pc);
1598:   /* free BDDC custom data  */
1599:   PCBDDCResetCustomization(pc);
1600:   /* destroy objects related to topography */
1601:   PCBDDCResetTopography(pc);
1602:   /* free allocated graph structure */
1603:   PetscFree(pcbddc->mat_graph);
1604:   /* free allocated sub schurs structure */
1605:   PetscFree(pcbddc->sub_schurs);
1606:   /* destroy objects for scaling operator */
1607:   PCBDDCScalingDestroy(pc);
1608:   PetscFree(pcbddc->deluxe_ctx);
1609:   /* free solvers stuff */
1610:   PCBDDCResetSolvers(pc);
1611:   /* free global vectors needed in presolve */
1612:   VecDestroy(&pcbddc->temp_solution);
1613:   VecDestroy(&pcbddc->original_rhs);
1614:   /* free stuff for change of basis hooks */
1615:   if (pcbddc->new_global_mat) {
1616:     PCBDDCChange_ctx change_ctx;
1617:     MatShellGetContext(pcbddc->new_global_mat,&change_ctx);
1618:     MatDestroy(&change_ctx->original_mat);
1619:     MatDestroy(&change_ctx->global_change);
1620:     VecDestroyVecs(2,&change_ctx->work);
1621:     PetscFree(change_ctx);
1622:   }
1623:   MatDestroy(&pcbddc->new_global_mat);
1624:   /* remove functions */
1625:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetChangeOfBasisMat_C",NULL);
1626:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);
1627:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);
1628:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevel_C",NULL);
1629:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetUseExactDirichlet_C",NULL);
1630:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevels_C",NULL);
1631:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);
1632:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);
1633:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundariesLocal_C",NULL);
1634:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);
1635:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundariesLocal_C",NULL);
1636:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);
1637:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundariesLocal_C",NULL);
1638:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);
1639:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundariesLocal_C",NULL);
1640:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);
1641:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplittingLocal_C",NULL);
1642:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);
1643:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);
1644:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);
1645:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);
1646:   PetscObjectComposeFunction((PetscObject)pc,"PCPreSolveChangeRHS_C",NULL);
1647:   /* Free the private data structure */
1648:   PetscFree(pc->data);
1649:   return(0);
1650: }
1651: /* -------------------------------------------------------------------------- */

1655: static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool* change)
1656: {
1658:   *change = PETSC_TRUE;
1659:   return(0);
1660: }

1664: static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1665: {
1666:   FETIDPMat_ctx  mat_ctx;
1667:   Vec            copy_standard_rhs;
1668:   PC_IS*         pcis;
1669:   PC_BDDC*       pcbddc;

1673:   MatShellGetContext(fetidp_mat,&mat_ctx);
1674:   pcis = (PC_IS*)mat_ctx->pc->data;
1675:   pcbddc = (PC_BDDC*)mat_ctx->pc->data;

1677:   /*
1678:      change of basis for physical rhs if needed
1679:      It also changes the rhs in case of dirichlet boundaries
1680:      TODO: better management when FETIDP will have its own class
1681:   */
1682:   VecDuplicate(standard_rhs,&copy_standard_rhs);
1683:   VecCopy(standard_rhs,copy_standard_rhs);
1684:   PCPreSolve_BDDC(mat_ctx->pc,NULL,copy_standard_rhs,NULL);
1685:   /* store vectors for computation of fetidp final solution */
1686:   VecScatterBegin(pcis->global_to_D,copy_standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);
1687:   VecScatterEnd(pcis->global_to_D,copy_standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);
1688:   /* scale rhs since it should be unassembled */
1689:   /* TODO use counter scaling? (also below) */
1690:   VecScatterBegin(pcis->global_to_B,copy_standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1691:   VecScatterEnd(pcis->global_to_B,copy_standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1692:   /* Apply partition of unity */
1693:   VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);
1694:   /* PCBDDCScalingRestriction(mat_ctx->pc,copy_standard_rhs,mat_ctx->temp_solution_B); */
1695:   if (!pcbddc->switch_static) {
1696:     /* compute partially subassembled Schur complement right-hand side */
1697:     KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);
1698:     MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);
1699:     VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);
1700:     VecSet(copy_standard_rhs,0.0);
1701:     VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,copy_standard_rhs,ADD_VALUES,SCATTER_REVERSE);
1702:     VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,copy_standard_rhs,ADD_VALUES,SCATTER_REVERSE);
1703:     /* PCBDDCScalingRestriction(mat_ctx->pc,copy_standard_rhs,mat_ctx->temp_solution_B); */
1704:     VecScatterBegin(pcis->global_to_B,copy_standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1705:     VecScatterEnd(pcis->global_to_B,copy_standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);
1706:     VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);
1707:   }
1708:   VecDestroy(&copy_standard_rhs);
1709:   /* BDDC rhs */
1710:   VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);
1711:   if (pcbddc->switch_static) {
1712:     VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);
1713:   }
1714:   /* apply BDDC */
1715:   PCBDDCApplyInterfacePreconditioner(mat_ctx->pc,PETSC_FALSE);
1716:   /* Application of B_delta and assembling of rhs for fetidp fluxes */
1717:   VecSet(fetidp_flux_rhs,0.0);
1718:   MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);
1719:   VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);
1720:   VecScatterEnd(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);
1721:   return(0);
1722: }

1726: /*@
1727:  PCBDDCMatFETIDPGetRHS - Compute the right-hand side for FETI-DP linear system using the physical right-hand side

1729:    Collective

1731:    Input Parameters:
1732: +  fetidp_mat      - the FETI-DP matrix object obtained by a call to PCBDDCCreateFETIDPOperators
1733: -  standard_rhs    - the right-hand side of the original linear system

1735:    Output Parameters:
1736: .  fetidp_flux_rhs - the right-hand side for the FETI-DP linear system

1738:    Level: developer

1740:    Notes:

1742: .seealso: PCBDDC, PCBDDCCreateFETIDPOperators, PCBDDCMatFETIDPGetSolution
1743: @*/
1744: PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1745: {
1746:   FETIDPMat_ctx  mat_ctx;

1750:   MatShellGetContext(fetidp_mat,&mat_ctx);
1751:   PetscUseMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));
1752:   return(0);
1753: }
1754: /* -------------------------------------------------------------------------- */

1758: static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1759: {
1760:   FETIDPMat_ctx  mat_ctx;
1761:   PC_IS*         pcis;
1762:   PC_BDDC*       pcbddc;

1766:   MatShellGetContext(fetidp_mat,&mat_ctx);
1767:   pcis = (PC_IS*)mat_ctx->pc->data;
1768:   pcbddc = (PC_BDDC*)mat_ctx->pc->data;

1770:   /* apply B_delta^T */
1771:   VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
1772:   VecScatterEnd  (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
1773:   MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);
1774:   /* compute rhs for BDDC application */
1775:   VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);
1776:   if (pcbddc->switch_static) {
1777:     VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);
1778:   }
1779:   /* apply BDDC */
1780:   PCBDDCApplyInterfacePreconditioner(mat_ctx->pc,PETSC_FALSE);
1781:   /* put values into standard global vector */
1782:   VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1783:   VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1784:   if (!pcbddc->switch_static) {
1785:     /* compute values into the interior if solved for the partially subassembled Schur complement */
1786:     MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);
1787:     VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);
1788:     KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);
1789:   }
1790:   VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1791:   VecScatterEnd  (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);
1792:   /* final change of basis if needed
1793:      Is also sums the dirichlet part removed during RHS assembling */
1794:   PCPostSolve_BDDC(mat_ctx->pc,NULL,NULL,standard_sol);
1795:   return(0);
1796: }

1800: /*@
1801:  PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system

1803:    Collective

1805:    Input Parameters:
1806: +  fetidp_mat      - the FETI-DP matrix obtained by a call to PCBDDCCreateFETIDPOperators
1807: -  fetidp_flux_sol - the solution of the FETI-DP linear system

1809:    Output Parameters:
1810: .  standard_sol    - the solution defined on the physical domain

1812:    Level: developer

1814:    Notes:

1816: .seealso: PCBDDC, PCBDDCCreateFETIDPOperators, PCBDDCMatFETIDPGetRHS
1817: @*/
1818: PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1819: {
1820:   FETIDPMat_ctx  mat_ctx;

1824:   MatShellGetContext(fetidp_mat,&mat_ctx);
1825:   PetscUseMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));
1826:   return(0);
1827: }
1828: /* -------------------------------------------------------------------------- */

1830: extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec);
1831: extern PetscErrorCode FETIDPMatMultTranspose(Mat,Vec,Vec);
1832: extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat);
1833: extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec);
1834: extern PetscErrorCode FETIDPPCApplyTranspose(PC,Vec,Vec);
1835: extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC);

1839: static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1840: {

1842:   FETIDPMat_ctx  fetidpmat_ctx;
1843:   Mat            newmat;
1844:   FETIDPPC_ctx   fetidppc_ctx;
1845:   PC             newpc;
1846:   MPI_Comm       comm;

1850:   PetscObjectGetComm((PetscObject)pc,&comm);
1851:   /* FETIDP linear matrix */
1852:   PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);
1853:   PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);
1854:   MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);
1855:   MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);
1856:   MatShellSetOperation(newmat,MATOP_MULT_TRANSPOSE,(void (*)(void))FETIDPMatMultTranspose);
1857:   MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);
1858:   MatSetUp(newmat);
1859:   /* FETIDP preconditioner */
1860:   PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);
1861:   PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);
1862:   PCCreate(comm,&newpc);
1863:   PCSetType(newpc,PCSHELL);
1864:   PCShellSetContext(newpc,fetidppc_ctx);
1865:   PCShellSetApply(newpc,FETIDPPCApply);
1866:   PCShellSetApplyTranspose(newpc,FETIDPPCApplyTranspose);
1867:   PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);
1868:   PCSetOperators(newpc,newmat,newmat);
1869:   PCSetUp(newpc);
1870:   /* return pointers for objects created */
1871:   *fetidp_mat=newmat;
1872:   *fetidp_pc=newpc;
1873:   return(0);
1874: }

1878: /*@
1879:  PCBDDCCreateFETIDPOperators - Create FETI-DP operators

1881:    Collective

1883:    Input Parameters:
1884: .  pc - the BDDC preconditioning context (setup should have been called before)

1886:    Output Parameters:
1887: +  fetidp_mat - shell FETI-DP matrix object
1888: -  fetidp_pc  - shell Dirichlet preconditioner for FETI-DP matrix

1890:    Options Database Keys:
1891: .    -fetidp_fullyredundant <false> - use or not a fully redundant set of Lagrange multipliers

1893:    Level: developer

1895:    Notes:
1896:      Currently the only operations provided for FETI-DP matrix are MatMult and MatMultTranspose

1898: .seealso: PCBDDC, PCBDDCMatFETIDPGetRHS, PCBDDCMatFETIDPGetSolution
1899: @*/
1900: PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1901: {

1906:   if (pc->setupcalled) {
1907:     PetscUseMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));
1908:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n");
1909:   return(0);
1910: }
1911: /* -------------------------------------------------------------------------- */
1912: /*MC
1913:    PCBDDC - Balancing Domain Decomposition by Constraints.

1915:    An implementation of the BDDC preconditioner based on

1917: .vb
1918:    [1] C. R. Dohrmann. "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
1919:    [2] A. Klawonn and O. B. Widlund. "Dual-Primal FETI Methods for Linear Elasticity", http://cs.nyu.edu/csweb/Research/TechReports/TR2004-855/TR2004-855.pdf
1920:    [3] J. Mandel, B. Sousedik, C. R. Dohrmann. "Multispace and Multilevel BDDC", http://arxiv.org/abs/0712.3977
1921:    [4] C. Pechstein and C. R. Dohrmann. "Modern domain decomposition methods BDDC, deluxe scaling, and an algebraic approach", Seminar talk, Linz, December 2013, http://people.ricam.oeaw.ac.at/c.pechstein/pechstein-bddc2013.pdf
1922: .ve

1924:    The matrix to be preconditioned (Pmat) must be of type MATIS.

1926:    Currently works with MATIS matrices with local matrices of type MATSEQAIJ, MATSEQBAIJ or MATSEQSBAIJ, either with real or complex numbers.

1928:    It also works with unsymmetric and indefinite problems.

1930:    Unlike 'conventional' interface preconditioners, PCBDDC iterates over all degrees of freedom, not just those on the interface. This allows the use of approximate solvers on the subdomains.

1932:    Approximate local solvers are automatically adapted for singular linear problems (see [1]) if the user has provided the nullspace using PCBDDCSetNullSpace()

1934:    Boundary nodes are split in vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes. The latter can be customized by using PCBDDCSetLocalAdjacencyGraph()
1935:    Additional information on dofs can be provided by using PCBDDCSetDofsSplitting(), PCBDDCSetDirichletBoundaries(), PCBDDCSetNeumannBoundaries(), and PCBDDCSetPrimalVerticesLocalIS()

1937:    Constraints can be customized by attaching a MatNullSpace object to the MATIS matrix via MatSetNearNullSpace(). Non-singular modes are retained via SVD.

1939:    Change of basis is performed similarly to [2] when requested. When more than one constraint is present on a single connected component (i.e. an edge or a face), a robust method based on local QR factorizations is used.
1940:    User defined change of basis can be passed to PCBDDC by using PCBDDCSetChangeOfBasisMat()

1942:    The PETSc implementation also supports multilevel BDDC [3]. Coarse grids are partitioned using a MatPartitioning object.

1944:    Adaptive selection of primal constraints [4] is supported for SPD systems with high-contrast in the coefficients if MUMPS is present. Future versions of the code will also consider using MKL_PARDISO or PASTIX.

1946:    An experimental interface to the FETI-DP method is available. FETI-DP operators could be created using PCBDDCCreateFETIDPOperators(). A stand-alone class for the FETI-DP method will be provided in the next releases.
1947:    Deluxe scaling is not supported yet for FETI-DP.

1949:    Options Database Keys (some of them, run with -h for a complete list):

1951: .    -pc_bddc_use_vertices <true> - use or not vertices in primal space
1952: .    -pc_bddc_use_edges <true> - use or not edges in primal space
1953: .    -pc_bddc_use_faces <false> - use or not faces in primal space
1954: .    -pc_bddc_symmetric <true> - symmetric computation of primal basis functions. Specify false for unsymmetric problems
1955: .    -pc_bddc_use_change_of_basis <false> - use change of basis approach (on edges only)
1956: .    -pc_bddc_use_change_on_faces <false> - use change of basis approach on faces if change of basis has been requested
1957: .    -pc_bddc_switch_static <false> - switches from M_2 (default) to M_3 operator (see reference article [1])
1958: .    -pc_bddc_levels <0> - maximum number of levels for multilevel
1959: .    -pc_bddc_coarsening_ratio <8> - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case)
1960: .    -pc_bddc_redistribute <0> - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level)
1961: .    -pc_bddc_use_deluxe_scaling <false> - use deluxe scaling
1962: .    -pc_bddc_schur_layers <-1> - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling)
1963: .    -pc_bddc_adaptive_threshold <0.0> - when a value greater than one is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS installed)
1964: -    -pc_bddc_check_level <0> - set verbosity level of debugging output

1966:    Options for Dirichlet, Neumann or coarse solver can be set with
1967: .vb
1968:       -pc_bddc_dirichlet_
1969:       -pc_bddc_neumann_
1970:       -pc_bddc_coarse_
1971: .ve
1972:    e.g -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. PCBDDC uses by default KPSPREONLY and PCLU.

1974:    When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified as
1975: .vb
1976:       -pc_bddc_dirichlet_lN_
1977:       -pc_bddc_neumann_lN_
1978:       -pc_bddc_coarse_lN_
1979: .ve
1980:    Note that level number ranges from the finest (0) to the coarsest (N).
1981:    In order to specify options for the BDDC operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l to the option, e.g.
1982: .vb
1983:      -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3
1984: .ve
1985:    will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors

1987:    Level: intermediate

1989:    Developer notes:

1991:    Contributed by Stefano Zampini

1993: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,  MATIS
1994: M*/

1998: PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
1999: {
2000:   PetscErrorCode      ierr;
2001:   PC_BDDC             *pcbddc;

2004:   /* Creates the private data structure for this preconditioner and attach it to the PC object. */
2005:   PetscNewLog(pc,&pcbddc);
2006:   pc->data  = (void*)pcbddc;

2008:   /* create PCIS data structure */
2009:   PCISCreate(pc);

2011:   /* BDDC customization */
2012:   pcbddc->use_local_adj       = PETSC_TRUE;
2013:   pcbddc->use_vertices        = PETSC_TRUE;
2014:   pcbddc->use_edges           = PETSC_TRUE;
2015:   pcbddc->use_faces           = PETSC_FALSE;
2016:   pcbddc->use_change_of_basis = PETSC_FALSE;
2017:   pcbddc->use_change_on_faces = PETSC_FALSE;
2018:   pcbddc->switch_static       = PETSC_FALSE;
2019:   pcbddc->use_nnsp_true       = PETSC_FALSE;
2020:   pcbddc->use_qr_single       = PETSC_FALSE;
2021:   pcbddc->symmetric_primal    = PETSC_TRUE;
2022:   pcbddc->dbg_flag            = 0;
2023:   /* private */
2024:   pcbddc->local_primal_size          = 0;
2025:   pcbddc->local_primal_size_cc       = 0;
2026:   pcbddc->local_primal_ref_node      = 0;
2027:   pcbddc->local_primal_ref_mult      = 0;
2028:   pcbddc->n_vertices                 = 0;
2029:   pcbddc->primal_indices_local_idxs  = 0;
2030:   pcbddc->recompute_topography       = PETSC_FALSE;
2031:   pcbddc->coarse_size                = -1;
2032:   pcbddc->new_primal_space           = PETSC_FALSE;
2033:   pcbddc->new_primal_space_local     = PETSC_FALSE;
2034:   pcbddc->global_primal_indices      = 0;
2035:   pcbddc->onearnullspace             = 0;
2036:   pcbddc->onearnullvecs_state        = 0;
2037:   pcbddc->user_primal_vertices       = 0;
2038:   pcbddc->NullSpace                  = 0;
2039:   pcbddc->temp_solution              = 0;
2040:   pcbddc->original_rhs               = 0;
2041:   pcbddc->local_mat                  = 0;
2042:   pcbddc->ChangeOfBasisMatrix        = 0;
2043:   pcbddc->user_ChangeOfBasisMatrix   = 0;
2044:   pcbddc->new_global_mat             = 0;
2045:   pcbddc->coarse_vec                 = 0;
2046:   pcbddc->coarse_ksp                 = 0;
2047:   pcbddc->coarse_phi_B               = 0;
2048:   pcbddc->coarse_phi_D               = 0;
2049:   pcbddc->coarse_psi_B               = 0;
2050:   pcbddc->coarse_psi_D               = 0;
2051:   pcbddc->vec1_P                     = 0;
2052:   pcbddc->vec1_R                     = 0;
2053:   pcbddc->vec2_R                     = 0;
2054:   pcbddc->local_auxmat1              = 0;
2055:   pcbddc->local_auxmat2              = 0;
2056:   pcbddc->R_to_B                     = 0;
2057:   pcbddc->R_to_D                     = 0;
2058:   pcbddc->ksp_D                      = 0;
2059:   pcbddc->ksp_R                      = 0;
2060:   pcbddc->NeumannBoundaries          = 0;
2061:   pcbddc->NeumannBoundariesLocal     = 0;
2062:   pcbddc->DirichletBoundaries        = 0;
2063:   pcbddc->DirichletBoundariesLocal   = 0;
2064:   pcbddc->user_provided_isfordofs    = PETSC_FALSE;
2065:   pcbddc->n_ISForDofs                = 0;
2066:   pcbddc->n_ISForDofsLocal           = 0;
2067:   pcbddc->ISForDofs                  = 0;
2068:   pcbddc->ISForDofsLocal             = 0;
2069:   pcbddc->ConstraintMatrix           = 0;
2070:   pcbddc->use_exact_dirichlet_trick  = PETSC_TRUE;
2071:   pcbddc->coarse_loc_to_glob         = 0;
2072:   pcbddc->coarsening_ratio           = 8;
2073:   pcbddc->coarse_adj_red             = 0;
2074:   pcbddc->current_level              = 0;
2075:   pcbddc->max_levels                 = 0;
2076:   pcbddc->use_coarse_estimates       = PETSC_FALSE;
2077:   pcbddc->redistribute_coarse        = 0;
2078:   pcbddc->coarse_subassembling       = 0;
2079:   pcbddc->coarse_subassembling_init  = 0;

2081:   /* create local graph structure */
2082:   PCBDDCGraphCreate(&pcbddc->mat_graph);

2084:   /* scaling */
2085:   pcbddc->work_scaling          = 0;
2086:   pcbddc->use_deluxe_scaling    = PETSC_FALSE;
2087:   pcbddc->faster_deluxe         = PETSC_FALSE;

2089:   /* create sub schurs structure */
2090:   PCBDDCSubSchursCreate(&pcbddc->sub_schurs);
2091:   pcbddc->sub_schurs_rebuild     = PETSC_FALSE;
2092:   pcbddc->sub_schurs_layers      = -1;
2093:   pcbddc->sub_schurs_use_useradj = PETSC_FALSE;

2095:   pcbddc->computed_rowadj = PETSC_FALSE;

2097:   /* adaptivity */
2098:   pcbddc->adaptive_threshold      = 0.0;
2099:   pcbddc->adaptive_nmax           = 0;
2100:   pcbddc->adaptive_nmin           = 0;

2102:   /* function pointers */
2103:   pc->ops->apply               = PCApply_BDDC;
2104:   pc->ops->applytranspose      = PCApplyTranspose_BDDC;
2105:   pc->ops->setup               = PCSetUp_BDDC;
2106:   pc->ops->destroy             = PCDestroy_BDDC;
2107:   pc->ops->setfromoptions      = PCSetFromOptions_BDDC;
2108:   pc->ops->view                = PCView_BDDC;
2109:   pc->ops->applyrichardson     = 0;
2110:   pc->ops->applysymmetricleft  = 0;
2111:   pc->ops->applysymmetricright = 0;
2112:   pc->ops->presolve            = PCPreSolve_BDDC;
2113:   pc->ops->postsolve           = PCPostSolve_BDDC;

2115:   /* composing function */
2116:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetChangeOfBasisMat_C",PCBDDCSetChangeOfBasisMat_BDDC);
2117:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);
2118:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);
2119:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevel_C",PCBDDCSetLevel_BDDC);
2120:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetUseExactDirichlet_C",PCBDDCSetUseExactDirichlet_BDDC);
2121:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevels_C",PCBDDCSetLevels_BDDC);
2122:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);
2123:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);
2124:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundariesLocal_C",PCBDDCSetDirichletBoundariesLocal_BDDC);
2125:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);
2126:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundariesLocal_C",PCBDDCSetNeumannBoundariesLocal_BDDC);
2127:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);
2128:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundariesLocal_C",PCBDDCGetDirichletBoundariesLocal_BDDC);
2129:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);
2130:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundariesLocal_C",PCBDDCGetNeumannBoundariesLocal_BDDC);
2131:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);
2132:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplittingLocal_C",PCBDDCSetDofsSplittingLocal_BDDC);
2133:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);
2134:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);
2135:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);
2136:   PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);
2137:   PetscObjectComposeFunction((PetscObject)pc,"PCPreSolveChangeRHS_C",PCPreSolveChangeRHS_BDDC);
2138:   return(0);
2139: }