Actual source code: bddc.c
1: #include <petsc/private/pcbddcimpl.h>
2: #include <petsc/private/pcbddcprivateimpl.h>
3: #include <petscblaslapack.h>
5: static PetscBool PCBDDCPackageInitialized = PETSC_FALSE;
7: static PetscBool cited = PETSC_FALSE;
8: static const char citation[] = "@article{ZampiniPCBDDC,\n"
9: "author = {Stefano Zampini},\n"
10: "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n"
11: "journal = {SIAM Journal on Scientific Computing},\n"
12: "volume = {38},\n"
13: "number = {5},\n"
14: "pages = {S282-S306},\n"
15: "year = {2016},\n"
16: "doi = {10.1137/15M1025785},\n"
17: "URL = {http://dx.doi.org/10.1137/15M1025785},\n"
18: "eprint = {http://dx.doi.org/10.1137/15M1025785}\n"
19: "}\n";
21: PetscLogEvent PC_BDDC_Topology[PETSC_PCBDDC_MAXLEVELS];
22: PetscLogEvent PC_BDDC_LocalSolvers[PETSC_PCBDDC_MAXLEVELS];
23: PetscLogEvent PC_BDDC_LocalWork[PETSC_PCBDDC_MAXLEVELS];
24: PetscLogEvent PC_BDDC_CorrectionSetUp[PETSC_PCBDDC_MAXLEVELS];
25: PetscLogEvent PC_BDDC_ApproxSetUp[PETSC_PCBDDC_MAXLEVELS];
26: PetscLogEvent PC_BDDC_ApproxApply[PETSC_PCBDDC_MAXLEVELS];
27: PetscLogEvent PC_BDDC_CoarseSetUp[PETSC_PCBDDC_MAXLEVELS];
28: PetscLogEvent PC_BDDC_CoarseSolver[PETSC_PCBDDC_MAXLEVELS];
29: PetscLogEvent PC_BDDC_AdaptiveSetUp[PETSC_PCBDDC_MAXLEVELS];
30: PetscLogEvent PC_BDDC_Scaling[PETSC_PCBDDC_MAXLEVELS];
31: PetscLogEvent PC_BDDC_Schurs[PETSC_PCBDDC_MAXLEVELS];
32: PetscLogEvent PC_BDDC_Solves[PETSC_PCBDDC_MAXLEVELS][3];
34: const char *const PCBDDCInterfaceExtTypes[] = {"DIRICHLET", "LUMP", "PCBDDCInterfaceExtType", "PC_BDDC_INTERFACE_EXT_", NULL};
36: static PetscErrorCode PCApply_BDDC(PC, Vec, Vec);
38: static PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems PetscOptionsObject)
39: {
40: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
41: PetscInt nt, i;
42: char load[PETSC_MAX_PATH_LEN] = {'\0'};
43: PetscBool flg;
45: PetscFunctionBegin;
46: PetscOptionsHeadBegin(PetscOptionsObject, "BDDC options");
47: /* Load customization from binary file (debugging) */
48: PetscCall(PetscOptionsString("-pc_bddc_load", "Load customization from file (intended for debug)", "none", load, load, sizeof(load), &flg));
49: if (flg) {
50: size_t len;
52: PetscCall(PetscStrlen(load, &len));
53: PetscCall(PCBDDCLoadOrViewCustomization(pc, PETSC_TRUE, len ? load : NULL));
54: }
55: /* Verbose debugging */
56: PetscCall(PetscOptionsInt("-pc_bddc_check_level", "Verbose output for PCBDDC (intended for debug)", "none", pcbddc->dbg_flag, &pcbddc->dbg_flag, NULL));
57: /* Approximate solvers */
58: PetscCall(PetscOptionsEnum("-pc_bddc_interface_ext_type", "Use DIRICHLET or LUMP to extend interface corrections to interior", "PCBDDCSetInterfaceExtType", PCBDDCInterfaceExtTypes, (PetscEnum)pcbddc->interface_extension, (PetscEnum *)&pcbddc->interface_extension, NULL));
59: if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET) {
60: PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate", "Inform PCBDDC that we are using approximate Dirichlet solvers", "none", pcbddc->NullSpace_corr[0], &pcbddc->NullSpace_corr[0], NULL));
61: PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate_scale", "Inform PCBDDC that we need to scale the Dirichlet solve", "none", pcbddc->NullSpace_corr[1], &pcbddc->NullSpace_corr[1], NULL));
62: } else {
63: /* This flag is needed/implied by lumping */
64: pcbddc->switch_static = PETSC_TRUE;
65: }
66: PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate", "Inform PCBDDC that we are using approximate Neumann solvers", "none", pcbddc->NullSpace_corr[2], &pcbddc->NullSpace_corr[2], NULL));
67: PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale", "Inform PCBDDC that we need to scale the Neumann solve", "none", pcbddc->NullSpace_corr[3], &pcbddc->NullSpace_corr[3], NULL));
68: /* Primal space customization */
69: PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph", "Use or not adjacency graph of local mat for interface analysis", "none", pcbddc->use_local_adj, &pcbddc->use_local_adj, NULL));
70: PetscCall(PetscOptionsInt("-pc_bddc_local_mat_graph_square", "Square adjacency graph of local mat for interface analysis", "none", pcbddc->local_adj_square, &pcbddc->local_adj_square, NULL));
71: PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount", "Maximum number of shared subdomains for a connected component", "none", pcbddc->graphmaxcount, &pcbddc->graphmaxcount, NULL));
72: PetscCall(PetscOptionsBool("-pc_bddc_corner_selection", "Activates face-based corner selection", "none", pcbddc->corner_selection, &pcbddc->corner_selection, NULL));
73: PetscCall(PetscOptionsBool("-pc_bddc_use_vertices", "Use or not corner dofs in coarse space", "none", pcbddc->use_vertices, &pcbddc->use_vertices, NULL));
74: PetscCall(PetscOptionsBool("-pc_bddc_use_edges", "Use or not edge constraints in coarse space", "none", pcbddc->use_edges, &pcbddc->use_edges, NULL));
75: PetscCall(PetscOptionsBool("-pc_bddc_use_faces", "Use or not face constraints in coarse space", "none", pcbddc->use_faces, &pcbddc->use_faces, NULL));
76: PetscCall(PetscOptionsInt("-pc_bddc_vertex_size", "Connected components smaller or equal to vertex size will be considered as primal vertices", "none", pcbddc->vertex_size, &pcbddc->vertex_size, NULL));
77: PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp", "Use near null space attached to the matrix to compute constraints", "none", pcbddc->use_nnsp, &pcbddc->use_nnsp, NULL));
78: PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp_true", "Use near null space attached to the matrix to compute constraints as is", "none", pcbddc->use_nnsp_true, &pcbddc->use_nnsp_true, NULL));
79: PetscCall(PetscOptionsBool("-pc_bddc_use_qr_single", "Use QR factorization for single constraints on cc (QR is always used when multiple constraints are present)", "none", pcbddc->use_qr_single, &pcbddc->use_qr_single, NULL));
80: /* Change of basis */
81: PetscCall(PetscOptionsBool("-pc_bddc_use_change_of_basis", "Use or not internal change of basis on local edge nodes", "none", pcbddc->use_change_of_basis, &pcbddc->use_change_of_basis, NULL));
82: PetscCall(PetscOptionsBool("-pc_bddc_use_change_on_faces", "Use or not internal change of basis on local face nodes", "none", pcbddc->use_change_on_faces, &pcbddc->use_change_on_faces, NULL));
83: if (!pcbddc->use_change_of_basis) pcbddc->use_change_on_faces = PETSC_FALSE;
84: /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */
85: PetscCall(PetscOptionsBool("-pc_bddc_switch_static", "Switch on static condensation ops around the interface preconditioner", "none", pcbddc->switch_static, &pcbddc->switch_static, NULL));
86: PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_per_proc", "Target number of equations per process for coarse problem redistribution (significant only at the coarsest level)", "none", pcbddc->coarse_eqs_per_proc, &pcbddc->coarse_eqs_per_proc, NULL));
87: i = pcbddc->coarsening_ratio;
88: PetscCall(PetscOptionsInt("-pc_bddc_coarsening_ratio", "Set coarsening ratio used in multilevel coarsening", "PCBDDCSetCoarseningRatio", i, &i, NULL));
89: PetscCall(PCBDDCSetCoarseningRatio(pc, i));
90: i = pcbddc->max_levels;
91: PetscCall(PetscOptionsInt("-pc_bddc_levels", "Set maximum number of levels for multilevel", "PCBDDCSetLevels", i, &i, NULL));
92: PetscCall(PCBDDCSetLevels(pc, i));
93: PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_limit", "Set maximum number of equations on coarsest grid to aim for", "none", pcbddc->coarse_eqs_limit, &pcbddc->coarse_eqs_limit, NULL));
94: PetscCall(PetscOptionsBool("-pc_bddc_use_coarse_estimates", "Use estimated eigenvalues for coarse problem", "none", pcbddc->use_coarse_estimates, &pcbddc->use_coarse_estimates, NULL));
95: PetscCall(PetscOptionsBool("-pc_bddc_use_deluxe_scaling", "Use deluxe scaling for BDDC", "none", pcbddc->use_deluxe_scaling, &pcbddc->use_deluxe_scaling, NULL));
96: PetscCall(PetscOptionsBool("-pc_bddc_schur_rebuild", "Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)", "none", pcbddc->sub_schurs_rebuild, &pcbddc->sub_schurs_rebuild, NULL));
97: PetscCall(PetscOptionsInt("-pc_bddc_schur_layers", "Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)", "none", pcbddc->sub_schurs_layers, &pcbddc->sub_schurs_layers, NULL));
98: PetscCall(PetscOptionsBool("-pc_bddc_schur_use_useradj", "Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)", "none", pcbddc->sub_schurs_use_useradj, &pcbddc->sub_schurs_use_useradj, NULL));
99: PetscCall(PetscOptionsBool("-pc_bddc_schur_exact", "Whether or not to use the exact Schur complement instead of the reduced one (which excludes size 1 cc)", "none", pcbddc->sub_schurs_exact_schur, &pcbddc->sub_schurs_exact_schur, NULL));
100: PetscCall(PetscOptionsBool("-pc_bddc_deluxe_zerorows", "Zero rows and columns of deluxe operators associated with primal dofs", "none", pcbddc->deluxe_zerorows, &pcbddc->deluxe_zerorows, NULL));
101: PetscCall(PetscOptionsBool("-pc_bddc_deluxe_singlemat", "Collapse deluxe operators", "none", pcbddc->deluxe_singlemat, &pcbddc->deluxe_singlemat, NULL));
102: PetscCall(PetscOptionsBool("-pc_bddc_adaptive_userdefined", "Use user-defined constraints (should be attached via MatSetNearNullSpace to pmat) in addition to those adaptively generated", "none", pcbddc->adaptive_userdefined, &pcbddc->adaptive_userdefined, NULL));
103: nt = 2;
104: PetscCall(PetscOptionsRealArray("-pc_bddc_adaptive_threshold", "Thresholds to be used for adaptive selection of constraints", "none", pcbddc->adaptive_threshold, &nt, NULL));
105: if (nt == 1) pcbddc->adaptive_threshold[1] = pcbddc->adaptive_threshold[0];
106: PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmin", "Minimum number of constraints per connected components", "none", pcbddc->adaptive_nmin, &pcbddc->adaptive_nmin, NULL));
107: PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmax", "Maximum number of constraints per connected components", "none", pcbddc->adaptive_nmax, &pcbddc->adaptive_nmax, NULL));
108: PetscCall(PetscOptionsBool("-pc_bddc_symmetric", "Symmetric computation of primal basis functions", "none", pcbddc->symmetric_primal, &pcbddc->symmetric_primal, NULL));
109: PetscCall(PetscOptionsInt("-pc_bddc_coarse_adj", "Number of processors where to map the coarse adjacency list", "none", pcbddc->coarse_adj_red, &pcbddc->coarse_adj_red, NULL));
110: PetscCall(PetscOptionsBool("-pc_bddc_benign_trick", "Apply the benign subspace trick to saddle point problems with discontinuous pressures", "none", pcbddc->benign_saddle_point, &pcbddc->benign_saddle_point, NULL));
111: PetscCall(PetscOptionsBool("-pc_bddc_benign_change", "Compute the pressure change of basis explicitly", "none", pcbddc->benign_change_explicit, &pcbddc->benign_change_explicit, NULL));
112: PetscCall(PetscOptionsBool("-pc_bddc_benign_compute_correction", "Compute the benign correction during PreSolve", "none", pcbddc->benign_compute_correction, &pcbddc->benign_compute_correction, NULL));
113: PetscCall(PetscOptionsBool("-pc_bddc_nonetflux", "Automatic computation of no-net-flux quadrature weights", "none", pcbddc->compute_nonetflux, &pcbddc->compute_nonetflux, NULL));
114: PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected", "Detects disconnected subdomains", "none", pcbddc->detect_disconnected, &pcbddc->detect_disconnected, NULL));
115: PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected_filter", "Filters out small entries in the local matrix when detecting disconnected subdomains", "none", pcbddc->detect_disconnected_filter, &pcbddc->detect_disconnected_filter, NULL));
116: PetscCall(PetscOptionsBool("-pc_bddc_eliminate_dirichlet", "Whether or not we want to eliminate dirichlet dofs during presolve", "none", pcbddc->eliminate_dirdofs, &pcbddc->eliminate_dirdofs, NULL));
117: PetscOptionsHeadEnd();
118: PetscFunctionReturn(PETSC_SUCCESS);
119: }
121: static PetscErrorCode PCView_BDDC(PC pc, PetscViewer viewer)
122: {
123: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
124: PC_IS *pcis = (PC_IS *)pc->data;
125: PetscBool isascii;
126: PetscSubcomm subcomm;
127: PetscViewer subviewer;
129: PetscFunctionBegin;
130: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
131: /* ASCII viewer */
132: if (isascii) {
133: PetscMPIInt color, rank, size;
134: PetscInt64 loc[7], gsum[6], gmax[6], gmin[6], totbenign;
135: PetscScalar interface_size;
136: PetscReal ratio1 = 0., ratio2 = 0.;
137: Vec counter;
139: if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " Partial information available: preconditioner has not been setup yet\n"));
140: PetscCall(PetscViewerASCIIPrintf(viewer, " Use verbose output: %" PetscInt_FMT "\n", pcbddc->dbg_flag));
141: PetscCall(PetscViewerASCIIPrintf(viewer, " Use user-defined CSR: %d\n", !!pcbddc->mat_graph->nvtxs_csr));
142: PetscCall(PetscViewerASCIIPrintf(viewer, " Use local mat graph: %d\n", pcbddc->use_local_adj && !pcbddc->mat_graph->nvtxs_csr));
143: if (pcbddc->mat_graph->twodim) {
144: PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 2\n"));
145: } else {
146: PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 3\n"));
147: }
148: if (pcbddc->graphmaxcount != PETSC_INT_MAX) PetscCall(PetscViewerASCIIPrintf(viewer, " Graph max count: %" PetscInt_FMT "\n", pcbddc->graphmaxcount));
149: PetscCall(PetscViewerASCIIPrintf(viewer, " Corner selection: %d (selected %d)\n", pcbddc->corner_selection, pcbddc->corner_selected));
150: PetscCall(PetscViewerASCIIPrintf(viewer, " Use vertices: %d (vertex size %" PetscInt_FMT ")\n", pcbddc->use_vertices, pcbddc->vertex_size));
151: PetscCall(PetscViewerASCIIPrintf(viewer, " Use edges: %d\n", pcbddc->use_edges));
152: PetscCall(PetscViewerASCIIPrintf(viewer, " Use faces: %d\n", pcbddc->use_faces));
153: PetscCall(PetscViewerASCIIPrintf(viewer, " Use true near null space: %d\n", pcbddc->use_nnsp_true));
154: PetscCall(PetscViewerASCIIPrintf(viewer, " Use QR for single constraints on cc: %d\n", pcbddc->use_qr_single));
155: PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local edge nodes: %d\n", pcbddc->use_change_of_basis));
156: PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local face nodes: %d\n", pcbddc->use_change_on_faces));
157: PetscCall(PetscViewerASCIIPrintf(viewer, " User defined change of basis matrix: %d\n", !!pcbddc->user_ChangeOfBasisMatrix));
158: PetscCall(PetscViewerASCIIPrintf(viewer, " Has change of basis matrix: %d\n", !!pcbddc->ChangeOfBasisMatrix));
159: PetscCall(PetscViewerASCIIPrintf(viewer, " Eliminate dirichlet boundary dofs: %d\n", pcbddc->eliminate_dirdofs));
160: PetscCall(PetscViewerASCIIPrintf(viewer, " Switch on static condensation ops around the interface preconditioner: %d\n", pcbddc->switch_static));
161: PetscCall(PetscViewerASCIIPrintf(viewer, " Use exact dirichlet trick: %d\n", pcbddc->use_exact_dirichlet_trick));
162: PetscCall(PetscViewerASCIIPrintf(viewer, " Interface extension: %s\n", PCBDDCInterfaceExtTypes[pcbddc->interface_extension]));
163: PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel max levels: %" PetscInt_FMT "\n", pcbddc->max_levels));
164: PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel coarsening ratio: %" PetscInt_FMT "\n", pcbddc->coarsening_ratio));
165: PetscCall(PetscViewerASCIIPrintf(viewer, " Use estimated eigs for coarse problem: %d\n", pcbddc->use_coarse_estimates));
166: PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe scaling: %d\n", pcbddc->use_deluxe_scaling));
167: PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe zerorows: %d\n", pcbddc->deluxe_zerorows));
168: PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe singlemat: %d\n", pcbddc->deluxe_singlemat));
169: PetscCall(PetscViewerASCIIPrintf(viewer, " Rebuild interface graph for Schur principal minors: %d\n", pcbddc->sub_schurs_rebuild));
170: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of dofs' layers for the computation of principal minors: %" PetscInt_FMT "\n", pcbddc->sub_schurs_layers));
171: PetscCall(PetscViewerASCIIPrintf(viewer, " Use user CSR graph to compute successive layers: %d\n", pcbddc->sub_schurs_use_useradj));
172: if (pcbddc->adaptive_threshold[1] != pcbddc->adaptive_threshold[0]) {
173: PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection thresholds (active %d, userdefined %d): %g,%g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0], (double)pcbddc->adaptive_threshold[1]));
174: } else {
175: PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection threshold (active %d, userdefined %d): %g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0]));
176: }
177: PetscCall(PetscViewerASCIIPrintf(viewer, " Min constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmin));
178: PetscCall(PetscViewerASCIIPrintf(viewer, " Max constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmax));
179: PetscCall(PetscViewerASCIIPrintf(viewer, " Invert exact Schur complement for adaptive selection: %d\n", pcbddc->sub_schurs_exact_schur));
180: PetscCall(PetscViewerASCIIPrintf(viewer, " Symmetric computation of primal basis functions: %d\n", pcbddc->symmetric_primal));
181: PetscCall(PetscViewerASCIIPrintf(viewer, " Num. Procs. to map coarse adjacency list: %" PetscInt_FMT "\n", pcbddc->coarse_adj_red));
182: PetscCall(PetscViewerASCIIPrintf(viewer, " Coarse eqs per proc (significant at the coarsest level): %" PetscInt_FMT "\n", pcbddc->coarse_eqs_per_proc));
183: PetscCall(PetscViewerASCIIPrintf(viewer, " Detect disconnected: %d (filter %d)\n", pcbddc->detect_disconnected, pcbddc->detect_disconnected_filter));
184: PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick: %d (change explicit %d)\n", pcbddc->benign_saddle_point, pcbddc->benign_change_explicit));
185: PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick is active: %d\n", pcbddc->benign_have_null));
186: PetscCall(PetscViewerASCIIPrintf(viewer, " Algebraic computation of no-net-flux: %d\n", pcbddc->compute_nonetflux));
187: if (!pc->setupcalled) PetscFunctionReturn(PETSC_SUCCESS);
189: /* compute interface size */
190: PetscCall(VecSet(pcis->vec1_B, 1.0));
191: PetscCall(MatCreateVecs(pc->pmat, &counter, NULL));
192: PetscCall(VecSet(counter, 0.0));
193: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE));
194: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE));
195: PetscCall(VecSum(counter, &interface_size));
196: PetscCall(VecDestroy(&counter));
198: /* compute some statistics on the domain decomposition */
199: gsum[0] = 1;
200: gsum[1] = gsum[2] = gsum[3] = gsum[4] = gsum[5] = 0;
201: loc[0] = !!pcis->n;
202: loc[1] = pcis->n - pcis->n_B;
203: loc[2] = pcis->n_B;
204: loc[3] = pcbddc->local_primal_size;
205: loc[4] = pcis->n;
206: loc[5] = pcbddc->n_local_subs > 0 ? pcbddc->n_local_subs : (pcis->n ? 1 : 0);
207: loc[6] = pcbddc->benign_n;
208: PetscCallMPI(MPI_Reduce(loc, gsum, 6, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc)));
209: if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = -1;
210: PetscCallMPI(MPI_Reduce(loc, gmax, 6, MPIU_INT64, MPI_MAX, 0, PetscObjectComm((PetscObject)pc)));
211: if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = PETSC_INT_MAX;
212: PetscCallMPI(MPI_Reduce(loc, gmin, 6, MPIU_INT64, MPI_MIN, 0, PetscObjectComm((PetscObject)pc)));
213: PetscCallMPI(MPI_Reduce(&loc[6], &totbenign, 1, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc)));
214: if (pcbddc->coarse_size) {
215: ratio1 = pc->pmat->rmap->N / (1. * pcbddc->coarse_size);
216: ratio2 = PetscRealPart(interface_size) / pcbddc->coarse_size;
217: }
218: PetscCall(PetscViewerASCIIPrintf(viewer, "********************************** STATISTICS AT LEVEL %" PetscInt_FMT " **********************************\n", pcbddc->current_level));
219: PetscCall(PetscViewerASCIIPrintf(viewer, " Global dofs sizes: all %" PetscInt_FMT " interface %" PetscInt_FMT " coarse %" PetscInt_FMT "\n", pc->pmat->rmap->N, (PetscInt)PetscRealPart(interface_size), pcbddc->coarse_size));
220: PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening ratios: all/coarse %" PetscInt_FMT " interface/coarse %" PetscInt_FMT "\n", (PetscInt)ratio1, (PetscInt)ratio2));
221: PetscCall(PetscViewerASCIIPrintf(viewer, " Active processes : %" PetscInt64_FMT "\n", gsum[0]));
222: PetscCall(PetscViewerASCIIPrintf(viewer, " Total subdomains : %" PetscInt64_FMT "\n", gsum[5]));
223: if (pcbddc->benign_have_null) PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subs : %" PetscInt64_FMT "\n", totbenign));
224: PetscCall(PetscViewerASCIIPrintf(viewer, " Dofs type :\tMIN\tMAX\tMEAN\n"));
225: PetscCall(PetscViewerASCIIPrintf(viewer, " Interior dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[1], gmax[1], gsum[1] / gsum[0]));
226: PetscCall(PetscViewerASCIIPrintf(viewer, " Interface dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[2], gmax[2], gsum[2] / gsum[0]));
227: PetscCall(PetscViewerASCIIPrintf(viewer, " Primal dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[3], gmax[3], gsum[3] / gsum[0]));
228: PetscCall(PetscViewerASCIIPrintf(viewer, " Local dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[4], gmax[4], gsum[4] / gsum[0]));
229: PetscCall(PetscViewerASCIIPrintf(viewer, " Local subs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[5], gmax[5]));
230: PetscCall(PetscViewerFlush(viewer));
232: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
234: /* local solvers */
235: PetscCall(PetscViewerGetSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer));
236: if (rank == 0) {
237: PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Interior solver (rank 0)\n"));
238: PetscCall(PetscViewerASCIIPushTab(subviewer));
239: PetscCall(KSPView(pcbddc->ksp_D, subviewer));
240: PetscCall(PetscViewerASCIIPopTab(subviewer));
241: PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Correction solver (rank 0)\n"));
242: PetscCall(PetscViewerASCIIPushTab(subviewer));
243: PetscCall(KSPView(pcbddc->ksp_R, subviewer));
244: PetscCall(PetscViewerASCIIPopTab(subviewer));
245: PetscCall(PetscViewerFlush(subviewer));
246: }
247: PetscCall(PetscViewerRestoreSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer));
248: /* the coarse problem can be handled by a different communicator */
249: if (pcbddc->coarse_ksp) color = 1;
250: else color = 0;
251: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
252: PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm));
253: PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2)));
254: PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank));
255: PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
256: if (color == 1) {
257: PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Coarse solver\n"));
258: PetscCall(PetscViewerASCIIPushTab(subviewer));
259: PetscCall(KSPView(pcbddc->coarse_ksp, subviewer));
260: PetscCall(PetscViewerASCIIPopTab(subviewer));
261: PetscCall(PetscViewerFlush(subviewer));
262: }
263: PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
264: PetscCall(PetscSubcommDestroy(&subcomm));
265: PetscCall(PetscViewerFlush(viewer));
266: }
267: PetscFunctionReturn(PETSC_SUCCESS);
268: }
270: static PetscErrorCode PCBDDCSetDiscreteGradient_BDDC(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming)
271: {
272: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
274: PetscFunctionBegin;
275: PetscCall(PetscObjectReference((PetscObject)G));
276: PetscCall(MatDestroy(&pcbddc->discretegradient));
277: pcbddc->discretegradient = G;
278: pcbddc->nedorder = order > 0 ? order : -order;
279: pcbddc->nedfield = field;
280: pcbddc->nedglobal = global;
281: pcbddc->conforming = conforming;
282: PetscFunctionReturn(PETSC_SUCCESS);
283: }
285: /*@
286: PCBDDCSetDiscreteGradient - Sets the discrete gradient to be used by the `PCBDDC` preconditioner
288: Collective
290: Input Parameters:
291: + pc - the preconditioning context
292: . G - the discrete gradient matrix (in `MATAIJ` format)
293: . order - the order of the Nedelec space (1 for the lowest order)
294: . field - the field id of the Nedelec dofs (not used if the fields have not been specified)
295: . global - the type of global ordering for the rows of `G`
296: - conforming - whether the mesh is conforming or not
298: Level: advanced
300: Note:
301: The discrete gradient matrix `G` is used to analyze the subdomain edges, and it should not contain any zero entry.
302: For variable order spaces, the order should be set to zero.
303: If `global` is `PETSC_TRUE`, the rows of `G` should be given in global ordering for the whole dofs;
304: if `PETSC_FALSE`, the ordering should be global for the Nedelec field.
305: In the latter case, it should hold gid[i] < gid[j] iff geid[i] < geid[j], with gid the global orderding for all the dofs
306: and geid the one for the Nedelec field.
308: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()`, `PCBDDCSetDofsSplittingLocal()`, `MATAIJ`, `PCBDDCSetDivergenceMat()`
309: @*/
310: PetscErrorCode PCBDDCSetDiscreteGradient(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming)
311: {
312: PetscFunctionBegin;
319: PetscCheckSameComm(pc, 1, G, 2);
320: PetscTryMethod(pc, "PCBDDCSetDiscreteGradient_C", (PC, Mat, PetscInt, PetscInt, PetscBool, PetscBool), (pc, G, order, field, global, conforming));
321: PetscFunctionReturn(PETSC_SUCCESS);
322: }
324: static PetscErrorCode PCBDDCSetDivergenceMat_BDDC(PC pc, Mat divudotp, PetscBool trans, IS vl2l)
325: {
326: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
328: PetscFunctionBegin;
329: PetscCall(PetscObjectReference((PetscObject)divudotp));
330: PetscCall(MatDestroy(&pcbddc->divudotp));
331: pcbddc->divudotp = divudotp;
332: pcbddc->divudotp_trans = trans;
333: pcbddc->compute_nonetflux = PETSC_TRUE;
334: if (vl2l) {
335: PetscCall(PetscObjectReference((PetscObject)vl2l));
336: PetscCall(ISDestroy(&pcbddc->divudotp_vl2l));
337: pcbddc->divudotp_vl2l = vl2l;
338: }
339: PetscFunctionReturn(PETSC_SUCCESS);
340: }
342: /*@
343: PCBDDCSetDivergenceMat - Sets the linear operator representing \int_\Omega \div {\bf u} \cdot p dx for the `PCBDDC` preconditioner
345: Collective
347: Input Parameters:
348: + pc - the preconditioning context
349: . divudotp - the matrix (must be of type `MATIS`)
350: . trans - if `PETSC_FALSE` (resp. `PETSC_TRUE`), then pressures are in the test (trial) space and velocities are in the trial (test) space.
351: - vl2l - optional index set describing the local (wrt the local matrix in `divudotp`) to local (wrt the local matrix
352: in the matrix used to construct the preconditioner) map for the velocities
354: Level: advanced
356: Notes:
357: This auxiliary matrix is used to compute quadrature weights representing the net-flux across subdomain boundaries
359: If `vl2l` is `NULL`, the local ordering for velocities in `divudotp` should match that of the matrix used to construct the preconditioner
361: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDiscreteGradient()`
362: @*/
363: PetscErrorCode PCBDDCSetDivergenceMat(PC pc, Mat divudotp, PetscBool trans, IS vl2l)
364: {
365: PetscBool ismatis;
367: PetscFunctionBegin;
370: PetscCheckSameComm(pc, 1, divudotp, 2);
373: PetscCall(PetscObjectTypeCompare((PetscObject)divudotp, MATIS, &ismatis));
374: PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Divergence matrix needs to be of type MATIS");
375: PetscTryMethod(pc, "PCBDDCSetDivergenceMat_C", (PC, Mat, PetscBool, IS), (pc, divudotp, trans, vl2l));
376: PetscFunctionReturn(PETSC_SUCCESS);
377: }
379: static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change, PetscBool interior)
380: {
381: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
383: PetscFunctionBegin;
384: PetscCall(PetscObjectReference((PetscObject)change));
385: PetscCall(MatDestroy(&pcbddc->user_ChangeOfBasisMatrix));
386: pcbddc->user_ChangeOfBasisMatrix = change;
387: pcbddc->change_interior = interior;
388: PetscFunctionReturn(PETSC_SUCCESS);
389: }
391: /*@
392: PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs
394: Collective
396: Input Parameters:
397: + pc - the preconditioning context
398: . change - the change of basis matrix
399: - interior - whether or not the change of basis modifies interior dofs
401: Level: intermediate
403: .seealso: [](ch_ksp), `PCBDDC`
404: @*/
405: PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change, PetscBool interior)
406: {
407: PetscFunctionBegin;
410: PetscCheckSameComm(pc, 1, change, 2);
411: if (pc->mat) {
412: PetscInt rows_c, cols_c, rows, cols;
413: PetscCall(MatGetSize(pc->mat, &rows, &cols));
414: PetscCall(MatGetSize(change, &rows_c, &cols_c));
415: PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows);
416: PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols);
417: PetscCall(MatGetLocalSize(pc->mat, &rows, &cols));
418: PetscCall(MatGetLocalSize(change, &rows_c, &cols_c));
419: PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows);
420: PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols);
421: }
422: PetscTryMethod(pc, "PCBDDCSetChangeOfBasisMat_C", (PC, Mat, PetscBool), (pc, change, interior));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: static PetscErrorCode PCBDDCSetPrimalVerticesIS_BDDC(PC pc, IS PrimalVertices)
427: {
428: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
429: PetscBool isequal = PETSC_FALSE;
431: PetscFunctionBegin;
432: PetscCall(PetscObjectReference((PetscObject)PrimalVertices));
433: if (pcbddc->user_primal_vertices) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices, &isequal));
434: PetscCall(ISDestroy(&pcbddc->user_primal_vertices));
435: PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local));
436: pcbddc->user_primal_vertices = PrimalVertices;
437: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
438: PetscFunctionReturn(PETSC_SUCCESS);
439: }
441: /*@
442: PCBDDCSetPrimalVerticesIS - Set additional user defined primal vertices in `PCBDDC`
444: Collective
446: Input Parameters:
447: + pc - the preconditioning context
448: - PrimalVertices - index set of primal vertices in global numbering (can be empty)
450: Level: intermediate
452: Note:
453: Any process can list any global node
455: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()`
456: @*/
457: PetscErrorCode PCBDDCSetPrimalVerticesIS(PC pc, IS PrimalVertices)
458: {
459: PetscFunctionBegin;
462: PetscCheckSameComm(pc, 1, PrimalVertices, 2);
463: PetscTryMethod(pc, "PCBDDCSetPrimalVerticesIS_C", (PC, IS), (pc, PrimalVertices));
464: PetscFunctionReturn(PETSC_SUCCESS);
465: }
467: static PetscErrorCode PCBDDCGetPrimalVerticesIS_BDDC(PC pc, IS *is)
468: {
469: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
471: PetscFunctionBegin;
472: *is = pcbddc->user_primal_vertices;
473: PetscFunctionReturn(PETSC_SUCCESS);
474: }
476: /*@
477: PCBDDCGetPrimalVerticesIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesIS()`
479: Collective
481: Input Parameter:
482: . pc - the preconditioning context
484: Output Parameter:
485: . is - index set of primal vertices in global numbering (`NULL` if not set)
487: Level: intermediate
489: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()`
490: @*/
491: PetscErrorCode PCBDDCGetPrimalVerticesIS(PC pc, IS *is)
492: {
493: PetscFunctionBegin;
495: PetscAssertPointer(is, 2);
496: PetscUseMethod(pc, "PCBDDCGetPrimalVerticesIS_C", (PC, IS *), (pc, is));
497: PetscFunctionReturn(PETSC_SUCCESS);
498: }
500: static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
501: {
502: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
503: PetscBool isequal = PETSC_FALSE;
505: PetscFunctionBegin;
506: PetscCall(PetscObjectReference((PetscObject)PrimalVertices));
507: if (pcbddc->user_primal_vertices_local) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices_local, &isequal));
508: PetscCall(ISDestroy(&pcbddc->user_primal_vertices));
509: PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local));
510: pcbddc->user_primal_vertices_local = PrimalVertices;
511: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
512: PetscFunctionReturn(PETSC_SUCCESS);
513: }
515: /*@
516: PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in `PCBDDC`
518: Collective
520: Input Parameters:
521: + pc - the preconditioning context
522: - PrimalVertices - index set of primal vertices in local numbering (can be empty)
524: Level: intermediate
526: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesLocalIS()`
527: @*/
528: PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
529: {
530: PetscFunctionBegin;
533: PetscCheckSameComm(pc, 1, PrimalVertices, 2);
534: PetscTryMethod(pc, "PCBDDCSetPrimalVerticesLocalIS_C", (PC, IS), (pc, PrimalVertices));
535: PetscFunctionReturn(PETSC_SUCCESS);
536: }
538: static PetscErrorCode PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc, IS *is)
539: {
540: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
542: PetscFunctionBegin;
543: *is = pcbddc->user_primal_vertices_local;
544: PetscFunctionReturn(PETSC_SUCCESS);
545: }
547: /*@
548: PCBDDCGetPrimalVerticesLocalIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesLocalIS()`
550: Collective
552: Input Parameter:
553: . pc - the preconditioning context
555: Output Parameter:
556: . is - index set of primal vertices in local numbering (`NULL` if not set)
558: Level: intermediate
560: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`
561: @*/
562: PetscErrorCode PCBDDCGetPrimalVerticesLocalIS(PC pc, IS *is)
563: {
564: PetscFunctionBegin;
566: PetscAssertPointer(is, 2);
567: PetscUseMethod(pc, "PCBDDCGetPrimalVerticesLocalIS_C", (PC, IS *), (pc, is));
568: PetscFunctionReturn(PETSC_SUCCESS);
569: }
571: static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc, PetscInt k)
572: {
573: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
575: PetscFunctionBegin;
576: pcbddc->coarsening_ratio = k;
577: PetscFunctionReturn(PETSC_SUCCESS);
578: }
580: /*@
581: PCBDDCSetCoarseningRatio - Set coarsening ratio used in the multi-level version of `PCBDDC`
583: Logically Collective
585: Input Parameters:
586: + pc - the preconditioning context
587: - k - coarsening ratio (H/h at the coarser level)
589: Options Database Key:
590: . -pc_bddc_coarsening_ratio k - Set the coarsening ratio used in multi-level coarsening
592: Level: intermediate
594: Note:
595: Approximately `k` subdomains at the finer level will be aggregated into a single subdomain at the coarser level
597: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetLevels()`
598: @*/
599: PetscErrorCode PCBDDCSetCoarseningRatio(PC pc, PetscInt k)
600: {
601: PetscFunctionBegin;
604: PetscTryMethod(pc, "PCBDDCSetCoarseningRatio_C", (PC, PetscInt), (pc, k));
605: PetscFunctionReturn(PETSC_SUCCESS);
606: }
608: /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */
609: static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc, PetscBool flg)
610: {
611: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
613: PetscFunctionBegin;
614: pcbddc->use_exact_dirichlet_trick = flg;
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }
618: PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc, PetscBool flg)
619: {
620: PetscFunctionBegin;
623: PetscTryMethod(pc, "PCBDDCSetUseExactDirichlet_C", (PC, PetscBool), (pc, flg));
624: PetscFunctionReturn(PETSC_SUCCESS);
625: }
627: static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc, PetscInt level)
628: {
629: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
631: PetscFunctionBegin;
632: pcbddc->current_level = level;
633: PetscFunctionReturn(PETSC_SUCCESS);
634: }
636: PetscErrorCode PCBDDCSetLevel(PC pc, PetscInt level)
637: {
638: PetscFunctionBegin;
641: PetscTryMethod(pc, "PCBDDCSetLevel_C", (PC, PetscInt), (pc, level));
642: PetscFunctionReturn(PETSC_SUCCESS);
643: }
645: static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc, PetscInt levels)
646: {
647: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
649: PetscFunctionBegin;
650: PetscCheck(levels < PETSC_PCBDDC_MAXLEVELS, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Maximum number of additional levels for BDDC is %d", PETSC_PCBDDC_MAXLEVELS - 1);
651: pcbddc->max_levels = levels;
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }
655: /*@
656: PCBDDCSetLevels - Sets the maximum number of additional levels allowed for multilevel `PCBDDC`
658: Logically Collective
660: Input Parameters:
661: + pc - the preconditioning context
662: - levels - the maximum number of levels
664: Options Database Key:
665: . -pc_bddc_levels levels - Set maximum number of levels for multilevel
667: Level: intermediate
669: Note:
670: The default value is 0, that gives the classical two-levels BDDC algorithm
672: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetCoarseningRatio()`
673: @*/
674: PetscErrorCode PCBDDCSetLevels(PC pc, PetscInt levels)
675: {
676: PetscFunctionBegin;
679: PetscTryMethod(pc, "PCBDDCSetLevels_C", (PC, PetscInt), (pc, levels));
680: PetscFunctionReturn(PETSC_SUCCESS);
681: }
683: static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc, IS DirichletBoundaries)
684: {
685: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
686: PetscBool isequal = PETSC_FALSE;
688: PetscFunctionBegin;
689: PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries));
690: if (pcbddc->DirichletBoundaries) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundaries, &isequal));
691: /* last user setting takes precedence -> destroy any other customization */
692: PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal));
693: PetscCall(ISDestroy(&pcbddc->DirichletBoundaries));
694: pcbddc->DirichletBoundaries = DirichletBoundaries;
695: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
696: PetscFunctionReturn(PETSC_SUCCESS);
697: }
699: /*@
700: PCBDDCSetDirichletBoundaries - Set the `IS` defining Dirichlet boundaries for the global problem.
702: Collective
704: Input Parameters:
705: + pc - the preconditioning context
706: - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries
708: Level: intermediate
710: Note:
711: Provide the information if you used `MatZeroRows()` or `MatZeroRowsColumns()`. Any process can list any global node
713: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundariesLocal()`, `MatZeroRows()`, `MatZeroRowsColumns()`
714: @*/
715: PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc, IS DirichletBoundaries)
716: {
717: PetscFunctionBegin;
720: PetscCheckSameComm(pc, 1, DirichletBoundaries, 2);
721: PetscTryMethod(pc, "PCBDDCSetDirichletBoundaries_C", (PC, IS), (pc, DirichletBoundaries));
722: PetscFunctionReturn(PETSC_SUCCESS);
723: }
725: static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc, IS DirichletBoundaries)
726: {
727: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
728: PetscBool isequal = PETSC_FALSE;
730: PetscFunctionBegin;
731: PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries));
732: if (pcbddc->DirichletBoundariesLocal) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundariesLocal, &isequal));
733: /* last user setting takes precedence -> destroy any other customization */
734: PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal));
735: PetscCall(ISDestroy(&pcbddc->DirichletBoundaries));
736: pcbddc->DirichletBoundariesLocal = DirichletBoundaries;
737: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
738: PetscFunctionReturn(PETSC_SUCCESS);
739: }
741: /*@
742: PCBDDCSetDirichletBoundariesLocal - Set the `IS` defining Dirichlet boundaries for the global problem in local ordering.
744: Collective
746: Input Parameters:
747: + pc - the preconditioning context
748: - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries (in local ordering)
750: Level: intermediate
752: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()`, `MatZeroRows()`, `MatZeroRowsColumns()`
753: @*/
754: PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc, IS DirichletBoundaries)
755: {
756: PetscFunctionBegin;
759: PetscCheckSameComm(pc, 1, DirichletBoundaries, 2);
760: PetscTryMethod(pc, "PCBDDCSetDirichletBoundariesLocal_C", (PC, IS), (pc, DirichletBoundaries));
761: PetscFunctionReturn(PETSC_SUCCESS);
762: }
764: static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc, IS NeumannBoundaries)
765: {
766: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
767: PetscBool isequal = PETSC_FALSE;
769: PetscFunctionBegin;
770: PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries));
771: if (pcbddc->NeumannBoundaries) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundaries, &isequal));
772: /* last user setting takes precedence -> destroy any other customization */
773: PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal));
774: PetscCall(ISDestroy(&pcbddc->NeumannBoundaries));
775: pcbddc->NeumannBoundaries = NeumannBoundaries;
776: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
777: PetscFunctionReturn(PETSC_SUCCESS);
778: }
780: /*@
781: PCBDDCSetNeumannBoundaries - Set the `IS` defining Neumann boundaries for the global problem.
783: Collective
785: Input Parameters:
786: + pc - the preconditioning context
787: - NeumannBoundaries - parallel `IS` defining the Neumann boundaries
789: Level: intermediate
791: Note:
792: Any process can list any global node
794: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundariesLocal()`
795: @*/
796: PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc, IS NeumannBoundaries)
797: {
798: PetscFunctionBegin;
801: PetscCheckSameComm(pc, 1, NeumannBoundaries, 2);
802: PetscTryMethod(pc, "PCBDDCSetNeumannBoundaries_C", (PC, IS), (pc, NeumannBoundaries));
803: PetscFunctionReturn(PETSC_SUCCESS);
804: }
806: static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc, IS NeumannBoundaries)
807: {
808: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
809: PetscBool isequal = PETSC_FALSE;
811: PetscFunctionBegin;
812: PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries));
813: if (pcbddc->NeumannBoundariesLocal) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundariesLocal, &isequal));
814: /* last user setting takes precedence -> destroy any other customization */
815: PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal));
816: PetscCall(ISDestroy(&pcbddc->NeumannBoundaries));
817: pcbddc->NeumannBoundariesLocal = NeumannBoundaries;
818: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
819: PetscFunctionReturn(PETSC_SUCCESS);
820: }
822: /*@
823: PCBDDCSetNeumannBoundariesLocal - Set the `IS` defining Neumann boundaries for the global problem in local ordering.
825: Collective
827: Input Parameters:
828: + pc - the preconditioning context
829: - NeumannBoundaries - parallel `IS` defining the subdomain part of Neumann boundaries (in local ordering)
831: Level: intermediate
833: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()`
834: @*/
835: PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc, IS NeumannBoundaries)
836: {
837: PetscFunctionBegin;
840: PetscCheckSameComm(pc, 1, NeumannBoundaries, 2);
841: PetscTryMethod(pc, "PCBDDCSetNeumannBoundariesLocal_C", (PC, IS), (pc, NeumannBoundaries));
842: PetscFunctionReturn(PETSC_SUCCESS);
843: }
845: static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc, IS *DirichletBoundaries)
846: {
847: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
849: PetscFunctionBegin;
850: *DirichletBoundaries = pcbddc->DirichletBoundaries;
851: PetscFunctionReturn(PETSC_SUCCESS);
852: }
854: /*@
855: PCBDDCGetDirichletBoundaries - Get parallel `IS` for Dirichlet boundaries
857: Collective
859: Input Parameter:
860: . pc - the preconditioning context
862: Output Parameter:
863: . DirichletBoundaries - index set defining the Dirichlet boundaries
865: Level: intermediate
867: Note:
868: The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetDirichletBoundaries()`
870: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()`
871: @*/
872: PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc, IS *DirichletBoundaries)
873: {
874: PetscFunctionBegin;
876: PetscUseMethod(pc, "PCBDDCGetDirichletBoundaries_C", (PC, IS *), (pc, DirichletBoundaries));
877: PetscFunctionReturn(PETSC_SUCCESS);
878: }
880: static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc, IS *DirichletBoundaries)
881: {
882: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
884: PetscFunctionBegin;
885: *DirichletBoundaries = pcbddc->DirichletBoundariesLocal;
886: PetscFunctionReturn(PETSC_SUCCESS);
887: }
889: /*@
890: PCBDDCGetDirichletBoundariesLocal - Get parallel `IS` for Dirichlet boundaries (in local ordering)
892: Collective
894: Input Parameter:
895: . pc - the preconditioning context
897: Output Parameter:
898: . DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries
900: Level: intermediate
902: Note:
903: The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetDirichletBoundariesLocal()`)
904: or a global-to-local map of the global `IS` (if provided with `PCBDDCSetDirichletBoundaries()`).
905: In the latter case, the `IS` will be available only after `PCSetUp()`.
907: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()`
908: @*/
909: PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc, IS *DirichletBoundaries)
910: {
911: PetscFunctionBegin;
913: PetscUseMethod(pc, "PCBDDCGetDirichletBoundariesLocal_C", (PC, IS *), (pc, DirichletBoundaries));
914: PetscFunctionReturn(PETSC_SUCCESS);
915: }
917: static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc, IS *NeumannBoundaries)
918: {
919: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
921: PetscFunctionBegin;
922: *NeumannBoundaries = pcbddc->NeumannBoundaries;
923: PetscFunctionReturn(PETSC_SUCCESS);
924: }
926: /*@
927: PCBDDCGetNeumannBoundaries - Get parallel `IS` for Neumann boundaries
929: Not Collective
931: Input Parameter:
932: . pc - the preconditioning context
934: Output Parameter:
935: . NeumannBoundaries - index set defining the Neumann boundaries
937: Level: intermediate
939: Note:
940: The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetNeumannBoundaries()`
942: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()`
943: @*/
944: PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc, IS *NeumannBoundaries)
945: {
946: PetscFunctionBegin;
948: PetscUseMethod(pc, "PCBDDCGetNeumannBoundaries_C", (PC, IS *), (pc, NeumannBoundaries));
949: PetscFunctionReturn(PETSC_SUCCESS);
950: }
952: static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc, IS *NeumannBoundaries)
953: {
954: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
956: PetscFunctionBegin;
957: *NeumannBoundaries = pcbddc->NeumannBoundariesLocal;
958: PetscFunctionReturn(PETSC_SUCCESS);
959: }
961: /*@
962: PCBDDCGetNeumannBoundariesLocal - Get parallel `IS` for Neumann boundaries (in local ordering)
964: Not Collective
966: Input Parameter:
967: . pc - the preconditioning context
969: Output Parameter:
970: . NeumannBoundaries - index set defining the subdomain part of Neumann boundaries
972: Level: intermediate
974: Note:
975: The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetNeumannBoundariesLocal()`
976: or a global-to-local map of the global `IS` (if provided with `PCBDDCSetNeumannBoundaries()`).
977: In the latter case, the `IS` will be available after `PCSetUp()`.
979: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetNeumannBoundariesLocal()`, `PCBDDCGetNeumannBoundaries()`
980: @*/
981: PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc, IS *NeumannBoundaries)
982: {
983: PetscFunctionBegin;
985: PetscUseMethod(pc, "PCBDDCGetNeumannBoundariesLocal_C", (PC, IS *), (pc, NeumannBoundaries));
986: PetscFunctionReturn(PETSC_SUCCESS);
987: }
989: static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode)
990: {
991: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
992: PCBDDCGraph mat_graph = pcbddc->mat_graph;
993: PetscBool same_data = PETSC_FALSE;
995: PetscFunctionBegin;
996: if (!nvtxs) {
997: if (copymode == PETSC_OWN_POINTER) {
998: PetscCall(PetscFree(xadj));
999: PetscCall(PetscFree(adjncy));
1000: }
1001: PetscCall(PCBDDCGraphResetCSR(mat_graph));
1002: PetscFunctionReturn(PETSC_SUCCESS);
1003: }
1004: if (mat_graph->nvtxs == nvtxs && mat_graph->freecsr) { /* we own the data */
1005: if (mat_graph->xadj == xadj && mat_graph->adjncy == adjncy) same_data = PETSC_TRUE;
1006: if (!same_data && mat_graph->xadj[nvtxs] == xadj[nvtxs]) {
1007: PetscCall(PetscArraycmp(xadj, mat_graph->xadj, nvtxs + 1, &same_data));
1008: if (same_data) PetscCall(PetscArraycmp(adjncy, mat_graph->adjncy, xadj[nvtxs], &same_data));
1009: }
1010: }
1011: if (!same_data) {
1012: /* free old CSR */
1013: PetscCall(PCBDDCGraphResetCSR(mat_graph));
1014: /* get CSR into graph structure */
1015: if (copymode == PETSC_COPY_VALUES) {
1016: PetscCall(PetscMalloc1(nvtxs + 1, &mat_graph->xadj));
1017: PetscCall(PetscMalloc1(xadj[nvtxs], &mat_graph->adjncy));
1018: PetscCall(PetscArraycpy(mat_graph->xadj, xadj, nvtxs + 1));
1019: PetscCall(PetscArraycpy(mat_graph->adjncy, adjncy, xadj[nvtxs]));
1020: mat_graph->freecsr = PETSC_TRUE;
1021: } else if (copymode == PETSC_OWN_POINTER) {
1022: mat_graph->xadj = (PetscInt *)xadj;
1023: mat_graph->adjncy = (PetscInt *)adjncy;
1024: mat_graph->freecsr = PETSC_TRUE;
1025: } else if (copymode == PETSC_USE_POINTER) {
1026: mat_graph->xadj = (PetscInt *)xadj;
1027: mat_graph->adjncy = (PetscInt *)adjncy;
1028: mat_graph->freecsr = PETSC_FALSE;
1029: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported copy mode %d", copymode);
1030: mat_graph->nvtxs_csr = nvtxs;
1031: pcbddc->recompute_topography = PETSC_TRUE;
1032: }
1033: PetscFunctionReturn(PETSC_SUCCESS);
1034: }
1036: /*@
1037: PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local degrees of freedom.
1039: Not collective
1041: Input Parameters:
1042: + pc - the preconditioning context.
1043: . nvtxs - number of local vertices of the graph (i.e., the number of local dofs).
1044: . xadj - CSR format row pointers for the connectivity of the dofs
1045: . adjncy - CSR format column pointers for the connectivity of the dofs
1046: - copymode - supported modes are `PETSC_COPY_VALUES`, `PETSC_USE_POINTER` or `PETSC_OWN_POINTER`.
1048: Level: intermediate
1050: Note:
1051: A dof is considered connected with all local dofs if xadj[dof+1]-xadj[dof] == 1 and adjncy[xadj[dof]] is negative.
1053: .seealso: [](ch_ksp), `PCBDDC`, `PetscCopyMode`
1054: @*/
1055: PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode)
1056: {
1057: PetscBool f = PETSC_FALSE;
1059: PetscFunctionBegin;
1061: if (nvtxs) {
1062: PetscAssertPointer(xadj, 3);
1063: if (xadj[nvtxs]) PetscAssertPointer(adjncy, 4);
1064: }
1065: PetscTryMethod(pc, "PCBDDCSetLocalAdjacencyGraph_C", (PC, PetscInt, const PetscInt[], const PetscInt[], PetscCopyMode), (pc, nvtxs, xadj, adjncy, copymode));
1066: /* free arrays if PCBDDC is not the PC type */
1067: PetscCall(PetscObjectHasFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", &f));
1068: if (!f && copymode == PETSC_OWN_POINTER) {
1069: PetscCall(PetscFree(xadj));
1070: PetscCall(PetscFree(adjncy));
1071: }
1072: PetscFunctionReturn(PETSC_SUCCESS);
1073: }
1075: static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc, PetscInt n_is, IS ISForDofs[])
1076: {
1077: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1078: PetscInt i;
1079: PetscBool isequal = PETSC_FALSE;
1081: PetscFunctionBegin;
1082: if (pcbddc->n_ISForDofsLocal == n_is) {
1083: for (i = 0; i < n_is; i++) {
1084: PetscBool isequalt;
1085: PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofsLocal[i], &isequalt));
1086: if (!isequalt) break;
1087: }
1088: if (i == n_is) isequal = PETSC_TRUE;
1089: }
1090: for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i]));
1091: /* Destroy ISes if they were already set */
1092: for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i]));
1093: PetscCall(PetscFree(pcbddc->ISForDofsLocal));
1094: /* last user setting takes precedence -> destroy any other customization */
1095: for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i]));
1096: PetscCall(PetscFree(pcbddc->ISForDofs));
1097: pcbddc->n_ISForDofs = 0;
1098: /* allocate space then set */
1099: if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofsLocal));
1100: for (i = 0; i < n_is; i++) pcbddc->ISForDofsLocal[i] = ISForDofs[i];
1101: pcbddc->n_ISForDofsLocal = n_is;
1102: if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE;
1103: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
1104: PetscFunctionReturn(PETSC_SUCCESS);
1105: }
1107: /*@
1108: PCBDDCSetDofsSplittingLocal - Set the `IS` defining fields of the local subdomain matrix
1110: Collective
1112: Input Parameters:
1113: + pc - the preconditioning context
1114: . n_is - number of index sets defining the fields, must be the same on all MPI processes
1115: - ISForDofs - array of `IS` describing the fields in local ordering
1117: Level: intermediate
1119: Note:
1120: Not all nodes need to be listed, unlisted nodes will belong to the complement field.
1122: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()`
1123: @*/
1124: PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc, PetscInt n_is, IS ISForDofs[])
1125: {
1126: PetscInt i;
1128: PetscFunctionBegin;
1131: for (i = 0; i < n_is; i++) {
1132: PetscCheckSameComm(pc, 1, ISForDofs[i], 3);
1134: }
1135: PetscTryMethod(pc, "PCBDDCSetDofsSplittingLocal_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs));
1136: PetscFunctionReturn(PETSC_SUCCESS);
1137: }
1139: static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc, PetscInt n_is, IS ISForDofs[])
1140: {
1141: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1142: PetscInt i;
1143: PetscBool isequal = PETSC_FALSE;
1145: PetscFunctionBegin;
1146: if (pcbddc->n_ISForDofs == n_is) {
1147: for (i = 0; i < n_is; i++) {
1148: PetscBool isequalt;
1149: PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofs[i], &isequalt));
1150: if (!isequalt) break;
1151: }
1152: if (i == n_is) isequal = PETSC_TRUE;
1153: }
1154: for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i]));
1155: /* Destroy ISes if they were already set */
1156: for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i]));
1157: PetscCall(PetscFree(pcbddc->ISForDofs));
1158: /* last user setting takes precedence -> destroy any other customization */
1159: for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i]));
1160: PetscCall(PetscFree(pcbddc->ISForDofsLocal));
1161: pcbddc->n_ISForDofsLocal = 0;
1162: /* allocate space then set */
1163: if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofs));
1164: for (i = 0; i < n_is; i++) pcbddc->ISForDofs[i] = ISForDofs[i];
1165: pcbddc->n_ISForDofs = n_is;
1166: if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE;
1167: if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
1168: PetscFunctionReturn(PETSC_SUCCESS);
1169: }
1171: /*@
1172: PCBDDCSetDofsSplitting - Set the `IS` defining fields of the global matrix
1174: Collective
1176: Input Parameters:
1177: + pc - the preconditioning context
1178: . n_is - number of index sets defining the fields
1179: - ISForDofs - array of `IS` describing the fields in global ordering
1181: Level: intermediate
1183: Note:
1184: Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field.
1186: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplittingLocal()`
1187: @*/
1188: PetscErrorCode PCBDDCSetDofsSplitting(PC pc, PetscInt n_is, IS ISForDofs[])
1189: {
1190: PetscInt i;
1192: PetscFunctionBegin;
1195: for (i = 0; i < n_is; i++) {
1197: PetscCheckSameComm(pc, 1, ISForDofs[i], 3);
1198: }
1199: PetscTryMethod(pc, "PCBDDCSetDofsSplitting_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs));
1200: PetscFunctionReturn(PETSC_SUCCESS);
1201: }
1203: static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
1204: {
1205: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1206: PC_IS *pcis = (PC_IS *)pc->data;
1207: Vec used_vec;
1208: PetscBool iscg, save_rhs = PETSC_TRUE, benign_correction_computed;
1210: PetscFunctionBegin;
1211: /* if we are working with CG, one dirichlet solve can be avoided during Krylov iterations */
1212: if (ksp) {
1213: PetscCall(PetscObjectTypeCompareAny((PetscObject)ksp, &iscg, KSPCG, KSPGROPPCG, KSPPIPECG, KSPPIPELCG, KSPPIPECGRR, ""));
1214: if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || !iscg || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE));
1215: }
1216: if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE));
1218: /* Creates parallel work vectors used in presolve */
1219: if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs));
1220: if (!pcbddc->temp_solution) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->temp_solution));
1222: pcbddc->temp_solution_used = PETSC_FALSE;
1223: if (x) {
1224: PetscCall(PetscObjectReference((PetscObject)x));
1225: used_vec = x;
1226: } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */
1227: PetscCall(PetscObjectReference((PetscObject)pcbddc->temp_solution));
1228: used_vec = pcbddc->temp_solution;
1229: PetscCall(VecSet(used_vec, 0.0));
1230: pcbddc->temp_solution_used = PETSC_TRUE;
1231: PetscCall(VecCopy(rhs, pcbddc->original_rhs));
1232: save_rhs = PETSC_FALSE;
1233: pcbddc->eliminate_dirdofs = PETSC_TRUE;
1234: }
1236: /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */
1237: if (ksp) {
1238: /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */
1239: PetscCall(KSPGetInitialGuessNonzero(ksp, &pcbddc->ksp_guess_nonzero));
1240: if (!pcbddc->ksp_guess_nonzero) PetscCall(VecSet(used_vec, 0.0));
1241: }
1243: pcbddc->rhs_change = PETSC_FALSE;
1244: /* Take into account zeroed rows -> change rhs and store solution removed */
1245: if (rhs && pcbddc->eliminate_dirdofs) {
1246: IS dirIS = NULL;
1248: /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */
1249: PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirIS));
1250: if (dirIS) {
1251: Mat_IS *matis = (Mat_IS *)pc->pmat->data;
1252: PetscInt dirsize, i, *is_indices;
1253: PetscScalar *array_x;
1254: const PetscScalar *array_diagonal;
1256: PetscCall(MatGetDiagonal(pc->pmat, pcis->vec1_global));
1257: PetscCall(VecPointwiseDivide(pcis->vec1_global, rhs, pcis->vec1_global));
1258: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD));
1259: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD));
1260: PetscCall(VecScatterBegin(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
1261: PetscCall(VecScatterEnd(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
1262: PetscCall(ISGetLocalSize(dirIS, &dirsize));
1263: PetscCall(VecGetArray(pcis->vec1_N, &array_x));
1264: PetscCall(VecGetArrayRead(pcis->vec2_N, &array_diagonal));
1265: PetscCall(ISGetIndices(dirIS, (const PetscInt **)&is_indices));
1266: for (i = 0; i < dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
1267: PetscCall(ISRestoreIndices(dirIS, (const PetscInt **)&is_indices));
1268: PetscCall(VecRestoreArrayRead(pcis->vec2_N, &array_diagonal));
1269: PetscCall(VecRestoreArray(pcis->vec1_N, &array_x));
1270: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE));
1271: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE));
1272: pcbddc->rhs_change = PETSC_TRUE;
1273: PetscCall(ISDestroy(&dirIS));
1274: }
1275: }
1277: /* remove the computed solution or the initial guess from the rhs */
1278: if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero)) {
1279: /* save the original rhs */
1280: if (save_rhs) {
1281: PetscCall(VecSwap(rhs, pcbddc->original_rhs));
1282: save_rhs = PETSC_FALSE;
1283: }
1284: pcbddc->rhs_change = PETSC_TRUE;
1285: PetscCall(VecScale(used_vec, -1.0));
1286: PetscCall(MatMultAdd(pc->mat, used_vec, pcbddc->original_rhs, rhs));
1287: PetscCall(VecScale(used_vec, -1.0));
1288: PetscCall(VecCopy(used_vec, pcbddc->temp_solution));
1289: pcbddc->temp_solution_used = PETSC_TRUE;
1290: if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE));
1291: }
1292: PetscCall(VecDestroy(&used_vec));
1294: /* compute initial vector in benign space if needed
1295: and remove non-benign solution from the rhs */
1296: benign_correction_computed = PETSC_FALSE;
1297: if (rhs && pcbddc->benign_compute_correction && (pcbddc->benign_have_null || pcbddc->benign_apply_coarse_only)) {
1298: /* compute u^*_h using ideas similar to those in Xuemin Tu's PhD thesis (see Section 4.8.1)
1299: Recursively apply BDDC in the multilevel case */
1300: if (!pcbddc->benign_vec) PetscCall(VecDuplicate(rhs, &pcbddc->benign_vec));
1301: /* keep applying coarse solver unless we no longer have benign subdomains */
1302: pcbddc->benign_apply_coarse_only = pcbddc->benign_have_null ? PETSC_TRUE : PETSC_FALSE;
1303: if (!pcbddc->benign_skip_correction) {
1304: PetscCall(PCApply_BDDC(pc, rhs, pcbddc->benign_vec));
1305: benign_correction_computed = PETSC_TRUE;
1306: if (pcbddc->temp_solution_used) PetscCall(VecAXPY(pcbddc->temp_solution, 1.0, pcbddc->benign_vec));
1307: PetscCall(VecScale(pcbddc->benign_vec, -1.0));
1308: /* store the original rhs if not done earlier */
1309: if (save_rhs) PetscCall(VecSwap(rhs, pcbddc->original_rhs));
1310: if (pcbddc->rhs_change) {
1311: PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, rhs, rhs));
1312: } else {
1313: PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, pcbddc->original_rhs, rhs));
1314: }
1315: pcbddc->rhs_change = PETSC_TRUE;
1316: }
1317: pcbddc->benign_apply_coarse_only = PETSC_FALSE;
1318: } else {
1319: PetscCall(VecDestroy(&pcbddc->benign_vec));
1320: }
1322: /* dbg output */
1323: if (pcbddc->dbg_flag && benign_correction_computed) {
1324: Vec v;
1326: PetscCall(VecDuplicate(pcis->vec1_global, &v));
1327: if (pcbddc->ChangeOfBasisMatrix) {
1328: PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, v));
1329: } else {
1330: PetscCall(VecCopy(rhs, v));
1331: }
1332: PetscCall(PCBDDCBenignGetOrSetP0(pc, v, PETSC_TRUE));
1333: PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "LEVEL %" PetscInt_FMT ": is the correction benign?\n", pcbddc->current_level));
1334: PetscCall(PetscScalarView(pcbddc->benign_n, pcbddc->benign_p0, pcbddc->dbg_viewer));
1335: PetscCall(PetscViewerFlush(pcbddc->dbg_viewer));
1336: PetscCall(VecDestroy(&v));
1337: }
1339: /* set initial guess if using PCG */
1340: pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1341: if (x && pcbddc->use_exact_dirichlet_trick) {
1342: PetscCall(VecSet(x, 0.0));
1343: if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) {
1344: if (benign_correction_computed) { /* we have already saved the changed rhs */
1345: PetscCall(VecLockReadPop(pcis->vec1_global));
1346: } else {
1347: PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, pcis->vec1_global));
1348: }
1349: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1350: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1351: } else {
1352: PetscCall(VecScatterBegin(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1353: PetscCall(VecScatterEnd(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1354: }
1355: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1356: PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D));
1357: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1358: PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D));
1359: if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) {
1360: PetscCall(VecSet(pcis->vec1_global, 0.));
1361: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE));
1362: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE));
1363: PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcis->vec1_global, x));
1364: } else {
1365: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE));
1366: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE));
1367: }
1368: if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
1369: pcbddc->exact_dirichlet_trick_app = PETSC_TRUE;
1370: } else if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior && benign_correction_computed && pcbddc->use_exact_dirichlet_trick) {
1371: PetscCall(VecLockReadPop(pcis->vec1_global));
1372: }
1373: PetscFunctionReturn(PETSC_SUCCESS);
1374: }
1376: static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
1377: {
1378: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1380: PetscFunctionBegin;
1381: /* add solution removed in presolve */
1382: if (x && pcbddc->rhs_change) {
1383: if (pcbddc->temp_solution_used) PetscCall(VecAXPY(x, 1.0, pcbddc->temp_solution));
1384: else if (pcbddc->benign_compute_correction && pcbddc->benign_vec) PetscCall(VecAXPY(x, -1.0, pcbddc->benign_vec));
1385: /* restore to original state (not for FETI-DP) */
1386: if (ksp) pcbddc->temp_solution_used = PETSC_FALSE;
1387: }
1389: /* restore rhs to its original state (not needed for FETI-DP) */
1390: if (rhs && pcbddc->rhs_change) {
1391: PetscCall(VecSwap(rhs, pcbddc->original_rhs));
1392: pcbddc->rhs_change = PETSC_FALSE;
1393: }
1394: /* restore ksp guess state */
1395: if (ksp) {
1396: PetscCall(KSPSetInitialGuessNonzero(ksp, pcbddc->ksp_guess_nonzero));
1397: /* reset flag for exact dirichlet trick */
1398: pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1399: }
1400: PetscFunctionReturn(PETSC_SUCCESS);
1401: }
1403: static PetscErrorCode PCSetUp_BDDC(PC pc)
1404: {
1405: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1406: PCBDDCSubSchurs sub_schurs;
1407: Mat_IS *matis;
1408: MatNullSpace nearnullspace;
1409: Mat lA;
1410: IS lP, zerodiag = NULL;
1411: PetscInt nrows, ncols;
1412: PetscMPIInt size;
1413: PetscBool computesubschurs;
1414: PetscBool computeconstraintsmatrix;
1415: PetscBool new_nearnullspace_provided, ismatis, rl;
1416: PetscBool isset, issym, isspd;
1418: PetscFunctionBegin;
1419: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &ismatis));
1420: PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PCBDDC preconditioner requires matrix of type MATIS");
1421: PetscCall(MatGetSize(pc->pmat, &nrows, &ncols));
1422: PetscCheck(nrows == ncols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCBDDC preconditioner requires a square matrix for constructing the preconditioner");
1423: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
1425: matis = (Mat_IS *)pc->pmat->data;
1426: /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */
1427: /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetUp
1428: Also, BDDC builds its own KSP for the Dirichlet problem */
1429: rl = pcbddc->recompute_topography;
1430: if (!pc->setupcalled || pc->flag == DIFFERENT_NONZERO_PATTERN) rl = PETSC_TRUE;
1431: PetscCallMPI(MPIU_Allreduce(&rl, &pcbddc->recompute_topography, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc)));
1432: if (pcbddc->recompute_topography) {
1433: pcbddc->graphanalyzed = PETSC_FALSE;
1434: computeconstraintsmatrix = PETSC_TRUE;
1435: } else {
1436: computeconstraintsmatrix = PETSC_FALSE;
1437: }
1439: /* check parameters' compatibility */
1440: if (!pcbddc->use_deluxe_scaling) pcbddc->deluxe_zerorows = PETSC_FALSE;
1441: pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold[0] != 0.0 || pcbddc->adaptive_threshold[1] != 0.0);
1442: pcbddc->use_deluxe_scaling = (PetscBool)(pcbddc->use_deluxe_scaling && (size > 1 || matis->allow_repeated));
1443: pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_selection && (size > 1 || matis->allow_repeated));
1444: pcbddc->adaptive_userdefined = (PetscBool)(pcbddc->adaptive_selection && pcbddc->adaptive_userdefined);
1445: if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE;
1447: computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling);
1449: /* activate all connected components if the netflux has been requested */
1450: if (pcbddc->compute_nonetflux) {
1451: pcbddc->use_vertices = PETSC_TRUE;
1452: pcbddc->use_edges = PETSC_TRUE;
1453: pcbddc->use_faces = PETSC_TRUE;
1454: }
1456: /* Get stdout for dbg */
1457: if (pcbddc->dbg_flag) {
1458: if (!pcbddc->dbg_viewer) pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc));
1459: PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer));
1460: PetscCall(PetscViewerASCIIAddTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level));
1461: }
1463: /* process topology information */
1464: PetscCall(PetscLogEventBegin(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0));
1465: if (pcbddc->recompute_topography) {
1466: PetscCall(PCBDDCComputeLocalTopologyInfo(pc));
1467: if (pcbddc->discretegradient) PetscCall(PCBDDCNedelecSupport(pc));
1468: }
1469: if (pcbddc->corner_selected) pcbddc->use_vertices = PETSC_TRUE;
1471: /* change basis if requested by the user */
1472: if (pcbddc->user_ChangeOfBasisMatrix) {
1473: /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */
1474: pcbddc->use_change_of_basis = PETSC_FALSE;
1475: PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->user_ChangeOfBasisMatrix));
1476: } else {
1477: PetscCall(MatDestroy(&pcbddc->local_mat));
1478: PetscCall(PetscObjectReference((PetscObject)matis->A));
1479: pcbddc->local_mat = matis->A;
1480: }
1482: /*
1483: Compute change of basis on local pressures (aka zerodiag dofs) with the benign trick
1484: This should come earlier than PCISSetUp for extracting the correct subdomain matrices
1485: */
1486: PetscCall(PCBDDCBenignShellMat(pc, PETSC_TRUE));
1487: if (pcbddc->benign_saddle_point) {
1488: PC_IS *pcis = (PC_IS *)pc->data;
1490: if (pcbddc->user_ChangeOfBasisMatrix || pcbddc->use_change_of_basis || !computesubschurs) pcbddc->benign_change_explicit = PETSC_TRUE;
1491: /* detect local saddle point and change the basis in pcbddc->local_mat */
1492: PetscCall(PCBDDCBenignDetectSaddlePoint(pc, (PetscBool)(!pcbddc->recompute_topography), &zerodiag));
1493: /* pop B0 mat from local mat */
1494: PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE));
1495: /* give pcis a hint to not reuse submatrices during PCISCreate */
1496: if (pc->flag == SAME_NONZERO_PATTERN && pcis->reusesubmatrices == PETSC_TRUE) {
1497: if (pcbddc->benign_n && (pcbddc->benign_change_explicit || pcbddc->dbg_flag)) {
1498: pcis->reusesubmatrices = PETSC_FALSE;
1499: } else {
1500: pcis->reusesubmatrices = PETSC_TRUE;
1501: }
1502: } else {
1503: pcis->reusesubmatrices = PETSC_FALSE;
1504: }
1505: }
1507: /* propagate relevant information */
1508: PetscCall(MatIsSymmetricKnown(matis->A, &isset, &issym));
1509: if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SYMMETRIC, issym));
1510: PetscCall(MatIsSPDKnown(matis->A, &isset, &isspd));
1511: if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SPD, isspd));
1513: /* Set up all the "iterative substructuring" common block without computing solvers */
1514: {
1515: Mat temp_mat;
1517: temp_mat = matis->A;
1518: matis->A = pcbddc->local_mat;
1519: PetscCall(PCISSetUp(pc, PETSC_TRUE, PETSC_FALSE));
1520: pcbddc->local_mat = matis->A;
1521: matis->A = temp_mat;
1522: }
1524: /* Analyze interface */
1525: if (!pcbddc->graphanalyzed) {
1526: PetscCall(PCBDDCAnalyzeInterface(pc));
1527: computeconstraintsmatrix = PETSC_TRUE;
1528: PetscCheck(!(pcbddc->adaptive_selection && !pcbddc->use_deluxe_scaling && !pcbddc->mat_graph->twodim), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot compute the adaptive primal space for a problem with 3D edges without deluxe scaling");
1529: if (pcbddc->compute_nonetflux) {
1530: MatNullSpace nnfnnsp;
1532: PetscCheck(pcbddc->divudotp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Missing divudotp operator");
1533: PetscCall(PCBDDCComputeNoNetFlux(pc->pmat, pcbddc->divudotp, pcbddc->divudotp_trans, pcbddc->divudotp_vl2l, pcbddc->mat_graph, &nnfnnsp));
1534: /* TODO what if a nearnullspace is already attached? */
1535: if (nnfnnsp) {
1536: PetscCall(MatSetNearNullSpace(pc->pmat, nnfnnsp));
1537: PetscCall(MatNullSpaceDestroy(&nnfnnsp));
1538: }
1539: }
1540: }
1541: PetscCall(PetscLogEventEnd(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0));
1543: /* check existence of a divergence free extension, i.e.
1544: b(v_I,p_0) = 0 for all v_I (raise error if not).
1545: Also, check that PCBDDCBenignGetOrSetP0 works */
1546: if (pcbddc->benign_saddle_point && pcbddc->dbg_flag > 1) PetscCall(PCBDDCBenignCheck(pc, zerodiag));
1547: PetscCall(ISDestroy(&zerodiag));
1549: /* Setup local dirichlet solver ksp_D and sub_schurs solvers */
1550: if (computesubschurs && pcbddc->recompute_topography) PetscCall(PCBDDCInitSubSchurs(pc));
1551: /* SetUp Scaling operator (scaling matrices could be needed in SubSchursSetUp)*/
1552: if (!pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc));
1554: /* finish setup solvers and do adaptive selection of constraints */
1555: sub_schurs = pcbddc->sub_schurs;
1556: if (sub_schurs && sub_schurs->schur_explicit) {
1557: if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc));
1558: PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE));
1559: } else {
1560: PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE));
1561: if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc));
1562: }
1563: if (pcbddc->adaptive_selection) {
1564: PetscCall(PCBDDCAdaptiveSelection(pc));
1565: computeconstraintsmatrix = PETSC_TRUE;
1566: }
1568: /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */
1569: new_nearnullspace_provided = PETSC_FALSE;
1570: PetscCall(MatGetNearNullSpace(pc->pmat, &nearnullspace));
1571: if (pcbddc->onearnullspace) { /* already used nearnullspace */
1572: if (!nearnullspace) { /* near null space attached to mat has been destroyed */
1573: new_nearnullspace_provided = PETSC_TRUE;
1574: } else {
1575: /* determine if the two nullspaces are different (should be lightweight) */
1576: if (nearnullspace != pcbddc->onearnullspace) {
1577: new_nearnullspace_provided = PETSC_TRUE;
1578: } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */
1579: PetscInt i;
1580: const Vec *nearnullvecs;
1581: PetscObjectState state;
1582: PetscInt nnsp_size;
1583: PetscCall(MatNullSpaceGetVecs(nearnullspace, NULL, &nnsp_size, &nearnullvecs));
1584: for (i = 0; i < nnsp_size; i++) {
1585: PetscCall(PetscObjectStateGet((PetscObject)nearnullvecs[i], &state));
1586: if (pcbddc->onearnullvecs_state[i] != state) {
1587: new_nearnullspace_provided = PETSC_TRUE;
1588: break;
1589: }
1590: }
1591: }
1592: }
1593: } else {
1594: if (!nearnullspace) { /* both nearnullspaces are null */
1595: new_nearnullspace_provided = PETSC_FALSE;
1596: } else { /* nearnullspace attached later */
1597: new_nearnullspace_provided = PETSC_TRUE;
1598: }
1599: }
1601: /* Setup constraints and related work vectors */
1602: /* reset primal space flags */
1603: PetscCall(PetscLogEventBegin(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0));
1604: pcbddc->new_primal_space = PETSC_FALSE;
1605: pcbddc->new_primal_space_local = PETSC_FALSE;
1606: if (computeconstraintsmatrix || new_nearnullspace_provided) {
1607: /* It also sets the primal space flags */
1608: PetscCall(PCBDDCConstraintsSetUp(pc));
1609: }
1610: /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */
1611: PetscCall(PCBDDCSetUpLocalWorkVectors(pc));
1613: if (pcbddc->use_change_of_basis) {
1614: PC_IS *pcis = (PC_IS *)pc->data;
1616: PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->ChangeOfBasisMatrix));
1617: if (pcbddc->benign_change) {
1618: PetscCall(MatDestroy(&pcbddc->benign_B0));
1619: /* pop B0 from pcbddc->local_mat */
1620: PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE));
1621: }
1622: /* get submatrices */
1623: PetscCall(MatDestroy(&pcis->A_IB));
1624: PetscCall(MatDestroy(&pcis->A_BI));
1625: PetscCall(MatDestroy(&pcis->A_BB));
1626: PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_BB));
1627: PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_I_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_IB));
1628: PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_I_local, MAT_INITIAL_MATRIX, &pcis->A_BI));
1629: /* set flag in pcis to not reuse submatrices during PCISCreate */
1630: pcis->reusesubmatrices = PETSC_FALSE;
1631: } else if (!pcbddc->user_ChangeOfBasisMatrix && !pcbddc->benign_change) {
1632: PetscCall(MatDestroy(&pcbddc->local_mat));
1633: PetscCall(PetscObjectReference((PetscObject)matis->A));
1634: pcbddc->local_mat = matis->A;
1635: }
1637: /* interface pressure block row for B_C */
1638: PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lP", (PetscObject *)&lP));
1639: PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lA", (PetscObject *)&lA));
1640: if (lA && lP) {
1641: PC_IS *pcis = (PC_IS *)pc->data;
1642: Mat B_BI, B_BB, Bt_BI, Bt_BB;
1643: PetscBool issym;
1645: PetscCall(MatIsSymmetric(lA, PETSC_SMALL, &issym));
1646: if (issym) {
1647: PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI));
1648: PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB));
1649: PetscCall(MatCreateTranspose(B_BI, &Bt_BI));
1650: PetscCall(MatCreateTranspose(B_BB, &Bt_BB));
1651: } else {
1652: PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI));
1653: PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB));
1654: PetscCall(MatCreateSubMatrix(lA, pcis->is_I_local, lP, MAT_INITIAL_MATRIX, &Bt_BI));
1655: PetscCall(MatCreateSubMatrix(lA, pcis->is_B_local, lP, MAT_INITIAL_MATRIX, &Bt_BB));
1656: }
1657: PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BI", (PetscObject)B_BI));
1658: PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BB", (PetscObject)B_BB));
1659: PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BI", (PetscObject)Bt_BI));
1660: PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BB", (PetscObject)Bt_BB));
1661: PetscCall(MatDestroy(&B_BI));
1662: PetscCall(MatDestroy(&B_BB));
1663: PetscCall(MatDestroy(&Bt_BI));
1664: PetscCall(MatDestroy(&Bt_BB));
1665: }
1666: PetscCall(PetscLogEventEnd(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0));
1668: /* SetUp coarse and local Neumann solvers */
1669: PetscCall(PCBDDCSetUpSolvers(pc));
1670: /* SetUp Scaling operator */
1671: if (pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc));
1673: /* mark topography as done */
1674: pcbddc->recompute_topography = PETSC_FALSE;
1676: /* wrap pcis->A_IB and pcis->A_BI if we did not change explicitly the variables on the pressures */
1677: PetscCall(PCBDDCBenignShellMat(pc, PETSC_FALSE));
1679: if (pcbddc->dbg_flag) {
1680: PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level));
1681: PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer));
1682: }
1684: { /* Dump customization */
1685: PetscBool flg;
1686: char save[PETSC_MAX_PATH_LEN] = {'\0'};
1688: PetscCall(PetscOptionsGetString(NULL, ((PetscObject)pc)->prefix, "-pc_bddc_save", save, sizeof(save), &flg));
1689: if (flg) {
1690: size_t len;
1692: PetscCall(PetscStrlen(save, &len));
1693: PetscCall(PCBDDCLoadOrViewCustomization(pc, PETSC_FALSE, len ? save : NULL));
1694: }
1695: }
1696: PetscFunctionReturn(PETSC_SUCCESS);
1697: }
1699: static PetscErrorCode PCApply_BDDC(PC pc, Vec r, Vec z)
1700: {
1701: PC_IS *pcis = (PC_IS *)pc->data;
1702: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1703: Mat lA = NULL;
1704: PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B;
1705: const PetscScalar one = 1.0;
1706: const PetscScalar m_one = -1.0;
1707: const PetscScalar zero = 0.0;
1708: /* This code is similar to that provided in nn.c for PCNN
1709: NN interface preconditioner changed to BDDC
1710: Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */
1712: PetscFunctionBegin;
1713: PetscCall(PetscCitationsRegister(citation, &cited));
1714: if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA));
1716: if (pcbddc->ChangeOfBasisMatrix) {
1717: Vec swap;
1719: PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change));
1720: swap = pcbddc->work_change;
1721: pcbddc->work_change = r;
1722: r = swap;
1723: /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */
1724: if (pcbddc->benign_apply_coarse_only && pcbddc->use_exact_dirichlet_trick && pcbddc->change_interior) {
1725: PetscCall(VecCopy(r, pcis->vec1_global));
1726: PetscCall(VecLockReadPush(pcis->vec1_global));
1727: }
1728: }
1729: if (pcbddc->benign_have_null) { /* get p0 from r */
1730: PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE));
1731: }
1732: if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET && !pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1733: PetscCall(VecCopy(r, z));
1734: /* First Dirichlet solve */
1735: PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1736: PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1737: /*
1738: Assembling right-hand side for BDDC operator
1739: - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE)
1740: - pcis->vec1_B the interface part of the global vector z
1741: */
1742: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1743: if (n_D) {
1744: PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D));
1745: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1746: PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D));
1747: PetscCall(VecScale(pcis->vec2_D, m_one));
1748: if (pcbddc->switch_static) {
1749: PetscCall(VecSet(pcis->vec1_N, 0.));
1750: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1751: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1752: if (!pcbddc->switch_static_change) PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N));
1753: else {
1754: PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1755: PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N));
1756: PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1757: }
1758: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1759: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1760: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1761: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1762: } else {
1763: PetscCall(MatMult(pcis->A_BI, pcis->vec2_D, pcis->vec1_B));
1764: }
1765: } else {
1766: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1767: PetscCall(VecSet(pcis->vec1_B, zero));
1768: }
1769: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1770: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1771: PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B));
1772: } else {
1773: if (!pcbddc->benign_apply_coarse_only) PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B));
1774: }
1775: if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) {
1776: PetscCheck(pcbddc->switch_static, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You forgot to pass -pc_bddc_switch_static");
1777: PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1778: PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1779: }
1781: /* Apply interface preconditioner
1782: input/output vecs: pcis->vec1_B and pcis->vec1_D */
1783: PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_FALSE));
1785: /* Apply transpose of partition of unity operator */
1786: PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z));
1787: if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) {
1788: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE));
1789: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE));
1790: PetscFunctionReturn(PETSC_SUCCESS);
1791: }
1792: /* Second Dirichlet solve and assembling of output */
1793: PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1794: PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1795: if (n_B) {
1796: if (pcbddc->switch_static) {
1797: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1798: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1799: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1800: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1801: if (!pcbddc->switch_static_change) PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N));
1802: else {
1803: PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1804: PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N));
1805: PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1806: }
1807: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1808: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1809: } else {
1810: PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec3_D));
1811: }
1812: } else if (pcbddc->switch_static) { /* n_B is zero */
1813: if (!pcbddc->switch_static_change) PetscCall(MatMult(lA, pcis->vec1_D, pcis->vec3_D));
1814: else {
1815: PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N));
1816: PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N));
1817: PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D));
1818: }
1819: }
1820: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1821: PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D));
1822: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1823: PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D));
1825: if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1826: if (pcbddc->switch_static) PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D));
1827: else PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D));
1828: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1829: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1830: } else {
1831: if (pcbddc->switch_static) PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D));
1832: else PetscCall(VecScale(pcis->vec4_D, m_one));
1833: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1834: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1835: }
1836: if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */
1837: if (pcbddc->benign_apply_coarse_only) PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
1838: PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE));
1839: }
1841: if (pcbddc->ChangeOfBasisMatrix) {
1842: pcbddc->work_change = r;
1843: PetscCall(VecCopy(z, pcbddc->work_change));
1844: PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z));
1845: }
1846: PetscFunctionReturn(PETSC_SUCCESS);
1847: }
1849: static PetscErrorCode PCApplyTranspose_BDDC(PC pc, Vec r, Vec z)
1850: {
1851: PC_IS *pcis = (PC_IS *)pc->data;
1852: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1853: Mat lA = NULL;
1854: PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B;
1855: const PetscScalar one = 1.0;
1856: const PetscScalar m_one = -1.0;
1857: const PetscScalar zero = 0.0;
1859: PetscFunctionBegin;
1860: PetscCall(PetscCitationsRegister(citation, &cited));
1861: if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA));
1862: if (pcbddc->ChangeOfBasisMatrix) {
1863: Vec swap;
1865: PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change));
1866: swap = pcbddc->work_change;
1867: pcbddc->work_change = r;
1868: r = swap;
1869: /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */
1870: if (pcbddc->benign_apply_coarse_only && pcbddc->exact_dirichlet_trick_app && pcbddc->change_interior) {
1871: PetscCall(VecCopy(r, pcis->vec1_global));
1872: PetscCall(VecLockReadPush(pcis->vec1_global));
1873: }
1874: }
1875: if (pcbddc->benign_have_null) { /* get p0 from r */
1876: PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE));
1877: }
1878: if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1879: PetscCall(VecCopy(r, z));
1880: /* First Dirichlet solve */
1881: PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1882: PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1883: /*
1884: Assembling right-hand side for BDDC operator
1885: - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE)
1886: - pcis->vec1_B the interface part of the global vector z
1887: */
1888: if (n_D) {
1889: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1890: PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D));
1891: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1892: PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D));
1893: PetscCall(VecScale(pcis->vec2_D, m_one));
1894: if (pcbddc->switch_static) {
1895: PetscCall(VecSet(pcis->vec1_N, 0.));
1896: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1897: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1898: if (!pcbddc->switch_static_change) {
1899: PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N));
1900: } else {
1901: PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1902: PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N));
1903: PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1904: }
1905: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1906: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1907: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1908: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1909: } else {
1910: PetscCall(MatMultTranspose(pcis->A_IB, pcis->vec2_D, pcis->vec1_B));
1911: }
1912: } else {
1913: PetscCall(VecSet(pcis->vec1_B, zero));
1914: }
1915: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1916: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1917: PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B));
1918: } else {
1919: PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B));
1920: }
1922: /* Apply interface preconditioner
1923: input/output vecs: pcis->vec1_B and pcis->vec1_D */
1924: PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_TRUE));
1926: /* Apply transpose of partition of unity operator */
1927: PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z));
1929: /* Second Dirichlet solve and assembling of output */
1930: PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1931: PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1932: if (n_B) {
1933: if (pcbddc->switch_static) {
1934: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1935: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1936: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1937: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1938: if (!pcbddc->switch_static_change) {
1939: PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N));
1940: } else {
1941: PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1942: PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N));
1943: PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1944: }
1945: PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1946: PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1947: } else {
1948: PetscCall(MatMultTranspose(pcis->A_BI, pcis->vec1_B, pcis->vec3_D));
1949: }
1950: } else if (pcbddc->switch_static) { /* n_B is zero */
1951: if (!pcbddc->switch_static_change) {
1952: PetscCall(MatMultTranspose(lA, pcis->vec1_D, pcis->vec3_D));
1953: } else {
1954: PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N));
1955: PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N));
1956: PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D));
1957: }
1958: }
1959: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1960: PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D));
1961: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1962: PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D));
1963: if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1964: if (pcbddc->switch_static) PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D));
1965: else PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D));
1966: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1967: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1968: } else {
1969: if (pcbddc->switch_static) PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D));
1970: else PetscCall(VecScale(pcis->vec4_D, m_one));
1971: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1972: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1973: }
1974: if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */
1975: PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE));
1976: }
1977: if (pcbddc->ChangeOfBasisMatrix) {
1978: pcbddc->work_change = r;
1979: PetscCall(VecCopy(z, pcbddc->work_change));
1980: PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z));
1981: }
1982: PetscFunctionReturn(PETSC_SUCCESS);
1983: }
1985: static PetscErrorCode PCReset_BDDC(PC pc)
1986: {
1987: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1988: PC_IS *pcis = (PC_IS *)pc->data;
1989: KSP kspD, kspR, kspC;
1991: PetscFunctionBegin;
1992: /* free BDDC custom data */
1993: PetscCall(PCBDDCResetCustomization(pc));
1994: /* destroy objects related to topography */
1995: PetscCall(PCBDDCResetTopography(pc));
1996: /* destroy objects for scaling operator */
1997: PetscCall(PCBDDCScalingDestroy(pc));
1998: /* free solvers stuff */
1999: PetscCall(PCBDDCResetSolvers(pc));
2000: /* free global vectors needed in presolve */
2001: PetscCall(VecDestroy(&pcbddc->temp_solution));
2002: PetscCall(VecDestroy(&pcbddc->original_rhs));
2003: /* free data created by PCIS */
2004: PetscCall(PCISReset(pc));
2006: /* restore defaults */
2007: kspD = pcbddc->ksp_D;
2008: kspR = pcbddc->ksp_R;
2009: kspC = pcbddc->coarse_ksp;
2010: PetscCall(PetscMemzero(pc->data, sizeof(*pcbddc)));
2011: pcis->n_neigh = -1;
2012: pcis->scaling_factor = 1.0;
2013: pcis->reusesubmatrices = PETSC_TRUE;
2014: pcbddc->use_local_adj = PETSC_TRUE;
2015: pcbddc->use_vertices = PETSC_TRUE;
2016: pcbddc->use_edges = PETSC_TRUE;
2017: pcbddc->symmetric_primal = PETSC_TRUE;
2018: pcbddc->vertex_size = 1;
2019: pcbddc->recompute_topography = PETSC_TRUE;
2020: pcbddc->coarse_size = -1;
2021: pcbddc->use_exact_dirichlet_trick = PETSC_TRUE;
2022: pcbddc->coarsening_ratio = 8;
2023: pcbddc->coarse_eqs_per_proc = 1;
2024: pcbddc->benign_compute_correction = PETSC_TRUE;
2025: pcbddc->nedfield = -1;
2026: pcbddc->nedglobal = PETSC_TRUE;
2027: pcbddc->graphmaxcount = PETSC_INT_MAX;
2028: pcbddc->sub_schurs_layers = -1;
2029: pcbddc->ksp_D = kspD;
2030: pcbddc->ksp_R = kspR;
2031: pcbddc->coarse_ksp = kspC;
2032: PetscFunctionReturn(PETSC_SUCCESS);
2033: }
2035: static PetscErrorCode PCDestroy_BDDC(PC pc)
2036: {
2037: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
2039: PetscFunctionBegin;
2040: PetscCall(PCReset_BDDC(pc));
2041: PetscCall(KSPDestroy(&pcbddc->ksp_D));
2042: PetscCall(KSPDestroy(&pcbddc->ksp_R));
2043: PetscCall(KSPDestroy(&pcbddc->coarse_ksp));
2044: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", NULL));
2045: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", NULL));
2046: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", NULL));
2047: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", NULL));
2048: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", NULL));
2049: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", NULL));
2050: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", NULL));
2051: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", NULL));
2052: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", NULL));
2053: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", NULL));
2054: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", NULL));
2055: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", NULL));
2056: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", NULL));
2057: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", NULL));
2058: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", NULL));
2059: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", NULL));
2060: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", NULL));
2061: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", NULL));
2062: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", NULL));
2063: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", NULL));
2064: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", NULL));
2065: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", NULL));
2066: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", NULL));
2067: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", NULL));
2068: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", NULL));
2069: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", NULL));
2070: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
2071: PetscCall(PetscFree(pc->data));
2072: PetscFunctionReturn(PETSC_SUCCESS);
2073: }
2075: static PetscErrorCode PCSetCoordinates_BDDC(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
2076: {
2077: PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
2078: PCBDDCGraph mat_graph = pcbddc->mat_graph;
2080: PetscFunctionBegin;
2081: PetscCall(PetscFree(mat_graph->coords));
2082: PetscCall(PetscMalloc1(nloc * dim, &mat_graph->coords));
2083: PetscCall(PetscArraycpy(mat_graph->coords, coords, nloc * dim));
2084: mat_graph->cnloc = nloc;
2085: mat_graph->cdim = dim;
2086: mat_graph->cloc = PETSC_FALSE;
2087: /* flg setup */
2088: pcbddc->recompute_topography = PETSC_TRUE;
2089: pcbddc->corner_selected = PETSC_FALSE;
2090: PetscFunctionReturn(PETSC_SUCCESS);
2091: }
2093: static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool *change)
2094: {
2095: PetscFunctionBegin;
2096: *change = PETSC_TRUE;
2097: PetscFunctionReturn(PETSC_SUCCESS);
2098: }
2100: static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
2101: {
2102: FETIDPMat_ctx mat_ctx;
2103: Vec work;
2104: PC_IS *pcis;
2105: PC_BDDC *pcbddc;
2107: PetscFunctionBegin;
2108: PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2109: pcis = (PC_IS *)mat_ctx->pc->data;
2110: pcbddc = (PC_BDDC *)mat_ctx->pc->data;
2112: PetscCall(VecSet(fetidp_flux_rhs, 0.0));
2113: /* copy rhs since we may change it during PCPreSolve_BDDC */
2114: if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs));
2115: if (mat_ctx->rhs_flip) {
2116: PetscCall(VecPointwiseMult(pcbddc->original_rhs, standard_rhs, mat_ctx->rhs_flip));
2117: } else {
2118: PetscCall(VecCopy(standard_rhs, pcbddc->original_rhs));
2119: }
2120: if (mat_ctx->g2g_p) {
2121: /* interface pressure rhs */
2122: PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE));
2123: PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE));
2124: PetscCall(VecScatterBegin(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD));
2125: PetscCall(VecScatterEnd(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD));
2126: if (!mat_ctx->rhs_flip) PetscCall(VecScale(fetidp_flux_rhs, -1.));
2127: }
2128: /*
2129: change of basis for physical rhs if needed
2130: It also changes the rhs in case of dirichlet boundaries
2131: */
2132: PetscCall(PCPreSolve_BDDC(mat_ctx->pc, NULL, pcbddc->original_rhs, NULL));
2133: if (pcbddc->ChangeOfBasisMatrix) {
2134: PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, pcbddc->original_rhs, pcbddc->work_change));
2135: work = pcbddc->work_change;
2136: } else {
2137: work = pcbddc->original_rhs;
2138: }
2139: /* store vectors for computation of fetidp final solution */
2140: PetscCall(VecScatterBegin(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD));
2141: PetscCall(VecScatterEnd(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD));
2142: /* scale rhs since it should be unassembled */
2143: /* TODO use counter scaling? (also below) */
2144: PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2145: PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2146: /* Apply partition of unity */
2147: PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B));
2148: /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */
2149: if (!pcbddc->switch_static) {
2150: /* compute partially subassembled Schur complement right-hand side */
2151: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2152: PetscCall(KSPSolve(pcbddc->ksp_D, mat_ctx->temp_solution_D, pcis->vec1_D));
2153: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2154: /* Cannot propagate up error in KSPSolve() because there is no access to the PC */
2155: PetscCall(MatMult(pcis->A_BI, pcis->vec1_D, pcis->vec1_B));
2156: PetscCall(VecAXPY(mat_ctx->temp_solution_B, -1.0, pcis->vec1_B));
2157: PetscCall(VecSet(work, 0.0));
2158: PetscCall(VecScatterBegin(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE));
2159: PetscCall(VecScatterEnd(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE));
2160: /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */
2161: PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2162: PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2163: PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B));
2164: }
2165: /* BDDC rhs */
2166: PetscCall(VecCopy(mat_ctx->temp_solution_B, pcis->vec1_B));
2167: if (pcbddc->switch_static) PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D));
2168: /* apply BDDC */
2169: PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
2170: PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE));
2171: PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
2173: /* Application of B_delta and assembling of rhs for fetidp fluxes */
2174: PetscCall(MatMult(mat_ctx->B_delta, pcis->vec1_B, mat_ctx->lambda_local));
2175: PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2176: PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2177: /* Add contribution to interface pressures */
2178: if (mat_ctx->l2g_p) {
2179: PetscCall(VecISSet(pcis->vec1_B, mat_ctx->lP_B, 0));
2180: PetscCall(MatMult(mat_ctx->B_BB, pcis->vec1_B, mat_ctx->vP));
2181: if (pcbddc->switch_static) {
2182: PetscCall(VecISSet(pcis->vec1_D, mat_ctx->lP_I, 0));
2183: PetscCall(MatMultAdd(mat_ctx->B_BI, pcis->vec1_D, mat_ctx->vP, mat_ctx->vP));
2184: }
2185: PetscCall(VecScatterBegin(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2186: PetscCall(VecScatterEnd(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2187: }
2188: PetscFunctionReturn(PETSC_SUCCESS);
2189: }
2191: /*@
2192: PCBDDCMatFETIDPGetRHS - Compute the right-hand side for a FETI-DP linear system using the physical right-hand side
2194: Collective
2196: Input Parameters:
2197: + fetidp_mat - the FETI-DP matrix object obtained by a call to `PCBDDCCreateFETIDPOperators()`
2198: - standard_rhs - the right-hand side of the original linear system
2200: Output Parameter:
2201: . fetidp_flux_rhs - the right-hand side for the FETI-DP linear system
2203: Level: developer
2205: Note:
2206: Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`.
2208: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetSolution()`
2209: @*/
2210: PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
2211: {
2212: FETIDPMat_ctx mat_ctx;
2214: PetscFunctionBegin;
2218: PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2219: PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetRHS_C", (Mat, Vec, Vec), (fetidp_mat, standard_rhs, fetidp_flux_rhs));
2220: PetscFunctionReturn(PETSC_SUCCESS);
2221: }
2223: static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
2224: {
2225: FETIDPMat_ctx mat_ctx;
2226: PC_IS *pcis;
2227: PC_BDDC *pcbddc;
2228: Vec work;
2230: PetscFunctionBegin;
2231: PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2232: pcis = (PC_IS *)mat_ctx->pc->data;
2233: pcbddc = (PC_BDDC *)mat_ctx->pc->data;
2235: /* apply B_delta^T */
2236: PetscCall(VecSet(pcis->vec1_B, 0.));
2237: PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
2238: PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
2239: PetscCall(MatMultTranspose(mat_ctx->B_delta, mat_ctx->lambda_local, pcis->vec1_B));
2240: if (mat_ctx->l2g_p) {
2241: PetscCall(VecScatterBegin(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
2242: PetscCall(VecScatterEnd(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
2243: PetscCall(MatMultAdd(mat_ctx->Bt_BB, mat_ctx->vP, pcis->vec1_B, pcis->vec1_B));
2244: }
2246: /* compute rhs for BDDC application */
2247: PetscCall(VecAYPX(pcis->vec1_B, -1.0, mat_ctx->temp_solution_B));
2248: if (pcbddc->switch_static) {
2249: PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D));
2250: if (mat_ctx->l2g_p) {
2251: PetscCall(VecScale(mat_ctx->vP, -1.));
2252: PetscCall(MatMultAdd(mat_ctx->Bt_BI, mat_ctx->vP, pcis->vec1_D, pcis->vec1_D));
2253: }
2254: }
2256: /* apply BDDC */
2257: PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
2258: PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE));
2260: /* put values into global vector */
2261: if (pcbddc->ChangeOfBasisMatrix) work = pcbddc->work_change;
2262: else work = standard_sol;
2263: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE));
2264: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE));
2265: if (!pcbddc->switch_static) {
2266: /* compute values into the interior if solved for the partially subassembled Schur complement */
2267: PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec1_D));
2268: PetscCall(VecAYPX(pcis->vec1_D, -1.0, mat_ctx->temp_solution_D));
2269: PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2270: PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec1_D));
2271: PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2272: /* Cannot propagate up error in KSPSolve() because there is no access to the PC */
2273: }
2275: PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE));
2276: PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE));
2277: /* add p0 solution to final solution */
2278: PetscCall(PCBDDCBenignGetOrSetP0(mat_ctx->pc, work, PETSC_FALSE));
2279: if (pcbddc->ChangeOfBasisMatrix) PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, work, standard_sol));
2280: PetscCall(PCPostSolve_BDDC(mat_ctx->pc, NULL, NULL, standard_sol));
2281: if (mat_ctx->g2g_p) {
2282: PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE));
2283: PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE));
2284: }
2285: PetscFunctionReturn(PETSC_SUCCESS);
2286: }
2288: static PetscErrorCode PCView_BDDCIPC(PC pc, PetscViewer viewer)
2289: {
2290: BDDCIPC_ctx bddcipc_ctx;
2291: PetscBool isascii;
2293: PetscFunctionBegin;
2294: PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2295: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2296: if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "BDDC interface preconditioner\n"));
2297: PetscCall(PetscViewerASCIIPushTab(viewer));
2298: PetscCall(PCView(bddcipc_ctx->bddc, viewer));
2299: PetscCall(PetscViewerASCIIPopTab(viewer));
2300: PetscFunctionReturn(PETSC_SUCCESS);
2301: }
2303: static PetscErrorCode PCSetUp_BDDCIPC(PC pc)
2304: {
2305: BDDCIPC_ctx bddcipc_ctx;
2306: PetscBool isbddc;
2307: Vec vv;
2308: IS is;
2309: PC_IS *pcis;
2311: PetscFunctionBegin;
2312: PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2313: PetscCall(PetscObjectTypeCompare((PetscObject)bddcipc_ctx->bddc, PCBDDC, &isbddc));
2314: PetscCheck(isbddc, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid type %s. Must be of type bddc", ((PetscObject)bddcipc_ctx->bddc)->type_name);
2315: PetscCall(PCSetUp(bddcipc_ctx->bddc));
2317: /* create interface scatter */
2318: pcis = (PC_IS *)bddcipc_ctx->bddc->data;
2319: PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l));
2320: PetscCall(MatCreateVecs(pc->pmat, &vv, NULL));
2321: PetscCall(ISRenumber(pcis->is_B_global, NULL, NULL, &is));
2322: PetscCall(VecScatterCreate(vv, is, pcis->vec1_B, NULL, &bddcipc_ctx->g2l));
2323: PetscCall(ISDestroy(&is));
2324: PetscCall(VecDestroy(&vv));
2325: PetscFunctionReturn(PETSC_SUCCESS);
2326: }
2328: static PetscErrorCode PCApply_BDDCIPC(PC pc, Vec r, Vec x)
2329: {
2330: BDDCIPC_ctx bddcipc_ctx;
2331: PC_IS *pcis;
2332: VecScatter tmps;
2334: PetscFunctionBegin;
2335: PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2336: pcis = (PC_IS *)bddcipc_ctx->bddc->data;
2337: tmps = pcis->global_to_B;
2338: pcis->global_to_B = bddcipc_ctx->g2l;
2339: PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B));
2340: PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_FALSE));
2341: PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x));
2342: pcis->global_to_B = tmps;
2343: PetscFunctionReturn(PETSC_SUCCESS);
2344: }
2346: static PetscErrorCode PCApplyTranspose_BDDCIPC(PC pc, Vec r, Vec x)
2347: {
2348: BDDCIPC_ctx bddcipc_ctx;
2349: PC_IS *pcis;
2350: VecScatter tmps;
2352: PetscFunctionBegin;
2353: PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2354: pcis = (PC_IS *)bddcipc_ctx->bddc->data;
2355: tmps = pcis->global_to_B;
2356: pcis->global_to_B = bddcipc_ctx->g2l;
2357: PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B));
2358: PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_TRUE));
2359: PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x));
2360: pcis->global_to_B = tmps;
2361: PetscFunctionReturn(PETSC_SUCCESS);
2362: }
2364: static PetscErrorCode PCDestroy_BDDCIPC(PC pc)
2365: {
2366: BDDCIPC_ctx bddcipc_ctx;
2368: PetscFunctionBegin;
2369: PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2370: PetscCall(PCDestroy(&bddcipc_ctx->bddc));
2371: PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l));
2372: PetscCall(PetscFree(bddcipc_ctx));
2373: PetscFunctionReturn(PETSC_SUCCESS);
2374: }
2376: /*@
2377: PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system
2379: Collective
2381: Input Parameters:
2382: + fetidp_mat - the FETI-DP matrix obtained by a call to `PCBDDCCreateFETIDPOperators()`
2383: - fetidp_flux_sol - the solution of the FETI-DP linear system`
2385: Output Parameter:
2386: . standard_sol - the solution defined on the physical domain
2388: Level: developer
2390: Note:
2391: Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`.
2393: .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetRHS()`
2394: @*/
2395: PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
2396: {
2397: FETIDPMat_ctx mat_ctx;
2399: PetscFunctionBegin;
2403: PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2404: PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetSolution_C", (Mat, Vec, Vec), (fetidp_mat, fetidp_flux_sol, standard_sol));
2405: PetscFunctionReturn(PETSC_SUCCESS);
2406: }
2408: static PetscErrorCode MatISSubMatrixEmbedLocalIS(Mat A, IS oldis, IS *newis)
2409: {
2410: Mat_IS *matis = (Mat_IS *)A->data;
2411: ISLocalToGlobalMapping ltog;
2412: IS is;
2414: PetscFunctionBegin;
2415: PetscCheck(matis->getsub_ris, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing getsub IS");
2416: PetscCall(ISLocalToGlobalMappingCreateIS(matis->getsub_ris, <og));
2417: PetscCall(ISGlobalToLocalMappingApplyIS(ltog, IS_GTOLM_DROP, oldis, &is));
2418: PetscCall(ISOnComm(is, PetscObjectComm((PetscObject)A), PETSC_COPY_VALUES, newis));
2419: PetscCall(ISLocalToGlobalMappingDestroy(<og));
2420: PetscCall(ISDestroy(&is));
2421: PetscFunctionReturn(PETSC_SUCCESS);
2422: }
2424: static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc)
2425: {
2426: FETIDPMat_ctx fetidpmat_ctx;
2427: Mat newmat;
2428: FETIDPPC_ctx fetidppc_ctx;
2429: PC newpc;
2430: MPI_Comm comm;
2432: PetscFunctionBegin;
2433: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
2434: /* FETI-DP matrix */
2435: PetscCall(PCBDDCCreateFETIDPMatContext(pc, &fetidpmat_ctx));
2436: fetidpmat_ctx->fully_redundant = fully_redundant;
2437: PetscCall(PCBDDCSetupFETIDPMatContext(fetidpmat_ctx));
2438: PetscCall(MatCreateShell(comm, fetidpmat_ctx->n, fetidpmat_ctx->n, fetidpmat_ctx->N, fetidpmat_ctx->N, fetidpmat_ctx, &newmat));
2439: PetscCall(PetscObjectSetName((PetscObject)newmat, !fetidpmat_ctx->l2g_lambda_only ? "F" : "G"));
2440: PetscCall(MatShellSetOperation(newmat, MATOP_MULT, (PetscErrorCodeFn *)FETIDPMatMult));
2441: PetscCall(MatShellSetOperation(newmat, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)FETIDPMatMultTranspose));
2442: PetscCall(MatShellSetOperation(newmat, MATOP_DESTROY, (PetscErrorCodeFn *)PCBDDCDestroyFETIDPMat));
2443: /* propagate MatOptions */
2444: {
2445: PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data;
2446: PetscBool isset, issym;
2448: PetscCall(MatIsSymmetricKnown(pc->mat, &isset, &issym));
2449: if ((isset && issym) || pcbddc->symmetric_primal) PetscCall(MatSetOption(newmat, MAT_SYMMETRIC, PETSC_TRUE));
2450: }
2451: PetscCall(MatSetOptionsPrefix(newmat, prefix));
2452: PetscCall(MatAppendOptionsPrefix(newmat, "fetidp_"));
2453: PetscCall(MatSetUp(newmat));
2454: /* FETI-DP preconditioner */
2455: PetscCall(PCBDDCCreateFETIDPPCContext(pc, &fetidppc_ctx));
2456: PetscCall(PCBDDCSetupFETIDPPCContext(newmat, fetidppc_ctx));
2457: PetscCall(PCCreate(comm, &newpc));
2458: PetscCall(PCSetOperators(newpc, newmat, newmat));
2459: PetscCall(PCSetOptionsPrefix(newpc, prefix));
2460: PetscCall(PCAppendOptionsPrefix(newpc, "fetidp_"));
2461: PetscCall(PCSetErrorIfFailure(newpc, pc->erroriffailure));
2462: if (!fetidpmat_ctx->l2g_lambda_only) { /* standard FETI-DP */
2463: PetscCall(PCSetType(newpc, PCSHELL));
2464: PetscCall(PCShellSetName(newpc, "FETI-DP multipliers"));
2465: PetscCall(PCShellSetContext(newpc, fetidppc_ctx));
2466: PetscCall(PCShellSetApply(newpc, FETIDPPCApply));
2467: PetscCall(PCShellSetApplyTranspose(newpc, FETIDPPCApplyTranspose));
2468: PetscCall(PCShellSetView(newpc, FETIDPPCView));
2469: PetscCall(PCShellSetDestroy(newpc, PCBDDCDestroyFETIDPPC));
2470: } else { /* saddle-point FETI-DP */
2471: Mat M;
2472: PetscInt psize;
2473: PetscBool fake = PETSC_FALSE, isfieldsplit;
2475: PetscCall(ISViewFromOptions(fetidpmat_ctx->lagrange, NULL, "-lag_view"));
2476: PetscCall(ISViewFromOptions(fetidpmat_ctx->pressure, NULL, "-press_view"));
2477: PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_PPmat", (PetscObject *)&M));
2478: PetscCall(PCSetType(newpc, PCFIELDSPLIT));
2479: PetscCall(PCFieldSplitSetIS(newpc, "lag", fetidpmat_ctx->lagrange));
2480: PetscCall(PCFieldSplitSetIS(newpc, "p", fetidpmat_ctx->pressure));
2481: PetscCall(PCFieldSplitSetType(newpc, PC_COMPOSITE_SCHUR));
2482: PetscCall(PCFieldSplitSetSchurFactType(newpc, PC_FIELDSPLIT_SCHUR_FACT_DIAG));
2483: PetscCall(ISGetSize(fetidpmat_ctx->pressure, &psize));
2484: if (psize != M->rmap->N) {
2485: Mat M2;
2486: PetscInt lpsize;
2488: fake = PETSC_TRUE;
2489: PetscCall(ISGetLocalSize(fetidpmat_ctx->pressure, &lpsize));
2490: PetscCall(MatCreate(comm, &M2));
2491: PetscCall(MatSetType(M2, MATAIJ));
2492: PetscCall(MatSetSizes(M2, lpsize, lpsize, psize, psize));
2493: PetscCall(MatSetUp(M2));
2494: PetscCall(MatAssemblyBegin(M2, MAT_FINAL_ASSEMBLY));
2495: PetscCall(MatAssemblyEnd(M2, MAT_FINAL_ASSEMBLY));
2496: PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M2));
2497: PetscCall(MatDestroy(&M2));
2498: } else {
2499: PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M));
2500: }
2501: PetscCall(PCFieldSplitSetSchurScale(newpc, 1.0));
2503: /* we need to setfromoptions and setup here to access the blocks */
2504: PetscCall(PCSetFromOptions(newpc));
2505: PetscCall(PCSetUp(newpc));
2507: /* user may have changed the type (e.g. -fetidp_pc_type none) */
2508: PetscCall(PetscObjectTypeCompare((PetscObject)newpc, PCFIELDSPLIT, &isfieldsplit));
2509: if (isfieldsplit) {
2510: KSP *ksps;
2511: PC ppc, lagpc;
2512: PetscInt nn;
2513: PetscBool ismatis, matisok = PETSC_FALSE, check = PETSC_FALSE;
2515: /* set the solver for the (0,0) block */
2516: PetscCall(PCFieldSplitSchurGetSubKSP(newpc, &nn, &ksps));
2517: if (!nn) { /* not of type PC_COMPOSITE_SCHUR */
2518: PetscCall(PCFieldSplitGetSubKSP(newpc, &nn, &ksps));
2519: if (!fake) { /* pass pmat to the pressure solver */
2520: Mat F;
2522: PetscCall(KSPGetOperators(ksps[1], &F, NULL));
2523: PetscCall(KSPSetOperators(ksps[1], F, M));
2524: }
2525: } else {
2526: PetscBool issym, isset;
2527: Mat S;
2529: PetscCall(PCFieldSplitSchurGetS(newpc, &S));
2530: PetscCall(MatIsSymmetricKnown(newmat, &isset, &issym));
2531: if (isset) PetscCall(MatSetOption(S, MAT_SYMMETRIC, issym));
2532: }
2533: PetscCall(KSPGetPC(ksps[0], &lagpc));
2534: PetscCall(PCSetType(lagpc, PCSHELL));
2535: PetscCall(PCShellSetName(lagpc, "FETI-DP multipliers"));
2536: PetscCall(PCShellSetContext(lagpc, fetidppc_ctx));
2537: PetscCall(PCShellSetApply(lagpc, FETIDPPCApply));
2538: PetscCall(PCShellSetApplyTranspose(lagpc, FETIDPPCApplyTranspose));
2539: PetscCall(PCShellSetView(lagpc, FETIDPPCView));
2540: PetscCall(PCShellSetDestroy(lagpc, PCBDDCDestroyFETIDPPC));
2542: /* Olof's idea: interface Schur complement preconditioner for the mass matrix */
2543: PetscCall(KSPGetPC(ksps[1], &ppc));
2544: if (fake) {
2545: PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data;
2546: BDDCIPC_ctx bddcipc_ctx;
2547: PetscContainer c;
2549: matisok = PETSC_TRUE;
2551: /* create inner BDDC solver */
2552: PetscCall(PetscNew(&bddcipc_ctx));
2553: PetscCall(PCCreate(comm, &bddcipc_ctx->bddc));
2554: PetscCall(PCSetType(bddcipc_ctx->bddc, PCBDDC));
2555: PetscCall(PCSetOperators(bddcipc_ctx->bddc, M, M));
2556: PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis));
2557: PetscCheck(ismatis, comm, PETSC_ERR_PLIB, "Matrix type %s not of type MATIS", ((PetscObject)M)->type_name);
2558: /* the inner bddc for FETI-DP is already setup, we have local info available */
2559: if (pcbddc->user_primal_vertices_local || pcbddc->n_ISForDofsLocal > 2) {
2560: if (pcbddc->user_primal_vertices_local) {
2561: IS primals;
2563: PetscCall(MatISSubMatrixEmbedLocalIS(M, pcbddc->user_primal_vertices_local, &primals));
2564: PetscCall(PCBDDCSetPrimalVerticesLocalIS(bddcipc_ctx->bddc, primals));
2565: PetscCall(ISDestroy(&primals));
2566: }
2567: if (pcbddc->n_ISForDofsLocal > 2) { /* no need to propagate info if nfields < 3 */
2568: IS *split;
2569: PetscInt i, nf;
2571: PetscCall(PetscCalloc1(pcbddc->n_ISForDofsLocal, &split));
2572: for (i = 0, nf = 0; i < pcbddc->n_ISForDofsLocal; i++) {
2573: PetscInt ns;
2575: PetscCall(MatISSubMatrixEmbedLocalIS(M, pcbddc->ISForDofsLocal[i], &split[nf]));
2576: PetscCall(ISGetSize(split[nf], &ns));
2577: if (!ns) PetscCall(ISDestroy(&split[nf]));
2578: else nf++;
2579: }
2580: PetscCall(PCBDDCSetDofsSplittingLocal(bddcipc_ctx->bddc, nf, split));
2581: for (i = 0; i < nf; i++) PetscCall(ISDestroy(&split[i]));
2582: PetscCall(PetscFree(split));
2583: }
2584: }
2585: PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_pCSR", (PetscObject *)&c));
2586: PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis));
2587: if (c && ismatis) {
2588: Mat lM;
2589: PetscInt *csr, n;
2591: PetscCall(MatISGetLocalMat(M, &lM));
2592: PetscCall(MatGetSize(lM, &n, NULL));
2593: PetscCall(PetscContainerGetPointer(c, &csr));
2594: PetscCall(PCBDDCSetLocalAdjacencyGraph(bddcipc_ctx->bddc, n, csr, csr + (n + 1), PETSC_COPY_VALUES));
2595: PetscCall(MatISRestoreLocalMat(M, &lM));
2596: }
2597: PetscCall(PCSetOptionsPrefix(bddcipc_ctx->bddc, ((PetscObject)ksps[1])->prefix));
2598: PetscCall(PCSetErrorIfFailure(bddcipc_ctx->bddc, pc->erroriffailure));
2599: PetscCall(PCSetFromOptions(bddcipc_ctx->bddc));
2601: /* wrap the interface application */
2602: PetscCall(PCSetType(ppc, PCSHELL));
2603: PetscCall(PCShellSetName(ppc, "FETI-DP pressure"));
2604: PetscCall(PCShellSetContext(ppc, bddcipc_ctx));
2605: PetscCall(PCShellSetSetUp(ppc, PCSetUp_BDDCIPC));
2606: PetscCall(PCShellSetApply(ppc, PCApply_BDDCIPC));
2607: PetscCall(PCShellSetApplyTranspose(ppc, PCApplyTranspose_BDDCIPC));
2608: PetscCall(PCShellSetView(ppc, PCView_BDDCIPC));
2609: PetscCall(PCShellSetDestroy(ppc, PCDestroy_BDDCIPC));
2610: }
2612: /* determine if we need to assemble M to construct a preconditioner */
2613: if (!matisok) {
2614: PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis));
2615: PetscCall(PetscObjectTypeCompareAny((PetscObject)ppc, &matisok, PCBDDC, PCJACOBI, PCNONE, PCMG, ""));
2616: if (ismatis && !matisok) PetscCall(MatConvert(M, MATAIJ, MAT_INPLACE_MATRIX, &M));
2617: }
2619: /* run the subproblems to check convergence */
2620: PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)newmat)->prefix, "-check_saddlepoint", &check, NULL));
2621: if (check) {
2622: PetscInt i;
2624: for (i = 0; i < nn; i++) {
2625: KSP kspC;
2626: PC npc;
2627: Mat F, pF;
2628: Vec x, y;
2629: PetscBool isschur, prec = PETSC_TRUE;
2631: PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksps[i]), &kspC));
2632: PetscCall(KSPSetNestLevel(kspC, pc->kspnestlevel));
2633: PetscCall(KSPSetOptionsPrefix(kspC, ((PetscObject)ksps[i])->prefix));
2634: PetscCall(KSPAppendOptionsPrefix(kspC, "check_"));
2635: PetscCall(KSPGetOperators(ksps[i], &F, &pF));
2636: PetscCall(PetscObjectTypeCompare((PetscObject)F, MATSCHURCOMPLEMENT, &isschur));
2637: if (isschur) {
2638: KSP kspS, kspS2;
2639: Mat A00, pA00, A10, A01, A11;
2640: char prefix[256];
2642: PetscCall(MatSchurComplementGetKSP(F, &kspS));
2643: PetscCall(MatSchurComplementGetSubMatrices(F, &A00, &pA00, &A01, &A10, &A11));
2644: PetscCall(MatCreateSchurComplement(A00, pA00, A01, A10, A11, &F));
2645: PetscCall(MatSchurComplementGetKSP(F, &kspS2));
2646: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sschur_", ((PetscObject)kspC)->prefix));
2647: PetscCall(KSPSetOptionsPrefix(kspS2, prefix));
2648: PetscCall(KSPGetPC(kspS2, &npc));
2649: PetscCall(PCSetType(npc, PCKSP));
2650: PetscCall(PCKSPSetKSP(npc, kspS));
2651: PetscCall(KSPSetFromOptions(kspS2));
2652: PetscCall(KSPGetPC(kspS2, &npc));
2653: PetscCall(PCSetUseAmat(npc, PETSC_TRUE));
2654: } else {
2655: PetscCall(PetscObjectReference((PetscObject)F));
2656: }
2657: PetscCall(KSPSetFromOptions(kspC));
2658: PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)kspC)->prefix, "-preconditioned", &prec, NULL));
2659: if (prec) {
2660: PetscCall(KSPGetPC(ksps[i], &npc));
2661: PetscCall(KSPSetPC(kspC, npc));
2662: }
2663: PetscCall(KSPSetOperators(kspC, F, pF));
2664: PetscCall(MatCreateVecs(F, &x, &y));
2665: PetscCall(VecSetRandom(x, NULL));
2666: PetscCall(MatMult(F, x, y));
2667: PetscCall(KSPSolve(kspC, y, x));
2668: PetscCall(KSPCheckSolve(kspC, npc, x));
2669: PetscCall(KSPDestroy(&kspC));
2670: PetscCall(MatDestroy(&F));
2671: PetscCall(VecDestroy(&x));
2672: PetscCall(VecDestroy(&y));
2673: }
2674: }
2675: PetscCall(PetscFree(ksps));
2676: }
2677: }
2678: /* return pointers for objects created */
2679: *fetidp_mat = newmat;
2680: *fetidp_pc = newpc;
2681: PetscFunctionReturn(PETSC_SUCCESS);
2682: }
2684: /*@C
2685: PCBDDCCreateFETIDPOperators - Create FETI-DP operators
2687: Collective
2689: Input Parameters:
2690: + pc - the `PCBDDC` preconditioning context (setup should have been called before)
2691: . fully_redundant - true for a fully redundant set of Lagrange multipliers
2692: - prefix - optional options database prefix for the objects to be created (can be `NULL`)
2694: Output Parameters:
2695: + fetidp_mat - shell FETI-DP matrix object
2696: - fetidp_pc - shell Dirichlet preconditioner for FETI-DP matrix
2698: Level: developer
2700: Notes:
2701: Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`.
2702: Currently the only operations provided for the FETI-DP matrix are `MatMult()` and `MatMultTranspose()`
2704: .seealso: [](ch_ksp), `KSPFETIDP`, `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()`
2705: @*/
2706: PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc)
2707: {
2708: PetscFunctionBegin;
2710: PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You must call PCSetup_BDDC() first");
2711: PetscUseMethod(pc, "PCBDDCCreateFETIDPOperators_C", (PC, PetscBool, const char *, Mat *, PC *), (pc, fully_redundant, prefix, fetidp_mat, fetidp_pc));
2712: PetscFunctionReturn(PETSC_SUCCESS);
2713: }
2715: /*MC
2716: PCBDDC - Balancing Domain Decomposition by Constraints preconditioners, {cite}`dohrmann2007approximate`, {cite}`klawonn2006dual`, {cite}`mandel2008multispace`
2718: Requires `MATIS` matrices (Pmat) with local matrices (inside the `MATIS`) of type `MATSEQAIJ`, `MATSEQBAIJ` or `MATSEQSBAIJ`
2720: Works with unsymmetric and indefinite problems.
2722: Unlike 'conventional' interface preconditioners, `PCBDDC` iterates over all degrees of freedom, not just those on the interface. This allows the use
2723: of approximate solvers on the subdomains.
2725: Approximate local solvers are automatically adapted (see {cite}`dohrmann2007approximate`,) if the user has attached a nullspace object to the subdomain matrices, and informed
2726: `PCBDDC` of using approximate solvers (via the command line).
2728: Boundary nodes are split into vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes.
2729: The latter can be customized by using `PCBDDCSetLocalAdjacencyGraph()`
2731: Additional information on dofs can be provided by using `PCBDDCSetDofsSplitting()`, `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, and
2732: `PCBDDCSetPrimalVerticesIS()` and their local counterparts.
2734: Constraints can be customized by attaching a `MatNullSpace` object to the `MATIS` matrix via `MatSetNearNullSpace()`. Non-singular modes are retained via SVD.
2736: Change of basis is performed similarly to {cite}`klawonn2006dual` when requested. When more than one constraint is present on a single connected component
2737: (i.e. an edge or a face), a robust method based on local QR factorizations is used.
2738: User defined change of basis can be passed to `PCBDDC` with `PCBDDCSetChangeOfBasisMat()`
2740: The PETSc implementation also supports multilevel `PCBDDC` {cite}`mandel2008multispace`. Coarse grids are partitioned using a `MatPartitioning` object.
2742: Adaptive selection of primal constraints is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present.
2744: Options Database Keys:
2745: + -pc_bddc_use_vertices (true|false) - use or not vertices in primal space
2746: . -pc_bddc_use_edges (true|false) - use or not edges in primal space
2747: . -pc_bddc_use_faces (true|false) - use or not faces in primal space
2748: . -pc_bddc_symmetric (true|false) - symmetric computation of primal basis functions. Specify false for unsymmetric problems
2749: . -pc_bddc_use_change_of_basis (true|false) - use change of basis approach (on edges only)
2750: . -pc_bddc_use_change_on_faces (true|false) - use change of basis approach on faces if change of basis has been requested
2751: . -pc_bddc_switch_static (true|false) - switches from M_2 (default) to M_3 operator (see reference article [1])
2752: . -pc_bddc_levels 0 - maximum number of levels for multilevel
2753: . -pc_bddc_coarsening_ratio 8 - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case)
2754: . -pc_bddc_coarse_redistribute 0 - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level)
2755: . -pc_bddc_use_deluxe_scaling (true|false) - use deluxe scaling
2756: . -pc_bddc_schur_layers \-1 - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling)
2757: . -pc_bddc_adaptive_threshold 0.0 - when a value different than zero is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS or MKL_PARDISO installed)
2758: - -pc_bddc_check_level 0 - set verbosity level of debugging output
2760: Options for Dirichlet, Neumann or coarse solver can be set using the appropriate options prefix
2761: .vb
2762: -pc_bddc_dirichlet_
2763: -pc_bddc_neumann_
2764: -pc_bddc_coarse_
2765: .ve
2766: e.g. -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. `PCBDDC` uses by default `KSPPREONLY` and `PCLU`.
2768: When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified using the options prefix
2769: .vb
2770: -pc_bddc_dirichlet_lN_
2771: -pc_bddc_neumann_lN_
2772: -pc_bddc_coarse_lN_
2773: .ve
2774: Note that level number ranges from the finest (0) to the coarsest (N).
2775: In order to specify options for the `PCBDDC` operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l
2776: to the option, e.g.
2777: .vb
2778: -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3
2779: .ve
2780: will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors
2782: Level: intermediate
2784: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS`, `KSPFETIDP`, `PCLU`, `PCGAMG`, `PCBDDCSetLocalAdjacencyGraph()`, `PCBDDCSetDofsSplitting()`,
2785: `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetPrimalVerticesIS()`, `MatNullSpace`, `MatSetNearNullSpace()`,
2786: `PCBDDCSetChangeOfBasisMat()`, `PCBDDCCreateFETIDPOperators()`, `PCNN`
2787: M*/
2789: PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
2790: {
2791: PC_BDDC *pcbddc;
2793: PetscFunctionBegin;
2794: PetscCall(PetscNew(&pcbddc));
2795: pc->data = pcbddc;
2797: PetscCall(PCISInitialize(pc));
2799: /* create local graph structure */
2800: PetscCall(PCBDDCGraphCreate(&pcbddc->mat_graph));
2802: /* BDDC nonzero defaults */
2803: pcbddc->use_nnsp = PETSC_TRUE;
2804: pcbddc->use_local_adj = PETSC_TRUE;
2805: pcbddc->use_vertices = PETSC_TRUE;
2806: pcbddc->use_edges = PETSC_TRUE;
2807: pcbddc->symmetric_primal = PETSC_TRUE;
2808: pcbddc->vertex_size = 1;
2809: pcbddc->recompute_topography = PETSC_TRUE;
2810: pcbddc->coarse_size = -1;
2811: pcbddc->use_exact_dirichlet_trick = PETSC_TRUE;
2812: pcbddc->coarsening_ratio = 8;
2813: pcbddc->coarse_eqs_per_proc = 1;
2814: pcbddc->benign_compute_correction = PETSC_TRUE;
2815: pcbddc->nedfield = -1;
2816: pcbddc->nedglobal = PETSC_TRUE;
2817: pcbddc->graphmaxcount = PETSC_INT_MAX;
2818: pcbddc->sub_schurs_layers = -1;
2819: pcbddc->adaptive_threshold[0] = 0.0;
2820: pcbddc->adaptive_threshold[1] = 0.0;
2822: /* function pointers */
2823: pc->ops->apply = PCApply_BDDC;
2824: pc->ops->applytranspose = PCApplyTranspose_BDDC;
2825: pc->ops->setup = PCSetUp_BDDC;
2826: pc->ops->destroy = PCDestroy_BDDC;
2827: pc->ops->setfromoptions = PCSetFromOptions_BDDC;
2828: pc->ops->view = PCView_BDDC;
2829: pc->ops->applyrichardson = NULL;
2830: pc->ops->applysymmetricleft = NULL;
2831: pc->ops->applysymmetricright = NULL;
2832: pc->ops->presolve = PCPreSolve_BDDC;
2833: pc->ops->postsolve = PCPostSolve_BDDC;
2834: pc->ops->reset = PCReset_BDDC;
2836: /* composing function */
2837: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", PCBDDCSetDiscreteGradient_BDDC));
2838: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", PCBDDCSetDivergenceMat_BDDC));
2839: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", PCBDDCSetChangeOfBasisMat_BDDC));
2840: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", PCBDDCSetPrimalVerticesLocalIS_BDDC));
2841: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", PCBDDCSetPrimalVerticesIS_BDDC));
2842: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", PCBDDCGetPrimalVerticesLocalIS_BDDC));
2843: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", PCBDDCGetPrimalVerticesIS_BDDC));
2844: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", PCBDDCSetCoarseningRatio_BDDC));
2845: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", PCBDDCSetLevel_BDDC));
2846: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", PCBDDCSetUseExactDirichlet_BDDC));
2847: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", PCBDDCSetLevels_BDDC));
2848: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", PCBDDCSetDirichletBoundaries_BDDC));
2849: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", PCBDDCSetDirichletBoundariesLocal_BDDC));
2850: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", PCBDDCSetNeumannBoundaries_BDDC));
2851: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", PCBDDCSetNeumannBoundariesLocal_BDDC));
2852: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", PCBDDCGetDirichletBoundaries_BDDC));
2853: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", PCBDDCGetDirichletBoundariesLocal_BDDC));
2854: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", PCBDDCGetNeumannBoundaries_BDDC));
2855: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", PCBDDCGetNeumannBoundariesLocal_BDDC));
2856: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", PCBDDCSetDofsSplitting_BDDC));
2857: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", PCBDDCSetDofsSplittingLocal_BDDC));
2858: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", PCBDDCSetLocalAdjacencyGraph_BDDC));
2859: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", PCBDDCCreateFETIDPOperators_BDDC));
2860: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", PCBDDCMatFETIDPGetRHS_BDDC));
2861: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", PCBDDCMatFETIDPGetSolution_BDDC));
2862: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", PCPreSolveChangeRHS_BDDC));
2863: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_BDDC));
2864: PetscFunctionReturn(PETSC_SUCCESS);
2865: }
2867: /*@C
2868: PCBDDCInitializePackage - This function initializes everything in the `PCBDDC` package. It is called
2869: from `PCInitializePackage()`.
2871: Level: developer
2873: .seealso: [](ch_ksp), `PetscInitialize()`, `PCBDDCFinalizePackage()`
2874: @*/
2875: PetscErrorCode PCBDDCInitializePackage(void)
2876: {
2877: int i;
2879: PetscFunctionBegin;
2880: if (PCBDDCPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS);
2881: PCBDDCPackageInitialized = PETSC_TRUE;
2882: PetscCall(PetscRegisterFinalize(PCBDDCFinalizePackage));
2884: /* general events */
2885: PetscCall(PetscLogEventRegister("PCBDDCTopo", PC_CLASSID, &PC_BDDC_Topology[0]));
2886: PetscCall(PetscLogEventRegister("PCBDDCLKSP", PC_CLASSID, &PC_BDDC_LocalSolvers[0]));
2887: PetscCall(PetscLogEventRegister("PCBDDCLWor", PC_CLASSID, &PC_BDDC_LocalWork[0]));
2888: PetscCall(PetscLogEventRegister("PCBDDCCorr", PC_CLASSID, &PC_BDDC_CorrectionSetUp[0]));
2889: PetscCall(PetscLogEventRegister("PCBDDCASet", PC_CLASSID, &PC_BDDC_ApproxSetUp[0]));
2890: PetscCall(PetscLogEventRegister("PCBDDCAApp", PC_CLASSID, &PC_BDDC_ApproxApply[0]));
2891: PetscCall(PetscLogEventRegister("PCBDDCCSet", PC_CLASSID, &PC_BDDC_CoarseSetUp[0]));
2892: PetscCall(PetscLogEventRegister("PCBDDCCKSP", PC_CLASSID, &PC_BDDC_CoarseSolver[0]));
2893: PetscCall(PetscLogEventRegister("PCBDDCAdap", PC_CLASSID, &PC_BDDC_AdaptiveSetUp[0]));
2894: PetscCall(PetscLogEventRegister("PCBDDCScal", PC_CLASSID, &PC_BDDC_Scaling[0]));
2895: PetscCall(PetscLogEventRegister("PCBDDCSchr", PC_CLASSID, &PC_BDDC_Schurs[0]));
2896: PetscCall(PetscLogEventRegister("PCBDDCDirS", PC_CLASSID, &PC_BDDC_Solves[0][0]));
2897: PetscCall(PetscLogEventRegister("PCBDDCNeuS", PC_CLASSID, &PC_BDDC_Solves[0][1]));
2898: PetscCall(PetscLogEventRegister("PCBDDCCoaS", PC_CLASSID, &PC_BDDC_Solves[0][2]));
2899: for (i = 1; i < PETSC_PCBDDC_MAXLEVELS; i++) {
2900: char ename[32];
2902: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCTopo l%02d", i));
2903: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Topology[i]));
2904: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLKSP l%02d", i));
2905: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalSolvers[i]));
2906: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLWor l%02d", i));
2907: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalWork[i]));
2908: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCorr l%02d", i));
2909: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CorrectionSetUp[i]));
2910: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCASet l%02d", i));
2911: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxSetUp[i]));
2912: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAApp l%02d", i));
2913: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxApply[i]));
2914: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCSet l%02d", i));
2915: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSetUp[i]));
2916: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCKSP l%02d", i));
2917: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSolver[i]));
2918: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAdap l%02d", i));
2919: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_AdaptiveSetUp[i]));
2920: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCScal l%02d", i));
2921: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Scaling[i]));
2922: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCSchr l%02d", i));
2923: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Schurs[i]));
2924: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCDirS l%02d", i));
2925: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][0]));
2926: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCNeuS l%02d", i));
2927: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][1]));
2928: PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCoaS l%02d", i));
2929: PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][2]));
2930: }
2931: PetscFunctionReturn(PETSC_SUCCESS);
2932: }
2934: /*@C
2935: PCBDDCFinalizePackage - This function frees everything from the `PCBDDC` package. It is
2936: called from `PetscFinalize()` automatically.
2938: Level: developer
2940: .seealso: [](ch_ksp), `PetscFinalize()`, `PCBDDCInitializePackage()`
2941: @*/
2942: PetscErrorCode PCBDDCFinalizePackage(void)
2943: {
2944: PetscFunctionBegin;
2945: PCBDDCPackageInitialized = PETSC_FALSE;
2946: PetscFunctionReturn(PETSC_SUCCESS);
2947: }