Actual source code: asm.c

  1: /*
  2:   This file defines an additive Schwarz preconditioner for any Mat implementation.

  4:   Note that each processor may have any number of subdomains. But in order to
  5:   deal easily with the VecScatter(), we treat each processor as if it has the
  6:   same number of subdomains.

  8:        n - total number of true subdomains on all processors
  9:        n_local_true - actual number of subdomains on this processor
 10:        n_local = maximum over all processors of n_local_true
 11: */

 13: #include <petsc/private/pcasmimpl.h>
 14: #include <petsc/private/matimpl.h>

 16: static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
 17: {
 18:   PC_ASM           *osm = (PC_ASM *)pc->data;
 19:   PetscMPIInt       rank;
 20:   PetscInt          i, bsz;
 21:   PetscBool         isascii, isstring;
 22:   PetscViewer       sviewer;
 23:   PetscViewerFormat format;
 24:   const char       *prefix;

 26:   PetscFunctionBegin;
 27:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
 28:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
 29:   if (isascii) {
 30:     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
 31:     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
 32:     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
 33:     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
 34:     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
 35:     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
 36:     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
 37:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
 38:     PetscCall(PetscViewerGetFormat(viewer, &format));
 39:     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
 40:       if (osm->ksp) {
 41:         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
 42:         PetscCall(PCGetOptionsPrefix(pc, &prefix));
 43:         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
 44:         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 45:         if (rank == 0) {
 46:           PetscCall(PetscViewerASCIIPushTab(sviewer));
 47:           PetscCall(KSPView(osm->ksp[0], sviewer));
 48:           PetscCall(PetscViewerASCIIPopTab(sviewer));
 49:         }
 50:         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 51:       }
 52:     } else {
 53:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
 54:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", rank, osm->n_local_true));
 55:       PetscCall(PetscViewerFlush(viewer));
 56:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
 57:       PetscCall(PetscViewerASCIIPushTab(viewer));
 58:       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
 59:       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 60:       for (i = 0; i < osm->n_local_true; i++) {
 61:         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
 62:         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", rank, i, bsz));
 63:         PetscCall(KSPView(osm->ksp[i], sviewer));
 64:         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
 65:       }
 66:       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 67:       PetscCall(PetscViewerASCIIPopTab(viewer));
 68:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 69:     }
 70:   } else if (isstring) {
 71:     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
 72:     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 73:     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
 74:     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 75:   }
 76:   PetscFunctionReturn(PETSC_SUCCESS);
 77: }

 79: static PetscErrorCode PCASMPrintSubdomains(PC pc)
 80: {
 81:   PC_ASM         *osm = (PC_ASM *)pc->data;
 82:   const char     *prefix;
 83:   char            fname[PETSC_MAX_PATH_LEN + 1];
 84:   PetscViewer     viewer, sviewer;
 85:   char           *s;
 86:   PetscInt        i, j, nidx;
 87:   const PetscInt *idx;
 88:   PetscMPIInt     rank, size;

 90:   PetscFunctionBegin;
 91:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
 92:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
 93:   PetscCall(PCGetOptionsPrefix(pc, &prefix));
 94:   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
 95:   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
 96:   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
 97:   for (i = 0; i < osm->n_local; i++) {
 98:     if (i < osm->n_local_true) {
 99:       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
100:       PetscCall(ISGetIndices(osm->is[i], &idx));
101:       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
102: #define len 16 * (nidx + 1) + 512
103:       PetscCall(PetscMalloc1(len, &s));
104:       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
105: #undef len
106:       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
107:       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
108:       PetscCall(ISRestoreIndices(osm->is[i], &idx));
109:       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
110:       PetscCall(PetscViewerDestroy(&sviewer));
111:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
112:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
113:       PetscCall(PetscViewerFlush(viewer));
114:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
115:       PetscCall(PetscFree(s));
116:       if (osm->is_local) {
117:         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118: #define len 16 * (nidx + 1) + 512
119:         PetscCall(PetscMalloc1(len, &s));
120:         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
121: #undef len
122:         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
123:         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
124:         PetscCall(ISGetIndices(osm->is_local[i], &idx));
125:         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
126:         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
127:         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
128:         PetscCall(PetscViewerDestroy(&sviewer));
129:         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
130:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
131:         PetscCall(PetscViewerFlush(viewer));
132:         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
133:         PetscCall(PetscFree(s));
134:       }
135:     } else {
136:       /* Participate in collective viewer calls. */
137:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
138:       PetscCall(PetscViewerFlush(viewer));
139:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
140:       /* Assume either all ranks have is_local or none do. */
141:       if (osm->is_local) {
142:         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
143:         PetscCall(PetscViewerFlush(viewer));
144:         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
145:       }
146:     }
147:   }
148:   PetscCall(PetscViewerFlush(viewer));
149:   PetscCall(PetscViewerDestroy(&viewer));
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: static PetscErrorCode PCSetUp_ASM(PC pc)
154: {
155:   PC_ASM       *osm = (PC_ASM *)pc->data;
156:   PetscBool     flg;
157:   PetscInt      i, m, m_local;
158:   MatReuse      scall = MAT_REUSE_MATRIX;
159:   IS            isl;
160:   KSP           ksp;
161:   PC            subpc;
162:   const char   *prefix, *pprefix;
163:   Vec           vec;
164:   DM           *domain_dm = NULL;
165:   MatNullSpace *nullsp    = NULL;

167:   PetscFunctionBegin;
168:   if (!pc->setupcalled) {
169:     PetscInt m;

171:     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172:     if (osm->n_local_true == PETSC_DECIDE) {
173:       /* no subdomains given */
174:       /* try pc->dm first, if allowed */
175:       if (osm->dm_subdomains && pc->dm) {
176:         PetscInt num_domains;
177:         char   **domain_names;
178:         IS      *inner_domain_is, *outer_domain_is;
179:         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180:         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181:                               A future improvement of this code might allow one to use
182:                               DM-defined subdomains and also increase the overlap,
183:                               but that is not currently supported */
184:         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185:         for (PetscInt d = 0; d < num_domains; ++d) {
186:           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187:           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188:           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189:         }
190:         PetscCall(PetscFree(domain_names));
191:         PetscCall(PetscFree(inner_domain_is));
192:         PetscCall(PetscFree(outer_domain_is));
193:       }
194:       if (osm->n_local_true == PETSC_DECIDE) {
195:         /* still no subdomains; use one subdomain per processor */
196:         osm->n_local_true = 1;
197:       }
198:     }
199:     { /* determine the global and max number of subdomains */
200:       struct {
201:         PetscInt max, sum;
202:       } inwork, outwork;
203:       PetscMPIInt size;

205:       inwork.max = osm->n_local_true;
206:       inwork.sum = osm->n_local_true;
207:       PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208:       osm->n_local = outwork.max;
209:       osm->n       = outwork.sum;

211:       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212:       if (outwork.max == 1 && outwork.sum == size) {
213:         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214:         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215:       }
216:     }
217:     if (!osm->is) { /* create the index sets */
218:       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219:     }
220:     if (osm->n_local_true > 1 && !osm->is_local) {
221:       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222:       for (i = 0; i < osm->n_local_true; i++) {
223:         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224:           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225:           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226:         } else {
227:           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228:           osm->is_local[i] = osm->is[i];
229:         }
230:       }
231:     }
232:     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233:     if (osm->overlap > 0) {
234:       /* Extend the "overlapping" regions by a number of steps */
235:       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236:     }
237:     if (osm->sort_indices) {
238:       for (i = 0; i < osm->n_local_true; i++) {
239:         PetscCall(ISSort(osm->is[i]));
240:         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241:       }
242:     }
243:     flg = PETSC_FALSE;
244:     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245:     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246:     if (!osm->ksp) {
247:       /* Create the local solvers */
248:       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249:       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250:       for (i = 0; i < osm->n_local_true; i++) {
251:         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252:         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253:         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254:         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255:         PetscCall(KSPSetType(ksp, KSPPREONLY));
256:         PetscCall(KSPGetPC(ksp, &subpc));
257:         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258:         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259:         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260:         if (domain_dm) {
261:           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262:           PetscCall(KSPSetDMActive(ksp, KSP_DMACTIVE_ALL, PETSC_FALSE));
263:           PetscCall(DMDestroy(&domain_dm[i]));
264:         }
265:         osm->ksp[i] = ksp;
266:       }
267:       PetscCall(PetscFree(domain_dm));
268:     }

270:     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271:     PetscCall(ISSortRemoveDups(osm->lis));
272:     PetscCall(ISGetLocalSize(osm->lis, &m));

274:     scall = MAT_INITIAL_MATRIX;
275:   } else {
276:     /*
277:        Destroy the blocks from the previous iteration
278:     */
279:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280:       PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
281:       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
282:       scall = MAT_INITIAL_MATRIX;
283:     }
284:   }

286:   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
287:   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
288:     PetscCall(MatGetNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
289:     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
290:     scall = MAT_INITIAL_MATRIX;
291:   }

293:   /*
294:      Extract out the submatrices
295:   */
296:   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
297:   if (scall == MAT_INITIAL_MATRIX) {
298:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
299:     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
300:     if (nullsp) PetscCall(MatRestoreNullSpaces(osm->n_local_true, osm->pmat, &nullsp));
301:   }

303:   /* Convert the types of the submatrices (if needbe) */
304:   if (osm->sub_mat_type) {
305:     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &osm->pmat[i]));
306:   }

308:   if (!pc->setupcalled) {
309:     VecType vtype;

311:     /* Create the local work vectors (from the local matrices) and scatter contexts */
312:     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));

314:     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
315:     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
316:     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
317:     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
318:     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));

320:     PetscCall(ISGetLocalSize(osm->lis, &m));
321:     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
322:     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
323:     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
324:     PetscCall(VecSetSizes(osm->lx, m, m));
325:     PetscCall(VecSetType(osm->lx, vtype));
326:     PetscCall(VecDuplicate(osm->lx, &osm->ly));
327:     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
328:     PetscCall(ISDestroy(&isl));

330:     for (i = 0; i < osm->n_local_true; ++i) {
331:       ISLocalToGlobalMapping ltog;
332:       IS                     isll;
333:       const PetscInt        *idx_is;
334:       PetscInt              *idx_lis, nout;

336:       PetscCall(ISGetLocalSize(osm->is[i], &m));
337:       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
338:       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));

340:       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
341:       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
342:       PetscCall(ISGetLocalSize(osm->is[i], &m));
343:       PetscCall(ISGetIndices(osm->is[i], &idx_is));
344:       PetscCall(PetscMalloc1(m, &idx_lis));
345:       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
346:       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
347:       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
348:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
349:       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
350:       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
351:       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
352:       PetscCall(ISDestroy(&isll));
353:       PetscCall(ISDestroy(&isl));
354:       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the non-overlapping is_local[i] entries */
355:         ISLocalToGlobalMapping ltog;
356:         IS                     isll, isll_local;
357:         const PetscInt        *idx_local;
358:         PetscInt              *idx1, *idx2, nout;

360:         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
361:         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));

363:         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
364:         PetscCall(PetscMalloc1(m_local, &idx1));
365:         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
366:         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
367:         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
368:         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));

370:         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
371:         PetscCall(PetscMalloc1(m_local, &idx2));
372:         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
373:         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
374:         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
375:         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));

377:         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
378:         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));

380:         PetscCall(ISDestroy(&isll));
381:         PetscCall(ISDestroy(&isll_local));
382:       }
383:     }
384:     PetscCall(VecDestroy(&vec));
385:   }

387:   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
388:     IS *cis;

390:     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
391:     for (PetscInt c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
392:     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
393:     PetscCall(PetscFree(cis));
394:   }

396:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
397:      different boundary conditions for the submatrices than for the global problem) */
398:   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));

400:   /*
401:      Loop over subdomains putting them into local ksp
402:   */
403:   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
404:   for (i = 0; i < osm->n_local_true; i++) {
405:     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
406:     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
407:     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
408:   }
409:   PetscFunctionReturn(PETSC_SUCCESS);
410: }

412: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
413: {
414:   PC_ASM            *osm = (PC_ASM *)pc->data;
415:   PetscInt           i;
416:   KSPConvergedReason reason;

418:   PetscFunctionBegin;
419:   for (i = 0; i < osm->n_local_true; i++) {
420:     PetscCall(KSPSetUp(osm->ksp[i]));
421:     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
422:     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
423:   }
424:   PetscFunctionReturn(PETSC_SUCCESS);
425: }

427: static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
428: {
429:   PC_ASM     *osm = (PC_ASM *)pc->data;
430:   PetscInt    i, n_local_true = osm->n_local_true;
431:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

433:   PetscFunctionBegin;
434:   /*
435:      support for limiting the restriction or interpolation to only local
436:      subdomain values (leaving the other values 0).
437:   */
438:   if (!(osm->type & PC_ASM_RESTRICT)) {
439:     forward = SCATTER_FORWARD_LOCAL;
440:     /* have to zero the work RHS since scatter may leave some slots empty */
441:     PetscCall(VecSet(osm->lx, 0.0));
442:   }
443:   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;

445:   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
446:   /* zero the global and the local solutions */
447:   PetscCall(VecSet(y, 0.0));
448:   PetscCall(VecSet(osm->ly, 0.0));

450:   /* copy the global RHS to local RHS including the ghost nodes */
451:   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
452:   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));

454:   /* restrict local RHS to the overlapping 0-block RHS */
455:   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
456:   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));

458:   /* do the local solves */
459:   for (i = 0; i < n_local_true; ++i) {
460:     /* solve the overlapping i-block */
461:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
462:     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
463:     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
464:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));

466:     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
467:       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
468:       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
469:     } else { /* interpolate the overlapping i-block solution to the local solution */
470:       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
471:       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
472:     }

474:     if (i < n_local_true - 1) {
475:       /* restrict local RHS to the overlapping (i+1)-block RHS */
476:       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
477:       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));

479:       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
480:         /* update the overlapping (i+1)-block RHS using the current local solution */
481:         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
482:         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
483:       }
484:     }
485:   }
486:   /* add the local solution to the global solution including the ghost nodes */
487:   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
488:   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
489:   PetscFunctionReturn(PETSC_SUCCESS);
490: }

492: static PetscErrorCode PCMatApply_ASM_Private(PC pc, Mat X, Mat Y, PetscBool transpose)
493: {
494:   PC_ASM     *osm = (PC_ASM *)pc->data;
495:   Mat         Z, W;
496:   Vec         x;
497:   PetscInt    i, m, N;
498:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

500:   PetscFunctionBegin;
501:   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
502:   /*
503:      support for limiting the restriction or interpolation to only local
504:      subdomain values (leaving the other values 0).
505:   */
506:   if ((!transpose && !(osm->type & PC_ASM_RESTRICT)) || (transpose && !(osm->type & PC_ASM_INTERPOLATE))) {
507:     forward = SCATTER_FORWARD_LOCAL;
508:     /* have to zero the work RHS since scatter may leave some slots empty */
509:     PetscCall(VecSet(osm->lx, 0.0));
510:   }
511:   if ((!transpose && !(osm->type & PC_ASM_INTERPOLATE)) || (transpose && !(osm->type & PC_ASM_RESTRICT))) reverse = SCATTER_REVERSE_LOCAL;
512:   PetscCall(VecGetLocalSize(osm->x[0], &m));
513:   PetscCall(MatGetSize(X, NULL, &N));
514:   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));

516:   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
517:   /* zero the global and the local solutions */
518:   PetscCall(MatZeroEntries(Y));
519:   PetscCall(VecSet(osm->ly, 0.0));

521:   for (i = 0; i < N; ++i) {
522:     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
523:     /* copy the global RHS to local RHS including the ghost nodes */
524:     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
525:     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
526:     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));

528:     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
529:     /* restrict local RHS to the overlapping 0-block RHS */
530:     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
531:     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
532:     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
533:   }
534:   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
535:   /* solve the overlapping 0-block */
536:   if (!transpose) {
537:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538:     PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
539:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
540:   } else {
541:     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0));
542:     PetscCall(KSPMatSolveTranspose(osm->ksp[0], Z, W));
543:     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[0], Z, W, 0));
544:   }
545:   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
546:   PetscCall(MatDestroy(&Z));

548:   for (i = 0; i < N; ++i) {
549:     PetscCall(VecSet(osm->ly, 0.0));
550:     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
551:     if (osm->lprolongation && ((!transpose && osm->type != PC_ASM_INTERPOLATE) || (transpose && osm->type != PC_ASM_RESTRICT))) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
552:       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
553:       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
554:     } else { /* interpolate the overlapping 0-block solution to the local solution */
555:       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
556:       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
557:     }
558:     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));

560:     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
561:     /* add the local solution to the global solution including the ghost nodes */
562:     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
563:     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
564:     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
565:   }
566:   PetscCall(MatDestroy(&W));
567:   PetscFunctionReturn(PETSC_SUCCESS);
568: }

570: static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
571: {
572:   PetscFunctionBegin;
573:   PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_FALSE));
574:   PetscFunctionReturn(PETSC_SUCCESS);
575: }

577: static PetscErrorCode PCMatApplyTranspose_ASM(PC pc, Mat X, Mat Y)
578: {
579:   PetscFunctionBegin;
580:   PetscCall(PCMatApply_ASM_Private(pc, X, Y, PETSC_TRUE));
581:   PetscFunctionReturn(PETSC_SUCCESS);
582: }

584: static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
585: {
586:   PC_ASM     *osm = (PC_ASM *)pc->data;
587:   PetscInt    i, n_local_true = osm->n_local_true;
588:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

590:   PetscFunctionBegin;
591:   PetscCheck(osm->n_local_true <= 1 || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
592:   /*
593:      Support for limiting the restriction or interpolation to only local
594:      subdomain values (leaving the other values 0).

596:      Note: these are reversed from the PCApply_ASM() because we are applying the
597:      transpose of the three terms
598:   */

600:   if (!(osm->type & PC_ASM_INTERPOLATE)) {
601:     forward = SCATTER_FORWARD_LOCAL;
602:     /* have to zero the work RHS since scatter may leave some slots empty */
603:     PetscCall(VecSet(osm->lx, 0.0));
604:   }
605:   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;

607:   /* zero the global and the local solutions */
608:   PetscCall(VecSet(y, 0.0));
609:   PetscCall(VecSet(osm->ly, 0.0));

611:   /* Copy the global RHS to local RHS including the ghost nodes */
612:   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
613:   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));

615:   /* Restrict local RHS to the overlapping 0-block RHS */
616:   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
617:   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));

619:   /* do the local solves */
620:   for (i = 0; i < n_local_true; ++i) {
621:     /* solve the overlapping i-block */
622:     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
623:     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
624:     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
625:     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));

627:     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
628:       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
629:       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
630:     } else { /* interpolate the overlapping i-block solution to the local solution */
631:       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
632:       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
633:     }

635:     if (i < n_local_true - 1) {
636:       /* Restrict local RHS to the overlapping (i+1)-block RHS */
637:       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
638:       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
639:     }
640:   }
641:   /* Add the local solution to the global solution including the ghost nodes */
642:   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
643:   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
644:   PetscFunctionReturn(PETSC_SUCCESS);
645: }

647: static PetscErrorCode PCReset_ASM(PC pc)
648: {
649:   PC_ASM *osm = (PC_ASM *)pc->data;

651:   PetscFunctionBegin;
652:   if (osm->ksp) {
653:     for (PetscInt i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
654:   }
655:   if (osm->pmat) {
656:     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
657:   }
658:   if (osm->lrestriction) {
659:     PetscCall(VecScatterDestroy(&osm->restriction));
660:     for (PetscInt i = 0; i < osm->n_local_true; i++) {
661:       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
662:       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
663:       PetscCall(VecDestroy(&osm->x[i]));
664:       PetscCall(VecDestroy(&osm->y[i]));
665:     }
666:     PetscCall(PetscFree(osm->lrestriction));
667:     PetscCall(PetscFree(osm->lprolongation));
668:     PetscCall(PetscFree(osm->x));
669:     PetscCall(PetscFree(osm->y));
670:   }
671:   PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));
672:   PetscCall(ISDestroy(&osm->lis));
673:   PetscCall(VecDestroy(&osm->lx));
674:   PetscCall(VecDestroy(&osm->ly));
675:   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));

677:   PetscCall(PetscFree(osm->sub_mat_type));

679:   osm->is       = NULL;
680:   osm->is_local = NULL;
681:   PetscFunctionReturn(PETSC_SUCCESS);
682: }

684: static PetscErrorCode PCDestroy_ASM(PC pc)
685: {
686:   PC_ASM *osm = (PC_ASM *)pc->data;

688:   PetscFunctionBegin;
689:   PetscCall(PCReset_ASM(pc));
690:   if (osm->ksp) {
691:     for (PetscInt i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
692:     PetscCall(PetscFree(osm->ksp));
693:   }
694:   PetscCall(PetscFree(pc->data));

696:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
697:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
698:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
699:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
700:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
701:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
702:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
703:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
704:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
705:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
706:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
707:   PetscFunctionReturn(PETSC_SUCCESS);
708: }

710: static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems PetscOptionsObject)
711: {
712:   PC_ASM         *osm = (PC_ASM *)pc->data;
713:   PetscInt        blocks, ovl;
714:   PetscBool       flg;
715:   PCASMType       asmtype;
716:   PCCompositeType loctype;
717:   char            sub_mat_type[256];

719:   PetscFunctionBegin;
720:   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
721:   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
722:   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
723:   if (flg) {
724:     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
725:     osm->dm_subdomains = PETSC_FALSE;
726:   }
727:   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
728:   if (flg) {
729:     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
730:     osm->dm_subdomains = PETSC_FALSE;
731:   }
732:   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
733:   if (flg) {
734:     PetscCall(PCASMSetOverlap(pc, ovl));
735:     osm->dm_subdomains = PETSC_FALSE;
736:   }
737:   flg = PETSC_FALSE;
738:   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
739:   if (flg) PetscCall(PCASMSetType(pc, asmtype));
740:   flg = PETSC_FALSE;
741:   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
742:   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
743:   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
744:   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
745:   PetscOptionsHeadEnd();
746:   PetscFunctionReturn(PETSC_SUCCESS);
747: }

749: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
750: {
751:   PC_ASM *osm = (PC_ASM *)pc->data;

753:   PetscFunctionBegin;
754:   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
755:   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");

757:   if (!pc->setupcalled) {
758:     if (is) {
759:       for (PetscInt i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
760:     }
761:     if (is_local) {
762:       for (PetscInt i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
763:     }
764:     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));

766:     if (osm->ksp && osm->n_local_true != n) {
767:       for (PetscInt i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
768:       PetscCall(PetscFree(osm->ksp));
769:     }

771:     osm->n_local_true = n;
772:     osm->is           = NULL;
773:     osm->is_local     = NULL;
774:     if (is) {
775:       PetscCall(PetscMalloc1(n, &osm->is));
776:       for (PetscInt i = 0; i < n; i++) osm->is[i] = is[i];
777:       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
778:       osm->overlap = -1;
779:     }
780:     if (is_local) {
781:       PetscCall(PetscMalloc1(n, &osm->is_local));
782:       for (PetscInt i = 0; i < n; i++) osm->is_local[i] = is_local[i];
783:       if (!is) {
784:         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
785:         for (PetscInt i = 0; i < osm->n_local_true; i++) {
786:           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
787:             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
788:             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
789:           } else {
790:             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
791:             osm->is[i] = osm->is_local[i];
792:           }
793:         }
794:       }
795:     }
796:   }
797:   PetscFunctionReturn(PETSC_SUCCESS);
798: }

800: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
801: {
802:   PC_ASM     *osm = (PC_ASM *)pc->data;
803:   PetscMPIInt rank, size;
804:   PetscInt    n;

806:   PetscFunctionBegin;
807:   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
808:   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets, they cannot be set globally yet.");

810:   /*
811:      Split the subdomains equally among all processors
812:   */
813:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
814:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
815:   n = N / size + ((N % size) > rank);
816:   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, rank, size, N);
817:   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
818:   if (!pc->setupcalled) {
819:     PetscCall(PCASMDestroySubdomains(osm->n_local_true, &osm->is, &osm->is_local));

821:     osm->n_local_true = n;
822:     osm->is           = NULL;
823:     osm->is_local     = NULL;
824:   }
825:   PetscFunctionReturn(PETSC_SUCCESS);
826: }

828: static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
829: {
830:   PC_ASM *osm = (PC_ASM *)pc->data;

832:   PetscFunctionBegin;
833:   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
834:   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
835:   if (!pc->setupcalled) osm->overlap = ovl;
836:   PetscFunctionReturn(PETSC_SUCCESS);
837: }

839: static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
840: {
841:   PC_ASM *osm = (PC_ASM *)pc->data;

843:   PetscFunctionBegin;
844:   osm->type     = type;
845:   osm->type_set = PETSC_TRUE;
846:   PetscFunctionReturn(PETSC_SUCCESS);
847: }

849: static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
850: {
851:   PC_ASM *osm = (PC_ASM *)pc->data;

853:   PetscFunctionBegin;
854:   *type = osm->type;
855:   PetscFunctionReturn(PETSC_SUCCESS);
856: }

858: static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
859: {
860:   PC_ASM *osm = (PC_ASM *)pc->data;

862:   PetscFunctionBegin;
863:   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
864:   osm->loctype = type;
865:   PetscFunctionReturn(PETSC_SUCCESS);
866: }

868: static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
869: {
870:   PC_ASM *osm = (PC_ASM *)pc->data;

872:   PetscFunctionBegin;
873:   *type = osm->loctype;
874:   PetscFunctionReturn(PETSC_SUCCESS);
875: }

877: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
878: {
879:   PC_ASM *osm = (PC_ASM *)pc->data;

881:   PetscFunctionBegin;
882:   osm->sort_indices = doSort;
883:   PetscFunctionReturn(PETSC_SUCCESS);
884: }

886: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
887: {
888:   PC_ASM *osm = (PC_ASM *)pc->data;

890:   PetscFunctionBegin;
891:   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");

893:   if (n_local) *n_local = osm->n_local_true;
894:   if (first_local) {
895:     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
896:     *first_local -= osm->n_local_true;
897:   }
898:   if (ksp) *ksp = osm->ksp;
899:   PetscFunctionReturn(PETSC_SUCCESS);
900: }

902: static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
903: {
904:   PC_ASM *osm = (PC_ASM *)pc->data;

906:   PetscFunctionBegin;
908:   PetscAssertPointer(sub_mat_type, 2);
909:   *sub_mat_type = osm->sub_mat_type;
910:   PetscFunctionReturn(PETSC_SUCCESS);
911: }

913: static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
914: {
915:   PC_ASM *osm = (PC_ASM *)pc->data;

917:   PetscFunctionBegin;
919:   PetscCall(PetscFree(osm->sub_mat_type));
920:   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
921:   PetscFunctionReturn(PETSC_SUCCESS);
922: }

924: /*@
925:   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.

927:   Collective

929:   Input Parameters:
930: + pc       - the preconditioner context
931: . n        - the number of subdomains for this processor (default value = 1)
932: . is       - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains)
933:              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
934: - is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT`
935:              (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array
936:              (not the `IS` in the array) after this call

938:   Options Database Key:
939: . -pc_asm_local_blocks blks - Sets number of local blocks

941:   Level: advanced

943:   Notes:
944:   The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local`

946:   By default the `PCASM` preconditioner uses 1 block per processor.

948:   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.

950:   If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated
951:   back to form the global solution (this is the standard restricted additive Schwarz method, RASM)

953:   If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
954:   no code to handle that case.

956: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
957:           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
958: @*/
959: PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
960: {
961:   PetscFunctionBegin;
963:   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
964:   PetscFunctionReturn(PETSC_SUCCESS);
965: }

967: /*@
968:   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
969:   additive Schwarz preconditioner, `PCASM`.

971:   Collective, all MPI ranks must pass in the same array of `IS`

973:   Input Parameters:
974: + pc       - the preconditioner context
975: . N        - the number of subdomains for all processors
976: . is       - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains)
977:              the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call
978: - is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information)
979:              The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call

981:   Options Database Key:
982: . -pc_asm_blocks blks - Sets total blocks

984:   Level: advanced

986:   Notes:
987:   Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`.

989:   By default the `PCASM` preconditioner uses 1 block per processor.

991:   These index sets cannot be destroyed until after completion of the
992:   linear solves for which the `PCASM` preconditioner is being used.

994:   Use `PCASMSetLocalSubdomains()` to set local subdomains.

996:   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local

998: .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
999:           `PCASMCreateSubdomains2D()`, `PCGASM`
1000: @*/
1001: PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
1002: {
1003:   PetscFunctionBegin;
1005:   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
1006:   PetscFunctionReturn(PETSC_SUCCESS);
1007: }

1009: /*@
1010:   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1011:   additive Schwarz preconditioner, `PCASM`.

1013:   Logically Collective

1015:   Input Parameters:
1016: + pc  - the preconditioner context
1017: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)

1019:   Options Database Key:
1020: . -pc_asm_overlap ovl - Sets overlap

1022:   Level: intermediate

1024:   Notes:
1025:   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1026:   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1027:   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).

1029:   The overlap defaults to 1, so if one desires that no additional
1030:   overlap be computed beyond what may have been set with a call to
1031:   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1032:   must be set to be 0.  In particular, if one does not explicitly set
1033:   the subdomains an application code, then all overlap would be computed
1034:   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1035:   variant that is equivalent to the block Jacobi preconditioner.

1037:   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1038:   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.

1040:   One can define initial index sets with any overlap via
1041:   `PCASMSetLocalSubdomains()`; the routine
1042:   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1043:   if desired.

1045: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1046:           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1047: @*/
1048: PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1049: {
1050:   PetscFunctionBegin;
1053:   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1054:   PetscFunctionReturn(PETSC_SUCCESS);
1055: }

1057: /*@
1058:   PCASMSetType - Sets the type of restriction and interpolation used
1059:   for local problems in the additive Schwarz method, `PCASM`.

1061:   Logically Collective

1063:   Input Parameters:
1064: + pc   - the preconditioner context
1065: - type - variant of `PCASM`, one of
1066: .vb
1067:       PC_ASM_BASIC       - full interpolation and restriction
1068:       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1069:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1070:       PC_ASM_NONE        - local processor restriction and interpolation
1071: .ve

1073:   Options Database Key:
1074: . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`

1076:   Level: intermediate

1078:   Note:
1079:   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1080:   to limit the local processor interpolation

1082: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1083:           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1084: @*/
1085: PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1086: {
1087:   PetscFunctionBegin;
1090:   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1091:   PetscFunctionReturn(PETSC_SUCCESS);
1092: }

1094: /*@
1095:   PCASMGetType - Gets the type of restriction and interpolation used
1096:   for local problems in the additive Schwarz method, `PCASM`.

1098:   Logically Collective

1100:   Input Parameter:
1101: . pc - the preconditioner context

1103:   Output Parameter:
1104: . type - variant of `PCASM`, one of
1105: .vb
1106:       PC_ASM_BASIC       - full interpolation and restriction
1107:       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1108:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1109:       PC_ASM_NONE        - local processor restriction and interpolation
1110: .ve

1112:   Options Database Key:
1113: . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type

1115:   Level: intermediate

1117: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1118:           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1119: @*/
1120: PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1121: {
1122:   PetscFunctionBegin;
1124:   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1125:   PetscFunctionReturn(PETSC_SUCCESS);
1126: }

1128: /*@
1129:   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.

1131:   Logically Collective

1133:   Input Parameters:
1134: + pc   - the preconditioner context
1135: - type - type of composition, one of
1136: .vb
1137:   PC_COMPOSITE_ADDITIVE       - local additive combination
1138:   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1139: .ve

1141:   Options Database Key:
1142: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type

1144:   Level: intermediate

1146: .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1147: @*/
1148: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1149: {
1150:   PetscFunctionBegin;
1153:   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1154:   PetscFunctionReturn(PETSC_SUCCESS);
1155: }

1157: /*@
1158:   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.

1160:   Logically Collective

1162:   Input Parameter:
1163: . pc - the preconditioner context

1165:   Output Parameter:
1166: . type - type of composition, one of
1167: .vb
1168:   PC_COMPOSITE_ADDITIVE       - local additive combination
1169:   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1170: .ve

1172:   Options Database Key:
1173: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type

1175:   Level: intermediate

1177: .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMType`, `PCCompositeType`
1178: @*/
1179: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1180: {
1181:   PetscFunctionBegin;
1183:   PetscAssertPointer(type, 2);
1184:   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1185:   PetscFunctionReturn(PETSC_SUCCESS);
1186: }

1188: /*@
1189:   PCASMSetSortIndices - Determines whether subdomain indices are sorted.

1191:   Logically Collective

1193:   Input Parameters:
1194: + pc     - the preconditioner context
1195: - doSort - sort the subdomain indices

1197:   Level: intermediate

1199: .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1200:           `PCASMCreateSubdomains2D()`
1201: @*/
1202: PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1203: {
1204:   PetscFunctionBegin;
1207:   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1208:   PetscFunctionReturn(PETSC_SUCCESS);
1209: }

1211: /*@C
1212:   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1213:   this processor.

1215:   Collective iff first_local is requested

1217:   Input Parameter:
1218: . pc - the preconditioner context

1220:   Output Parameters:
1221: + n_local     - the number of blocks on this processor or `NULL`
1222: . first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL`
1223: - ksp         - the array of `KSP` contexts

1225:   Level: advanced

1227:   Notes:
1228:   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.

1230:   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.

1232:   Fortran Note:
1233:   Call `PCASMRestoreSubKSP()` when access to the array of `KSP` is no longer needed

1235: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1236:           `PCASMCreateSubdomains2D()`
1237: @*/
1238: PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1239: {
1240:   PetscFunctionBegin;
1242:   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1243:   PetscFunctionReturn(PETSC_SUCCESS);
1244: }

1246: /*MC
1247:    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1248:            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`

1250:    Options Database Keys:
1251: +  -pc_asm_blocks blks                            - Sets total blocks. Defaults to one block per MPI process.
1252: .  -pc_asm_overlap ovl                            - Sets overlap
1253: .  -pc_asm_type (basic|restrict|interpolate|none) - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1254: .  -pc_asm_dm_subdomains (true|false)             - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`
1255: -  -pc_asm_local_type (additive|multiplicative)   - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`

1257:    Level: beginner

1259:    Notes:
1260:    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1261:    will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use
1262:    `-pc_asm_type basic` to get the same convergence behavior

1264:    Each processor can have one or more blocks, but a block cannot be shared by more
1265:    than one processor. Use `PCGASM` for subdomains shared by multiple processes.

1267:    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1268:    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`

1270:    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1271:    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)

1273:    If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains

1275: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1276:           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`,
1277:           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`
1278: M*/

1280: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1281: {
1282:   PC_ASM *osm;

1284:   PetscFunctionBegin;
1285:   PetscCall(PetscNew(&osm));

1287:   osm->n             = PETSC_DECIDE;
1288:   osm->n_local       = 0;
1289:   osm->n_local_true  = PETSC_DECIDE;
1290:   osm->overlap       = 1;
1291:   osm->ksp           = NULL;
1292:   osm->restriction   = NULL;
1293:   osm->lprolongation = NULL;
1294:   osm->lrestriction  = NULL;
1295:   osm->x             = NULL;
1296:   osm->y             = NULL;
1297:   osm->is            = NULL;
1298:   osm->is_local      = NULL;
1299:   osm->mat           = NULL;
1300:   osm->pmat          = NULL;
1301:   osm->type          = PC_ASM_RESTRICT;
1302:   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1303:   osm->sort_indices  = PETSC_TRUE;
1304:   osm->dm_subdomains = PETSC_FALSE;
1305:   osm->sub_mat_type  = NULL;

1307:   pc->data                   = (void *)osm;
1308:   pc->ops->apply             = PCApply_ASM;
1309:   pc->ops->matapply          = PCMatApply_ASM;
1310:   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1311:   pc->ops->matapplytranspose = PCMatApplyTranspose_ASM;
1312:   pc->ops->setup             = PCSetUp_ASM;
1313:   pc->ops->reset             = PCReset_ASM;
1314:   pc->ops->destroy           = PCDestroy_ASM;
1315:   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1316:   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1317:   pc->ops->view              = PCView_ASM;
1318:   pc->ops->applyrichardson   = NULL;

1320:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1321:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1322:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1323:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1324:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1325:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1326:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1327:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1328:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1329:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1330:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1331:   PetscFunctionReturn(PETSC_SUCCESS);
1332: }

1334: /*@C
1335:   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1336:   preconditioner, `PCASM`,  for any problem on a general grid.

1338:   Collective

1340:   Input Parameters:
1341: + A - The global matrix operator
1342: - n - the number of local blocks

1344:   Output Parameter:
1345: . outis - the array of index sets defining the subdomains

1347:   Level: advanced

1349:   Note:
1350:   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1351:   from these if you use `PCASMSetLocalSubdomains()`

1353: .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1354: @*/
1355: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1356: {
1357:   MatPartitioning mpart;
1358:   const char     *prefix;
1359:   PetscInt        i, j, rstart, rend, bs;
1360:   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1361:   Mat             Ad = NULL, adj;
1362:   IS              ispart, isnumb, *is;

1364:   PetscFunctionBegin;
1366:   PetscAssertPointer(outis, 3);
1367:   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);

1369:   /* Get prefix, row distribution, and block size */
1370:   PetscCall(MatGetOptionsPrefix(A, &prefix));
1371:   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1372:   PetscCall(MatGetBlockSize(A, &bs));
1373:   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);

1375:   /* Get diagonal block from matrix if possible */
1376:   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1377:   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1378:   if (Ad) {
1379:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1380:     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1381:   }
1382:   if (Ad && n > 1) {
1383:     PetscBool match, done;
1384:     /* Try to setup a good matrix partitioning if available */
1385:     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1386:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1387:     PetscCall(MatPartitioningSetFromOptions(mpart));
1388:     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1389:     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1390:     if (!match) { /* assume a "good" partitioner is available */
1391:       PetscInt        na;
1392:       const PetscInt *ia, *ja;
1393:       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1394:       if (done) {
1395:         /* Build adjacency matrix by hand. Unfortunately a call to
1396:            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1397:            remove the block-aij structure and we cannot expect
1398:            MatPartitioning to split vertices as we need */
1399:         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1400:         const PetscInt *row;
1401:         nnz = 0;
1402:         for (i = 0; i < na; i++) { /* count number of nonzeros */
1403:           len = ia[i + 1] - ia[i];
1404:           row = ja + ia[i];
1405:           for (j = 0; j < len; j++) {
1406:             if (row[j] == i) { /* don't count diagonal */
1407:               len--;
1408:               break;
1409:             }
1410:           }
1411:           nnz += len;
1412:         }
1413:         PetscCall(PetscMalloc1(na + 1, &iia));
1414:         PetscCall(PetscMalloc1(nnz, &jja));
1415:         nnz    = 0;
1416:         iia[0] = 0;
1417:         for (i = 0; i < na; i++) { /* fill adjacency */
1418:           cnt = 0;
1419:           len = ia[i + 1] - ia[i];
1420:           row = ja + ia[i];
1421:           for (j = 0; j < len; j++) {
1422:             if (row[j] != i) { /* if not diagonal */
1423:               jja[nnz + cnt++] = row[j];
1424:             }
1425:           }
1426:           nnz += cnt;
1427:           iia[i + 1] = nnz;
1428:         }
1429:         /* Partitioning of the adjacency matrix */
1430:         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1431:         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1432:         PetscCall(MatPartitioningSetNParts(mpart, n));
1433:         PetscCall(MatPartitioningApply(mpart, &ispart));
1434:         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1435:         PetscCall(MatDestroy(&adj));
1436:         foundpart = PETSC_TRUE;
1437:       }
1438:       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1439:     }
1440:     PetscCall(MatPartitioningDestroy(&mpart));
1441:   }

1443:   PetscCall(PetscMalloc1(n, &is));
1444:   *outis = is;

1446:   if (!foundpart) {
1447:     /* Partitioning by contiguous chunks of rows */

1449:     PetscInt mbs   = (rend - rstart) / bs;
1450:     PetscInt start = rstart;
1451:     for (i = 0; i < n; i++) {
1452:       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1453:       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1454:       start += count;
1455:     }

1457:   } else {
1458:     /* Partitioning by adjacency of diagonal block  */

1460:     const PetscInt *numbering;
1461:     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1462:     /* Get node count in each partition */
1463:     PetscCall(PetscMalloc1(n, &count));
1464:     PetscCall(ISPartitioningCount(ispart, n, count));
1465:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1466:       for (i = 0; i < n; i++) count[i] *= bs;
1467:     }
1468:     /* Build indices from node numbering */
1469:     PetscCall(ISGetLocalSize(isnumb, &nidx));
1470:     PetscCall(PetscMalloc1(nidx, &indices));
1471:     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1472:     PetscCall(ISGetIndices(isnumb, &numbering));
1473:     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1474:     PetscCall(ISRestoreIndices(isnumb, &numbering));
1475:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1476:       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1477:       for (i = 0; i < nidx; i++) {
1478:         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1479:       }
1480:       PetscCall(PetscFree(indices));
1481:       nidx *= bs;
1482:       indices = newidx;
1483:     }
1484:     /* Shift to get global indices */
1485:     for (i = 0; i < nidx; i++) indices[i] += rstart;

1487:     /* Build the index sets for each block */
1488:     for (i = 0; i < n; i++) {
1489:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1490:       PetscCall(ISSort(is[i]));
1491:       start += count[i];
1492:     }

1494:     PetscCall(PetscFree(count));
1495:     PetscCall(PetscFree(indices));
1496:     PetscCall(ISDestroy(&isnumb));
1497:     PetscCall(ISDestroy(&ispart));
1498:   }
1499:   PetscFunctionReturn(PETSC_SUCCESS);
1500: }

1502: /*@C
1503:   PCASMDestroySubdomains - Destroys the index sets created with
1504:   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.

1506:   Collective

1508:   Input Parameters:
1509: + n        - the number of index sets
1510: . is       - the array of index sets
1511: - is_local - the array of local index sets, can be `NULL`

1513:   Level: advanced

1515:   Developer Note:
1516:   The `IS` arguments should be a *[]

1518: .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1519: @*/
1520: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS *is[], IS *is_local[])
1521: {
1522:   PetscInt i;

1524:   PetscFunctionBegin;
1525:   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1526:   if (*is) {
1527:     PetscAssertPointer(*is, 2);
1528:     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is)[i]));
1529:     PetscCall(PetscFree(*is));
1530:   }
1531:   if (is_local && *is_local) {
1532:     PetscAssertPointer(*is_local, 3);
1533:     for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*is_local)[i]));
1534:     PetscCall(PetscFree(*is_local));
1535:   }
1536:   PetscFunctionReturn(PETSC_SUCCESS);
1537: }

1539: /*@C
1540:   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1541:   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.

1543:   Not Collective

1545:   Input Parameters:
1546: + m       - the number of mesh points in the x direction
1547: . n       - the number of mesh points in the y direction
1548: . M       - the number of subdomains in the x direction
1549: . N       - the number of subdomains in the y direction
1550: . dof     - degrees of freedom per node
1551: - overlap - overlap in mesh lines

1553:   Output Parameters:
1554: + Nsub     - the number of subdomains created
1555: . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1556: - is_local - array of index sets defining non-overlapping subdomains

1558:   Level: advanced

1560:   Note:
1561:   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1562:   preconditioners.  More general related routines are
1563:   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.

1565: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1566:           `PCASMSetOverlap()`
1567: @*/
1568: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS *is[], IS *is_local[])
1569: {
1570:   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1571:   PetscInt nidx, *idx, loc, ii, jj, count;

1573:   PetscFunctionBegin;
1574:   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");

1576:   *Nsub = N * M;
1577:   PetscCall(PetscMalloc1(*Nsub, is));
1578:   PetscCall(PetscMalloc1(*Nsub, is_local));
1579:   ystart    = 0;
1580:   loc_outer = 0;
1581:   for (i = 0; i < N; i++) {
1582:     height = n / N + ((n % N) > i); /* height of subdomain */
1583:     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1584:     yleft = ystart - overlap;
1585:     if (yleft < 0) yleft = 0;
1586:     yright = ystart + height + overlap;
1587:     if (yright > n) yright = n;
1588:     xstart = 0;
1589:     for (j = 0; j < M; j++) {
1590:       width = m / M + ((m % M) > j); /* width of subdomain */
1591:       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1592:       xleft = xstart - overlap;
1593:       if (xleft < 0) xleft = 0;
1594:       xright = xstart + width + overlap;
1595:       if (xright > m) xright = m;
1596:       nidx = (xright - xleft) * (yright - yleft);
1597:       PetscCall(PetscMalloc1(nidx, &idx));
1598:       loc = 0;
1599:       for (ii = yleft; ii < yright; ii++) {
1600:         count = m * ii + xleft;
1601:         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1602:       }
1603:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1604:       if (overlap == 0) {
1605:         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));

1607:         (*is_local)[loc_outer] = (*is)[loc_outer];
1608:       } else {
1609:         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1610:           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1611:         }
1612:         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1613:       }
1614:       PetscCall(PetscFree(idx));
1615:       xstart += width;
1616:       loc_outer++;
1617:     }
1618:     ystart += height;
1619:   }
1620:   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1621:   PetscFunctionReturn(PETSC_SUCCESS);
1622: }

1624: /*@C
1625:   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1626:   only) for the additive Schwarz preconditioner, `PCASM`.

1628:   Not Collective

1630:   Input Parameter:
1631: . pc - the preconditioner context

1633:   Output Parameters:
1634: + n        - if requested, the number of subdomains for this processor (default value = 1)
1635: . is       - if requested, the index sets that define the subdomains for this processor
1636: - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)

1638:   Level: advanced

1640:   Note:
1641:   The `IS` numbering is in the parallel, global numbering of the vector.

1643: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1644:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1645: @*/
1646: PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1647: {
1648:   PC_ASM   *osm = (PC_ASM *)pc->data;
1649:   PetscBool match;

1651:   PetscFunctionBegin;
1653:   if (n) PetscAssertPointer(n, 2);
1654:   if (is) PetscAssertPointer(is, 3);
1655:   if (is_local) PetscAssertPointer(is_local, 4);
1656:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1657:   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1658:   if (n) *n = osm->n_local_true;
1659:   if (is) *is = osm->is;
1660:   if (is_local) *is_local = osm->is_local;
1661:   PetscFunctionReturn(PETSC_SUCCESS);
1662: }

1664: /*@C
1665:   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1666:   only) for the additive Schwarz preconditioner, `PCASM`.

1668:   Not Collective

1670:   Input Parameter:
1671: . pc - the preconditioner context

1673:   Output Parameters:
1674: + n   - if requested, the number of matrices for this processor (default value = 1)
1675: - mat - if requested, the matrices

1677:   Level: advanced

1679:   Notes:
1680:   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)

1682:   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.

1684: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1685:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1686: @*/
1687: PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1688: {
1689:   PC_ASM   *osm;
1690:   PetscBool match;

1692:   PetscFunctionBegin;
1694:   if (n) PetscAssertPointer(n, 2);
1695:   if (mat) PetscAssertPointer(mat, 3);
1696:   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1697:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1698:   if (!match) {
1699:     if (n) *n = 0;
1700:     if (mat) *mat = NULL;
1701:   } else {
1702:     osm = (PC_ASM *)pc->data;
1703:     if (n) *n = osm->n_local_true;
1704:     if (mat) *mat = osm->pmat;
1705:   }
1706:   PetscFunctionReturn(PETSC_SUCCESS);
1707: }

1709: /*@
1710:   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.

1712:   Logically Collective

1714:   Input Parameters:
1715: + pc  - the preconditioner
1716: - flg - boolean indicating whether to use subdomains defined by the `DM`

1718:   Options Database Key:
1719: . -pc_asm_dm_subdomains (true|false) - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()`

1721:   Level: intermediate

1723:   Note:
1724:   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1725:   so setting either of the first two effectively turns the latter off.

1727:   Developer Note:
1728:   This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key

1730: .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1731:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1732: @*/
1733: PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1734: {
1735:   PC_ASM   *osm = (PC_ASM *)pc->data;
1736:   PetscBool match;

1738:   PetscFunctionBegin;
1741:   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1742:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1743:   if (match) osm->dm_subdomains = flg;
1744:   PetscFunctionReturn(PETSC_SUCCESS);
1745: }

1747: /*@
1748:   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.

1750:   Not Collective

1752:   Input Parameter:
1753: . pc - the preconditioner

1755:   Output Parameter:
1756: . flg - boolean indicating whether to use subdomains defined by the `DM`

1758:   Level: intermediate

1760:   Developer Note:
1761:   This should be `PCASMSetUseDMSubdomains()`

1763: .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1764:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1765: @*/
1766: PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1767: {
1768:   PC_ASM   *osm = (PC_ASM *)pc->data;
1769:   PetscBool match;

1771:   PetscFunctionBegin;
1773:   PetscAssertPointer(flg, 2);
1774:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1775:   if (match) *flg = osm->dm_subdomains;
1776:   else *flg = PETSC_FALSE;
1777:   PetscFunctionReturn(PETSC_SUCCESS);
1778: }

1780: /*@
1781:   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.

1783:   Not Collective

1785:   Input Parameter:
1786: . pc - the `PC`

1788:   Output Parameter:
1789: . sub_mat_type - name of matrix type

1791:   Level: advanced

1793: .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1794: @*/
1795: PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1796: {
1797:   PetscFunctionBegin;
1799:   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1800:   PetscFunctionReturn(PETSC_SUCCESS);
1801: }

1803: /*@
1804:   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves

1806:   Collective

1808:   Input Parameters:
1809: + pc           - the `PC` object
1810: - sub_mat_type - the `MatType`

1812:   Options Database Key:
1813: . -pc_asm_sub_mat_type sub_mat_type - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1814:                                       If you specify a base name like aijviennacl, the corresponding sequential type is assumed.

1816:   Note:
1817:   See `MatType` for available types

1819:   Level: advanced

1821: .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1822: @*/
1823: PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1824: {
1825:   PetscFunctionBegin;
1827:   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1828:   PetscFunctionReturn(PETSC_SUCCESS);
1829: }