Actual source code: gasm.c

  1: /*
  2:   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
  3:   In this version, each MPI process may intersect multiple subdomains and any subdomain may
  4:   intersect multiple MPI processes.  Intersections of subdomains with MPI processes are called *local
  5:   subdomains*.

  7:        N    - total number of distinct global subdomains  (set explicitly in PCGASMSetTotalSubdomains() or implicitly PCGASMSetSubdomains() and then calculated in PCSetUp_GASM())
  8:        n    - actual number of local subdomains on this process (set in `PCGASMSetSubdomains()` or calculated in `PCGASMSetTotalSubdomains()`)
  9:        nmax - maximum number of local subdomains per process (calculated in PCSetUp_GASM())
 10: */
 11: #include <petsc/private/pcimpl.h>
 12: #include <petscdm.h>

 14: typedef struct {
 15:   PetscInt   N, n, nmax;
 16:   PetscInt   overlap;                /* overlap requested by user */
 17:   PCGASMType type;                   /* use reduced interpolation, restriction or both */
 18:   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
 19:   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
 20:   PetscBool  sort_indices;           /* flag to sort subdomain indices */
 21:   PetscBool  user_subdomains;        /* whether the user set explicit subdomain index sets -- keep them on PCReset() */
 22:   PetscBool  dm_subdomains;          /* whether DM is allowed to define subdomains */
 23:   PetscBool  hierarchicalpartitioning;
 24:   IS        *ois;           /* index sets that define the outer (conceptually, overlapping) subdomains */
 25:   IS        *iis;           /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
 26:   KSP       *ksp;           /* linear solvers for each subdomain */
 27:   Mat       *pmat;          /* subdomain block matrices */
 28:   Vec        gx, gy;        /* Merged work vectors */
 29:   Vec       *x, *y;         /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
 30:   VecScatter gorestriction; /* merged restriction to disjoint union of outer subdomains */
 31:   VecScatter girestriction; /* merged restriction to disjoint union of inner subdomains */
 32:   VecScatter pctoouter;
 33:   IS         permutationIS;
 34:   Mat        permutationP;
 35:   Mat        pcmat;
 36:   Vec        pcx, pcy;
 37: } PC_GASM;

 39: static PetscErrorCode PCGASMComputeGlobalSubdomainNumbering_Private(PC pc, PetscInt **numbering, PetscInt **permutation)
 40: {
 41:   PC_GASM *osm = (PC_GASM *)pc->data;
 42:   PetscInt i;

 44:   PetscFunctionBegin;
 45:   /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */
 46:   PetscCall(PetscMalloc2(osm->n, numbering, osm->n, permutation));
 47:   PetscCall(PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject *)osm->iis, NULL, *numbering));
 48:   for (i = 0; i < osm->n; ++i) (*permutation)[i] = i;
 49:   PetscCall(PetscSortIntWithPermutation(osm->n, *numbering, *permutation));
 50:   PetscFunctionReturn(PETSC_SUCCESS);
 51: }

 53: static PetscErrorCode PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
 54: {
 55:   PC_GASM        *osm = (PC_GASM *)pc->data;
 56:   PetscInt        nidx;
 57:   const PetscInt *idx;
 58:   PetscViewer     sviewer;
 59:   char           *cidx;

 61:   PetscFunctionBegin;
 62:   PetscCheck(i >= -1 && i < osm->n, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %" PetscInt_FMT ": must nonnegative and less than %" PetscInt_FMT, i, osm->n);

 64:   /* Inner subdomains. */
 65:   /*
 66:    No more than 15 characters per index plus a space.
 67:    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
 68:    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
 69:    For nidx == 0, the whole string 16 '\0'.
 70:    */
 71:   PetscCall(PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n"));
 72:   PetscCall(PetscViewerFlush(viewer));
 73:   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
 74:   if (i > -1) {
 75:     PetscCall(ISGetLocalSize(osm->iis[i], &nidx));
 76:     PetscCall(PetscMalloc1(16 * (nidx + 1) + 1, &cidx));
 77:     PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16 * (nidx + 1) + 1, &sviewer));
 78:     PetscCall(ISGetIndices(osm->iis[i], &idx));
 79:     for (PetscInt j = 0; j < nidx; ++j) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
 80:     PetscCall(ISRestoreIndices(osm->iis[i], &idx));
 81:     PetscCall(PetscViewerDestroy(&sviewer));
 82:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx));
 83:     PetscCall(PetscFree(cidx));
 84:   }
 85:   PetscCall(PetscViewerFlush(viewer));
 86:   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 87:   PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
 88:   PetscCall(PetscViewerFlush(viewer));

 90:   /* Outer subdomains. */
 91:   /*
 92:    No more than 15 characters per index plus a space.
 93:    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
 94:    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
 95:    For nidx == 0, the whole string 16 '\0'.
 96:    */
 97:   PetscCall(PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n"));
 98:   PetscCall(PetscViewerFlush(viewer));
 99:   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
100:   if (i > -1) {
101:     PetscCall(ISGetLocalSize(osm->ois[i], &nidx));
102:     PetscCall(PetscMalloc1(16 * (nidx + 1) + 1, &cidx));
103:     PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16 * (nidx + 1) + 1, &sviewer));
104:     PetscCall(ISGetIndices(osm->ois[i], &idx));
105:     for (PetscInt j = 0; j < nidx; ++j) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
106:     PetscCall(PetscViewerDestroy(&sviewer));
107:     PetscCall(ISRestoreIndices(osm->ois[i], &idx));
108:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx));
109:     PetscCall(PetscFree(cidx));
110:   }
111:   PetscCall(PetscViewerFlush(viewer));
112:   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
113:   PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
114:   PetscCall(PetscViewerFlush(viewer));
115:   PetscFunctionReturn(PETSC_SUCCESS);
116: }

118: static PetscErrorCode PCGASMPrintSubdomains(PC pc)
119: {
120:   PC_GASM    *osm = (PC_GASM *)pc->data;
121:   const char *prefix;
122:   char        fname[PETSC_MAX_PATH_LEN + 1];
123:   PetscInt    l, d, count;
124:   PetscBool   found;
125:   PetscViewer viewer;
126:   PetscInt   *numbering, *permutation; /* global numbering of locally-supported subdomains and the permutation from the local ordering */

128:   PetscFunctionBegin;
129:   PetscCall(PCGetOptionsPrefix(pc, &prefix));
130:   PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_gasm_print_subdomains", &found));
131:   if (!found) PetscFunctionReturn(PETSC_SUCCESS);
132:   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_gasm_print_subdomains", fname, sizeof(fname), &found));
133:   if (!found) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
134:   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
135:   /*
136:    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
137:    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
138:   */
139:   PetscCall(PetscObjectName((PetscObject)viewer));
140:   l = 0;
141:   PetscCall(PCGASMComputeGlobalSubdomainNumbering_Private(pc, &numbering, &permutation));
142:   for (count = 0; count < osm->N; ++count) {
143:     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
144:     if (l < osm->n) {
145:       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
146:       if (numbering[d] == count) l++;
147:       else d = -1;
148:     } else d = -1;
149:     PetscCall(PCGASMSubdomainView_Private(pc, d, viewer));
150:   }
151:   PetscCall(PetscFree2(numbering, permutation));
152:   PetscCall(PetscViewerDestroy(&viewer));
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: static PetscErrorCode PCView_GASM(PC pc, PetscViewer viewer)
157: {
158:   PC_GASM    *osm = (PC_GASM *)pc->data;
159:   const char *prefix;
160:   PetscMPIInt rank, size;
161:   PetscInt    bsz;
162:   PetscBool   isascii, view_subdomains = PETSC_FALSE;
163:   PetscViewer sviewer;
164:   PetscInt    l;
165:   char        overlap[256]     = "user-defined overlap";
166:   char        gsubdomains[256] = "unknown total number of subdomains";
167:   char        msubdomains[256] = "unknown max number of local subdomains";
168:   PetscInt   *numbering, *permutation; /* global numbering of locally-supported subdomains and the permutation from the local ordering */

170:   PetscFunctionBegin;
171:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
172:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));

174:   if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlap, sizeof(overlap), "requested amount of overlap = %" PetscInt_FMT, osm->overlap));
175:   if (osm->N != PETSC_DETERMINE) PetscCall(PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %" PetscInt_FMT, osm->N));
176:   if (osm->nmax != PETSC_DETERMINE) PetscCall(PetscSNPrintf(msubdomains, sizeof(msubdomains), "max number of local subdomains = %" PetscInt_FMT, osm->nmax));

178:   PetscCall(PCGetOptionsPrefix(pc, &prefix));
179:   PetscCall(PetscOptionsGetBool(NULL, prefix, "-pc_gasm_view_subdomains", &view_subdomains, NULL));

181:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
182:   if (isascii) {
183:     /*
184:      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
185:      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
186:      collectively on the comm.
187:      */
188:     PetscCall(PetscObjectName((PetscObject)viewer));
189:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Restriction/interpolation type: %s\n", PCGASMTypes[osm->type]));
190:     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s\n", overlap));
191:     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s\n", gsubdomains));
192:     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s\n", msubdomains));
193:     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
194:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d|%d] number of locally-supported subdomains = %" PetscInt_FMT "\n", rank, size, osm->n));
195:     PetscCall(PetscViewerFlush(viewer));
196:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
197:     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
198:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Subdomain solver info:\n"));
199:     PetscCall(PetscViewerASCIIPushTab(viewer));
200:     PetscCall(PetscViewerASCIIPrintf(viewer, "  - - - - - - - - - - - - - - - - - -\n"));
201:     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
202:     PetscCall(PCGASMComputeGlobalSubdomainNumbering_Private(pc, &numbering, &permutation));
203:     l = 0;
204:     for (PetscInt count = 0; count < osm->N; ++count) {
205:       PetscMPIInt srank, ssize;
206:       if (l < osm->n) {
207:         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
208:         if (numbering[d] == count) {
209:           PetscCallMPI(MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize));
210:           PetscCallMPI(MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank));
211:           PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)osm->ois[d])->comm, &sviewer));
212:           PetscCall(ISGetLocalSize(osm->ois[d], &bsz));
213:           PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "  [%d|%d] (subcomm [%d|%d]) local subdomain number %" PetscInt_FMT ", local size = %" PetscInt_FMT "\n", rank, size, srank, ssize, d, bsz));
214:           PetscCall(PetscViewerFlush(sviewer));
215:           PetscCall(PetscViewerASCIIPushTab(sviewer));
216:           if (view_subdomains) PetscCall(PCGASMSubdomainView_Private(pc, d, sviewer));
217:           if (!pc->setupcalled) {
218:             PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "  Solver not set up yet: PCSetUp() not yet called\n"));
219:           } else {
220:             PetscCall(KSPView(osm->ksp[d], sviewer));
221:           }
222:           PetscCall(PetscViewerASCIIPopTab(sviewer));
223:           PetscCall(PetscViewerASCIIPrintf(sviewer, "  - - - - - - - - - - - - - - - - - -\n"));
224:           PetscCall(PetscViewerFlush(sviewer));
225:           PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)osm->ois[d])->comm, &sviewer));
226:           ++l;
227:         } else {
228:           PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
229:           PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
230:         }
231:       } else {
232:         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
233:         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
234:       }
235:     }
236:     PetscCall(PetscFree2(numbering, permutation));
237:     PetscCall(PetscViewerASCIIPopTab(viewer));
238:     PetscCall(PetscViewerFlush(viewer));
239:     /* this line is needed to match the extra PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
240:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
241:   }
242:   PetscFunctionReturn(PETSC_SUCCESS);
243: }

245: PETSC_INTERN PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[]);

247: static PetscErrorCode PCGASMSetHierarchicalPartitioning(PC pc)
248: {
249:   PC_GASM        *osm = (PC_GASM *)pc->data;
250:   MatPartitioning part;
251:   MPI_Comm        comm;
252:   PetscMPIInt     size;
253:   PetscInt        nlocalsubdomains, fromrows_localsize;
254:   IS              partitioning, fromrows, isn;
255:   Vec             outervec;

257:   PetscFunctionBegin;
258:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
259:   PetscCallMPI(MPI_Comm_size(comm, &size));
260:   /* we do not need a hierarchical partitioning when
261:     * the total number of subdomains is consistent with
262:     * the number of MPI tasks.
263:     * For the following cases, we do not need to use HP
264:     * */
265:   if (osm->N == PETSC_DETERMINE || osm->N >= size || osm->N == 1) PetscFunctionReturn(PETSC_SUCCESS);
266:   PetscCheck(size % osm->N == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "have to specify the total number of subdomains %" PetscInt_FMT " to be a factor of the number of ranks %d ", osm->N, size);
267:   nlocalsubdomains = size / osm->N;
268:   osm->n           = 1;
269:   PetscCall(MatPartitioningCreate(comm, &part));
270:   PetscCall(MatPartitioningSetAdjacency(part, pc->pmat));
271:   PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
272:   PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, osm->N));
273:   PetscCall(MatPartitioningHierarchicalSetNfineparts(part, nlocalsubdomains));
274:   PetscCall(MatPartitioningSetFromOptions(part));
275:   /* get new rank owner number of each vertex */
276:   PetscCall(MatPartitioningApply(part, &partitioning));
277:   PetscCall(ISBuildTwoSided(partitioning, NULL, &fromrows));
278:   PetscCall(ISPartitioningToNumbering(partitioning, &isn));
279:   PetscCall(ISDestroy(&isn));
280:   PetscCall(ISGetLocalSize(fromrows, &fromrows_localsize));
281:   PetscCall(MatPartitioningDestroy(&part));
282:   PetscCall(MatCreateVecs(pc->pmat, &outervec, NULL));
283:   PetscCall(VecCreateMPI(comm, fromrows_localsize, PETSC_DETERMINE, &osm->pcx));
284:   PetscCall(VecDuplicate(osm->pcx, &osm->pcy));
285:   PetscCall(VecScatterCreate(osm->pcx, NULL, outervec, fromrows, &osm->pctoouter));
286:   PetscCall(MatCreateSubMatrix(pc->pmat, fromrows, fromrows, MAT_INITIAL_MATRIX, &osm->permutationP));
287:   PetscCall(PetscObjectReference((PetscObject)fromrows));
288:   osm->permutationIS = fromrows;
289:   osm->pcmat         = pc->pmat;
290:   PetscCall(PetscObjectReference((PetscObject)osm->permutationP));
291:   pc->pmat = osm->permutationP;
292:   PetscCall(VecDestroy(&outervec));
293:   PetscCall(ISDestroy(&fromrows));
294:   PetscCall(ISDestroy(&partitioning));
295:   osm->n = PETSC_DETERMINE;
296:   PetscFunctionReturn(PETSC_SUCCESS);
297: }

299: static PetscErrorCode PCSetUp_GASM(PC pc)
300: {
301:   PC_GASM        *osm = (PC_GASM *)pc->data;
302:   PetscInt        i, nInnerIndices, nTotalInnerIndices;
303:   PetscMPIInt     rank, size;
304:   MatReuse        scall = MAT_REUSE_MATRIX;
305:   KSP             ksp;
306:   PC              subpc;
307:   const char     *prefix, *pprefix;
308:   Vec             x, y;
309:   PetscInt        oni;   /* Number of indices in the i-th local outer subdomain.               */
310:   const PetscInt *oidxi; /* Indices from the i-th subdomain local outer subdomain.             */
311:   PetscInt        on;    /* Number of indices in the disjoint union of local outer subdomains. */
312:   PetscInt       *oidx;  /* Indices in the disjoint union of local outer subdomains. */
313:   IS              gois;  /* Disjoint union the global indices of outer subdomains.             */
314:   IS              goid;  /* Identity IS of the size of the disjoint union of outer subdomains. */
315:   PetscScalar    *gxarray, *gyarray;
316:   PetscInt        gostart; /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
317:   PetscInt        num_subdomains  = 0;
318:   DM             *subdomain_dm    = NULL;
319:   char          **subdomain_names = NULL;
320:   PetscInt       *numbering;

322:   PetscFunctionBegin;
323:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
324:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
325:   if (!pc->setupcalled) {
326:     /* use a hierarchical partitioning */
327:     if (osm->hierarchicalpartitioning) PetscCall(PCGASMSetHierarchicalPartitioning(pc));
328:     if (osm->n == PETSC_DETERMINE) {
329:       if (osm->N != PETSC_DETERMINE) {
330:         /* No local subdomains given, but the desired number of total subdomains is known, so construct them accordingly. */
331:         PetscCall(PCGASMCreateSubdomains(pc->pmat, osm->N, &osm->n, &osm->iis));
332:       } else if (osm->dm_subdomains && pc->dm) {
333:         /* try pc->dm next, if allowed */
334:         IS *inner_subdomain_is, *outer_subdomain_is;
335:         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm));
336:         if (num_subdomains) PetscCall(PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is));
337:         for (PetscInt d = 0; d < num_subdomains; ++d) {
338:           if (inner_subdomain_is) PetscCall(ISDestroy(&inner_subdomain_is[d]));
339:           if (outer_subdomain_is) PetscCall(ISDestroy(&outer_subdomain_is[d]));
340:         }
341:         PetscCall(PetscFree(inner_subdomain_is));
342:         PetscCall(PetscFree(outer_subdomain_is));
343:       } else {
344:         /* still no subdomains; use one per rank */
345:         osm->nmax = osm->n = 1;
346:         PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
347:         osm->N = size;
348:         PetscCall(PCGASMCreateLocalSubdomains(pc->pmat, osm->n, &osm->iis));
349:       }
350:     }
351:     if (!osm->iis) {
352:       /*
353:        osm->n was set in PCGASMSetSubdomains(), but the actual subdomains have not been supplied.
354:        We create the requisite number of local inner subdomains and then expand them into
355:        out subdomains, if necessary.
356:        */
357:       PetscCall(PCGASMCreateLocalSubdomains(pc->pmat, osm->n, &osm->iis));
358:     }
359:     if (!osm->ois) {
360:       /*
361:             Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
362:             has been requested, copy the inner subdomains over so they can be modified.
363:       */
364:       PetscCall(PetscMalloc1(osm->n, &osm->ois));
365:       for (i = 0; i < osm->n; ++i) {
366:         if (osm->overlap > 0 && osm->N > 1) { /* With positive overlap, osm->iis[i] will be modified */
367:           PetscCall(ISDuplicate(osm->iis[i], (osm->ois) + i));
368:           PetscCall(ISCopy(osm->iis[i], osm->ois[i]));
369:         } else {
370:           PetscCall(PetscObjectReference((PetscObject)osm->iis[i]));
371:           osm->ois[i] = osm->iis[i];
372:         }
373:       }
374:       if (osm->overlap > 0 && osm->N > 1) {
375:         /* Extend the "overlapping" regions by a number of steps */
376:         PetscCall(MatIncreaseOverlapSplit(pc->pmat, osm->n, osm->ois, osm->overlap));
377:       }
378:     }

380:     /* Now the subdomains are defined.  Determine their global and max local numbers, if necessary. */
381:     if (osm->nmax == PETSC_DETERMINE) {
382:       PetscInt inwork, outwork;
383:       /* determine global number of subdomains and the max number of local subdomains */
384:       inwork = osm->n;
385:       PetscCallMPI(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
386:       osm->nmax = outwork;
387:     }
388:     if (osm->N == PETSC_DETERMINE) {
389:       /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */
390:       PetscCall(PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject *)osm->ois, &osm->N, NULL));
391:     }

393:     if (osm->sort_indices) {
394:       for (i = 0; i < osm->n; i++) {
395:         PetscCall(ISSort(osm->ois[i]));
396:         PetscCall(ISSort(osm->iis[i]));
397:       }
398:     }
399:     PetscCall(PCGetOptionsPrefix(pc, &prefix));
400:     PetscCall(PCGASMPrintSubdomains(pc));

402:     /*
403:        Merge the ISs, create merged vectors and restrictions.
404:      */
405:     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
406:     on = 0;
407:     for (i = 0; i < osm->n; i++) {
408:       PetscCall(ISGetLocalSize(osm->ois[i], &oni));
409:       on += oni;
410:     }
411:     PetscCall(PetscMalloc1(on, &oidx));
412:     on = 0;
413:     /* Merge local indices together */
414:     for (i = 0; i < osm->n; i++) {
415:       PetscCall(ISGetLocalSize(osm->ois[i], &oni));
416:       PetscCall(ISGetIndices(osm->ois[i], &oidxi));
417:       PetscCall(PetscArraycpy(oidx + on, oidxi, oni));
418:       PetscCall(ISRestoreIndices(osm->ois[i], &oidxi));
419:       on += oni;
420:     }
421:     PetscCall(ISCreateGeneral(((PetscObject)pc)->comm, on, oidx, PETSC_OWN_POINTER, &gois));
422:     nTotalInnerIndices = 0;
423:     for (i = 0; i < osm->n; i++) {
424:       PetscCall(ISGetLocalSize(osm->iis[i], &nInnerIndices));
425:       nTotalInnerIndices += nInnerIndices;
426:     }
427:     PetscCall(VecCreateMPI(((PetscObject)pc)->comm, nTotalInnerIndices, PETSC_DETERMINE, &x));
428:     PetscCall(VecDuplicate(x, &y));

430:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)pc), on, PETSC_DECIDE, &osm->gx));
431:     PetscCall(VecDuplicate(osm->gx, &osm->gy));
432:     PetscCall(VecGetOwnershipRange(osm->gx, &gostart, NULL));
433:     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), on, gostart, 1, &goid));
434:     /* gois might indices not on local */
435:     PetscCall(VecScatterCreate(x, gois, osm->gx, goid, &osm->gorestriction));
436:     PetscCall(PetscMalloc1(osm->n, &numbering));
437:     PetscCall(PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject *)osm->ois, NULL, numbering));
438:     PetscCall(VecDestroy(&x));
439:     PetscCall(ISDestroy(&gois));

441:     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
442:     {
443:       PetscInt        ini;   /* Number of indices the i-th a local inner subdomain. */
444:       PetscInt        in;    /* Number of indices in the disjoint union of local inner subdomains. */
445:       PetscInt       *iidx;  /* Global indices in the merged local inner subdomain. */
446:       PetscInt       *ioidx; /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
447:       IS              giis;  /* IS for the disjoint union of inner subdomains. */
448:       IS              giois; /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
449:       PetscScalar    *array;
450:       const PetscInt *indices;
451:       on = 0;
452:       for (i = 0; i < osm->n; i++) {
453:         PetscCall(ISGetLocalSize(osm->ois[i], &oni));
454:         on += oni;
455:       }
456:       PetscCall(PetscMalloc1(on, &iidx));
457:       PetscCall(PetscMalloc1(on, &ioidx));
458:       PetscCall(VecGetArray(y, &array));
459:       /* set communicator id to determine where overlap is */
460:       in = 0;
461:       for (i = 0; i < osm->n; i++) {
462:         PetscCall(ISGetLocalSize(osm->iis[i], &ini));
463:         for (PetscInt k = 0; k < ini; ++k) array[in + k] = numbering[i];
464:         in += ini;
465:       }
466:       PetscCall(VecRestoreArray(y, &array));
467:       PetscCall(VecScatterBegin(osm->gorestriction, y, osm->gy, INSERT_VALUES, SCATTER_FORWARD));
468:       PetscCall(VecScatterEnd(osm->gorestriction, y, osm->gy, INSERT_VALUES, SCATTER_FORWARD));
469:       PetscCall(VecGetOwnershipRange(osm->gy, &gostart, NULL));
470:       PetscCall(VecGetArray(osm->gy, &array));
471:       on = 0;
472:       in = 0;
473:       for (i = 0; i < osm->n; i++) {
474:         PetscCall(ISGetLocalSize(osm->ois[i], &oni));
475:         PetscCall(ISGetIndices(osm->ois[i], &indices));
476:         for (PetscInt k = 0; k < oni; k++) {
477:           /*  skip overlapping indices to get inner domain */
478:           if (PetscRealPart(array[on + k]) != numbering[i]) continue;
479:           iidx[in]    = indices[k];
480:           ioidx[in++] = gostart + on + k;
481:         }
482:         PetscCall(ISRestoreIndices(osm->ois[i], &indices));
483:         on += oni;
484:       }
485:       PetscCall(VecRestoreArray(osm->gy, &array));
486:       PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, iidx, PETSC_OWN_POINTER, &giis));
487:       PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, ioidx, PETSC_OWN_POINTER, &giois));
488:       PetscCall(VecScatterCreate(y, giis, osm->gy, giois, &osm->girestriction));
489:       PetscCall(VecDestroy(&y));
490:       PetscCall(ISDestroy(&giis));
491:       PetscCall(ISDestroy(&giois));
492:     }
493:     PetscCall(ISDestroy(&goid));
494:     PetscCall(PetscFree(numbering));

496:     /* Create the subdomain work vectors. */
497:     PetscCall(PetscMalloc1(osm->n, &osm->x));
498:     PetscCall(PetscMalloc1(osm->n, &osm->y));
499:     PetscCall(VecGetArray(osm->gx, &gxarray));
500:     PetscCall(VecGetArray(osm->gy, &gyarray));
501:     for (i = 0, on = 0; i < osm->n; ++i, on += oni) {
502:       PetscInt oNi;
503:       PetscCall(ISGetLocalSize(osm->ois[i], &oni));
504:       /* on a sub communicator */
505:       PetscCall(ISGetSize(osm->ois[i], &oNi));
506:       PetscCall(VecCreateMPIWithArray(((PetscObject)osm->ois[i])->comm, 1, oni, oNi, gxarray + on, &osm->x[i]));
507:       PetscCall(VecCreateMPIWithArray(((PetscObject)osm->ois[i])->comm, 1, oni, oNi, gyarray + on, &osm->y[i]));
508:     }
509:     PetscCall(VecRestoreArray(osm->gx, &gxarray));
510:     PetscCall(VecRestoreArray(osm->gy, &gyarray));
511:     /* Create the subdomain solvers */
512:     PetscCall(PetscMalloc1(osm->n, &osm->ksp));
513:     for (i = 0; i < osm->n; i++) {
514:       char subprefix[PETSC_MAX_PATH_LEN + 1];
515:       PetscCall(KSPCreate(((PetscObject)osm->ois[i])->comm, &ksp));
516:       PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
517:       PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
518:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
519:       PetscCall(KSPSetType(ksp, KSPPREONLY));
520:       PetscCall(KSPGetPC(ksp, &subpc)); /* Why do we need this here? */
521:       if (subdomain_dm) {
522:         PetscCall(KSPSetDM(ksp, subdomain_dm[i]));
523:         PetscCall(DMDestroy(subdomain_dm + i));
524:       }
525:       PetscCall(PCGetOptionsPrefix(pc, &prefix));
526:       PetscCall(KSPSetOptionsPrefix(ksp, prefix));
527:       if (subdomain_names && subdomain_names[i]) {
528:         PetscCall(PetscSNPrintf(subprefix, PETSC_MAX_PATH_LEN, "sub_%s_", subdomain_names[i]));
529:         PetscCall(KSPAppendOptionsPrefix(ksp, subprefix));
530:         PetscCall(PetscFree(subdomain_names[i]));
531:       }
532:       PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
533:       osm->ksp[i] = ksp;
534:     }
535:     PetscCall(PetscFree(subdomain_dm));
536:     PetscCall(PetscFree(subdomain_names));
537:     scall = MAT_INITIAL_MATRIX;
538:   } else { /* if (pc->setupcalled) */
539:     /*
540:        Destroy the submatrices from the previous iteration
541:     */
542:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
543:       PetscCall(MatDestroyMatrices(osm->n, &osm->pmat));
544:       scall = MAT_INITIAL_MATRIX;
545:     }
546:     if (osm->permutationIS) {
547:       PetscCall(MatCreateSubMatrix(pc->pmat, osm->permutationIS, osm->permutationIS, scall, &osm->permutationP));
548:       PetscCall(PetscObjectReference((PetscObject)osm->permutationP));
549:       osm->pcmat = pc->pmat;
550:       pc->pmat   = osm->permutationP;
551:     }
552:   }

554:   /*
555:      Extract the submatrices.
556:   */
557:   if (size > 1) {
558:     PetscCall(MatCreateSubMatricesMPI(pc->pmat, osm->n, osm->ois, osm->ois, scall, &osm->pmat));
559:   } else {
560:     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n, osm->ois, osm->ois, scall, &osm->pmat));
561:   }
562:   if (scall == MAT_INITIAL_MATRIX) {
563:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
564:     for (i = 0; i < osm->n; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
565:   }

567:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
568:      different boundary conditions for the submatrices than for the global problem) */
569:   PetscCall(PCModifySubMatrices(pc, osm->n, osm->ois, osm->ois, osm->pmat, pc->modifysubmatricesP));

571:   /*
572:      Loop over submatrices putting them into local ksps
573:   */
574:   for (i = 0; i < osm->n; i++) {
575:     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
576:     PetscCall(KSPGetOptionsPrefix(osm->ksp[i], &prefix));
577:     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
578:     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
579:   }
580:   if (osm->pcmat) {
581:     PetscCall(MatDestroy(&pc->pmat));
582:     pc->pmat   = osm->pcmat;
583:     osm->pcmat = NULL;
584:   }
585:   PetscFunctionReturn(PETSC_SUCCESS);
586: }

588: static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
589: {
590:   PC_GASM *osm = (PC_GASM *)pc->data;
591:   PetscInt i;

593:   PetscFunctionBegin;
594:   for (i = 0; i < osm->n; i++) PetscCall(KSPSetUp(osm->ksp[i]));
595:   PetscFunctionReturn(PETSC_SUCCESS);
596: }

598: static PetscErrorCode PCApply_GASM(PC pc, Vec xin, Vec yout)
599: {
600:   PC_GASM    *osm = (PC_GASM *)pc->data;
601:   PetscInt    i;
602:   Vec         x, y;
603:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

605:   PetscFunctionBegin;
606:   if (osm->pctoouter) {
607:     PetscCall(VecScatterBegin(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
608:     PetscCall(VecScatterEnd(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
609:     x = osm->pcx;
610:     y = osm->pcy;
611:   } else {
612:     x = xin;
613:     y = yout;
614:   }
615:   /*
616:      support for limiting the restriction or interpolation only to the inner
617:      subdomain values (leaving the other values 0).
618:   */
619:   if (!(osm->type & PC_GASM_RESTRICT)) {
620:     /* have to zero the work RHS since scatter may leave some slots empty */
621:     PetscCall(VecZeroEntries(osm->gx));
622:     PetscCall(VecScatterBegin(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
623:   } else {
624:     PetscCall(VecScatterBegin(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
625:   }
626:   PetscCall(VecZeroEntries(osm->gy));
627:   if (!(osm->type & PC_GASM_RESTRICT)) {
628:     PetscCall(VecScatterEnd(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
629:   } else {
630:     PetscCall(VecScatterEnd(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
631:   }
632:   /* do the subdomain solves */
633:   for (i = 0; i < osm->n; ++i) {
634:     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
635:     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
636:   }
637:   /* do we need to zero y? */
638:   PetscCall(VecZeroEntries(y));
639:   if (!(osm->type & PC_GASM_INTERPOLATE)) {
640:     PetscCall(VecScatterBegin(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
641:     PetscCall(VecScatterEnd(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
642:   } else {
643:     PetscCall(VecScatterBegin(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
644:     PetscCall(VecScatterEnd(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
645:   }
646:   if (osm->pctoouter) {
647:     PetscCall(VecScatterBegin(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
648:     PetscCall(VecScatterEnd(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
649:   }
650:   PetscFunctionReturn(PETSC_SUCCESS);
651: }

653: static PetscErrorCode PCMatApply_GASM(PC pc, Mat Xin, Mat Yout)
654: {
655:   PC_GASM    *osm = (PC_GASM *)pc->data;
656:   Mat         X, Y, O = NULL, Z, W;
657:   Vec         x, y;
658:   PetscInt    i, m, M, N;
659:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

661:   PetscFunctionBegin;
662:   PetscCheck(osm->n == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
663:   PetscCall(MatGetSize(Xin, NULL, &N));
664:   if (osm->pctoouter) {
665:     PetscCall(VecGetLocalSize(osm->pcx, &m));
666:     PetscCall(VecGetSize(osm->pcx, &M));
667:     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)osm->ois[0]), m, PETSC_DECIDE, M, N, NULL, &O));
668:     for (i = 0; i < N; ++i) {
669:       PetscCall(MatDenseGetColumnVecRead(Xin, i, &x));
670:       PetscCall(MatDenseGetColumnVecWrite(O, i, &y));
671:       PetscCall(VecScatterBegin(osm->pctoouter, x, y, INSERT_VALUES, SCATTER_REVERSE));
672:       PetscCall(VecScatterEnd(osm->pctoouter, x, y, INSERT_VALUES, SCATTER_REVERSE));
673:       PetscCall(MatDenseRestoreColumnVecWrite(O, i, &y));
674:       PetscCall(MatDenseRestoreColumnVecRead(Xin, i, &x));
675:     }
676:     X = Y = O;
677:   } else {
678:     X = Xin;
679:     Y = Yout;
680:   }
681:   /*
682:      support for limiting the restriction or interpolation only to the inner
683:      subdomain values (leaving the other values 0).
684:   */
685:   PetscCall(VecGetLocalSize(osm->x[0], &m));
686:   PetscCall(VecGetSize(osm->x[0], &M));
687:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)osm->ois[0]), m, PETSC_DECIDE, M, N, NULL, &Z));
688:   for (i = 0; i < N; ++i) {
689:     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
690:     PetscCall(MatDenseGetColumnVecWrite(Z, i, &y));
691:     if (!(osm->type & PC_GASM_RESTRICT)) {
692:       /* have to zero the work RHS since scatter may leave some slots empty */
693:       PetscCall(VecZeroEntries(y));
694:       PetscCall(VecScatterBegin(osm->girestriction, x, y, INSERT_VALUES, forward));
695:       PetscCall(VecScatterEnd(osm->girestriction, x, y, INSERT_VALUES, forward));
696:     } else {
697:       PetscCall(VecScatterBegin(osm->gorestriction, x, y, INSERT_VALUES, forward));
698:       PetscCall(VecScatterEnd(osm->gorestriction, x, y, INSERT_VALUES, forward));
699:     }
700:     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &y));
701:     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
702:   }
703:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)osm->ois[0]), m, PETSC_DECIDE, M, N, NULL, &W));
704:   PetscCall(MatSetOption(Z, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
705:   PetscCall(MatAssemblyBegin(Z, MAT_FINAL_ASSEMBLY));
706:   PetscCall(MatAssemblyEnd(Z, MAT_FINAL_ASSEMBLY));
707:   /* do the subdomain solve */
708:   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
709:   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
710:   PetscCall(MatDestroy(&Z));
711:   /* do we need to zero y? */
712:   PetscCall(MatZeroEntries(Y));
713:   for (i = 0; i < N; ++i) {
714:     PetscCall(MatDenseGetColumnVecWrite(Y, i, &y));
715:     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
716:     if (!(osm->type & PC_GASM_INTERPOLATE)) {
717:       PetscCall(VecScatterBegin(osm->girestriction, x, y, ADD_VALUES, reverse));
718:       PetscCall(VecScatterEnd(osm->girestriction, x, y, ADD_VALUES, reverse));
719:     } else {
720:       PetscCall(VecScatterBegin(osm->gorestriction, x, y, ADD_VALUES, reverse));
721:       PetscCall(VecScatterEnd(osm->gorestriction, x, y, ADD_VALUES, reverse));
722:     }
723:     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
724:     if (osm->pctoouter) {
725:       PetscCall(MatDenseGetColumnVecWrite(Yout, i, &x));
726:       PetscCall(VecScatterBegin(osm->pctoouter, y, x, INSERT_VALUES, SCATTER_FORWARD));
727:       PetscCall(VecScatterEnd(osm->pctoouter, y, x, INSERT_VALUES, SCATTER_FORWARD));
728:       PetscCall(MatDenseRestoreColumnVecRead(Yout, i, &x));
729:     }
730:     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &y));
731:   }
732:   PetscCall(MatDestroy(&W));
733:   PetscCall(MatDestroy(&O));
734:   PetscFunctionReturn(PETSC_SUCCESS);
735: }

737: static PetscErrorCode PCApplyTranspose_GASM(PC pc, Vec xin, Vec yout)
738: {
739:   PC_GASM    *osm = (PC_GASM *)pc->data;
740:   PetscInt    i;
741:   Vec         x, y;
742:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

744:   PetscFunctionBegin;
745:   if (osm->pctoouter) {
746:     PetscCall(VecScatterBegin(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
747:     PetscCall(VecScatterEnd(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
748:     x = osm->pcx;
749:     y = osm->pcy;
750:   } else {
751:     x = xin;
752:     y = yout;
753:   }
754:   /*
755:      Support for limiting the restriction or interpolation to only local
756:      subdomain values (leaving the other values 0).

758:      Note: these are reversed from the PCApply_GASM() because we are applying the
759:      transpose of the three terms
760:   */
761:   if (!(osm->type & PC_GASM_INTERPOLATE)) {
762:     /* have to zero the work RHS since scatter may leave some slots empty */
763:     PetscCall(VecZeroEntries(osm->gx));
764:     PetscCall(VecScatterBegin(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
765:   } else {
766:     PetscCall(VecScatterBegin(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
767:   }
768:   PetscCall(VecZeroEntries(osm->gy));
769:   if (!(osm->type & PC_GASM_INTERPOLATE)) {
770:     PetscCall(VecScatterEnd(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
771:   } else {
772:     PetscCall(VecScatterEnd(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
773:   }
774:   /* do the local solves */
775:   for (i = 0; i < osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
776:     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
777:     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
778:   }
779:   PetscCall(VecZeroEntries(y));
780:   if (!(osm->type & PC_GASM_RESTRICT)) {
781:     PetscCall(VecScatterBegin(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
782:     PetscCall(VecScatterEnd(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
783:   } else {
784:     PetscCall(VecScatterBegin(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
785:     PetscCall(VecScatterEnd(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
786:   }
787:   if (osm->pctoouter) {
788:     PetscCall(VecScatterBegin(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
789:     PetscCall(VecScatterEnd(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
790:   }
791:   PetscFunctionReturn(PETSC_SUCCESS);
792: }

794: static PetscErrorCode PCReset_GASM(PC pc)
795: {
796:   PC_GASM *osm = (PC_GASM *)pc->data;

798:   PetscFunctionBegin;
799:   if (osm->ksp) {
800:     for (PetscInt i = 0; i < osm->n; i++) PetscCall(KSPReset(osm->ksp[i]));
801:   }
802:   if (osm->pmat) {
803:     if (osm->n > 0) {
804:       PetscMPIInt size;
805:       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
806:       if (size > 1) {
807:         /* osm->pmat is created by MatCreateSubMatricesMPI(), cannot use MatDestroySubMatrices() */
808:         PetscCall(MatDestroyMatrices(osm->n, &osm->pmat));
809:       } else {
810:         PetscCall(MatDestroySubMatrices(osm->n, &osm->pmat));
811:       }
812:     }
813:   }
814:   if (osm->x) {
815:     for (PetscInt i = 0; i < osm->n; i++) {
816:       PetscCall(VecDestroy(&osm->x[i]));
817:       PetscCall(VecDestroy(&osm->y[i]));
818:     }
819:   }
820:   PetscCall(VecDestroy(&osm->gx));
821:   PetscCall(VecDestroy(&osm->gy));

823:   PetscCall(VecScatterDestroy(&osm->gorestriction));
824:   PetscCall(VecScatterDestroy(&osm->girestriction));
825:   if (!osm->user_subdomains) {
826:     PetscCall(PCGASMDestroySubdomains(osm->n, &osm->ois, &osm->iis));
827:     osm->N    = PETSC_DETERMINE;
828:     osm->nmax = PETSC_DETERMINE;
829:   }
830:   PetscCall(VecScatterDestroy(&osm->pctoouter));
831:   PetscCall(ISDestroy(&osm->permutationIS));
832:   PetscCall(VecDestroy(&osm->pcx));
833:   PetscCall(VecDestroy(&osm->pcy));
834:   PetscCall(MatDestroy(&osm->permutationP));
835:   PetscCall(MatDestroy(&osm->pcmat));
836:   PetscFunctionReturn(PETSC_SUCCESS);
837: }

839: static PetscErrorCode PCDestroy_GASM(PC pc)
840: {
841:   PC_GASM *osm = (PC_GASM *)pc->data;

843:   PetscFunctionBegin;
844:   PetscCall(PCReset_GASM(pc));
845:   /* PCReset will not destroy subdomains, if user_subdomains is true. */
846:   PetscCall(PCGASMDestroySubdomains(osm->n, &osm->ois, &osm->iis));
847:   if (osm->ksp) {
848:     for (PetscInt i = 0; i < osm->n; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
849:     PetscCall(PetscFree(osm->ksp));
850:   }
851:   PetscCall(PetscFree(osm->x));
852:   PetscCall(PetscFree(osm->y));
853:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSubdomains_C", NULL));
854:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetOverlap_C", NULL));
855:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetType_C", NULL));
856:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSortIndices_C", NULL));
857:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMGetSubKSP_C", NULL));
858:   PetscCall(PetscFree(pc->data));
859:   PetscFunctionReturn(PETSC_SUCCESS);
860: }

862: static PetscErrorCode PCSetFromOptions_GASM(PC pc, PetscOptionItems PetscOptionsObject)
863: {
864:   PC_GASM   *osm = (PC_GASM *)pc->data;
865:   PetscInt   blocks, ovl;
866:   PetscBool  flg;
867:   PCGASMType gasmtype;

869:   PetscFunctionBegin;
870:   PetscOptionsHeadBegin(PetscOptionsObject, "Generalized additive Schwarz options");
871:   PetscCall(PetscOptionsBool("-pc_gasm_use_dm_subdomains", "If subdomains aren't set, use DMCreateDomainDecomposition() to define subdomains.", "PCGASMSetUseDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
872:   PetscCall(PetscOptionsInt("-pc_gasm_total_subdomains", "Total number of subdomains across communicator", "PCGASMSetTotalSubdomains", osm->N, &blocks, &flg));
873:   if (flg) PetscCall(PCGASMSetTotalSubdomains(pc, blocks));
874:   PetscCall(PetscOptionsInt("-pc_gasm_overlap", "Number of overlapping degrees of freedom", "PCGASMSetOverlap", osm->overlap, &ovl, &flg));
875:   if (flg) {
876:     PetscCall(PCGASMSetOverlap(pc, ovl));
877:     osm->dm_subdomains = PETSC_FALSE;
878:   }
879:   flg = PETSC_FALSE;
880:   PetscCall(PetscOptionsEnum("-pc_gasm_type", "Type of restriction/extension", "PCGASMSetType", PCGASMTypes, (PetscEnum)osm->type, (PetscEnum *)&gasmtype, &flg));
881:   if (flg) PetscCall(PCGASMSetType(pc, gasmtype));
882:   PetscCall(PetscOptionsBool("-pc_gasm_use_hierachical_partitioning", "use hierarchical partitioning", NULL, osm->hierarchicalpartitioning, &osm->hierarchicalpartitioning, &flg));
883:   PetscOptionsHeadEnd();
884:   PetscFunctionReturn(PETSC_SUCCESS);
885: }

887: /*@
888:   PCGASMSetTotalSubdomains - sets the total number of subdomains to use across the communicator for `PCGASM`

890:   Logically Collective

892:   Input Parameters:
893: + pc - the preconditioner
894: - N  - total number of subdomains

896:   Level: beginner

898: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMSetOverlap()`,
899:           `PCGASMCreateSubdomains2D()`
900: @*/
901: PetscErrorCode PCGASMSetTotalSubdomains(PC pc, PetscInt N)
902: {
903:   PC_GASM    *osm = (PC_GASM *)pc->data;
904:   PetscMPIInt size, rank;

906:   PetscFunctionBegin;
907:   PetscCheck(N >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Total number of subdomains must be 1 or more, got N = %" PetscInt_FMT, N);
908:   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCGASMSetTotalSubdomains() should be called before calling PCSetUp().");

910:   PetscCall(PCGASMDestroySubdomains(osm->n, &osm->iis, &osm->ois));
911:   osm->ois = osm->iis = NULL;

913:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
914:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
915:   osm->N             = N;
916:   osm->n             = PETSC_DETERMINE;
917:   osm->nmax          = PETSC_DETERMINE;
918:   osm->dm_subdomains = PETSC_FALSE;
919:   PetscFunctionReturn(PETSC_SUCCESS);
920: }

922: static PetscErrorCode PCGASMSetSubdomains_GASM(PC pc, PetscInt n, IS iis[], IS ois[])
923: {
924:   PC_GASM *osm = (PC_GASM *)pc->data;

926:   PetscFunctionBegin;
927:   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each MPI rank must have 1 or more subdomains, got n = %" PetscInt_FMT, n);
928:   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCGASMSetSubdomains() should be called before calling PCSetUp().");

930:   PetscCall(PCGASMDestroySubdomains(osm->n, &osm->iis, &osm->ois));
931:   osm->iis = osm->ois = NULL;
932:   osm->n              = n;
933:   osm->N              = PETSC_DETERMINE;
934:   osm->nmax           = PETSC_DETERMINE;
935:   if (ois) {
936:     PetscCall(PetscMalloc1(n, &osm->ois));
937:     for (PetscInt i = 0; i < n; i++) {
938:       PetscCall(PetscObjectReference((PetscObject)ois[i]));
939:       osm->ois[i] = ois[i];
940:     }
941:     /*
942:        Since the user set the outer subdomains, even if nontrivial overlap was requested via PCGASMSetOverlap(),
943:        it will be ignored.  To avoid confusion later on (e.g., when viewing the PC), the overlap size is set to -1.
944:     */
945:     osm->overlap = -1;
946:     /* inner subdomains must be provided  */
947:     PetscCheck(iis, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "inner indices have to be provided ");
948:   } /* end if */
949:   if (iis) {
950:     PetscCall(PetscMalloc1(n, &osm->iis));
951:     for (PetscInt i = 0; i < n; i++) {
952:       PetscCall(PetscObjectReference((PetscObject)iis[i]));
953:       osm->iis[i] = iis[i];
954:     }
955:     if (!ois) {
956:       osm->ois = NULL;
957:       /* if user does not provide outer indices, we will create the corresponding outer indices using  osm->overlap =1 in PCSetUp_GASM */
958:     }
959:   }
960:   if (PetscDefined(USE_DEBUG)) {
961:     PetscInt        j, rstart, rend, *covered, lsize;
962:     const PetscInt *indices;

964:     if (osm->iis) {
965:       /* check if the inner indices cover and only cover the local portion of the matrix */
966:       PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
967:       PetscCall(PetscCalloc1(rend - rstart, &covered));
968:       /* check if the current MPI process owns indices from others */
969:       for (PetscInt i = 0; i < n; i++) {
970:         PetscCall(ISGetIndices(osm->iis[i], &indices));
971:         PetscCall(ISGetLocalSize(osm->iis[i], &lsize));
972:         for (j = 0; j < lsize; j++) {
973:           PetscCheck(indices[j] >= rstart && indices[j] < rend, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "inner subdomains can not own an index %" PetscInt_FMT " from other ranks", indices[j]);
974:           PetscCheck(covered[indices[j] - rstart] != 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "inner subdomains can not have an overlapping index %" PetscInt_FMT " ", indices[j]);
975:           covered[indices[j] - rstart] = 1;
976:         }
977:         PetscCall(ISRestoreIndices(osm->iis[i], &indices));
978:       }
979:       /* check if we miss any indices */
980:       for (PetscInt i = rstart; i < rend; i++) PetscCheck(covered[i - rstart], PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "local entity %" PetscInt_FMT " was not covered by inner subdomains", i);
981:       PetscCall(PetscFree(covered));
982:     }
983:   }
984:   if (iis) osm->user_subdomains = PETSC_TRUE;
985:   PetscFunctionReturn(PETSC_SUCCESS);
986: }

988: static PetscErrorCode PCGASMSetOverlap_GASM(PC pc, PetscInt ovl)
989: {
990:   PC_GASM *osm = (PC_GASM *)pc->data;

992:   PetscFunctionBegin;
993:   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
994:   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCGASMSetOverlap() should be called before PCSetUp().");
995:   if (!pc->setupcalled) osm->overlap = ovl;
996:   PetscFunctionReturn(PETSC_SUCCESS);
997: }

999: static PetscErrorCode PCGASMSetType_GASM(PC pc, PCGASMType type)
1000: {
1001:   PC_GASM *osm = (PC_GASM *)pc->data;

1003:   PetscFunctionBegin;
1004:   osm->type     = type;
1005:   osm->type_set = PETSC_TRUE;
1006:   PetscFunctionReturn(PETSC_SUCCESS);
1007: }

1009: static PetscErrorCode PCGASMSetSortIndices_GASM(PC pc, PetscBool doSort)
1010: {
1011:   PC_GASM *osm = (PC_GASM *)pc->data;

1013:   PetscFunctionBegin;
1014:   osm->sort_indices = doSort;
1015:   PetscFunctionReturn(PETSC_SUCCESS);
1016: }

1018: /*
1019:    FIXME: This routine might need to be modified now that multiple processes per subdomain are allowed.
1020:         In particular, it would upset the global subdomain number calculation.
1021: */
1022: static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc, PetscInt *n, PetscInt *first, KSP **ksp)
1023: {
1024:   PC_GASM *osm = (PC_GASM *)pc->data;

1026:   PetscFunctionBegin;
1027:   PetscCheck(osm->n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");

1029:   if (n) *n = osm->n;
1030:   if (first) {
1031:     PetscCallMPI(MPI_Scan(&osm->n, first, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
1032:     *first -= osm->n;
1033:   }
1034:   if (ksp) {
1035:     /* Assume that local solves are now different; not necessarily
1036:        true, though!  This flag is used only for PCView_GASM() */
1037:     *ksp                        = osm->ksp;
1038:     osm->same_subdomain_solvers = PETSC_FALSE;
1039:   }
1040:   PetscFunctionReturn(PETSC_SUCCESS);
1041: } /* PCGASMGetSubKSP_GASM() */

1043: /*@
1044:   PCGASMSetSubdomains - Sets the subdomains for this MPI process
1045:   for the additive Schwarz preconditioner with multiple MPI processes per subdomain, `PCGASM`

1047:   Collective

1049:   Input Parameters:
1050: + pc  - the preconditioner object
1051: . n   - the number of subdomains for this MPI process
1052: . iis - the index sets that define the inner subdomains (or `NULL` for PETSc to determine subdomains), the `iis` array is
1053:         copied so may be freed after this call.
1054: - ois - the index sets that define the outer subdomains (or `NULL` to use the same as `iis`, or to construct by expanding `iis` by
1055:         the requested overlap), the `ois` array is copied so may be freed after this call.

1057:   Level: advanced

1059:   Notes:
1060:   The `IS` indices use the parallel, global numbering of the vector entries.

1062:   Inner subdomains are those where the correction is applied.

1064:   Outer subdomains are those where the residual necessary to obtain the
1065:   corrections is obtained (see `PCGASMType` for the use of inner/outer subdomains).

1067:   Both inner and outer subdomains can extend over several MPI processes.
1068:   This process' portion of a subdomain is known as a local subdomain.

1070:   Inner subdomains can not overlap with each other, do not have any entities from remote processes,
1071:   and  have to cover the entire local subdomain owned by the current process. The index sets on each
1072:   process should be ordered such that the ith local subdomain is connected to the ith remote subdomain
1073:   on another MPI process.

1075:   By default the `PGASM` preconditioner uses 1 (local) subdomain per MPI process.

1077:   The `iis` and `ois` arrays may be freed after this call using `PCGASMDestroySubdomains()`

1079: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetOverlap()`, `PCGASMGetSubKSP()`, `PCGASMDestroySubdomains()`,
1080:           `PCGASMCreateSubdomains2D()`, `PCGASMGetSubdomains()`
1081: @*/
1082: PetscErrorCode PCGASMSetSubdomains(PC pc, PetscInt n, IS iis[], IS ois[])
1083: {
1084:   PC_GASM *osm = (PC_GASM *)pc->data;

1086:   PetscFunctionBegin;
1088:   PetscTryMethod(pc, "PCGASMSetSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, iis, ois));
1089:   osm->dm_subdomains = PETSC_FALSE;
1090:   PetscFunctionReturn(PETSC_SUCCESS);
1091: }

1093: /*@
1094:   PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
1095:   additive Schwarz preconditioner `PCGASM`.  Either all or no MPI processes in the
1096:   pc communicator must call this routine.

1098:   Logically Collective

1100:   Input Parameters:
1101: + pc  - the preconditioner context
1102: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 0)

1104:   Options Database Key:
1105: . -pc_gasm_overlap overlap - Sets overlap

1107:   Level: intermediate

1109:   Notes:
1110:   By default the `PCGASM` preconditioner uses 1 subdomain per process.  To use
1111:   multiple subdomain per perocessor or "straddling" subdomains that intersect
1112:   multiple processes use `PCGASMSetSubdomains()` (or option `-pc_gasm_total_subdomains` <n>).

1114:   The overlap defaults to 0, so if one desires that no additional
1115:   overlap be computed beyond what may have been set with a call to
1116:   `PCGASMSetSubdomains()`, then `ovl` must be set to be 0.  In particular, if one does
1117:   not explicitly set the subdomains in application code, then all overlap would be computed
1118:   internally by PETSc, and using an overlap of 0 would result in an `PCGASM`
1119:   variant that is equivalent to the block Jacobi preconditioner.

1121:   One can define initial index sets with any overlap via
1122:   `PCGASMSetSubdomains()`; the routine `PCGASMSetOverlap()` merely allows
1123:   PETSc to extend that overlap further, if desired.

1125: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`,
1126:           `PCGASMCreateSubdomains2D()`, `PCGASMGetSubdomains()`
1127: @*/
1128: PetscErrorCode PCGASMSetOverlap(PC pc, PetscInt ovl)
1129: {
1130:   PC_GASM *osm = (PC_GASM *)pc->data;

1132:   PetscFunctionBegin;
1135:   PetscTryMethod(pc, "PCGASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1136:   osm->dm_subdomains = PETSC_FALSE;
1137:   PetscFunctionReturn(PETSC_SUCCESS);
1138: }

1140: /*@
1141:   PCGASMSetType - Sets the type of restriction and interpolation used
1142:   for local problems in the `PCGASM` additive Schwarz method.

1144:   Logically Collective

1146:   Input Parameters:
1147: + pc   - the preconditioner context
1148: - type - variant of `PCGASM`, one of
1149: .vb
1150:       `PC_GASM_BASIC`       - full interpolation and restriction
1151:       `PC_GASM_RESTRICT`    - full restriction, local MPI process interpolation
1152:       `PC_GASM_INTERPOLATE` - full interpolation, local MPI process restriction
1153:       `PC_GASM_NONE`        - local MPI process restriction and interpolation
1154: .ve

1156:   Options Database Key:
1157: . -pc_gasm_type [basic,restrict,interpolate,none] - Sets `PCGASM` type

1159:   Level: intermediate

1161: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`,
1162:           `PCGASMCreateSubdomains2D()`, `PCASM`, `PCASMSetType()`
1163: @*/
1164: PetscErrorCode PCGASMSetType(PC pc, PCGASMType type)
1165: {
1166:   PetscFunctionBegin;
1169:   PetscTryMethod(pc, "PCGASMSetType_C", (PC, PCGASMType), (pc, type));
1170:   PetscFunctionReturn(PETSC_SUCCESS);
1171: }

1173: /*@
1174:   PCGASMSetSortIndices - Determines whether subdomain indices are sorted.

1176:   Logically Collective

1178:   Input Parameters:
1179: + pc     - the preconditioner context
1180: - doSort - sort the subdomain indices

1182:   Level: intermediate

1184: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`,
1185:           `PCGASMCreateSubdomains2D()`
1186: @*/
1187: PetscErrorCode PCGASMSetSortIndices(PC pc, PetscBool doSort)
1188: {
1189:   PetscFunctionBegin;
1192:   PetscTryMethod(pc, "PCGASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1193:   PetscFunctionReturn(PETSC_SUCCESS);
1194: }

1196: /*@C
1197:   PCGASMGetSubKSP - Gets the local `KSP` contexts for all subdomains on this MPI process.

1199:   Collective iff first_local is requested

1201:   Input Parameter:
1202: . pc - the preconditioner context

1204:   Output Parameters:
1205: + n_local     - the number of blocks on this MPI process or `NULL`
1206: . first_local - the global number of the first block on this process or `NULL`, all processes must request or all must pass `NULL`
1207: - ksp         - the array of `KSP` contexts

1209:   Level: advanced

1211:   Note:
1212:   After `PCGASMGetSubKSP()` the array of `KSP`es is not to be freed

1214:   Currently for some matrix implementations only 1 block per MPI process
1215:   is supported.

1217:   You must call `KSPSetUp()` before calling `PCGASMGetSubKSP()`.

1219:   Fortran Note:
1220:   Call `PCGASMRestoreSubKSP()` when the array of `KSP` is no longer needed

1222: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMSetOverlap()`,
1223:           `PCGASMCreateSubdomains2D()`
1224: @*/
1225: PetscErrorCode PCGASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1226: {
1227:   PetscFunctionBegin;
1229:   PetscUseMethod(pc, "PCGASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1230:   PetscFunctionReturn(PETSC_SUCCESS);
1231: }

1233: /*MC
1234:    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1235:            its own `KSP` object on a subset of MPI processes

1237:    Options Database Keys:
1238: +  -pc_gasm_total_subdomains n                     - Sets total number of local subdomains to be distributed among the MPI processes
1239: .  -pc_gasm_view_subdomains                        - activates the printing of subdomain indices in `PCView()`, -ksp_view or -snes_view
1240: .  -pc_gasm_print_subdomains                       - activates the printing of subdomain indices in `PCSetUp()`
1241: .  -pc_gasm_overlap ovl                            - Sets overlap by which to (automatically) extend local subdomains
1242: -  -pc_gasm_type (basic|restrict|interpolate|none) - Sets `PCGASMType`

1244:    Level: beginner

1246:    Notes:
1247:    To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1248:    options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`

1250:    To set the options on the solvers separate for each block call `PCGASMGetSubKSP()`
1251:    and set the options directly on the resulting `KSP` object (you can access its `PC`
1252:    with `KSPGetPC()`)

1254:    See {cite}`dryja1987additive` and {cite}`1sbg` for details on additive Schwarz algorithms

1256: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASM`, `PCGASMType`, `PCGASMSetType()`,
1257:           `PCBJACOBI`, `PCGASMGetSubKSP()`, `PCGASMSetSubdomains()`,
1258:           `PCSetModifySubMatrices()`, `PCGASMSetOverlap()`, `PCASMSetType()`
1259: M*/

1261: PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc)
1262: {
1263:   PC_GASM *osm;

1265:   PetscFunctionBegin;
1266:   PetscCall(PetscNew(&osm));

1268:   osm->N                        = PETSC_DETERMINE;
1269:   osm->n                        = PETSC_DECIDE;
1270:   osm->nmax                     = PETSC_DETERMINE;
1271:   osm->overlap                  = 0;
1272:   osm->ksp                      = NULL;
1273:   osm->gorestriction            = NULL;
1274:   osm->girestriction            = NULL;
1275:   osm->pctoouter                = NULL;
1276:   osm->gx                       = NULL;
1277:   osm->gy                       = NULL;
1278:   osm->x                        = NULL;
1279:   osm->y                        = NULL;
1280:   osm->pcx                      = NULL;
1281:   osm->pcy                      = NULL;
1282:   osm->permutationIS            = NULL;
1283:   osm->permutationP             = NULL;
1284:   osm->pcmat                    = NULL;
1285:   osm->ois                      = NULL;
1286:   osm->iis                      = NULL;
1287:   osm->pmat                     = NULL;
1288:   osm->type                     = PC_GASM_RESTRICT;
1289:   osm->same_subdomain_solvers   = PETSC_TRUE;
1290:   osm->sort_indices             = PETSC_TRUE;
1291:   osm->dm_subdomains            = PETSC_FALSE;
1292:   osm->hierarchicalpartitioning = PETSC_FALSE;

1294:   pc->data                 = (void *)osm;
1295:   pc->ops->apply           = PCApply_GASM;
1296:   pc->ops->matapply        = PCMatApply_GASM;
1297:   pc->ops->applytranspose  = PCApplyTranspose_GASM;
1298:   pc->ops->setup           = PCSetUp_GASM;
1299:   pc->ops->reset           = PCReset_GASM;
1300:   pc->ops->destroy         = PCDestroy_GASM;
1301:   pc->ops->setfromoptions  = PCSetFromOptions_GASM;
1302:   pc->ops->setuponblocks   = PCSetUpOnBlocks_GASM;
1303:   pc->ops->view            = PCView_GASM;
1304:   pc->ops->applyrichardson = NULL;

1306:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSubdomains_C", PCGASMSetSubdomains_GASM));
1307:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetOverlap_C", PCGASMSetOverlap_GASM));
1308:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetType_C", PCGASMSetType_GASM));
1309:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSortIndices_C", PCGASMSetSortIndices_GASM));
1310:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMGetSubKSP_C", PCGASMGetSubKSP_GASM));
1311:   PetscFunctionReturn(PETSC_SUCCESS);
1312: }

1314: PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[])
1315: {
1316:   MatPartitioning mpart;
1317:   const char     *prefix;
1318:   PetscInt        i, j, rstart, rend, bs;
1319:   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1320:   Mat             Ad = NULL, adj;
1321:   IS              ispart, isnumb, *is;

1323:   PetscFunctionBegin;
1324:   PetscCheck(nloc >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local subdomains must > 0, got nloc = %" PetscInt_FMT, nloc);

1326:   /* Get prefix, row distribution, and block size */
1327:   PetscCall(MatGetOptionsPrefix(A, &prefix));
1328:   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1329:   PetscCall(MatGetBlockSize(A, &bs));
1330:   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);

1332:   /* Get diagonal block from matrix if possible */
1333:   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1334:   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1335:   if (Ad) {
1336:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1337:     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1338:   }
1339:   if (Ad && nloc > 1) {
1340:     PetscBool match, done;
1341:     /* Try to setup a good matrix partitioning if available */
1342:     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1343:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1344:     PetscCall(MatPartitioningSetFromOptions(mpart));
1345:     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1346:     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1347:     if (!match) { /* assume a "good" partitioner is available */
1348:       PetscInt        na;
1349:       const PetscInt *ia, *ja;
1350:       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1351:       if (done) {
1352:         /* Build adjacency matrix by hand. Unfortunately a call to
1353:            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1354:            remove the block-aij structure and we cannot expect
1355:            MatPartitioning to split vertices as we need */
1356:         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1357:         const PetscInt *row;
1358:         nnz = 0;
1359:         for (i = 0; i < na; i++) { /* count number of nonzeros */
1360:           len = ia[i + 1] - ia[i];
1361:           row = ja + ia[i];
1362:           for (j = 0; j < len; j++) {
1363:             if (row[j] == i) { /* don't count diagonal */
1364:               len--;
1365:               break;
1366:             }
1367:           }
1368:           nnz += len;
1369:         }
1370:         PetscCall(PetscMalloc1(na + 1, &iia));
1371:         PetscCall(PetscMalloc1(nnz, &jja));
1372:         nnz    = 0;
1373:         iia[0] = 0;
1374:         for (i = 0; i < na; i++) { /* fill adjacency */
1375:           cnt = 0;
1376:           len = ia[i + 1] - ia[i];
1377:           row = ja + ia[i];
1378:           for (j = 0; j < len; j++) {
1379:             if (row[j] != i) jja[nnz + cnt++] = row[j]; /* if not diagonal */
1380:           }
1381:           nnz += cnt;
1382:           iia[i + 1] = nnz;
1383:         }
1384:         /* Partitioning of the adjacency matrix */
1385:         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1386:         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1387:         PetscCall(MatPartitioningSetNParts(mpart, nloc));
1388:         PetscCall(MatPartitioningApply(mpart, &ispart));
1389:         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1390:         PetscCall(MatDestroy(&adj));
1391:         foundpart = PETSC_TRUE;
1392:       }
1393:       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1394:     }
1395:     PetscCall(MatPartitioningDestroy(&mpart));
1396:   }
1397:   PetscCall(PetscMalloc1(nloc, &is));
1398:   if (!foundpart) {
1399:     /* Partitioning by contiguous chunks of rows */

1401:     PetscInt mbs   = (rend - rstart) / bs;
1402:     PetscInt start = rstart;
1403:     for (i = 0; i < nloc; i++) {
1404:       PetscInt count = (mbs / nloc + ((mbs % nloc) > i)) * bs;
1405:       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1406:       start += count;
1407:     }

1409:   } else {
1410:     /* Partitioning by adjacency of diagonal block  */

1412:     const PetscInt *numbering;
1413:     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1414:     /* Get node count in each partition */
1415:     PetscCall(PetscMalloc1(nloc, &count));
1416:     PetscCall(ISPartitioningCount(ispart, nloc, count));
1417:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1418:       for (i = 0; i < nloc; i++) count[i] *= bs;
1419:     }
1420:     /* Build indices from node numbering */
1421:     PetscCall(ISGetLocalSize(isnumb, &nidx));
1422:     PetscCall(PetscMalloc1(nidx, &indices));
1423:     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1424:     PetscCall(ISGetIndices(isnumb, &numbering));
1425:     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1426:     PetscCall(ISRestoreIndices(isnumb, &numbering));
1427:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1428:       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1429:       for (i = 0; i < nidx; i++) {
1430:         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1431:       }
1432:       PetscCall(PetscFree(indices));
1433:       nidx *= bs;
1434:       indices = newidx;
1435:     }
1436:     /* Shift to get global indices */
1437:     for (i = 0; i < nidx; i++) indices[i] += rstart;

1439:     /* Build the index sets for each block */
1440:     for (i = 0; i < nloc; i++) {
1441:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1442:       PetscCall(ISSort(is[i]));
1443:       start += count[i];
1444:     }

1446:     PetscCall(PetscFree(count));
1447:     PetscCall(PetscFree(indices));
1448:     PetscCall(ISDestroy(&isnumb));
1449:     PetscCall(ISDestroy(&ispart));
1450:   }
1451:   *iis = is;
1452:   PetscFunctionReturn(PETSC_SUCCESS);
1453: }

1455: PETSC_INTERN PetscErrorCode PCGASMCreateStraddlingSubdomains(Mat A, PetscInt N, PetscInt *n, IS *iis[])
1456: {
1457:   PetscFunctionBegin;
1458:   PetscCall(MatSubdomainsCreateCoalesce(A, N, n, iis));
1459:   PetscFunctionReturn(PETSC_SUCCESS);
1460: }

1462: /*@C
1463:   PCGASMCreateSubdomains - Creates `n` index sets defining `n` nonoverlapping subdomains on this MPI process for the `PCGASM` additive
1464:   Schwarz preconditioner for a any problem based on its matrix.

1466:   Collective

1468:   Input Parameters:
1469: + A - The global matrix operator
1470: - N - the number of global subdomains requested

1472:   Output Parameters:
1473: + n   - the number of subdomains created on this MPI process
1474: - iis - the array of index sets defining the local inner subdomains (on which the correction is applied)

1476:   Level: advanced

1478:   Notes:
1479:   When `N` >= A's communicator size, each subdomain is local -- contained within a single MPI process.
1480:   When `N` < size, the subdomains are 'straddling' (process boundaries) and are no longer local.
1481:   The resulting subdomains can be use in `PCGASMSetSubdomains`(pc,n,iss,`NULL`).  The overlapping
1482:   outer subdomains will be automatically generated from these according to the requested amount of
1483:   overlap; this is currently supported only with local subdomains.

1485:   Use `PCGASMDestroySubdomains()` to free the array and the list of index sets.

1487: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMDestroySubdomains()`
1488: @*/
1489: PetscErrorCode PCGASMCreateSubdomains(Mat A, PetscInt N, PetscInt *n, IS *iis[])
1490: {
1491:   PetscMPIInt size;

1493:   PetscFunctionBegin;
1495:   PetscAssertPointer(iis, 4);

1497:   PetscCheck(N >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Number of subdomains must be > 0, N = %" PetscInt_FMT, N);
1498:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1499:   if (N >= size) {
1500:     *n = N / size + (N % size);
1501:     PetscCall(PCGASMCreateLocalSubdomains(A, *n, iis));
1502:   } else {
1503:     PetscCall(PCGASMCreateStraddlingSubdomains(A, N, n, iis));
1504:   }
1505:   PetscFunctionReturn(PETSC_SUCCESS);
1506: }

1508: /*@C
1509:   PCGASMDestroySubdomains - Destroys the index sets created with
1510:   `PCGASMCreateSubdomains()` or `PCGASMCreateSubdomains2D()`. Should be
1511:   called after setting subdomains with `PCGASMSetSubdomains()`.

1513:   Collective

1515:   Input Parameters:
1516: + n   - the number of index sets
1517: . iis - the array of inner subdomains
1518: - ois - the array of outer subdomains, can be `NULL`

1520:   Level: intermediate

1522:   Note:
1523:   This is a convenience subroutine that walks each list,
1524:   destroys each `IS` on the list, and then frees the list. At the end the
1525:   list pointers are set to `NULL`.

1527: .seealso: [](ch_ksp), `PCGASM`, `PCGASMCreateSubdomains()`, `PCGASMSetSubdomains()`
1528: @*/
1529: PetscErrorCode PCGASMDestroySubdomains(PetscInt n, IS *iis[], IS *ois[])
1530: {
1531:   PetscFunctionBegin;
1532:   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1533:   if (ois) {
1534:     PetscAssertPointer(ois, 3);
1535:     if (*ois) {
1536:       PetscAssertPointer(*ois, 3);
1537:       for (PetscInt i = 0; i < n; i++) PetscCall(ISDestroy(&(*ois)[i]));
1538:       PetscCall(PetscFree(*ois));
1539:     }
1540:   }
1541:   if (iis) {
1542:     PetscAssertPointer(iis, 2);
1543:     if (*iis) {
1544:       PetscAssertPointer(*iis, 2);
1545:       for (PetscInt i = 0; i < n; i++) PetscCall(ISDestroy(&(*iis)[i]));
1546:       PetscCall(PetscFree(*iis));
1547:     }
1548:   }
1549:   PetscFunctionReturn(PETSC_SUCCESS);
1550: }

1552: #define PCGASMLocalSubdomainBounds2D(M, N, xleft, ylow, xright, yhigh, first, last, xleft_loc, ylow_loc, xright_loc, yhigh_loc, n) \
1553:   do { \
1554:     PetscInt first_row = first / M, last_row = last / M + 1; \
1555:     /*                                                                                                    \
1556:      Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1557:      of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1558:      subdomain).                                                                                          \
1559:      Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1560:      of the intersection.                                                                                 \
1561:     */ \
1562:     /* ylow_loc is the grid row containing the first element of the local sumbdomain */ \
1563:     *ylow_loc = PetscMax(first_row, ylow); \
1564:     /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1565:     *xleft_loc = *ylow_loc == first_row ? PetscMax(first % M, xleft) : xleft; \
1566:     /* yhigh_loc is the grid row above the last local subdomain element */ \
1567:     *yhigh_loc = PetscMin(last_row, yhigh); \
1568:     /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */ \
1569:     *xright_loc = *yhigh_loc == last_row ? PetscMin(xright, last % M) : xright; \
1570:     /* Now compute the size of the local subdomain n. */ \
1571:     *n = 0; \
1572:     if (*ylow_loc < *yhigh_loc) { \
1573:       PetscInt width = xright - xleft; \
1574:       *n += width * (*yhigh_loc - *ylow_loc - 1); \
1575:       *n += PetscMin(PetscMax(*xright_loc - xleft, 0), width); \
1576:       *n -= PetscMin(PetscMax(*xleft_loc - xleft, 0), width); \
1577:     } \
1578:   } while (0)

1580: /*@C
1581:   PCGASMCreateSubdomains2D - Creates the index sets for the `PCGASM` overlapping Schwarz
1582:   preconditioner for a two-dimensional problem on a regular grid.

1584:   Collective

1586:   Input Parameters:
1587: + pc       - the preconditioner context
1588: . M        - the global number of grid points in the x direction
1589: . N        - the global number of grid points in the y direction
1590: . Mdomains - the global number of subdomains in the x direction
1591: . Ndomains - the global number of subdomains in the y direction
1592: . dof      - degrees of freedom per node
1593: - overlap  - overlap in mesh lines

1595:   Output Parameters:
1596: + nsub - the number of local subdomains created
1597: . iis  - array of index sets defining inner (nonoverlapping) subdomains
1598: - ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains

1600:   Level: advanced

1602:   Note:
1603:   Use `PCGASMDestroySubdomains()` to free the index sets and the arrays

1605: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`, `PCGASMSetOverlap()`, `PCASMCreateSubdomains2D()`,
1606:           `PCGASMDestroySubdomains()`
1607: @*/
1608: PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M, PetscInt N, PetscInt Mdomains, PetscInt Ndomains, PetscInt dof, PetscInt overlap, PetscInt *nsub, IS *iis[], IS *ois[])
1609: {
1610:   PetscMPIInt size, rank;
1611:   PetscInt    maxheight, maxwidth;
1612:   PetscInt    xstart, xleft, xright, xleft_loc, xright_loc;
1613:   PetscInt    ystart, ylow, yhigh, ylow_loc, yhigh_loc;
1614:   PetscInt    x[2][2], y[2][2], n[2];
1615:   PetscInt    first, last;
1616:   PetscInt    nidx, *idx;
1617:   PetscInt    ii, jj, s, q, d;
1618:   PetscInt    k, kk;
1619:   PetscMPIInt color;
1620:   MPI_Comm    comm, subcomm;
1621:   IS        **xis = NULL, **is = ois, **is_local = iis;

1623:   PetscFunctionBegin;
1624:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1625:   PetscCallMPI(MPI_Comm_size(comm, &size));
1626:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1627:   PetscCall(MatGetOwnershipRange(pc->pmat, &first, &last));
1628:   PetscCheck((first % dof) == 0 && (last % dof) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE,
1629:              "Matrix row partitioning unsuitable for domain decomposition: local row range (%" PetscInt_FMT ",%" PetscInt_FMT ") "
1630:              "does not respect the number of degrees of freedom per grid point %" PetscInt_FMT,
1631:              first, last, dof);

1633:   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1634:   s      = 0;
1635:   ystart = 0;
1636:   for (PetscInt j = 0; j < Ndomains; ++j) {
1637:     maxheight = N / Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1638:     PetscCheck(maxheight >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the vertical direction for mesh height %" PetscInt_FMT, Ndomains, N);
1639:     /* Vertical domain limits with an overlap. */
1640:     ylow   = PetscMax(ystart - overlap, 0);
1641:     yhigh  = PetscMin(ystart + maxheight + overlap, N);
1642:     xstart = 0;
1643:     for (PetscInt i = 0; i < Mdomains; ++i) {
1644:       maxwidth = M / Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1645:       PetscCheck(maxwidth >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the horizontal direction for mesh width %" PetscInt_FMT, Mdomains, M);
1646:       /* Horizontal domain limits with an overlap. */
1647:       xleft  = PetscMax(xstart - overlap, 0);
1648:       xright = PetscMin(xstart + maxwidth + overlap, M);
1649:       /*
1650:          Determine whether this subdomain intersects this rank's ownership range of pc->pmat.
1651:       */
1652:       PCGASMLocalSubdomainBounds2D(M, N, xleft, ylow, xright, yhigh, first, last, (&xleft_loc), (&ylow_loc), (&xright_loc), (&yhigh_loc), (&nidx));
1653:       if (nidx) ++s;
1654:       xstart += maxwidth;
1655:     } /* for (PetscInt i = 0; i < Mdomains; ++i) */
1656:     ystart += maxheight;
1657:   } /* for (PetscInt j = 0; j < Ndomains; ++j) */

1659:   /* Now we can allocate the necessary number of ISs. */
1660:   *nsub = s;
1661:   PetscCall(PetscMalloc1(*nsub, is));
1662:   PetscCall(PetscMalloc1(*nsub, is_local));
1663:   s      = 0;
1664:   ystart = 0;
1665:   for (PetscInt j = 0; j < Ndomains; ++j) {
1666:     maxheight = N / Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1667:     PetscCheck(maxheight >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the vertical direction for mesh height %" PetscInt_FMT, Ndomains, N);
1668:     /* Vertical domain limits with an overlap. */
1669:     y[0][0] = PetscMax(ystart - overlap, 0);
1670:     y[0][1] = PetscMin(ystart + maxheight + overlap, N);
1671:     /* Vertical domain limits without an overlap. */
1672:     y[1][0] = ystart;
1673:     y[1][1] = PetscMin(ystart + maxheight, N);
1674:     xstart  = 0;
1675:     for (PetscInt i = 0; i < Mdomains; ++i) {
1676:       maxwidth = M / Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1677:       PetscCheck(maxwidth >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the horizontal direction for mesh width %" PetscInt_FMT, Mdomains, M);
1678:       /* Horizontal domain limits with an overlap. */
1679:       x[0][0] = PetscMax(xstart - overlap, 0);
1680:       x[0][1] = PetscMin(xstart + maxwidth + overlap, M);
1681:       /* Horizontal domain limits without an overlap. */
1682:       x[1][0] = xstart;
1683:       x[1][1] = PetscMin(xstart + maxwidth, M);
1684:       /*
1685:          Determine whether this domain intersects this rank's ownership range of pc->pmat.
1686:          Do this twice: first for the domains with overlaps, and once without.
1687:          During the first pass create the subcommunicators, and use them on the second pass as well.
1688:       */
1689:       for (q = 0; q < 2; ++q) {
1690:         PetscBool split = PETSC_FALSE;
1691:         /*
1692:           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1693:           according to whether the domain with an overlap or without is considered.
1694:         */
1695:         xleft  = x[q][0];
1696:         xright = x[q][1];
1697:         ylow   = y[q][0];
1698:         yhigh  = y[q][1];
1699:         PCGASMLocalSubdomainBounds2D(M, N, xleft, ylow, xright, yhigh, first, last, (&xleft_loc), (&ylow_loc), (&xright_loc), (&yhigh_loc), (&nidx));
1700:         nidx *= dof;
1701:         n[q] = nidx;
1702:         /*
1703:          Based on the counted number of indices in the local domain *with an overlap*,
1704:          construct a subcommunicator of all the MPI ranks supporting this domain.
1705:          Observe that a domain with an overlap might have nontrivial local support,
1706:          while the domain without an overlap might not.  Hence, the decision to participate
1707:          in the subcommunicator must be based on the domain with an overlap.
1708:          */
1709:         if (q == 0) {
1710:           if (nidx) color = 1;
1711:           else color = MPI_UNDEFINED;
1712:           PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
1713:           split = PETSC_TRUE;
1714:         }
1715:         /*
1716:          Proceed only if the number of local indices *with an overlap* is nonzero.
1717:          */
1718:         if (n[0]) {
1719:           if (q == 0) xis = is;
1720:           if (q == 1) {
1721:             /*
1722:              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1723:              Moreover, if the overlap is zero, the two ISs are identical.
1724:              */
1725:             if (overlap == 0) {
1726:               (*is_local)[s] = (*is)[s];
1727:               PetscCall(PetscObjectReference((PetscObject)(*is)[s]));
1728:               continue;
1729:             } else {
1730:               xis     = is_local;
1731:               subcomm = ((PetscObject)(*is)[s])->comm;
1732:             }
1733:           } /* if (q == 1) */
1734:           idx = NULL;
1735:           PetscCall(PetscMalloc1(nidx, &idx));
1736:           if (nidx) {
1737:             k = 0;
1738:             for (jj = ylow_loc; jj < yhigh_loc; ++jj) {
1739:               PetscInt x0 = (jj == ylow_loc) ? xleft_loc : xleft;
1740:               PetscInt x1 = (jj == yhigh_loc - 1) ? xright_loc : xright;
1741:               kk          = dof * (M * jj + x0);
1742:               for (ii = x0; ii < x1; ++ii) {
1743:                 for (d = 0; d < dof; ++d) idx[k++] = kk++;
1744:               }
1745:             }
1746:           }
1747:           PetscCall(ISCreateGeneral(subcomm, nidx, idx, PETSC_OWN_POINTER, (*xis) + s));
1748:           if (split) PetscCallMPI(MPI_Comm_free(&subcomm));
1749:         } /* if (n[0]) */
1750:       } /* for (q = 0; q < 2; ++q) */
1751:       if (n[0]) ++s;
1752:       xstart += maxwidth;
1753:     } /* for (PetscInt i = 0; i < Mdomains; ++i) */
1754:     ystart += maxheight;
1755:   } /* for (PetscInt j = 0; j < Ndomains; ++j) */
1756:   PetscFunctionReturn(PETSC_SUCCESS);
1757: }

1759: /*@C
1760:   PCGASMGetSubdomains - Gets the subdomains supported on this MPI process
1761:   for the `PCGASM` additive Schwarz preconditioner.

1763:   Not Collective

1765:   Input Parameter:
1766: . pc - the preconditioner context

1768:   Output Parameters:
1769: + n   - the number of subdomains for this MPI process (default value = 1)
1770: . iis - the index sets that define the inner subdomains (without overlap) supported on this process (can be `NULL`)
1771: - ois - the index sets that define the outer subdomains (with overlap) supported on this process (can be `NULL`)

1773:   Level: advanced

1775:   Notes:
1776:   The user is responsible for destroying the `IS`s and freeing the returned arrays, this can be done with
1777:   `PCGASMDestroySubdomains()`

1779:   The `IS` numbering is in the parallel, global numbering of the vector.

1781: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetOverlap()`, `PCGASMGetSubKSP()`, `PCGASMCreateSubdomains2D()`,
1782:           `PCGASMSetSubdomains()`, `PCGASMGetSubmatrices()`, `PCGASMDestroySubdomains()`
1783: @*/
1784: PetscErrorCode PCGASMGetSubdomains(PC pc, PetscInt *n, IS *iis[], IS *ois[])
1785: {
1786:   PC_GASM  *osm;
1787:   PetscBool match;

1789:   PetscFunctionBegin;
1791:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1792:   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1793:   osm = (PC_GASM *)pc->data;
1794:   if (n) *n = osm->n;
1795:   if (iis) PetscCall(PetscMalloc1(osm->n, iis));
1796:   if (ois) PetscCall(PetscMalloc1(osm->n, ois));
1797:   if (iis || ois) {
1798:     for (PetscInt i = 0; i < osm->n; ++i) {
1799:       if (iis) (*iis)[i] = osm->iis[i];
1800:       if (ois) (*ois)[i] = osm->ois[i];
1801:     }
1802:   }
1803:   PetscFunctionReturn(PETSC_SUCCESS);
1804: }

1806: /*@C
1807:   PCGASMGetSubmatrices - Gets the local submatrices (for this MPI process
1808:   only) for the `PCGASM` additive Schwarz preconditioner.

1810:   Not Collective

1812:   Input Parameter:
1813: . pc - the preconditioner context

1815:   Output Parameters:
1816: + n   - the number of matrices for this MPI process (default value = 1)
1817: - mat - the matrices

1819:   Level: advanced

1821:   Note:
1822:   Matrices returned by this routine have the same communicators as the index sets (`IS`)
1823:   used to define subdomains in `PCGASMSetSubdomains()`

1825: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetOverlap()`, `PCGASMGetSubKSP()`,
1826:           `PCGASMCreateSubdomains2D()`, `PCGASMSetSubdomains()`, `PCGASMGetSubdomains()`
1827: @*/
1828: PetscErrorCode PCGASMGetSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1829: {
1830:   PC_GASM  *osm;
1831:   PetscBool match;

1833:   PetscFunctionBegin;
1835:   PetscAssertPointer(n, 2);
1836:   if (mat) PetscAssertPointer(mat, 3);
1837:   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1838:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1839:   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1840:   osm = (PC_GASM *)pc->data;
1841:   if (n) *n = osm->n;
1842:   if (mat) *mat = osm->pmat;
1843:   PetscFunctionReturn(PETSC_SUCCESS);
1844: }

1846: /*@
1847:   PCGASMSetUseDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible for `PCGASM`

1849:   Logically Collective

1851:   Input Parameters:
1852: + pc  - the preconditioner
1853: - flg - boolean indicating whether to use subdomains defined by the `DM`

1855:   Options Database Key:
1856: + -pc_gasm_dm_subdomains    - configure subdomains
1857: . -pc_gasm_overlap          - set overlap
1858: - -pc_gasm_total_subdomains - set number of subdomains

1860:   Level: intermediate

1862:   Note:
1863:   `PCGASMSetSubdomains()`, `PCGASMSetTotalSubdomains()` or `PCGASMSetOverlap()` take precedence over `PCGASMSetUseDMSubdomains()`,
1864:   so setting `PCGASMSetSubdomains()` with nontrivial subdomain ISs or any of `PCGASMSetTotalSubdomains()` and `PCGASMSetOverlap()`
1865:   automatically turns the latter off.

1867: .seealso: [](ch_ksp), `PCGASM`, `PCGASMGetUseDMSubdomains()`, `PCGASMSetSubdomains()`, `PCGASMSetOverlap()`,
1868:           `PCGASMCreateSubdomains2D()`
1869: @*/
1870: PetscErrorCode PCGASMSetUseDMSubdomains(PC pc, PetscBool flg)
1871: {
1872:   PC_GASM  *osm = (PC_GASM *)pc->data;
1873:   PetscBool match;

1875:   PetscFunctionBegin;
1878:   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1879:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1880:   if (match && !osm->user_subdomains && osm->N == PETSC_DETERMINE && osm->overlap < 0) osm->dm_subdomains = flg;
1881:   PetscFunctionReturn(PETSC_SUCCESS);
1882: }

1884: /*@
1885:   PCGASMGetUseDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible with `PCGASM`

1887:   Not Collective

1889:   Input Parameter:
1890: . pc - the preconditioner

1892:   Output Parameter:
1893: . flg - boolean indicating whether to use subdomains defined by the `DM`

1895:   Level: intermediate

1897: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetUseDMSubdomains()`, `PCGASMSetOverlap()`,
1898:           `PCGASMCreateSubdomains2D()`
1899: @*/
1900: PetscErrorCode PCGASMGetUseDMSubdomains(PC pc, PetscBool *flg)
1901: {
1902:   PC_GASM  *osm = (PC_GASM *)pc->data;
1903:   PetscBool match;

1905:   PetscFunctionBegin;
1907:   PetscAssertPointer(flg, 2);
1908:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1909:   if (match) {
1910:     if (flg) *flg = osm->dm_subdomains;
1911:   }
1912:   PetscFunctionReturn(PETSC_SUCCESS);
1913: }