Actual source code: redistribute.c

  1: /*
  2:   This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
  3: */
  4: #include <petsc/private/pcimpl.h>
  5: #include <petscksp.h>

  7: typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
  8: struct _PC_FieldSplitLink {
  9:   char             *splitname;
 10:   IS                is;
 11:   PC_FieldSplitLink next, previous;
 12: };

 14: typedef struct {
 15:   KSP          ksp;
 16:   Vec          x, b;
 17:   VecScatter   scatter;
 18:   IS           is;
 19:   PetscInt     dcnt, *drows; /* these are the local rows that have only diagonal entry */
 20:   PetscScalar *diag;
 21:   Vec          work;
 22:   PetscBool    zerodiag;

 24:   PetscInt          nsplits;
 25:   PC_FieldSplitLink splitlinks;
 26: } PC_Redistribute;

 28: static PetscErrorCode PCFieldSplitSetIS_Redistribute(PC pc, const char splitname[], IS is)
 29: {
 30:   PC_Redistribute   *red  = (PC_Redistribute *)pc->data;
 31:   PC_FieldSplitLink *next = &red->splitlinks;

 33:   PetscFunctionBegin;
 34:   while (*next) next = &(*next)->next;
 35:   PetscCall(PetscNew(next));
 36:   if (splitname) {
 37:     PetscCall(PetscStrallocpy(splitname, &(*next)->splitname));
 38:   } else {
 39:     PetscCall(PetscMalloc1(8, &(*next)->splitname));
 40:     PetscCall(PetscSNPrintf((*next)->splitname, 7, "%" PetscInt_FMT, red->nsplits++));
 41:   }
 42:   PetscCall(PetscObjectReference((PetscObject)is));
 43:   PetscCall(ISDestroy(&(*next)->is));
 44:   (*next)->is = is;
 45:   PetscFunctionReturn(PETSC_SUCCESS);
 46: }

 48: static PetscErrorCode PCView_Redistribute(PC pc, PetscViewer viewer)
 49: {
 50:   PC_Redistribute *red = (PC_Redistribute *)pc->data;
 51:   PetscBool        iascii, isstring;
 52:   PetscInt         ncnt, N;

 54:   PetscFunctionBegin;
 55:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
 56:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
 57:   if (iascii) {
 58:     PetscCallMPI(MPIU_Allreduce(&red->dcnt, &ncnt, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
 59:     PetscCall(MatGetSize(pc->pmat, &N, NULL));
 60:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number rows eliminated %" PetscInt_FMT " Percentage rows eliminated %g\n", ncnt, (double)(100 * ((PetscReal)ncnt) / ((PetscReal)N))));
 61:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Redistribute preconditioner: \n"));
 62:     PetscCall(KSPView(red->ksp, viewer));
 63:   } else if (isstring) {
 64:     PetscCall(PetscViewerStringSPrintf(viewer, " Redistribute preconditioner"));
 65:     PetscCall(KSPView(red->ksp, viewer));
 66:   }
 67:   PetscFunctionReturn(PETSC_SUCCESS);
 68: }

 70: static PetscErrorCode PCSetUp_Redistribute(PC pc)
 71: {
 72:   PC_Redistribute         *red = (PC_Redistribute *)pc->data;
 73:   MPI_Comm                 comm;
 74:   PetscInt                 rstart, rend, nrstart, nrend, nz, cnt, *rows, ncnt, dcnt, *drows;
 75:   PetscLayout              map, nmap;
 76:   PetscMPIInt              size, tag, n;
 77:   PETSC_UNUSED PetscMPIInt imdex;
 78:   PetscInt                *source = NULL;
 79:   PetscMPIInt             *sizes  = NULL, nrecvs, nsends;
 80:   PetscInt                 j;
 81:   PetscInt                *owner = NULL, *starts = NULL, count, slen;
 82:   PetscInt                *rvalues, *svalues, recvtotal;
 83:   PetscMPIInt             *onodes1, *olengths1;
 84:   MPI_Request             *send_waits = NULL, *recv_waits = NULL;
 85:   MPI_Status               recv_status, *send_status;
 86:   Vec                      tvec, diag;
 87:   Mat                      tmat;
 88:   const PetscScalar       *d, *values;
 89:   const PetscInt          *cols;
 90:   PC_FieldSplitLink       *next = &red->splitlinks;

 92:   PetscFunctionBegin;
 93:   if (pc->setupcalled) {
 94:     PetscCheck(pc->flag == SAME_NONZERO_PATTERN, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC is not supported for a change in the nonzero structure of the matrix");
 95:     PetscCall(KSPGetOperators(red->ksp, NULL, &tmat));
 96:     PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_REUSE_MATRIX, &tmat));
 97:     PetscCall(KSPSetOperators(red->ksp, tmat, tmat));
 98:   } else {
 99:     PetscInt  NN;
100:     PC        ipc;
101:     PetscBool fptr;

103:     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
104:     PetscCallMPI(MPI_Comm_size(comm, &size));
105:     PetscCall(PetscObjectGetNewTag((PetscObject)pc, &tag));

107:     /* count non-diagonal rows on process */
108:     PetscCall(MatGetOwnershipRange(pc->mat, &rstart, &rend));
109:     cnt = 0;
110:     for (PetscInt i = rstart; i < rend; i++) {
111:       PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values));
112:       for (PetscInt j = 0; j < nz; j++) {
113:         if (values[j] != 0 && cols[j] != i) {
114:           cnt++;
115:           break;
116:         }
117:       }
118:       PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values));
119:     }
120:     PetscCall(PetscMalloc1(cnt, &rows));
121:     PetscCall(PetscMalloc1(rend - rstart - cnt, &drows));

123:     /* list non-diagonal rows on process */
124:     cnt  = 0;
125:     dcnt = 0;
126:     for (PetscInt i = rstart; i < rend; i++) {
127:       PetscBool diagonly = PETSC_TRUE;
128:       PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values));
129:       for (PetscInt j = 0; j < nz; j++) {
130:         if (values[j] != 0 && cols[j] != i) {
131:           diagonly = PETSC_FALSE;
132:           break;
133:         }
134:       }
135:       if (!diagonly) rows[cnt++] = i;
136:       else drows[dcnt++] = i - rstart;
137:       PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values));
138:     }

140:     /* create PetscLayout for non-diagonal rows on each process */
141:     PetscCall(PetscLayoutCreate(comm, &map));
142:     PetscCall(PetscLayoutSetLocalSize(map, cnt));
143:     PetscCall(PetscLayoutSetBlockSize(map, 1));
144:     PetscCall(PetscLayoutSetUp(map));
145:     nrstart = map->rstart;
146:     nrend   = map->rend;

148:     /* create PetscLayout for load-balanced non-diagonal rows on each process */
149:     PetscCall(PetscLayoutCreate(comm, &nmap));
150:     PetscCallMPI(MPIU_Allreduce(&cnt, &ncnt, 1, MPIU_INT, MPI_SUM, comm));
151:     PetscCall(PetscLayoutSetSize(nmap, ncnt));
152:     PetscCall(PetscLayoutSetBlockSize(nmap, 1));
153:     PetscCall(PetscLayoutSetUp(nmap));

155:     PetscCall(MatGetSize(pc->pmat, &NN, NULL));
156:     PetscCall(PetscInfo(pc, "Number of diagonal rows eliminated %" PetscInt_FMT ", percentage eliminated %g\n", NN - ncnt, (double)((PetscReal)(NN - ncnt) / (PetscReal)NN)));

158:     if (size > 1) {
159:       /*
160:         the following block of code assumes MPI can send messages to self, which is not supported for MPI-uni hence we need to handle
161:         the size 1 case as a special case

163:        this code is taken from VecScatterCreate_PtoS()
164:        Determines what rows need to be moved where to
165:        load balance the non-diagonal rows
166:        */
167:       /*  count number of contributors to each processor */
168:       PetscCall(PetscMalloc2(size, &sizes, cnt, &owner));
169:       PetscCall(PetscArrayzero(sizes, size));
170:       j      = 0;
171:       nsends = 0;
172:       for (PetscInt i = nrstart; i < nrend; i++) {
173:         if (i < nmap->range[j]) j = 0;
174:         for (; j < size; j++) {
175:           if (i < nmap->range[j + 1]) {
176:             if (!sizes[j]++) nsends++;
177:             owner[i - nrstart] = j;
178:             break;
179:           }
180:         }
181:       }
182:       /* inform other processors of number of messages and max length*/
183:       PetscCall(PetscGatherNumberOfMessages(comm, NULL, sizes, &nrecvs));
184:       PetscCall(PetscGatherMessageLengths(comm, nsends, nrecvs, sizes, &onodes1, &olengths1));
185:       PetscCall(PetscSortMPIIntWithArray(nrecvs, onodes1, olengths1));
186:       recvtotal = 0;
187:       for (PetscMPIInt i = 0; i < nrecvs; i++) recvtotal += olengths1[i];

189:       /* post receives:  rvalues - rows I will own; count - nu */
190:       PetscCall(PetscMalloc3(recvtotal, &rvalues, nrecvs, &source, nrecvs, &recv_waits));
191:       count = 0;
192:       for (PetscMPIInt i = 0; i < nrecvs; i++) {
193:         PetscCallMPI(MPIU_Irecv(rvalues + count, olengths1[i], MPIU_INT, onodes1[i], tag, comm, recv_waits + i));
194:         count += olengths1[i];
195:       }

197:       /* do sends:
198:        1) starts[i] gives the starting index in svalues for stuff going to
199:        the ith processor
200:        */
201:       PetscCall(PetscMalloc3(cnt, &svalues, nsends, &send_waits, size, &starts));
202:       starts[0] = 0;
203:       for (PetscMPIInt i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1];
204:       for (PetscInt i = 0; i < cnt; i++) svalues[starts[owner[i]]++] = rows[i];
205:       for (PetscInt i = 0; i < cnt; i++) rows[i] = rows[i] - nrstart;
206:       red->drows = drows;
207:       red->dcnt  = dcnt;
208:       PetscCall(PetscFree(rows));

210:       starts[0] = 0;
211:       for (PetscMPIInt i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1];
212:       count = 0;
213:       for (PetscMPIInt i = 0; i < size; i++) {
214:         if (sizes[i]) PetscCallMPI(MPIU_Isend(svalues + starts[i], sizes[i], MPIU_INT, i, tag, comm, send_waits + count++));
215:       }

217:       /*  wait on receives */
218:       count = nrecvs;
219:       slen  = 0;
220:       while (count) {
221:         PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status));
222:         /* unpack receives into our local space */
223:         PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &n));
224:         slen += n;
225:         count--;
226:       }
227:       PetscCheck(slen == recvtotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Total message lengths %" PetscInt_FMT " not expected %" PetscInt_FMT, slen, recvtotal);
228:       PetscCall(ISCreateGeneral(comm, slen, rvalues, PETSC_COPY_VALUES, &red->is));

230:       /* free all work space */
231:       PetscCall(PetscFree(olengths1));
232:       PetscCall(PetscFree(onodes1));
233:       PetscCall(PetscFree3(rvalues, source, recv_waits));
234:       PetscCall(PetscFree2(sizes, owner));
235:       if (nsends) { /* wait on sends */
236:         PetscCall(PetscMalloc1(nsends, &send_status));
237:         PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status));
238:         PetscCall(PetscFree(send_status));
239:       }
240:       PetscCall(PetscFree3(svalues, send_waits, starts));
241:     } else {
242:       PetscCall(ISCreateGeneral(comm, cnt, rows, PETSC_OWN_POINTER, &red->is));
243:       red->drows = drows;
244:       red->dcnt  = dcnt;
245:       slen       = cnt;
246:     }
247:     PetscCall(PetscLayoutDestroy(&map));

249:     PetscCall(VecCreateMPI(comm, slen, PETSC_DETERMINE, &red->b));
250:     PetscCall(VecDuplicate(red->b, &red->x));
251:     PetscCall(MatCreateVecs(pc->pmat, &tvec, NULL));
252:     PetscCall(VecScatterCreate(tvec, red->is, red->b, NULL, &red->scatter));

254:     /* Map the PCFIELDSPLIT fields to redistributed KSP */
255:     PetscCall(KSPGetPC(red->ksp, &ipc));
256:     PetscCall(PetscObjectHasFunction((PetscObject)ipc, "PCFieldSplitSetIS_C", &fptr));
257:     if (fptr && *next) {
258:       PetscScalar       *atvec;
259:       const PetscScalar *ab;
260:       PetscInt           primes[] = {2, 3, 5, 7, 11, 13, 17, 19};
261:       PetscInt           cnt      = 0;

263:       PetscCheck(red->nsplits <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(primes), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No support for this many fields");
264:       PetscCall(VecSet(tvec, 1.0));
265:       PetscCall(VecGetArray(tvec, &atvec));

267:       while (*next) {
268:         const PetscInt *indices;
269:         PetscInt        n;

271:         PetscCall(ISGetIndices((*next)->is, &indices));
272:         PetscCall(ISGetLocalSize((*next)->is, &n));
273:         for (PetscInt i = 0; i < n; i++) atvec[indices[i] - rstart] *= primes[cnt];
274:         PetscCall(ISRestoreIndices((*next)->is, &indices));
275:         cnt++;
276:         next = &(*next)->next;
277:       }
278:       PetscCall(VecRestoreArray(tvec, &atvec));
279:       PetscCall(VecScatterBegin(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD));
280:       PetscCall(VecScatterEnd(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD));
281:       cnt = 0;
282:       PetscCall(VecGetArrayRead(red->b, &ab));
283:       next = &red->splitlinks;
284:       while (*next) {
285:         PetscInt  n = 0;
286:         PetscInt *indices;
287:         IS        ris;

289:         for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) {
290:           if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) n++;
291:         }
292:         PetscCall(PetscMalloc1(n, &indices));
293:         n = 0;
294:         for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) {
295:           if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) indices[n++] = i + nmap->rstart;
296:         }
297:         PetscCall(ISCreateGeneral(comm, n, indices, PETSC_OWN_POINTER, &ris));
298:         PetscCall(PCFieldSplitSetIS(ipc, (*next)->splitname, ris));

300:         PetscCall(ISDestroy(&ris));
301:         cnt++;
302:         next = &(*next)->next;
303:       }
304:       PetscCall(VecRestoreArrayRead(red->b, &ab));
305:     }
306:     PetscCall(VecDestroy(&tvec));
307:     PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_INITIAL_MATRIX, &tmat));
308:     PetscCall(KSPSetOperators(red->ksp, tmat, tmat));
309:     PetscCall(MatDestroy(&tmat));
310:     PetscCall(PetscLayoutDestroy(&nmap));
311:   }

313:   /* get diagonal portion of matrix */
314:   PetscCall(PetscFree(red->diag));
315:   PetscCall(PetscMalloc1(red->dcnt, &red->diag));
316:   PetscCall(MatCreateVecs(pc->pmat, &diag, NULL));
317:   PetscCall(MatGetDiagonal(pc->pmat, diag));
318:   PetscCall(VecGetArrayRead(diag, &d));
319:   for (PetscInt i = 0; i < red->dcnt; i++) {
320:     if (d[red->drows[i]] != 0) red->diag[i] = 1.0 / d[red->drows[i]];
321:     else {
322:       red->zerodiag = PETSC_TRUE;
323:       red->diag[i]  = 0.0;
324:     }
325:   }
326:   PetscCall(VecRestoreArrayRead(diag, &d));
327:   PetscCall(VecDestroy(&diag));
328:   PetscCall(KSPSetUp(red->ksp));
329:   PetscFunctionReturn(PETSC_SUCCESS);
330: }

332: static PetscErrorCode PCApply_Redistribute(PC pc, Vec b, Vec x)
333: {
334:   PC_Redistribute   *red   = (PC_Redistribute *)pc->data;
335:   PetscInt           dcnt  = red->dcnt, i;
336:   const PetscInt    *drows = red->drows;
337:   PetscScalar       *xwork;
338:   const PetscScalar *bwork, *diag = red->diag;
339:   PetscBool          nonzero_guess;

341:   PetscFunctionBegin;
342:   if (!red->work) PetscCall(VecDuplicate(b, &red->work));
343:   PetscCall(KSPGetInitialGuessNonzero(red->ksp, &nonzero_guess));
344:   if (nonzero_guess) {
345:     PetscCall(VecScatterBegin(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD));
346:     PetscCall(VecScatterEnd(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD));
347:   }

349:   /* compute the rows of solution that have diagonal entries only */
350:   PetscCall(VecSet(x, 0.0)); /* x = diag(A)^{-1} b */
351:   PetscCall(VecGetArray(x, &xwork));
352:   PetscCall(VecGetArrayRead(b, &bwork));
353:   if (red->zerodiag) {
354:     for (i = 0; i < dcnt; i++) {
355:       if (diag[i] == 0.0 && bwork[drows[i]] != 0.0) {
356:         PetscCheck(!pc->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_CONV_FAILED, "Linear system is inconsistent, zero matrix row but nonzero right-hand side");
357:         PetscCall(PetscInfo(pc, "Linear system is inconsistent, zero matrix row but nonzero right-hand side\n"));
358:         pc->failedreasonrank = PC_INCONSISTENT_RHS;
359:       }
360:     }
361:     PetscCall(VecFlag(x, pc->failedreasonrank == PC_INCONSISTENT_RHS));
362:   }
363:   for (i = 0; i < dcnt; i++) xwork[drows[i]] = diag[i] * bwork[drows[i]];
364:   PetscCall(PetscLogFlops(dcnt));
365:   PetscCall(VecRestoreArray(red->work, &xwork));
366:   PetscCall(VecRestoreArrayRead(b, &bwork));
367:   /* update the right-hand side for the reduced system with diagonal rows (and corresponding columns) removed */
368:   PetscCall(MatMult(pc->pmat, x, red->work));
369:   PetscCall(VecAYPX(red->work, -1.0, b)); /* red->work = b - A x */

371:   PetscCall(VecScatterBegin(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
372:   PetscCall(VecScatterEnd(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
373:   PetscCall(KSPSolve(red->ksp, red->b, red->x));
374:   PetscCall(KSPCheckSolve(red->ksp, pc, red->x));
375:   PetscCall(VecScatterBegin(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
376:   PetscCall(VecScatterEnd(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
377:   PetscFunctionReturn(PETSC_SUCCESS);
378: }

380: static PetscErrorCode PCApplyTranspose_Redistribute(PC pc, Vec b, Vec x)
381: {
382:   PC_Redistribute   *red   = (PC_Redistribute *)pc->data;
383:   PetscInt           dcnt  = red->dcnt, i;
384:   const PetscInt    *drows = red->drows;
385:   PetscScalar       *xwork;
386:   const PetscScalar *bwork, *diag = red->diag;
387:   PetscBool          set, flg     = PETSC_FALSE, nonzero_guess;

389:   PetscFunctionBegin;
390:   PetscCall(MatIsStructurallySymmetricKnown(pc->pmat, &set, &flg));
391:   PetscCheck(set || flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyTranspose() not implemented for structurally unsymmetric Mat");
392:   if (!red->work) PetscCall(VecDuplicate(b, &red->work));
393:   PetscCall(KSPGetInitialGuessNonzero(red->ksp, &nonzero_guess));
394:   if (nonzero_guess) {
395:     PetscCall(VecScatterBegin(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD));
396:     PetscCall(VecScatterEnd(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD));
397:   }

399:   /* compute the rows of solution that have diagonal entries only */
400:   PetscCall(VecSet(x, 0.0)); /* x = diag(A)^{-1} b */
401:   PetscCall(VecGetArray(x, &xwork));
402:   PetscCall(VecGetArrayRead(b, &bwork));
403:   if (red->zerodiag) {
404:     for (i = 0; i < dcnt; i++) {
405:       if (diag[i] == 0.0 && bwork[drows[i]] != 0.0) {
406:         PetscCheck(!pc->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_CONV_FAILED, "Linear system is inconsistent, zero matrix row but nonzero right-hand side");
407:         PetscCall(PetscInfo(pc, "Linear system is inconsistent, zero matrix row but nonzero right-hand side\n"));
408:         pc->failedreasonrank = PC_INCONSISTENT_RHS;
409:       }
410:     }
411:     PetscCall(VecFlag(x, pc->failedreasonrank == PC_INCONSISTENT_RHS));
412:   }
413:   for (i = 0; i < dcnt; i++) xwork[drows[i]] = diag[i] * bwork[drows[i]];
414:   PetscCall(PetscLogFlops(dcnt));
415:   PetscCall(VecRestoreArray(red->work, &xwork));
416:   PetscCall(VecRestoreArrayRead(b, &bwork));
417:   /* update the right-hand side for the reduced system with diagonal rows (and corresponding columns) removed */
418:   PetscCall(MatMultTranspose(pc->pmat, x, red->work));
419:   PetscCall(VecAYPX(red->work, -1.0, b)); /* red->work = b - A^T x */

421:   PetscCall(VecScatterBegin(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
422:   PetscCall(VecScatterEnd(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
423:   PetscCall(KSPSolveTranspose(red->ksp, red->b, red->x));
424:   PetscCall(KSPCheckSolve(red->ksp, pc, red->x));
425:   PetscCall(VecScatterBegin(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
426:   PetscCall(VecScatterEnd(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
427:   PetscFunctionReturn(PETSC_SUCCESS);
428: }

430: static PetscErrorCode PCDestroy_Redistribute(PC pc)
431: {
432:   PC_Redistribute  *red  = (PC_Redistribute *)pc->data;
433:   PC_FieldSplitLink next = red->splitlinks;

435:   PetscFunctionBegin;
436:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));

438:   while (next) {
439:     PC_FieldSplitLink ilink;
440:     PetscCall(PetscFree(next->splitname));
441:     PetscCall(ISDestroy(&next->is));
442:     ilink = next;
443:     next  = next->next;
444:     PetscCall(PetscFree(ilink));
445:   }
446:   PetscCall(VecScatterDestroy(&red->scatter));
447:   PetscCall(ISDestroy(&red->is));
448:   PetscCall(VecDestroy(&red->b));
449:   PetscCall(VecDestroy(&red->x));
450:   PetscCall(KSPDestroy(&red->ksp));
451:   PetscCall(VecDestroy(&red->work));
452:   PetscCall(PetscFree(red->drows));
453:   PetscCall(PetscFree(red->diag));
454:   PetscCall(PetscFree(pc->data));
455:   PetscFunctionReturn(PETSC_SUCCESS);
456: }

458: static PetscErrorCode PCSetFromOptions_Redistribute(PC pc, PetscOptionItems *PetscOptionsObject)
459: {
460:   PC_Redistribute *red = (PC_Redistribute *)pc->data;

462:   PetscFunctionBegin;
463:   PetscCall(KSPSetFromOptions(red->ksp));
464:   PetscFunctionReturn(PETSC_SUCCESS);
465: }

467: /*@
468:   PCRedistributeGetKSP - Gets the `KSP` created by the `PCREDISTRIBUTE`

470:   Not Collective

472:   Input Parameter:
473: . pc - the preconditioner context

475:   Output Parameter:
476: . innerksp - the inner `KSP`

478:   Level: advanced

480: .seealso: [](ch_ksp), `KSP`, `PCREDISTRIBUTE`
481: @*/
482: PetscErrorCode PCRedistributeGetKSP(PC pc, KSP *innerksp)
483: {
484:   PC_Redistribute *red = (PC_Redistribute *)pc->data;

486:   PetscFunctionBegin;
488:   PetscAssertPointer(innerksp, 2);
489:   *innerksp = red->ksp;
490:   PetscFunctionReturn(PETSC_SUCCESS);
491: }

493: /*MC
494:      PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows (and the corresponding columns) that only have a diagonal entry and then
495:      applies a `KSP` to that new smaller matrix

497:      Level: intermediate

499:      Notes:
500:      Options for the redistribute `KSP` and `PC` with the options database prefix `-redistribute_`

502:      Usually run this with `-ksp_type preonly`

504:      If you have used `MatZeroRows()` to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, `-ksp_type preonly
505:      -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc` to take advantage of the symmetry.

507:      Supports the function `PCFieldSplitSetIS()`; pass the appropriate reduced field indices to an inner `PCFIELDSPLIT`, set with, for example
508:      `-ksp_type preonly -pc_type redistribute -redistribute_pc_type fieldsplit`. Does not support the `PCFIELDSPLIT` options database keys.

510:      This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same. Rows are moved
511:      between MPI processes inside the preconditioner to balance the number of rows on each process.

513:      The matrix block information is lost with the possible removal of individual rows and columns of the matrix, thus the behavior of the preconditioner on the reduced
514:      system may be very different (worse) than running that preconditioner on the full system. This is specifically true for elasticity problems.

516:      Developer Note:
517:      Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication.

519: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PCRedistributeGetKSP()`, `MatZeroRows()`, `PCFieldSplitSetIS()`, `PCFIELDSPLIT`
520: M*/

522: PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc)
523: {
524:   PC_Redistribute *red;
525:   const char      *prefix;

527:   PetscFunctionBegin;
528:   PetscCall(PetscNew(&red));
529:   pc->data = (void *)red;

531:   pc->ops->apply          = PCApply_Redistribute;
532:   pc->ops->applytranspose = PCApplyTranspose_Redistribute;
533:   pc->ops->setup          = PCSetUp_Redistribute;
534:   pc->ops->destroy        = PCDestroy_Redistribute;
535:   pc->ops->setfromoptions = PCSetFromOptions_Redistribute;
536:   pc->ops->view           = PCView_Redistribute;

538:   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &red->ksp));
539:   PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel));
540:   PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
541:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
542:   PetscCall(PCGetOptionsPrefix(pc, &prefix));
543:   PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
544:   PetscCall(KSPAppendOptionsPrefix(red->ksp, "redistribute_"));
545:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_Redistribute));
546:   PetscFunctionReturn(PETSC_SUCCESS);
547: }