Actual source code: bjacobi.c

  1: /*
  2:    Defines a block Jacobi preconditioner.
  3: */

  5: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>

  7: static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC, Mat, Mat);
  8: static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC, Mat, Mat);
  9: static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);

 11: static PetscErrorCode PCSetUp_BJacobi(PC pc)
 12: {
 13:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
 14:   Mat         mat = pc->mat, pmat = pc->pmat;
 15:   PetscBool   hasop;
 16:   PetscInt    N, M, start, i, sum, end;
 17:   PetscInt    bs, i_start = -1, i_end = -1;
 18:   PetscMPIInt rank, size;

 20:   PetscFunctionBegin;
 21:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
 22:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
 23:   PetscCall(MatGetLocalSize(pc->pmat, &M, &N));
 24:   PetscCall(MatGetBlockSize(pc->pmat, &bs));

 26:   if (jac->n > 0 && jac->n < size) {
 27:     PetscCall(PCSetUp_BJacobi_Multiproc(pc));
 28:     PetscFunctionReturn(PETSC_SUCCESS);
 29:   }

 31:   /*    Determines the number of blocks assigned to each processor */
 32:   /*   local block count  given */
 33:   if (jac->n_local > 0 && jac->n < 0) {
 34:     PetscCallMPI(MPIU_Allreduce(&jac->n_local, &jac->n, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
 35:     if (jac->l_lens) { /* check that user set these correctly */
 36:       sum = 0;
 37:       for (i = 0; i < jac->n_local; i++) {
 38:         PetscCheck(jac->l_lens[i] / bs * bs == jac->l_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat blocksize doesn't match block Jacobi layout");
 39:         sum += jac->l_lens[i];
 40:       }
 41:       PetscCheck(sum == M, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local lens set incorrectly");
 42:     } else {
 43:       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
 44:       for (i = 0; i < jac->n_local; i++) jac->l_lens[i] = bs * ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i));
 45:     }
 46:   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
 47:     /* global blocks given: determine which ones are local */
 48:     if (jac->g_lens) {
 49:       /* check if the g_lens is has valid entries */
 50:       for (i = 0; i < jac->n; i++) {
 51:         PetscCheck(jac->g_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Zero block not allowed");
 52:         PetscCheck(jac->g_lens[i] / bs * bs == jac->g_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat blocksize doesn't match block Jacobi layout");
 53:       }
 54:       if (size == 1) {
 55:         jac->n_local = jac->n;
 56:         PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
 57:         PetscCall(PetscArraycpy(jac->l_lens, jac->g_lens, jac->n_local));
 58:         /* check that user set these correctly */
 59:         sum = 0;
 60:         for (i = 0; i < jac->n_local; i++) sum += jac->l_lens[i];
 61:         PetscCheck(sum == M, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Global lens set incorrectly");
 62:       } else {
 63:         PetscCall(MatGetOwnershipRange(pc->pmat, &start, &end));
 64:         /* loop over blocks determining first one owned by me */
 65:         sum = 0;
 66:         for (i = 0; i < jac->n + 1; i++) {
 67:           if (sum == start) {
 68:             i_start = i;
 69:             goto start_1;
 70:           }
 71:           if (i < jac->n) sum += jac->g_lens[i];
 72:         }
 73:         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
 74:       start_1:
 75:         for (i = i_start; i < jac->n + 1; i++) {
 76:           if (sum == end) {
 77:             i_end = i;
 78:             goto end_1;
 79:           }
 80:           if (i < jac->n) sum += jac->g_lens[i];
 81:         }
 82:         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
 83:       end_1:
 84:         jac->n_local = i_end - i_start;
 85:         PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
 86:         PetscCall(PetscArraycpy(jac->l_lens, jac->g_lens + i_start, jac->n_local));
 87:       }
 88:     } else { /* no global blocks given, determine then using default layout */
 89:       jac->n_local = jac->n / size + ((jac->n % size) > rank);
 90:       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
 91:       for (i = 0; i < jac->n_local; i++) {
 92:         jac->l_lens[i] = ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i)) * bs;
 93:         PetscCheck(jac->l_lens[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Too many blocks given");
 94:       }
 95:     }
 96:   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
 97:     jac->n       = size;
 98:     jac->n_local = 1;
 99:     PetscCall(PetscMalloc1(1, &jac->l_lens));
100:     jac->l_lens[0] = M;
101:   } else { /* jac->n > 0 && jac->n_local > 0 */
102:     if (!jac->l_lens) {
103:       PetscCall(PetscMalloc1(jac->n_local, &jac->l_lens));
104:       for (i = 0; i < jac->n_local; i++) jac->l_lens[i] = bs * ((M / bs) / jac->n_local + (((M / bs) % jac->n_local) > i));
105:     }
106:   }
107:   PetscCheck(jac->n_local >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of blocks is less than number of processors");

109:   /*    Determines mat and pmat */
110:   PetscCall(MatHasOperation(pc->mat, MATOP_GET_DIAGONAL_BLOCK, &hasop));
111:   if (!hasop && size == 1) {
112:     mat  = pc->mat;
113:     pmat = pc->pmat;
114:   } else {
115:     if (pc->useAmat) {
116:       /* use block from Amat matrix, not Pmat for local MatMult() */
117:       PetscCall(MatGetDiagonalBlock(pc->mat, &mat));
118:     }
119:     if (pc->pmat != pc->mat || !pc->useAmat) PetscCall(MatGetDiagonalBlock(pc->pmat, &pmat));
120:     else pmat = mat;
121:   }

123:   /*
124:      Setup code depends on the number of blocks
125:   */
126:   if (jac->n_local == 1) PetscCall(PCSetUp_BJacobi_Singleblock(pc, mat, pmat));
127:   else PetscCall(PCSetUp_BJacobi_Multiblock(pc, mat, pmat));
128:   PetscFunctionReturn(PETSC_SUCCESS);
129: }

131: /* Default destroy, if it has never been setup */
132: static PetscErrorCode PCDestroy_BJacobi(PC pc)
133: {
134:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;

136:   PetscFunctionBegin;
137:   PetscCall(PetscFree(jac->g_lens));
138:   PetscCall(PetscFree(jac->l_lens));
139:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetSubKSP_C", NULL));
140:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetTotalBlocks_C", NULL));
141:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetTotalBlocks_C", NULL));
142:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetLocalBlocks_C", NULL));
143:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetLocalBlocks_C", NULL));
144:   PetscCall(PetscFree(pc->data));
145:   PetscFunctionReturn(PETSC_SUCCESS);
146: }

148: static PetscErrorCode PCSetFromOptions_BJacobi(PC pc, PetscOptionItems PetscOptionsObject)
149: {
150:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
151:   PetscInt    blocks;
152:   PetscBool   flg;

154:   PetscFunctionBegin;
155:   PetscOptionsHeadBegin(PetscOptionsObject, "Block Jacobi options");
156:   PetscCall(PetscOptionsInt("-pc_bjacobi_blocks", "Total number of blocks", "PCBJacobiSetTotalBlocks", jac->n, &blocks, &flg));
157:   if (flg) PetscCall(PCBJacobiSetTotalBlocks(pc, blocks, NULL));
158:   PetscCall(PetscOptionsInt("-pc_bjacobi_local_blocks", "Local number of blocks", "PCBJacobiSetLocalBlocks", jac->n_local, &blocks, &flg));
159:   if (flg) PetscCall(PCBJacobiSetLocalBlocks(pc, blocks, NULL));
160:   if (jac->ksp) {
161:     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
162:      * unless we had already been called. */
163:     for (PetscInt i = 0; i < jac->n_local; i++) PetscCall(KSPSetFromOptions(jac->ksp[i]));
164:   }
165:   PetscOptionsHeadEnd();
166:   PetscFunctionReturn(PETSC_SUCCESS);
167: }

169: #include <petscdraw.h>
170: static PetscErrorCode PCView_BJacobi(PC pc, PetscViewer viewer)
171: {
172:   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
173:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
174:   PetscMPIInt           rank;
175:   PetscBool             isascii, isstring, isdraw;
176:   PetscViewer           sviewer;
177:   PetscViewerFormat     format;
178:   const char           *prefix;

180:   PetscFunctionBegin;
181:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
182:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
183:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
184:   if (isascii) {
185:     if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, "  using Amat local matrix, number of blocks = %" PetscInt_FMT "\n", jac->n));
186:     PetscCall(PetscViewerASCIIPrintf(viewer, "  number of blocks = %" PetscInt_FMT "\n", jac->n));
187:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
188:     PetscCall(PetscViewerGetFormat(viewer, &format));
189:     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
190:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
191:       PetscCall(PCGetOptionsPrefix(pc, &prefix));
192:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
193:       if (jac->ksp && !jac->psubcomm) {
194:         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
195:         if (rank == 0) {
196:           PetscCall(PetscViewerASCIIPushTab(sviewer));
197:           PetscCall(KSPView(jac->ksp[0], sviewer));
198:           PetscCall(PetscViewerASCIIPopTab(sviewer));
199:         }
200:         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
201:         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
202:         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
203:       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
204:         PetscCall(PetscViewerGetSubViewer(viewer, mpjac->psubcomm->child, &sviewer));
205:         if (!mpjac->psubcomm->color) {
206:           PetscCall(PetscViewerASCIIPushTab(sviewer));
207:           PetscCall(KSPView(*jac->ksp, sviewer));
208:           PetscCall(PetscViewerASCIIPopTab(sviewer));
209:         }
210:         PetscCall(PetscViewerRestoreSubViewer(viewer, mpjac->psubcomm->child, &sviewer));
211:         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
212:         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
213:       }
214:     } else {
215:       PetscInt n_global;
216:       PetscCallMPI(MPIU_Allreduce(&jac->n_local, &n_global, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
217:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
218:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
219:       PetscCall(PetscViewerASCIIPushTab(viewer));
220:       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
221:       PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] number of local blocks = %" PetscInt_FMT ", first local block number = %" PetscInt_FMT "\n", rank, jac->n_local, jac->first_local));
222:       for (PetscInt i = 0; i < jac->n_local; i++) {
223:         PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] local block number %" PetscInt_FMT "\n", rank, i));
224:         PetscCall(KSPView(jac->ksp[i], sviewer));
225:         PetscCall(PetscViewerASCIIPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
226:       }
227:       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
228:       PetscCall(PetscViewerASCIIPopTab(viewer));
229:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
230:     }
231:   } else if (isstring) {
232:     PetscCall(PetscViewerStringSPrintf(viewer, " blks=%" PetscInt_FMT, jac->n));
233:     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
234:     if (jac->ksp) PetscCall(KSPView(jac->ksp[0], sviewer));
235:     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
236:   } else if (isdraw) {
237:     PetscDraw draw;
238:     char      str[25];
239:     PetscReal x, y, bottom, h;

241:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
242:     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
243:     PetscCall(PetscSNPrintf(str, 25, "Number blocks %" PetscInt_FMT, jac->n));
244:     PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
245:     bottom = y - h;
246:     PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
247:     /* warning the communicator on viewer is different then on ksp in parallel */
248:     if (jac->ksp) PetscCall(KSPView(jac->ksp[0], viewer));
249:     PetscCall(PetscDrawPopCurrentPoint(draw));
250:   }
251:   PetscFunctionReturn(PETSC_SUCCESS);
252: }

254: static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
255: {
256:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;

258:   PetscFunctionBegin;
259:   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() first");

261:   if (n_local) *n_local = jac->n_local;
262:   if (first_local) *first_local = jac->first_local;
263:   if (ksp) *ksp = jac->ksp;
264:   PetscFunctionReturn(PETSC_SUCCESS);
265: }

267: static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc, PetscInt blocks, const PetscInt *lens)
268: {
269:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;

271:   PetscFunctionBegin;
272:   PetscCheck(!pc->setupcalled || jac->n == blocks, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
273:   jac->n = blocks;
274:   if (!lens) jac->g_lens = NULL;
275:   else {
276:     PetscCall(PetscMalloc1(blocks, &jac->g_lens));
277:     PetscCall(PetscArraycpy(jac->g_lens, lens, blocks));
278:   }
279:   PetscFunctionReturn(PETSC_SUCCESS);
280: }

282: static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
283: {
284:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;

286:   PetscFunctionBegin;
287:   *blocks = jac->n;
288:   if (lens) *lens = jac->g_lens;
289:   PetscFunctionReturn(PETSC_SUCCESS);
290: }

292: static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc, PetscInt blocks, const PetscInt lens[])
293: {
294:   PC_BJacobi *jac;

296:   PetscFunctionBegin;
297:   jac = (PC_BJacobi *)pc->data;

299:   jac->n_local = blocks;
300:   if (!lens) jac->l_lens = NULL;
301:   else {
302:     PetscCall(PetscMalloc1(blocks, &jac->l_lens));
303:     PetscCall(PetscArraycpy(jac->l_lens, lens, blocks));
304:   }
305:   PetscFunctionReturn(PETSC_SUCCESS);
306: }

308: static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
309: {
310:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;

312:   PetscFunctionBegin;
313:   *blocks = jac->n_local;
314:   if (lens) *lens = jac->l_lens;
315:   PetscFunctionReturn(PETSC_SUCCESS);
316: }

318: /*@C
319:   PCBJacobiGetSubKSP - Gets the local `KSP` contexts for all blocks on
320:   this processor.

322:   Not Collective

324:   Input Parameter:
325: . pc - the preconditioner context

327:   Output Parameters:
328: + n_local     - the number of blocks on this processor, or NULL
329: . first_local - the global number of the first block on this processor, or NULL
330: - ksp         - the array of KSP contexts

332:   Level: advanced

334:   Notes:
335:   After `PCBJacobiGetSubKSP()` the array of `KSP` contexts is not to be freed.

337:   Currently for some matrix implementations only 1 block per processor
338:   is supported.

340:   You must call `KSPSetUp()` or `PCSetUp()` before calling `PCBJacobiGetSubKSP()`.

342:   Fortran Note:
343:   Call `PCBJacobiRestoreSubKSP()` when you no longer need access to the array of `KSP`

345: .seealso: [](ch_ksp), `PCBJACOBI`, `PCASM`, `PCASMGetSubKSP()`
346: @*/
347: PetscErrorCode PCBJacobiGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
348: {
349:   PetscFunctionBegin;
351:   PetscUseMethod(pc, "PCBJacobiGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
352:   PetscFunctionReturn(PETSC_SUCCESS);
353: }

355: /*@
356:   PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
357:   Jacobi preconditioner.

359:   Collective

361:   Input Parameters:
362: + pc     - the preconditioner context
363: . blocks - the number of blocks
364: - lens   - [optional] integer array containing the size of each block

366:   Options Database Key:
367: . -pc_bjacobi_blocks blocks - Sets the number of global blocks

369:   Level: intermediate

371:   Note:
372:   Currently only a limited number of blocking configurations are supported.
373:   All processors sharing the `PC` must call this routine with the same data.

375: .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiSetLocalBlocks()`
376: @*/
377: PetscErrorCode PCBJacobiSetTotalBlocks(PC pc, PetscInt blocks, const PetscInt lens[])
378: {
379:   PetscFunctionBegin;
381:   PetscCheck(blocks > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Must have positive blocks");
382:   PetscTryMethod(pc, "PCBJacobiSetTotalBlocks_C", (PC, PetscInt, const PetscInt[]), (pc, blocks, lens));
383:   PetscFunctionReturn(PETSC_SUCCESS);
384: }

386: /*@C
387:   PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
388:   Jacobi, `PCBJACOBI`, preconditioner.

390:   Not Collective

392:   Input Parameter:
393: . pc - the preconditioner context

395:   Output Parameters:
396: + blocks - the number of blocks
397: - lens   - integer array containing the size of each block

399:   Level: intermediate

401: .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiGetLocalBlocks()`
402: @*/
403: PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
404: {
405:   PetscFunctionBegin;
407:   PetscAssertPointer(blocks, 2);
408:   PetscUseMethod(pc, "PCBJacobiGetTotalBlocks_C", (PC, PetscInt *, const PetscInt *[]), (pc, blocks, lens));
409:   PetscFunctionReturn(PETSC_SUCCESS);
410: }

412: /*@
413:   PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
414:   Jacobi, `PCBJACOBI`,  preconditioner.

416:   Not Collective

418:   Input Parameters:
419: + pc     - the preconditioner context
420: . blocks - the number of blocks
421: - lens   - [optional] integer array containing size of each block

423:   Options Database Key:
424: . -pc_bjacobi_local_blocks blocks - Sets the number of local blocks

426:   Level: intermediate

428:   Note:
429:   Currently only a limited number of blocking configurations are supported.

431: .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiSetTotalBlocks()`
432: @*/
433: PetscErrorCode PCBJacobiSetLocalBlocks(PC pc, PetscInt blocks, const PetscInt lens[])
434: {
435:   PetscFunctionBegin;
437:   PetscCheck(blocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have nonegative blocks");
438:   PetscTryMethod(pc, "PCBJacobiSetLocalBlocks_C", (PC, PetscInt, const PetscInt[]), (pc, blocks, lens));
439:   PetscFunctionReturn(PETSC_SUCCESS);
440: }

442: /*@C
443:   PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
444:   Jacobi, `PCBJACOBI`, preconditioner.

446:   Not Collective

448:   Input Parameters:
449: + pc     - the preconditioner context
450: . blocks - the number of blocks
451: - lens   - [optional] integer array containing size of each block

453:   Level: intermediate

455:   Note:
456:   Currently only a limited number of blocking configurations are supported.

458: .seealso: [](ch_ksp), `PCBJACOBI`, `PCSetUseAmat()`, `PCBJacobiGetTotalBlocks()`
459: @*/
460: PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
461: {
462:   PetscFunctionBegin;
464:   PetscAssertPointer(blocks, 2);
465:   PetscUseMethod(pc, "PCBJacobiGetLocalBlocks_C", (PC, PetscInt *, const PetscInt *[]), (pc, blocks, lens));
466:   PetscFunctionReturn(PETSC_SUCCESS);
467: }

469: /*MC
470:    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with its own `KSP` object.

472:    Options Database Keys:
473: +  -pc_use_amat (true|false) - use `Amat` to apply block of operator in inner Krylov method
474: -  -pc_bjacobi_blocks n      - use n total blocks

476:    Level: beginner

478:    Notes:
479:     See `PCJACOBI` for diagonal Jacobi, `PCVPBJACOBI` for variable point block, and `PCPBJACOBI` for fixed size point block

481:     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.

483:      To set options on the solvers for each block append -sub_ to all the `KSP` and `PC`
484:         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly

486:      To set the options on the solvers separate for each block call `PCBJacobiGetSubKSP()`
487:          and set the options directly on the resulting `KSP` object (you can access its `PC`
488:          `KSPGetPC()`)

490:      For GPU-based vectors (`VECCUDA`, `VECViennaCL`) it is recommended to use exactly one block per MPI process for best
491:          performance.  Different block partitioning may lead to additional data transfers
492:          between host and GPU that lead to degraded performance.

494:      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.

496: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`,
497:           `PCASM`, `PCSetUseAmat()`, `PCGetUseAmat()`, `PCBJacobiGetSubKSP()`, `PCBJacobiSetTotalBlocks()`,
498:           `PCBJacobiSetLocalBlocks()`, `PCSetModifySubMatrices()`, `PCJACOBI`, `PCVPBJACOBI`, `PCPBJACOBI`
499: M*/

501: PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
502: {
503:   PetscMPIInt rank;
504:   PC_BJacobi *jac;

506:   PetscFunctionBegin;
507:   PetscCall(PetscNew(&jac));
508:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));

510:   pc->ops->apply             = NULL;
511:   pc->ops->matapply          = NULL;
512:   pc->ops->applytranspose    = NULL;
513:   pc->ops->matapplytranspose = NULL;
514:   pc->ops->setup             = PCSetUp_BJacobi;
515:   pc->ops->destroy           = PCDestroy_BJacobi;
516:   pc->ops->setfromoptions    = PCSetFromOptions_BJacobi;
517:   pc->ops->view              = PCView_BJacobi;
518:   pc->ops->applyrichardson   = NULL;

520:   pc->data         = (void *)jac;
521:   jac->n           = -1;
522:   jac->n_local     = -1;
523:   jac->first_local = rank;
524:   jac->ksp         = NULL;
525:   jac->g_lens      = NULL;
526:   jac->l_lens      = NULL;
527:   jac->psubcomm    = NULL;

529:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetSubKSP_C", PCBJacobiGetSubKSP_BJacobi));
530:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetTotalBlocks_C", PCBJacobiSetTotalBlocks_BJacobi));
531:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetTotalBlocks_C", PCBJacobiGetTotalBlocks_BJacobi));
532:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiSetLocalBlocks_C", PCBJacobiSetLocalBlocks_BJacobi));
533:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBJacobiGetLocalBlocks_C", PCBJacobiGetLocalBlocks_BJacobi));
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: /*
538:         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
539: */
540: static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
541: {
542:   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
543:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;

545:   PetscFunctionBegin;
546:   PetscCall(KSPReset(jac->ksp[0]));
547:   PetscCall(VecDestroy(&bjac->x));
548:   PetscCall(VecDestroy(&bjac->y));
549:   PetscFunctionReturn(PETSC_SUCCESS);
550: }

552: static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
553: {
554:   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
555:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;

557:   PetscFunctionBegin;
558:   PetscCall(PCReset_BJacobi_Singleblock(pc));
559:   PetscCall(KSPDestroy(&jac->ksp[0]));
560:   PetscCall(PetscFree(jac->ksp));
561:   PetscCall(PetscFree(bjac));
562:   PetscCall(PCDestroy_BJacobi(pc));
563:   PetscFunctionReturn(PETSC_SUCCESS);
564: }

566: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
567: {
568:   PC_BJacobi        *jac    = (PC_BJacobi *)pc->data;
569:   KSP                subksp = jac->ksp[0];
570:   KSPConvergedReason reason;

572:   PetscFunctionBegin;
573:   PetscCall(KSPSetUp(subksp));
574:   PetscCall(KSPGetConvergedReason(subksp, &reason));
575:   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
576:   PetscFunctionReturn(PETSC_SUCCESS);
577: }

579: static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc, Vec x, Vec y)
580: {
581:   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
582:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;

584:   PetscFunctionBegin;
585:   PetscCall(VecGetLocalVectorRead(x, bjac->x));
586:   PetscCall(VecGetLocalVector(y, bjac->y));
587:   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
588:      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
589:      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
590:   PetscCall(KSPSetReusePreconditioner(jac->ksp[0], pc->reusepreconditioner));
591:   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
592:   PetscCall(KSPSolve(jac->ksp[0], bjac->x, bjac->y));
593:   PetscCall(KSPCheckSolve(jac->ksp[0], pc, bjac->y));
594:   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
595:   PetscCall(VecRestoreLocalVectorRead(x, bjac->x));
596:   PetscCall(VecRestoreLocalVector(y, bjac->y));
597:   PetscFunctionReturn(PETSC_SUCCESS);
598: }

600: static PetscErrorCode PCMatApply_BJacobi_Singleblock_Private(PC pc, Mat X, Mat Y, PetscBool transpose)
601: {
602:   PC_BJacobi *jac = (PC_BJacobi *)pc->data;
603:   Mat         sX, sY;

605:   PetscFunctionBegin;
606:   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
607:      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
608:      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
609:   PetscCall(KSPSetReusePreconditioner(jac->ksp[0], pc->reusepreconditioner));
610:   PetscCall(MatDenseGetLocalMatrix(X, &sX));
611:   PetscCall(MatDenseGetLocalMatrix(Y, &sY));
612:   if (!transpose) {
613:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], sX, sY, 0));
614:     PetscCall(KSPMatSolve(jac->ksp[0], sX, sY));
615:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], sX, sY, 0));
616:   } else {
617:     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[0], sX, sY, 0));
618:     PetscCall(KSPMatSolveTranspose(jac->ksp[0], sX, sY));
619:     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[0], sX, sY, 0));
620:   }
621:   PetscFunctionReturn(PETSC_SUCCESS);
622: }

624: static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc, Mat X, Mat Y)
625: {
626:   PetscFunctionBegin;
627:   PetscCall(PCMatApply_BJacobi_Singleblock_Private(pc, X, Y, PETSC_FALSE));
628:   PetscFunctionReturn(PETSC_SUCCESS);
629: }

631: static PetscErrorCode PCMatApplyTranspose_BJacobi_Singleblock(PC pc, Mat X, Mat Y)
632: {
633:   PetscFunctionBegin;
634:   PetscCall(PCMatApply_BJacobi_Singleblock_Private(pc, X, Y, PETSC_TRUE));
635:   PetscFunctionReturn(PETSC_SUCCESS);
636: }

638: static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc, Vec x, Vec y)
639: {
640:   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
641:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
642:   PetscScalar            *y_array;
643:   const PetscScalar      *x_array;
644:   PC                      subpc;

646:   PetscFunctionBegin;
647:   /*
648:       The VecPlaceArray() is to avoid having to copy the
649:     y vector into the bjac->x vector. The reason for
650:     the bjac->x vector is that we need a sequential vector
651:     for the sequential solve.
652:   */
653:   PetscCall(VecGetArrayRead(x, &x_array));
654:   PetscCall(VecGetArray(y, &y_array));
655:   PetscCall(VecPlaceArray(bjac->x, x_array));
656:   PetscCall(VecPlaceArray(bjac->y, y_array));
657:   /* apply the symmetric left portion of the inner PC operator */
658:   /* note this bypasses the inner KSP and its options completely */
659:   PetscCall(KSPGetPC(jac->ksp[0], &subpc));
660:   PetscCall(PCApplySymmetricLeft(subpc, bjac->x, bjac->y));
661:   PetscCall(VecResetArray(bjac->x));
662:   PetscCall(VecResetArray(bjac->y));
663:   PetscCall(VecRestoreArrayRead(x, &x_array));
664:   PetscCall(VecRestoreArray(y, &y_array));
665:   PetscFunctionReturn(PETSC_SUCCESS);
666: }

668: static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc, Vec x, Vec y)
669: {
670:   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
671:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
672:   PetscScalar            *y_array;
673:   const PetscScalar      *x_array;
674:   PC                      subpc;

676:   PetscFunctionBegin;
677:   /*
678:       The VecPlaceArray() is to avoid having to copy the
679:     y vector into the bjac->x vector. The reason for
680:     the bjac->x vector is that we need a sequential vector
681:     for the sequential solve.
682:   */
683:   PetscCall(VecGetArrayRead(x, &x_array));
684:   PetscCall(VecGetArray(y, &y_array));
685:   PetscCall(VecPlaceArray(bjac->x, x_array));
686:   PetscCall(VecPlaceArray(bjac->y, y_array));

688:   /* apply the symmetric right portion of the inner PC operator */
689:   /* note this bypasses the inner KSP and its options completely */

691:   PetscCall(KSPGetPC(jac->ksp[0], &subpc));
692:   PetscCall(PCApplySymmetricRight(subpc, bjac->x, bjac->y));

694:   PetscCall(VecResetArray(bjac->x));
695:   PetscCall(VecResetArray(bjac->y));
696:   PetscCall(VecRestoreArrayRead(x, &x_array));
697:   PetscCall(VecRestoreArray(y, &y_array));
698:   PetscFunctionReturn(PETSC_SUCCESS);
699: }

701: static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc, Vec x, Vec y)
702: {
703:   PC_BJacobi             *jac  = (PC_BJacobi *)pc->data;
704:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock *)jac->data;
705:   PetscScalar            *y_array;
706:   const PetscScalar      *x_array;

708:   PetscFunctionBegin;
709:   /*
710:       The VecPlaceArray() is to avoid having to copy the
711:     y vector into the bjac->x vector. The reason for
712:     the bjac->x vector is that we need a sequential vector
713:     for the sequential solve.
714:   */
715:   PetscCall(VecGetArrayRead(x, &x_array));
716:   PetscCall(VecGetArray(y, &y_array));
717:   PetscCall(VecPlaceArray(bjac->x, x_array));
718:   PetscCall(VecPlaceArray(bjac->y, y_array));
719:   PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
720:   PetscCall(KSPSolveTranspose(jac->ksp[0], bjac->x, bjac->y));
721:   PetscCall(KSPCheckSolve(jac->ksp[0], pc, bjac->y));
722:   PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[0], bjac->x, bjac->y, 0));
723:   PetscCall(VecResetArray(bjac->x));
724:   PetscCall(VecResetArray(bjac->y));
725:   PetscCall(VecRestoreArrayRead(x, &x_array));
726:   PetscCall(VecRestoreArray(y, &y_array));
727:   PetscFunctionReturn(PETSC_SUCCESS);
728: }

730: static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc, Mat mat, Mat pmat)
731: {
732:   PC_BJacobi             *jac = (PC_BJacobi *)pc->data;
733:   PetscInt                m;
734:   KSP                     ksp;
735:   PC_BJacobi_Singleblock *bjac;
736:   PetscBool               wasSetup = PETSC_TRUE;
737:   VecType                 vectype;
738:   const char             *prefix;

740:   PetscFunctionBegin;
741:   if (!pc->setupcalled) {
742:     if (!jac->ksp) {
743:       PetscInt nestlevel;

745:       wasSetup = PETSC_FALSE;

747:       PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
748:       PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
749:       PetscCall(KSPSetNestLevel(ksp, nestlevel + 1));
750:       PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
751:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
752:       PetscCall(KSPSetType(ksp, KSPPREONLY));
753:       PetscCall(PCGetOptionsPrefix(pc, &prefix));
754:       PetscCall(KSPSetOptionsPrefix(ksp, prefix));
755:       PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));

757:       pc->ops->reset               = PCReset_BJacobi_Singleblock;
758:       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
759:       pc->ops->apply               = PCApply_BJacobi_Singleblock;
760:       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
761:       pc->ops->matapplytranspose   = PCMatApplyTranspose_BJacobi_Singleblock;
762:       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
763:       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
764:       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
765:       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;

767:       PetscCall(PetscMalloc1(1, &jac->ksp));
768:       jac->ksp[0] = ksp;

770:       PetscCall(PetscNew(&bjac));
771:       jac->data = (void *)bjac;
772:     } else {
773:       ksp  = jac->ksp[0];
774:       bjac = (PC_BJacobi_Singleblock *)jac->data;
775:     }

777:     /*
778:       The reason we need to generate these vectors is to serve
779:       as the right-hand side and solution vector for the solve on the
780:       block. We do not need to allocate space for the vectors since
781:       that is provided via VecPlaceArray() just before the call to
782:       KSPSolve() on the block.
783:     */
784:     PetscCall(MatGetSize(pmat, &m, &m));
785:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &bjac->x));
786:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &bjac->y));
787:     PetscCall(MatGetVecType(pmat, &vectype));
788:     PetscCall(VecSetType(bjac->x, vectype));
789:     PetscCall(VecSetType(bjac->y, vectype));
790:   } else {
791:     ksp  = jac->ksp[0];
792:     bjac = (PC_BJacobi_Singleblock *)jac->data;
793:   }
794:   PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
795:   if (pc->useAmat) {
796:     PetscCall(KSPSetOperators(ksp, mat, pmat));
797:     PetscCall(MatSetOptionsPrefix(mat, prefix));
798:   } else {
799:     PetscCall(KSPSetOperators(ksp, pmat, pmat));
800:   }
801:   PetscCall(MatSetOptionsPrefix(pmat, prefix));
802:   if (!wasSetup && pc->setfromoptionscalled) {
803:     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
804:     PetscCall(KSPSetFromOptions(ksp));
805:   }
806:   PetscFunctionReturn(PETSC_SUCCESS);
807: }

809: static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
810: {
811:   PC_BJacobi            *jac  = (PC_BJacobi *)pc->data;
812:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
813:   PetscInt               i;

815:   PetscFunctionBegin;
816:   if (bjac && bjac->pmat) {
817:     PetscCall(MatDestroyMatrices(jac->n_local, &bjac->pmat));
818:     if (pc->useAmat) PetscCall(MatDestroyMatrices(jac->n_local, &bjac->mat));
819:   }

821:   for (i = 0; i < jac->n_local; i++) {
822:     PetscCall(KSPReset(jac->ksp[i]));
823:     if (bjac && bjac->x) {
824:       PetscCall(VecDestroy(&bjac->x[i]));
825:       PetscCall(VecDestroy(&bjac->y[i]));
826:       PetscCall(ISDestroy(&bjac->is[i]));
827:     }
828:   }
829:   PetscCall(PetscFree(jac->l_lens));
830:   PetscCall(PetscFree(jac->g_lens));
831:   PetscFunctionReturn(PETSC_SUCCESS);
832: }

834: static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
835: {
836:   PC_BJacobi            *jac  = (PC_BJacobi *)pc->data;
837:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
838:   PetscInt               i;

840:   PetscFunctionBegin;
841:   PetscCall(PCReset_BJacobi_Multiblock(pc));
842:   if (bjac) {
843:     PetscCall(PetscFree2(bjac->x, bjac->y));
844:     PetscCall(PetscFree(bjac->starts));
845:     PetscCall(PetscFree(bjac->is));
846:   }
847:   PetscCall(PetscFree(jac->data));
848:   for (i = 0; i < jac->n_local; i++) PetscCall(KSPDestroy(&jac->ksp[i]));
849:   PetscCall(PetscFree(jac->ksp));
850:   PetscCall(PCDestroy_BJacobi(pc));
851:   PetscFunctionReturn(PETSC_SUCCESS);
852: }

854: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
855: {
856:   PC_BJacobi        *jac = (PC_BJacobi *)pc->data;
857:   PetscInt           i, n_local = jac->n_local;
858:   KSPConvergedReason reason;

860:   PetscFunctionBegin;
861:   for (i = 0; i < n_local; i++) {
862:     PetscCall(KSPSetUp(jac->ksp[i]));
863:     PetscCall(KSPGetConvergedReason(jac->ksp[i], &reason));
864:     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
865:   }
866:   PetscFunctionReturn(PETSC_SUCCESS);
867: }

869: static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc, Vec x, Vec y)
870: {
871:   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
872:   PetscInt               i, n_local = jac->n_local;
873:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
874:   PetscScalar           *yin;
875:   const PetscScalar     *xin;

877:   PetscFunctionBegin;
878:   PetscCall(VecGetArrayRead(x, &xin));
879:   PetscCall(VecGetArray(y, &yin));
880:   for (i = 0; i < n_local; i++) {
881:     /*
882:        To avoid copying the subvector from x into a workspace we instead
883:        make the workspace vector array point to the subpart of the array of
884:        the global vector.
885:     */
886:     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
887:     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));

889:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
890:     PetscCall(KSPSolve(jac->ksp[i], bjac->x[i], bjac->y[i]));
891:     PetscCall(KSPCheckSolve(jac->ksp[i], pc, bjac->y[i]));
892:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));

894:     PetscCall(VecResetArray(bjac->x[i]));
895:     PetscCall(VecResetArray(bjac->y[i]));
896:   }
897:   PetscCall(VecRestoreArrayRead(x, &xin));
898:   PetscCall(VecRestoreArray(y, &yin));
899:   PetscFunctionReturn(PETSC_SUCCESS);
900: }

902: static PetscErrorCode PCApplySymmetricLeft_BJacobi_Multiblock(PC pc, Vec x, Vec y)
903: {
904:   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
905:   PetscInt               i, n_local = jac->n_local;
906:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
907:   PetscScalar           *yin;
908:   const PetscScalar     *xin;
909:   PC                     subpc;

911:   PetscFunctionBegin;
912:   PetscCall(VecGetArrayRead(x, &xin));
913:   PetscCall(VecGetArray(y, &yin));
914:   for (i = 0; i < n_local; i++) {
915:     /*
916:        To avoid copying the subvector from x into a workspace we instead
917:        make the workspace vector array point to the subpart of the array of
918:        the global vector.
919:     */
920:     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
921:     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));

923:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
924:     /* apply the symmetric left portion of the inner PC operator */
925:     /* note this bypasses the inner KSP and its options completely */
926:     PetscCall(KSPGetPC(jac->ksp[i], &subpc));
927:     PetscCall(PCApplySymmetricLeft(subpc, bjac->x[i], bjac->y[i]));
928:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));

930:     PetscCall(VecResetArray(bjac->x[i]));
931:     PetscCall(VecResetArray(bjac->y[i]));
932:   }
933:   PetscCall(VecRestoreArrayRead(x, &xin));
934:   PetscCall(VecRestoreArray(y, &yin));
935:   PetscFunctionReturn(PETSC_SUCCESS);
936: }

938: static PetscErrorCode PCApplySymmetricRight_BJacobi_Multiblock(PC pc, Vec x, Vec y)
939: {
940:   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
941:   PetscInt               i, n_local = jac->n_local;
942:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
943:   PetscScalar           *yin;
944:   const PetscScalar     *xin;
945:   PC                     subpc;

947:   PetscFunctionBegin;
948:   PetscCall(VecGetArrayRead(x, &xin));
949:   PetscCall(VecGetArray(y, &yin));
950:   for (i = 0; i < n_local; i++) {
951:     /*
952:        To avoid copying the subvector from x into a workspace we instead
953:        make the workspace vector array point to the subpart of the array of
954:        the global vector.
955:     */
956:     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
957:     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));

959:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
960:     /* apply the symmetric left portion of the inner PC operator */
961:     /* note this bypasses the inner KSP and its options completely */
962:     PetscCall(KSPGetPC(jac->ksp[i], &subpc));
963:     PetscCall(PCApplySymmetricRight(subpc, bjac->x[i], bjac->y[i]));
964:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));

966:     PetscCall(VecResetArray(bjac->x[i]));
967:     PetscCall(VecResetArray(bjac->y[i]));
968:   }
969:   PetscCall(VecRestoreArrayRead(x, &xin));
970:   PetscCall(VecRestoreArray(y, &yin));
971:   PetscFunctionReturn(PETSC_SUCCESS);
972: }

974: static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc, Vec x, Vec y)
975: {
976:   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
977:   PetscInt               i, n_local = jac->n_local;
978:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
979:   PetscScalar           *yin;
980:   const PetscScalar     *xin;

982:   PetscFunctionBegin;
983:   PetscCall(VecGetArrayRead(x, &xin));
984:   PetscCall(VecGetArray(y, &yin));
985:   for (i = 0; i < n_local; i++) {
986:     /*
987:        To avoid copying the subvector from x into a workspace we instead
988:        make the workspace vector array point to the subpart of the array of
989:        the global vector.
990:     */
991:     PetscCall(VecPlaceArray(bjac->x[i], xin + bjac->starts[i]));
992:     PetscCall(VecPlaceArray(bjac->y[i], yin + bjac->starts[i]));

994:     PetscCall(PetscLogEventBegin(PC_ApplyTransposeOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));
995:     PetscCall(KSPSolveTranspose(jac->ksp[i], bjac->x[i], bjac->y[i]));
996:     PetscCall(KSPCheckSolve(jac->ksp[i], pc, bjac->y[i]));
997:     PetscCall(PetscLogEventEnd(PC_ApplyTransposeOnBlocks, jac->ksp[i], bjac->x[i], bjac->y[i], 0));

999:     PetscCall(VecResetArray(bjac->x[i]));
1000:     PetscCall(VecResetArray(bjac->y[i]));
1001:   }
1002:   PetscCall(VecRestoreArrayRead(x, &xin));
1003:   PetscCall(VecRestoreArray(y, &yin));
1004:   PetscFunctionReturn(PETSC_SUCCESS);
1005: }

1007: static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc, Mat mat, Mat pmat)
1008: {
1009:   PC_BJacobi            *jac = (PC_BJacobi *)pc->data;
1010:   PetscInt               m, n_local, N, M, start, i;
1011:   const char            *prefix;
1012:   KSP                    ksp;
1013:   Vec                    x, y;
1014:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock *)jac->data;
1015:   PC                     subpc;
1016:   IS                     is;
1017:   MatReuse               scall;
1018:   VecType                vectype;
1019:   MatNullSpace          *nullsp_mat = NULL, *nullsp_pmat = NULL;

1021:   PetscFunctionBegin;
1022:   PetscCall(MatGetLocalSize(pc->pmat, &M, &N));

1024:   n_local = jac->n_local;

1026:   if (pc->useAmat) {
1027:     PetscBool same;
1028:     PetscCall(PetscObjectTypeCompare((PetscObject)mat, ((PetscObject)pmat)->type_name, &same));
1029:     PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Matrices not of same type");
1030:   }

1032:   if (!pc->setupcalled) {
1033:     PetscInt nestlevel;

1035:     scall = MAT_INITIAL_MATRIX;

1037:     if (!jac->ksp) {
1038:       pc->ops->reset               = PCReset_BJacobi_Multiblock;
1039:       pc->ops->destroy             = PCDestroy_BJacobi_Multiblock;
1040:       pc->ops->apply               = PCApply_BJacobi_Multiblock;
1041:       pc->ops->matapply            = NULL;
1042:       pc->ops->matapplytranspose   = NULL;
1043:       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Multiblock;
1044:       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Multiblock;
1045:       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Multiblock;
1046:       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Multiblock;

1048:       PetscCall(PetscNew(&bjac));
1049:       PetscCall(PetscMalloc1(n_local, &jac->ksp));
1050:       PetscCall(PetscMalloc2(n_local, &bjac->x, n_local, &bjac->y));
1051:       PetscCall(PetscMalloc1(n_local, &bjac->starts));

1053:       jac->data = (void *)bjac;
1054:       PetscCall(PetscMalloc1(n_local, &bjac->is));

1056:       for (i = 0; i < n_local; i++) {
1057:         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
1058:         PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
1059:         PetscCall(KSPSetNestLevel(ksp, nestlevel + 1));
1060:         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
1061:         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
1062:         PetscCall(KSPSetType(ksp, KSPPREONLY));
1063:         PetscCall(KSPGetPC(ksp, &subpc));
1064:         PetscCall(PCGetOptionsPrefix(pc, &prefix));
1065:         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1066:         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));

1068:         jac->ksp[i] = ksp;
1069:       }
1070:     } else {
1071:       bjac = (PC_BJacobi_Multiblock *)jac->data;
1072:     }

1074:     start = 0;
1075:     PetscCall(MatGetVecType(pmat, &vectype));
1076:     for (i = 0; i < n_local; i++) {
1077:       m = jac->l_lens[i];
1078:       /*
1079:       The reason we need to generate these vectors is to serve
1080:       as the right-hand side and solution vector for the solve on the
1081:       block. We do not need to allocate space for the vectors since
1082:       that is provided via VecPlaceArray() just before the call to
1083:       KSPSolve() on the block.

1085:       */
1086:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &x));
1087:       PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, m, NULL, &y));
1088:       PetscCall(VecSetType(x, vectype));
1089:       PetscCall(VecSetType(y, vectype));

1091:       bjac->x[i]      = x;
1092:       bjac->y[i]      = y;
1093:       bjac->starts[i] = start;

1095:       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, start, 1, &is));
1096:       bjac->is[i] = is;

1098:       start += m;
1099:     }
1100:   } else {
1101:     bjac = (PC_BJacobi_Multiblock *)jac->data;
1102:     /*
1103:        Destroy the blocks from the previous iteration
1104:     */
1105:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1106:       PetscCall(MatGetNullSpaces(n_local, bjac->pmat, &nullsp_pmat));
1107:       PetscCall(MatDestroyMatrices(n_local, &bjac->pmat));
1108:       if (pc->useAmat) {
1109:         PetscCall(MatGetNullSpaces(n_local, bjac->mat, &nullsp_mat));
1110:         PetscCall(MatDestroyMatrices(n_local, &bjac->mat));
1111:       }
1112:       scall = MAT_INITIAL_MATRIX;
1113:     } else scall = MAT_REUSE_MATRIX;
1114:   }

1116:   PetscCall(MatCreateSubMatrices(pmat, n_local, bjac->is, bjac->is, scall, &bjac->pmat));
1117:   if (nullsp_pmat) PetscCall(MatRestoreNullSpaces(n_local, bjac->pmat, &nullsp_pmat));
1118:   if (pc->useAmat) {
1119:     PetscCall(MatCreateSubMatrices(mat, n_local, bjac->is, bjac->is, scall, &bjac->mat));
1120:     if (nullsp_mat) PetscCall(MatRestoreNullSpaces(n_local, bjac->mat, &nullsp_mat));
1121:   }
1122:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1123:      different boundary conditions for the submatrices than for the global problem) */
1124:   PetscCall(PCModifySubMatrices(pc, n_local, bjac->is, bjac->is, bjac->pmat, pc->modifysubmatricesP));

1126:   for (i = 0; i < n_local; i++) {
1127:     PetscCall(KSPGetOptionsPrefix(jac->ksp[i], &prefix));
1128:     if (pc->useAmat) {
1129:       PetscCall(KSPSetOperators(jac->ksp[i], bjac->mat[i], bjac->pmat[i]));
1130:       PetscCall(MatSetOptionsPrefix(bjac->mat[i], prefix));
1131:     } else {
1132:       PetscCall(KSPSetOperators(jac->ksp[i], bjac->pmat[i], bjac->pmat[i]));
1133:     }
1134:     PetscCall(MatSetOptionsPrefix(bjac->pmat[i], prefix));
1135:     if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(jac->ksp[i]));
1136:   }
1137:   PetscFunctionReturn(PETSC_SUCCESS);
1138: }

1140: /*
1141:       These are for a single block with multiple processes
1142: */
1143: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiproc(PC pc)
1144: {
1145:   PC_BJacobi        *jac    = (PC_BJacobi *)pc->data;
1146:   KSP                subksp = jac->ksp[0];
1147:   KSPConvergedReason reason;

1149:   PetscFunctionBegin;
1150:   PetscCall(KSPSetUp(subksp));
1151:   PetscCall(KSPGetConvergedReason(subksp, &reason));
1152:   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1153:   PetscFunctionReturn(PETSC_SUCCESS);
1154: }

1156: static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1157: {
1158:   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1159:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;

1161:   PetscFunctionBegin;
1162:   PetscCall(VecDestroy(&mpjac->ysub));
1163:   PetscCall(VecDestroy(&mpjac->xsub));
1164:   PetscCall(MatDestroy(&mpjac->submats));
1165:   if (jac->ksp) PetscCall(KSPReset(jac->ksp[0]));
1166:   PetscFunctionReturn(PETSC_SUCCESS);
1167: }

1169: static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1170: {
1171:   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1172:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;

1174:   PetscFunctionBegin;
1175:   PetscCall(PCReset_BJacobi_Multiproc(pc));
1176:   PetscCall(KSPDestroy(&jac->ksp[0]));
1177:   PetscCall(PetscFree(jac->ksp));
1178:   PetscCall(PetscSubcommDestroy(&mpjac->psubcomm));

1180:   PetscCall(PetscFree(mpjac));
1181:   PetscCall(PCDestroy_BJacobi(pc));
1182:   PetscFunctionReturn(PETSC_SUCCESS);
1183: }

1185: static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc, Vec x, Vec y)
1186: {
1187:   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1188:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1189:   PetscScalar          *yarray;
1190:   const PetscScalar    *xarray;
1191:   KSPConvergedReason    reason;

1193:   PetscFunctionBegin;
1194:   /* place x's and y's local arrays into xsub and ysub */
1195:   PetscCall(VecGetArrayRead(x, &xarray));
1196:   PetscCall(VecGetArray(y, &yarray));
1197:   PetscCall(VecPlaceArray(mpjac->xsub, xarray));
1198:   PetscCall(VecPlaceArray(mpjac->ysub, yarray));

1200:   /* apply preconditioner on each matrix block */
1201:   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], mpjac->xsub, mpjac->ysub, 0));
1202:   PetscCall(KSPSolve(jac->ksp[0], mpjac->xsub, mpjac->ysub));
1203:   PetscCall(KSPCheckSolve(jac->ksp[0], pc, mpjac->ysub));
1204:   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], mpjac->xsub, mpjac->ysub, 0));
1205:   PetscCall(KSPGetConvergedReason(jac->ksp[0], &reason));
1206:   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;

1208:   PetscCall(VecResetArray(mpjac->xsub));
1209:   PetscCall(VecResetArray(mpjac->ysub));
1210:   PetscCall(VecRestoreArrayRead(x, &xarray));
1211:   PetscCall(VecRestoreArray(y, &yarray));
1212:   PetscFunctionReturn(PETSC_SUCCESS);
1213: }

1215: static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc, Mat X, Mat Y)
1216: {
1217:   PC_BJacobi        *jac = (PC_BJacobi *)pc->data;
1218:   KSPConvergedReason reason;
1219:   Mat                sX, sY;
1220:   const PetscScalar *x;
1221:   PetscScalar       *y;
1222:   PetscInt           m, N, lda, ldb;

1224:   PetscFunctionBegin;
1225:   /* apply preconditioner on each matrix block */
1226:   PetscCall(MatGetLocalSize(X, &m, NULL));
1227:   PetscCall(MatGetSize(X, NULL, &N));
1228:   PetscCall(MatDenseGetLDA(X, &lda));
1229:   PetscCall(MatDenseGetLDA(Y, &ldb));
1230:   PetscCall(MatDenseGetArrayRead(X, &x));
1231:   PetscCall(MatDenseGetArrayWrite(Y, &y));
1232:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]), m, PETSC_DECIDE, PETSC_DECIDE, N, (PetscScalar *)x, &sX));
1233:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]), m, PETSC_DECIDE, PETSC_DECIDE, N, y, &sY));
1234:   PetscCall(MatDenseSetLDA(sX, lda));
1235:   PetscCall(MatDenseSetLDA(sY, ldb));
1236:   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, jac->ksp[0], X, Y, 0));
1237:   PetscCall(KSPMatSolve(jac->ksp[0], sX, sY));
1238:   PetscCall(KSPCheckSolve(jac->ksp[0], pc, NULL));
1239:   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, jac->ksp[0], X, Y, 0));
1240:   PetscCall(MatDestroy(&sY));
1241:   PetscCall(MatDestroy(&sX));
1242:   PetscCall(MatDenseRestoreArrayWrite(Y, &y));
1243:   PetscCall(MatDenseRestoreArrayRead(X, &x));
1244:   PetscCall(KSPGetConvergedReason(jac->ksp[0], &reason));
1245:   if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1246:   PetscFunctionReturn(PETSC_SUCCESS);
1247: }

1249: static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1250: {
1251:   PC_BJacobi           *jac   = (PC_BJacobi *)pc->data;
1252:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc *)jac->data;
1253:   PetscInt              m, n;
1254:   MPI_Comm              comm, subcomm = 0;
1255:   const char           *prefix;
1256:   PetscBool             wasSetup = PETSC_TRUE;
1257:   VecType               vectype;

1259:   PetscFunctionBegin;
1260:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1261:   PetscCheck(jac->n_local <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only a single block in a subcommunicator is supported");
1262:   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1263:   if (!pc->setupcalled) {
1264:     PetscInt nestlevel;

1266:     wasSetup = PETSC_FALSE;
1267:     PetscCall(PetscNew(&mpjac));
1268:     jac->data = (void *)mpjac;

1270:     /* initialize datastructure mpjac */
1271:     if (!jac->psubcomm) {
1272:       /* Create default contiguous subcommunicatiors if user does not provide them */
1273:       PetscCall(PetscSubcommCreate(comm, &jac->psubcomm));
1274:       PetscCall(PetscSubcommSetNumber(jac->psubcomm, jac->n));
1275:       PetscCall(PetscSubcommSetType(jac->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
1276:     }
1277:     mpjac->psubcomm = jac->psubcomm;
1278:     subcomm         = PetscSubcommChild(mpjac->psubcomm);

1280:     /* Get matrix blocks of pmat */
1281:     PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_INITIAL_MATRIX, &mpjac->submats));

1283:     /* create a new PC that processors in each subcomm have copy of */
1284:     PetscCall(PetscMalloc1(1, &jac->ksp));
1285:     PetscCall(KSPCreate(subcomm, &jac->ksp[0]));
1286:     PetscCall(PCGetKSPNestLevel(pc, &nestlevel));
1287:     PetscCall(KSPSetNestLevel(jac->ksp[0], nestlevel + 1));
1288:     PetscCall(KSPSetErrorIfNotConverged(jac->ksp[0], pc->erroriffailure));
1289:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0], (PetscObject)pc, 1));
1290:     PetscCall(KSPSetOperators(jac->ksp[0], mpjac->submats, mpjac->submats));
1291:     PetscCall(KSPGetPC(jac->ksp[0], &mpjac->pc));

1293:     PetscCall(PCGetOptionsPrefix(pc, &prefix));
1294:     PetscCall(KSPSetOptionsPrefix(jac->ksp[0], prefix));
1295:     PetscCall(KSPAppendOptionsPrefix(jac->ksp[0], "sub_"));
1296:     PetscCall(KSPGetOptionsPrefix(jac->ksp[0], &prefix));
1297:     PetscCall(MatSetOptionsPrefix(mpjac->submats, prefix));

1299:     PetscCall(MatGetLocalSize(mpjac->submats, &m, &n));
1300:     PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &mpjac->xsub));
1301:     PetscCall(VecCreateMPIWithArray(subcomm, 1, m, PETSC_DECIDE, NULL, &mpjac->ysub));
1302:     PetscCall(MatGetVecType(mpjac->submats, &vectype));
1303:     PetscCall(VecSetType(mpjac->xsub, vectype));
1304:     PetscCall(VecSetType(mpjac->ysub, vectype));

1306:     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiproc;
1307:     pc->ops->reset         = PCReset_BJacobi_Multiproc;
1308:     pc->ops->destroy       = PCDestroy_BJacobi_Multiproc;
1309:     pc->ops->apply         = PCApply_BJacobi_Multiproc;
1310:     pc->ops->matapply      = PCMatApply_BJacobi_Multiproc;
1311:   } else { /* pc->setupcalled */
1312:     subcomm = PetscSubcommChild(mpjac->psubcomm);
1313:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1314:       /* destroy old matrix blocks, then get new matrix blocks */
1315:       PetscCall(MatDestroy(&mpjac->submats));
1316:       PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_INITIAL_MATRIX, &mpjac->submats));
1317:     } else {
1318:       PetscCall(MatGetMultiProcBlock(pc->pmat, subcomm, MAT_REUSE_MATRIX, &mpjac->submats));
1319:     }
1320:     PetscCall(KSPSetOperators(jac->ksp[0], mpjac->submats, mpjac->submats));
1321:   }

1323:   if (!wasSetup && pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(jac->ksp[0]));
1324:   PetscFunctionReturn(PETSC_SUCCESS);
1325: }