Actual source code: redundant.c
1: /*
2: This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
3: */
4: #include <petsc/private/pcimpl.h>
5: #include <petscksp.h>
7: typedef struct {
8: KSP ksp;
9: PC pc; /* actual preconditioner used on each processor */
10: Vec xsub, ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
11: Vec xdup, ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
12: Mat pmats; /* matrices belong to a subcommunicator */
13: VecScatter scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
14: PetscBool useparallelmat;
15: PetscSubcomm psubcomm;
16: PetscInt nsubcomm; /* num of data structure PetscSubcomm */
17: PetscBool shifttypeset;
18: MatFactorShiftType shifttype;
19: } PC_Redundant;
21: static PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
22: {
23: PC_Redundant *red = (PC_Redundant *)pc->data;
25: PetscFunctionBegin;
26: if (red->ksp) {
27: PC pc;
28: PetscCall(KSPGetPC(red->ksp, &pc));
29: PetscCall(PCFactorSetShiftType(pc, shifttype));
30: } else {
31: red->shifttypeset = PETSC_TRUE;
32: red->shifttype = shifttype;
33: }
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
38: {
39: PC_Redundant *red = (PC_Redundant *)pc->data;
40: PetscBool isascii, isstring;
41: PetscViewer subviewer;
43: PetscFunctionBegin;
44: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
45: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
46: if (isascii) {
47: if (!red->psubcomm) {
48: PetscCall(PetscViewerASCIIPrintf(viewer, " Not yet setup\n"));
49: } else {
50: PetscCall(PetscViewerASCIIPrintf(viewer, " First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
51: PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
52: if (!red->psubcomm->color) { /* only view first redundant pc */
53: PetscCall(PetscViewerASCIIPushTab(subviewer));
54: PetscCall(KSPView(red->ksp, subviewer));
55: PetscCall(PetscViewerASCIIPopTab(subviewer));
56: }
57: PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
58: }
59: } else if (isstring) {
60: PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
61: }
62: PetscFunctionReturn(PETSC_SUCCESS);
63: }
65: #include <../src/mat/impls/aij/mpi/mpiaij.h>
66: static PetscErrorCode PCSetUp_Redundant(PC pc)
67: {
68: PC_Redundant *red = (PC_Redundant *)pc->data;
69: PetscInt mstart, mend, mlocal, M;
70: PetscMPIInt size;
71: MPI_Comm comm, subcomm;
72: Vec x;
74: PetscFunctionBegin;
75: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
77: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
78: PetscCallMPI(MPI_Comm_size(comm, &size));
79: if (size == 1) red->useparallelmat = PETSC_FALSE;
81: if (!pc->setupcalled) {
82: PetscInt mloc_sub;
83: if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
84: KSP ksp;
85: PetscCall(PCRedundantGetKSP(pc, &ksp));
86: }
87: subcomm = PetscSubcommChild(red->psubcomm);
89: if (red->useparallelmat) {
90: /* grab the parallel matrix and put it into the processes of a subcommunicator */
91: PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
93: PetscCallMPI(MPI_Comm_size(subcomm, &size));
94: if (size > 1) {
95: PetscBool foundpack, issbaij;
96: PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
97: if (!issbaij) {
98: PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
99: } else {
100: PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
101: }
102: if (!foundpack) { /* reset default ksp and pc */
103: PetscCall(KSPSetType(red->ksp, KSPGMRES));
104: PetscCall(PCSetType(red->pc, PCBJACOBI));
105: } else {
106: PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
107: }
108: }
110: PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
112: /* get working vectors xsub and ysub */
113: PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
115: /* create working vectors xdup and ydup.
116: xdup concatenates all xsub's contiguously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
117: ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
118: Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
119: PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
120: PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
121: PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
123: /* create vecscatters */
124: if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
125: IS is1, is2;
126: PetscInt *idx1, *idx2, i, j, k;
128: PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
129: PetscCall(VecGetSize(x, &M));
130: PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
131: mlocal = mend - mstart;
132: PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
133: j = 0;
134: for (k = 0; k < red->psubcomm->n; k++) {
135: for (i = mstart; i < mend; i++) {
136: idx1[j] = i;
137: idx2[j++] = i + M * k;
138: }
139: }
140: PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
141: PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
142: PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
143: PetscCall(ISDestroy(&is1));
144: PetscCall(ISDestroy(&is2));
146: /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
147: PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
148: PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
149: PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
150: PetscCall(ISDestroy(&is1));
151: PetscCall(ISDestroy(&is2));
152: PetscCall(PetscFree2(idx1, idx2));
153: PetscCall(VecDestroy(&x));
154: }
155: } else { /* !red->useparallelmat */
156: PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
157: }
158: } else { /* pc->setupcalled */
159: if (red->useparallelmat) {
160: MatReuse reuse;
161: /* grab the parallel matrix and put it into the processes of a subcommunicator */
162: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163: /* destroy old matrices */
164: PetscCall(MatDestroy(&red->pmats));
165: reuse = MAT_INITIAL_MATRIX;
166: } else {
167: reuse = MAT_REUSE_MATRIX;
168: }
169: PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
170: PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
171: } else { /* !red->useparallelmat */
172: PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
173: }
174: }
176: if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
177: PetscFunctionReturn(PETSC_SUCCESS);
178: }
180: static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
181: {
182: PC_Redundant *red = (PC_Redundant *)pc->data;
183: PetscScalar *array;
185: PetscFunctionBegin;
186: if (!red->useparallelmat) {
187: PetscCall(KSPSolve(red->ksp, x, y));
188: PetscCall(KSPCheckSolve(red->ksp, pc, y));
189: PetscFunctionReturn(PETSC_SUCCESS);
190: }
192: /* scatter x to xdup */
193: PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
194: PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
196: /* place xdup's local array into xsub */
197: PetscCall(VecGetArray(red->xdup, &array));
198: PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
200: /* apply preconditioner on each processor */
201: PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
202: PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
203: PetscCall(VecResetArray(red->xsub));
204: PetscCall(VecRestoreArray(red->xdup, &array));
206: /* place ysub's local array into ydup */
207: PetscCall(VecGetArray(red->ysub, &array));
208: PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
210: /* scatter ydup to y */
211: PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
212: PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
213: PetscCall(VecResetArray(red->ydup));
214: PetscCall(VecRestoreArray(red->ysub, &array));
215: PetscFunctionReturn(PETSC_SUCCESS);
216: }
218: static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
219: {
220: PC_Redundant *red = (PC_Redundant *)pc->data;
221: PetscScalar *array;
223: PetscFunctionBegin;
224: if (!red->useparallelmat) {
225: PetscCall(KSPSolveTranspose(red->ksp, x, y));
226: PetscCall(KSPCheckSolve(red->ksp, pc, y));
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: /* scatter x to xdup */
231: PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
232: PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
234: /* place xdup's local array into xsub */
235: PetscCall(VecGetArray(red->xdup, &array));
236: PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
238: /* apply preconditioner on each processor */
239: PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
240: PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
241: PetscCall(VecResetArray(red->xsub));
242: PetscCall(VecRestoreArray(red->xdup, &array));
244: /* place ysub's local array into ydup */
245: PetscCall(VecGetArray(red->ysub, &array));
246: PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
248: /* scatter ydup to y */
249: PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
250: PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
251: PetscCall(VecResetArray(red->ydup));
252: PetscCall(VecRestoreArray(red->ysub, &array));
253: PetscFunctionReturn(PETSC_SUCCESS);
254: }
256: static PetscErrorCode PCReset_Redundant(PC pc)
257: {
258: PC_Redundant *red = (PC_Redundant *)pc->data;
260: PetscFunctionBegin;
261: if (red->useparallelmat) {
262: PetscCall(VecScatterDestroy(&red->scatterin));
263: PetscCall(VecScatterDestroy(&red->scatterout));
264: PetscCall(VecDestroy(&red->ysub));
265: PetscCall(VecDestroy(&red->xsub));
266: PetscCall(VecDestroy(&red->xdup));
267: PetscCall(VecDestroy(&red->ydup));
268: }
269: PetscCall(MatDestroy(&red->pmats));
270: PetscCall(KSPReset(red->ksp));
271: PetscFunctionReturn(PETSC_SUCCESS);
272: }
274: static PetscErrorCode PCDestroy_Redundant(PC pc)
275: {
276: PC_Redundant *red = (PC_Redundant *)pc->data;
278: PetscFunctionBegin;
279: PetscCall(PCReset_Redundant(pc));
280: PetscCall(KSPDestroy(&red->ksp));
281: PetscCall(PetscSubcommDestroy(&red->psubcomm));
282: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
283: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
284: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
285: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
286: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
287: PetscCall(PetscFree(pc->data));
288: PetscFunctionReturn(PETSC_SUCCESS);
289: }
291: static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems PetscOptionsObject)
292: {
293: PC_Redundant *red = (PC_Redundant *)pc->data;
295: PetscFunctionBegin;
296: PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
297: PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
298: PetscOptionsHeadEnd();
299: PetscFunctionReturn(PETSC_SUCCESS);
300: }
302: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
303: {
304: PC_Redundant *red = (PC_Redundant *)pc->data;
306: PetscFunctionBegin;
307: red->nsubcomm = nreds;
308: PetscFunctionReturn(PETSC_SUCCESS);
309: }
311: /*@
312: PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
314: Logically Collective
316: Input Parameters:
317: + pc - the preconditioner context
318: - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
319: use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
321: Level: advanced
323: .seealso: [](ch_ksp), `PCREDUNDANT`
324: @*/
325: PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
326: {
327: PetscFunctionBegin;
329: PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
330: PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
331: PetscFunctionReturn(PETSC_SUCCESS);
332: }
334: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
335: {
336: PC_Redundant *red = (PC_Redundant *)pc->data;
338: PetscFunctionBegin;
339: PetscCall(PetscObjectReference((PetscObject)in));
340: PetscCall(VecScatterDestroy(&red->scatterin));
342: red->scatterin = in;
344: PetscCall(PetscObjectReference((PetscObject)out));
345: PetscCall(VecScatterDestroy(&red->scatterout));
346: red->scatterout = out;
347: PetscFunctionReturn(PETSC_SUCCESS);
348: }
350: /*@
351: PCRedundantSetScatter - Sets the scatter used to copy values into the
352: redundant local solve and the scatter to move them back into the global
353: vector.
355: Logically Collective
357: Input Parameters:
358: + pc - the preconditioner context
359: . in - the scatter to move the values in
360: - out - the scatter to move them out
362: Level: advanced
364: .seealso: [](ch_ksp), `PCREDUNDANT`
365: @*/
366: PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
367: {
368: PetscFunctionBegin;
372: PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
373: PetscFunctionReturn(PETSC_SUCCESS);
374: }
376: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
377: {
378: PC_Redundant *red = (PC_Redundant *)pc->data;
379: MPI_Comm comm, subcomm;
380: const char *prefix;
381: PetscBool issbaij;
383: PetscFunctionBegin;
384: if (!red->psubcomm) {
385: PetscCall(PCGetOptionsPrefix(pc, &prefix));
387: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
388: PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
389: PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
390: PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
392: PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
393: PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
395: /* create a new PC that processors in each subcomm have copy of */
396: subcomm = PetscSubcommChild(red->psubcomm);
398: PetscCall(KSPCreate(subcomm, &red->ksp));
399: PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel));
400: PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
401: PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
402: PetscCall(KSPSetType(red->ksp, KSPPREONLY));
403: PetscCall(KSPGetPC(red->ksp, &red->pc));
404: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
405: if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
406: if (!issbaij) {
407: PetscCall(PCSetType(red->pc, PCLU));
408: } else {
409: PetscCall(PCSetType(red->pc, PCCHOLESKY));
410: }
411: if (red->shifttypeset) {
412: PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
413: red->shifttypeset = PETSC_FALSE;
414: }
415: PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
416: PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
417: }
418: *innerksp = red->ksp;
419: PetscFunctionReturn(PETSC_SUCCESS);
420: }
422: /*@
423: PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
425: Not Collective
427: Input Parameter:
428: . pc - the preconditioner context
430: Output Parameter:
431: . innerksp - the `KSP` on the smaller set of processes
433: Level: advanced
435: .seealso: [](ch_ksp), `PCREDUNDANT`
436: @*/
437: PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
438: {
439: PetscFunctionBegin;
441: PetscAssertPointer(innerksp, 2);
442: PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
443: PetscFunctionReturn(PETSC_SUCCESS);
444: }
446: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
447: {
448: PC_Redundant *red = (PC_Redundant *)pc->data;
450: PetscFunctionBegin;
451: if (mat) *mat = red->pmats;
452: if (pmat) *pmat = red->pmats;
453: PetscFunctionReturn(PETSC_SUCCESS);
454: }
456: /*@
457: PCRedundantGetOperators - gets the sequential linear system matrix and matrix used to construct the preconditioner
459: Not Collective
461: Input Parameter:
462: . pc - the preconditioner context
464: Output Parameters:
465: + mat - the matrix
466: - pmat - the (possibly different) matrix used to construct the preconditioner
468: Level: advanced
470: .seealso: [](ch_ksp), `PCREDUNDANT`
471: @*/
472: PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
473: {
474: PetscFunctionBegin;
476: if (mat) PetscAssertPointer(mat, 2);
477: if (pmat) PetscAssertPointer(pmat, 3);
478: PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
479: PetscFunctionReturn(PETSC_SUCCESS);
480: }
482: /*MC
483: PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
485: Options Database Key:
486: . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
487: use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
489: Level: intermediate
491: Notes:
492: Options for the redundant preconditioners can be set using the options database prefix -redundant_
494: The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
496: `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
498: Developer Note:
499: `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
501: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
502: `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
503: M*/
505: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
506: {
507: PC_Redundant *red;
508: PetscMPIInt size;
510: PetscFunctionBegin;
511: PetscCall(PetscNew(&red));
512: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
514: red->nsubcomm = size;
515: red->useparallelmat = PETSC_TRUE;
516: pc->data = (void *)red;
518: pc->ops->apply = PCApply_Redundant;
519: pc->ops->applytranspose = PCApplyTranspose_Redundant;
520: pc->ops->setup = PCSetUp_Redundant;
521: pc->ops->destroy = PCDestroy_Redundant;
522: pc->ops->reset = PCReset_Redundant;
523: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
524: pc->ops->view = PCView_Redundant;
526: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
527: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
528: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
529: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
530: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
531: PetscFunctionReturn(PETSC_SUCCESS);
532: }