Actual source code: bjacobi.c


  2: /*
  3:    Defines a block Jacobi preconditioner.
  4: */

  6: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>

  8: static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
  9: static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
 10: static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);

 12: static PetscErrorCode PCSetUp_BJacobi(PC pc)
 13: {
 14:   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
 15:   Mat            mat  = pc->mat,pmat = pc->pmat;
 17:   PetscBool      hasop;
 18:   PetscInt       N,M,start,i,sum,end;
 19:   PetscInt       bs,i_start=-1,i_end=-1;
 20:   PetscMPIInt    rank,size;

 23:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
 24:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
 25:   MatGetLocalSize(pc->pmat,&M,&N);
 26:   MatGetBlockSize(pc->pmat,&bs);

 28:   if (jac->n > 0 && jac->n < size) {
 29:     PCSetUp_BJacobi_Multiproc(pc);
 30:     return(0);
 31:   }

 33:   /* --------------------------------------------------------------------------
 34:       Determines the number of blocks assigned to each processor
 35:   -----------------------------------------------------------------------------*/

 37:   /*   local block count  given */
 38:   if (jac->n_local > 0 && jac->n < 0) {
 39:     MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
 40:     if (jac->l_lens) { /* check that user set these correctly */
 41:       sum = 0;
 42:       for (i=0; i<jac->n_local; i++) {
 43:         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
 44:         sum += jac->l_lens[i];
 45:       }
 46:       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
 47:     } else {
 48:       PetscMalloc1(jac->n_local,&jac->l_lens);
 49:       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
 50:     }
 51:   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
 52:     /* global blocks given: determine which ones are local */
 53:     if (jac->g_lens) {
 54:       /* check if the g_lens is has valid entries */
 55:       for (i=0; i<jac->n; i++) {
 56:         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
 57:         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
 58:       }
 59:       if (size == 1) {
 60:         jac->n_local = jac->n;
 61:         PetscMalloc1(jac->n_local,&jac->l_lens);
 62:         PetscArraycpy(jac->l_lens,jac->g_lens,jac->n_local);
 63:         /* check that user set these correctly */
 64:         sum = 0;
 65:         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
 66:         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
 67:       } else {
 68:         MatGetOwnershipRange(pc->pmat,&start,&end);
 69:         /* loop over blocks determing first one owned by me */
 70:         sum = 0;
 71:         for (i=0; i<jac->n+1; i++) {
 72:           if (sum == start) { i_start = i; goto start_1;}
 73:           if (i < jac->n) sum += jac->g_lens[i];
 74:         }
 75:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
 76: start_1:
 77:         for (i=i_start; i<jac->n+1; i++) {
 78:           if (sum == end) { i_end = i; goto end_1; }
 79:           if (i < jac->n) sum += jac->g_lens[i];
 80:         }
 81:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
 82: end_1:
 83:         jac->n_local = i_end - i_start;
 84:         PetscMalloc1(jac->n_local,&jac->l_lens);
 85:         PetscArraycpy(jac->l_lens,jac->g_lens+i_start,jac->n_local);
 86:       }
 87:     } else { /* no global blocks given, determine then using default layout */
 88:       jac->n_local = jac->n/size + ((jac->n % size) > rank);
 89:       PetscMalloc1(jac->n_local,&jac->l_lens);
 90:       for (i=0; i<jac->n_local; i++) {
 91:         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
 92:         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
 93:       }
 94:     }
 95:   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
 96:     jac->n         = size;
 97:     jac->n_local   = 1;
 98:     PetscMalloc1(1,&jac->l_lens);
 99:     jac->l_lens[0] = M;
100:   } else { /* jac->n > 0 && jac->n_local > 0 */
101:     if (!jac->l_lens) {
102:       PetscMalloc1(jac->n_local,&jac->l_lens);
103:       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
104:     }
105:   }
106:   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");

108:   /* -------------------------
109:       Determines mat and pmat
110:   ---------------------------*/
111:   MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);
112:   if (!hasop && size == 1) {
113:     mat  = pc->mat;
114:     pmat = pc->pmat;
115:   } else {
116:     if (pc->useAmat) {
117:       /* use block from Amat matrix, not Pmat for local MatMult() */
118:       MatGetDiagonalBlock(pc->mat,&mat);
119:     }
120:     if (pc->pmat != pc->mat || !pc->useAmat) {
121:       MatGetDiagonalBlock(pc->pmat,&pmat);
122:     } else pmat = mat;
123:   }

125:   /* ------
126:      Setup code depends on the number of blocks
127:   */
128:   if (jac->n_local == 1) {
129:     PCSetUp_BJacobi_Singleblock(pc,mat,pmat);
130:   } else {
131:     PCSetUp_BJacobi_Multiblock(pc,mat,pmat);
132:   }
133:   return(0);
134: }

136: /* Default destroy, if it has never been setup */
137: static PetscErrorCode PCDestroy_BJacobi(PC pc)
138: {
139:   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;

143:   PetscFree(jac->g_lens);
144:   PetscFree(jac->l_lens);
145:   PetscFree(pc->data);
146:   return(0);
147: }

149: static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc)
150: {
151:   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
153:   PetscInt       blocks,i;
154:   PetscBool      flg;

157:   PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");
158:   PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);
159:   if (flg) {PCBJacobiSetTotalBlocks(pc,blocks,NULL);}
160:   PetscOptionsInt("-pc_bjacobi_local_blocks","Local number of blocks","PCBJacobiSetLocalBlocks",jac->n_local,&blocks,&flg);
161:   if (flg) {PCBJacobiSetLocalBlocks(pc,blocks,NULL);}
162:   if (jac->ksp) {
163:     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
164:      * unless we had already been called. */
165:     for (i=0; i<jac->n_local; i++) {
166:       KSPSetFromOptions(jac->ksp[i]);
167:     }
168:   }
169:   PetscOptionsTail();
170:   return(0);
171: }

173: #include <petscdraw.h>
174: static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
175: {
176:   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
177:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
178:   PetscErrorCode       ierr;
179:   PetscMPIInt          rank;
180:   PetscInt             i;
181:   PetscBool            iascii,isstring,isdraw;
182:   PetscViewer          sviewer;
183:   PetscViewerFormat    format;
184:   const char           *prefix;

187:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
188:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
189:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
190:   if (iascii) {
191:     if (pc->useAmat) {
192:       PetscViewerASCIIPrintf(viewer,"  using Amat local matrix, number of blocks = %D\n",jac->n);
193:     }
194:     PetscViewerASCIIPrintf(viewer,"  number of blocks = %D\n",jac->n);
195:     MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
196:     PetscViewerGetFormat(viewer,&format);
197:     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
198:       PetscViewerASCIIPrintf(viewer,"  Local solver information for first block is in the following KSP and PC objects on rank 0:\n");
199:       PCGetOptionsPrefix(pc,&prefix);
200:       PetscViewerASCIIPrintf(viewer,"  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:"");
201:       if (jac->ksp && !jac->psubcomm) {
202:         PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
203:         if (rank == 0) {
204:           PetscViewerASCIIPushTab(viewer);
205:           KSPView(jac->ksp[0],sviewer);
206:           PetscViewerASCIIPopTab(viewer);
207:         }
208:         PetscViewerFlush(sviewer);
209:         PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
210:         PetscViewerFlush(viewer);
211:         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
212:         PetscViewerASCIIPopSynchronized(viewer);
213:       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
214:         PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);
215:         if (!mpjac->psubcomm->color) {
216:           PetscViewerASCIIPushTab(viewer);
217:           KSPView(*(jac->ksp),sviewer);
218:           PetscViewerASCIIPopTab(viewer);
219:         }
220:         PetscViewerFlush(sviewer);
221:         PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);
222:         PetscViewerFlush(viewer);
223:         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
224:         PetscViewerASCIIPopSynchronized(viewer);
225:       } else {
226:         PetscViewerFlush(viewer);
227:       }
228:     } else {
229:       PetscInt n_global;
230:       MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));
231:       PetscViewerASCIIPushSynchronized(viewer);
232:       PetscViewerASCIIPrintf(viewer,"  Local solver information for each block is in the following KSP and PC objects:\n");
233:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
234:                                                 rank,jac->n_local,jac->first_local);
235:       PetscViewerASCIIPushTab(viewer);
236:       PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
237:       for (i=0; i<jac->n_local; i++) {
238:         PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);
239:         KSPView(jac->ksp[i],sviewer);
240:         PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
241:       }
242:       PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
243:       PetscViewerASCIIPopTab(viewer);
244:       PetscViewerFlush(viewer);
245:       PetscViewerASCIIPopSynchronized(viewer);
246:     }
247:   } else if (isstring) {
248:     PetscViewerStringSPrintf(viewer," blks=%D",jac->n);
249:     PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
250:     if (jac->ksp) {KSPView(jac->ksp[0],sviewer);}
251:     PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
252:   } else if (isdraw) {
253:     PetscDraw draw;
254:     char      str[25];
255:     PetscReal x,y,bottom,h;

257:     PetscViewerDrawGetDraw(viewer,0,&draw);
258:     PetscDrawGetCurrentPoint(draw,&x,&y);
259:     PetscSNPrintf(str,25,"Number blocks %D",jac->n);
260:     PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
261:     bottom = y - h;
262:     PetscDrawPushCurrentPoint(draw,x,bottom);
263:     /* warning the communicator on viewer is different then on ksp in parallel */
264:     if (jac->ksp) {KSPView(jac->ksp[0],viewer);}
265:     PetscDrawPopCurrentPoint(draw);
266:   }
267:   return(0);
268: }

270: /* -------------------------------------------------------------------------------------*/

272: static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
273: {
274:   PC_BJacobi *jac = (PC_BJacobi*)pc->data;

277:   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");

279:   if (n_local) *n_local = jac->n_local;
280:   if (first_local) *first_local = jac->first_local;
281:   if (ksp) *ksp                 = jac->ksp;
282:   return(0);
283: }

285: static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
286: {
287:   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;

291:   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
292:   jac->n = blocks;
293:   if (!lens) jac->g_lens = NULL;
294:   else {
295:     PetscMalloc1(blocks,&jac->g_lens);
296:     PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));
297:     PetscArraycpy(jac->g_lens,lens,blocks);
298:   }
299:   return(0);
300: }

302: static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
303: {
304:   PC_BJacobi *jac = (PC_BJacobi*) pc->data;

307:   *blocks = jac->n;
308:   if (lens) *lens = jac->g_lens;
309:   return(0);
310: }

312: static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
313: {
314:   PC_BJacobi     *jac;

318:   jac = (PC_BJacobi*)pc->data;

320:   jac->n_local = blocks;
321:   if (!lens) jac->l_lens = NULL;
322:   else {
323:     PetscMalloc1(blocks,&jac->l_lens);
324:     PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));
325:     PetscArraycpy(jac->l_lens,lens,blocks);
326:   }
327:   return(0);
328: }

330: static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
331: {
332:   PC_BJacobi *jac = (PC_BJacobi*) pc->data;

335:   *blocks = jac->n_local;
336:   if (lens) *lens = jac->l_lens;
337:   return(0);
338: }

340: /* -------------------------------------------------------------------------------------*/

342: /*@C
343:    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
344:    this processor.

346:    Not Collective

348:    Input Parameter:
349: .  pc - the preconditioner context

351:    Output Parameters:
352: +  n_local - the number of blocks on this processor, or NULL
353: .  first_local - the global number of the first block on this processor, or NULL
354: -  ksp - the array of KSP contexts

356:    Notes:
357:    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.

359:    Currently for some matrix implementations only 1 block per processor
360:    is supported.

362:    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().

364:    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
365:       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the
366:       KSP array must be.

368:    Level: advanced

370: .seealso: PCASMGetSubKSP()
371: @*/
372: PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
373: {

378:   PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
379:   return(0);
380: }

382: /*@
383:    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
384:    Jacobi preconditioner.

386:    Collective on PC

388:    Input Parameters:
389: +  pc - the preconditioner context
390: .  blocks - the number of blocks
391: -  lens - [optional] integer array containing the size of each block

393:    Options Database Key:
394: .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks

396:    Notes:
397:    Currently only a limited number of blocking configurations are supported.
398:    All processors sharing the PC must call this routine with the same data.

400:    Level: intermediate

402: .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
403: @*/
404: PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
405: {

410:   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
411:   PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));
412:   return(0);
413: }

415: /*@C
416:    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
417:    Jacobi preconditioner.

419:    Not Collective

421:    Input Parameter:
422: .  pc - the preconditioner context

424:    Output parameters:
425: +  blocks - the number of blocks
426: -  lens - integer array containing the size of each block

428:    Level: intermediate

430: .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
431: @*/
432: PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
433: {

439:   PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));
440:   return(0);
441: }

443: /*@
444:    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
445:    Jacobi preconditioner.

447:    Not Collective

449:    Input Parameters:
450: +  pc - the preconditioner context
451: .  blocks - the number of blocks
452: -  lens - [optional] integer array containing size of each block

454:    Options Database Key:
455: .  -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks

457:    Note:
458:    Currently only a limited number of blocking configurations are supported.

460:    Level: intermediate

462: .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
463: @*/
464: PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
465: {

470:   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
471:   PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));
472:   return(0);
473: }

475: /*@C
476:    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
477:    Jacobi preconditioner.

479:    Not Collective

481:    Input Parameters:
482: +  pc - the preconditioner context
483: .  blocks - the number of blocks
484: -  lens - [optional] integer array containing size of each block

486:    Note:
487:    Currently only a limited number of blocking configurations are supported.

489:    Level: intermediate

491: .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
492: @*/
493: PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
494: {

500:   PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));
501:   return(0);
502: }

504: /* -----------------------------------------------------------------------------------*/

506: /*MC
507:    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
508:            its own KSP object.

510:    Options Database Keys:
511: +  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
512: -  -pc_bjacobi_blocks <n> - use n total blocks

514:    Notes:
515:     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.

517:      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
518:         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly

520:      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
521:          and set the options directly on the resulting KSP object (you can access its PC
522:          KSPGetPC())

524:      For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best
525:          performance.  Different block partitioning may lead to additional data transfers
526:          between host and GPU that lead to degraded performance.

528:      The options prefix for each block is sub_, for example -sub_pc_type lu.

530:      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.

532:    Level: beginner

534: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
535:            PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
536:            PCBJacobiSetLocalBlocks(), PCSetModifySubMatrices()
537: M*/

539: PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
540: {
542:   PetscMPIInt    rank;
543:   PC_BJacobi     *jac;

546:   PetscNewLog(pc,&jac);
547:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);

549:   pc->ops->apply           = NULL;
550:   pc->ops->matapply        = NULL;
551:   pc->ops->applytranspose  = NULL;
552:   pc->ops->setup           = PCSetUp_BJacobi;
553:   pc->ops->destroy         = PCDestroy_BJacobi;
554:   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
555:   pc->ops->view            = PCView_BJacobi;
556:   pc->ops->applyrichardson = NULL;

558:   pc->data               = (void*)jac;
559:   jac->n                 = -1;
560:   jac->n_local           = -1;
561:   jac->first_local       = rank;
562:   jac->ksp               = NULL;
563:   jac->g_lens            = NULL;
564:   jac->l_lens            = NULL;
565:   jac->psubcomm          = NULL;

567:   PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);
568:   PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);
569:   PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);
570:   PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);
571:   PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);
572:   return(0);
573: }

575: /* --------------------------------------------------------------------------------------------*/
576: /*
577:         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
578: */
579: static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
580: {
581:   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
582:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
583:   PetscErrorCode         ierr;

586:   KSPReset(jac->ksp[0]);
587:   VecDestroy(&bjac->x);
588:   VecDestroy(&bjac->y);
589:   return(0);
590: }

592: static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
593: {
594:   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
595:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
596:   PetscErrorCode         ierr;

599:   PCReset_BJacobi_Singleblock(pc);
600:   KSPDestroy(&jac->ksp[0]);
601:   PetscFree(jac->ksp);
602:   PetscFree(jac->l_lens);
603:   PetscFree(jac->g_lens);
604:   PetscFree(bjac);
605:   PetscFree(pc->data);
606:   return(0);
607: }

609: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
610: {
611:   PetscErrorCode     ierr;
612:   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
613:   KSP                subksp = jac->ksp[0];
614:   KSPConvergedReason reason;

617:   KSPSetUp(subksp);
618:   KSPGetConvergedReason(subksp,&reason);
619:   if (reason == KSP_DIVERGED_PC_FAILED) {
620:     pc->failedreason = PC_SUBPC_ERROR;
621:   }
622:   return(0);
623: }

625: static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
626: {
627:   PetscErrorCode         ierr;
628:   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
629:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;

632:   VecGetLocalVectorRead(x, bjac->x);
633:   VecGetLocalVector(y, bjac->y);
634:  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
635:      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
636:      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
637:   KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);
638:   KSPSolve(jac->ksp[0],bjac->x,bjac->y);
639:   KSPCheckSolve(jac->ksp[0],pc,bjac->y);
640:   VecRestoreLocalVectorRead(x, bjac->x);
641:   VecRestoreLocalVector(y, bjac->y);
642:   return(0);
643: }

645: static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc,Mat X,Mat Y)
646: {
647:   PC_BJacobi     *jac  = (PC_BJacobi*)pc->data;
648:   Mat            sX,sY;

652:  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
653:      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
654:      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
655:   KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);
656:   MatDenseGetLocalMatrix(X,&sX);
657:   MatDenseGetLocalMatrix(Y,&sY);
658:   KSPMatSolve(jac->ksp[0],sX,sY);
659:   return(0);
660: }

662: static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
663: {
664:   PetscErrorCode         ierr;
665:   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
666:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
667:   PetscScalar            *y_array;
668:   const PetscScalar      *x_array;
669:   PC                     subpc;

672:   /*
673:       The VecPlaceArray() is to avoid having to copy the
674:     y vector into the bjac->x vector. The reason for
675:     the bjac->x vector is that we need a sequential vector
676:     for the sequential solve.
677:   */
678:   VecGetArrayRead(x,&x_array);
679:   VecGetArray(y,&y_array);
680:   VecPlaceArray(bjac->x,x_array);
681:   VecPlaceArray(bjac->y,y_array);
682:   /* apply the symmetric left portion of the inner PC operator */
683:   /* note this by-passes the inner KSP and its options completely */
684:   KSPGetPC(jac->ksp[0],&subpc);
685:   PCApplySymmetricLeft(subpc,bjac->x,bjac->y);
686:   VecResetArray(bjac->x);
687:   VecResetArray(bjac->y);
688:   VecRestoreArrayRead(x,&x_array);
689:   VecRestoreArray(y,&y_array);
690:   return(0);
691: }

693: static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
694: {
695:   PetscErrorCode         ierr;
696:   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
697:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
698:   PetscScalar            *y_array;
699:   const PetscScalar      *x_array;
700:   PC                     subpc;

703:   /*
704:       The VecPlaceArray() is to avoid having to copy the
705:     y vector into the bjac->x vector. The reason for
706:     the bjac->x vector is that we need a sequential vector
707:     for the sequential solve.
708:   */
709:   VecGetArrayRead(x,&x_array);
710:   VecGetArray(y,&y_array);
711:   VecPlaceArray(bjac->x,x_array);
712:   VecPlaceArray(bjac->y,y_array);

714:   /* apply the symmetric right portion of the inner PC operator */
715:   /* note this by-passes the inner KSP and its options completely */

717:   KSPGetPC(jac->ksp[0],&subpc);
718:   PCApplySymmetricRight(subpc,bjac->x,bjac->y);

720:   VecRestoreArrayRead(x,&x_array);
721:   VecRestoreArray(y,&y_array);
722:   return(0);
723: }

725: static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
726: {
727:   PetscErrorCode         ierr;
728:   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
729:   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
730:   PetscScalar            *y_array;
731:   const PetscScalar      *x_array;

734:   /*
735:       The VecPlaceArray() is to avoid having to copy the
736:     y vector into the bjac->x vector. The reason for
737:     the bjac->x vector is that we need a sequential vector
738:     for the sequential solve.
739:   */
740:   VecGetArrayRead(x,&x_array);
741:   VecGetArray(y,&y_array);
742:   VecPlaceArray(bjac->x,x_array);
743:   VecPlaceArray(bjac->y,y_array);
744:   KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);
745:   KSPCheckSolve(jac->ksp[0],pc,bjac->y);
746:   VecResetArray(bjac->x);
747:   VecResetArray(bjac->y);
748:   VecRestoreArrayRead(x,&x_array);
749:   VecRestoreArray(y,&y_array);
750:   return(0);
751: }

753: static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
754: {
755:   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
756:   PetscErrorCode         ierr;
757:   PetscInt               m;
758:   KSP                    ksp;
759:   PC_BJacobi_Singleblock *bjac;
760:   PetscBool              wasSetup = PETSC_TRUE;
761:   VecType                vectype;
762:   const char             *prefix;

765:   if (!pc->setupcalled) {
766:     if (!jac->ksp) {
767:       wasSetup = PETSC_FALSE;

769:       KSPCreate(PETSC_COMM_SELF,&ksp);
770:       KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
771:       PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
772:       PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
773:       KSPSetType(ksp,KSPPREONLY);
774:       PCGetOptionsPrefix(pc,&prefix);
775:       KSPSetOptionsPrefix(ksp,prefix);
776:       KSPAppendOptionsPrefix(ksp,"sub_");

778:       pc->ops->reset               = PCReset_BJacobi_Singleblock;
779:       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
780:       pc->ops->apply               = PCApply_BJacobi_Singleblock;
781:       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
782:       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
783:       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
784:       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
785:       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;

787:       PetscMalloc1(1,&jac->ksp);
788:       jac->ksp[0] = ksp;

790:       PetscNewLog(pc,&bjac);
791:       jac->data = (void*)bjac;
792:     } else {
793:       ksp  = jac->ksp[0];
794:       bjac = (PC_BJacobi_Singleblock*)jac->data;
795:     }

797:     /*
798:       The reason we need to generate these vectors is to serve
799:       as the right-hand side and solution vector for the solve on the
800:       block. We do not need to allocate space for the vectors since
801:       that is provided via VecPlaceArray() just before the call to
802:       KSPSolve() on the block.
803:     */
804:     MatGetSize(pmat,&m,&m);
805:     VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);
806:     VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);
807:     MatGetVecType(pmat,&vectype);
808:     VecSetType(bjac->x,vectype);
809:     VecSetType(bjac->y,vectype);
810:     PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);
811:     PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);
812:   } else {
813:     ksp  = jac->ksp[0];
814:     bjac = (PC_BJacobi_Singleblock*)jac->data;
815:   }
816:   KSPGetOptionsPrefix(ksp,&prefix);
817:   if (pc->useAmat) {
818:     KSPSetOperators(ksp,mat,pmat);
819:     MatSetOptionsPrefix(mat,prefix);
820:   } else {
821:     KSPSetOperators(ksp,pmat,pmat);
822:   }
823:   MatSetOptionsPrefix(pmat,prefix);
824:   if (!wasSetup && pc->setfromoptionscalled) {
825:     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
826:     KSPSetFromOptions(ksp);
827:   }
828:   return(0);
829: }

831: /* ---------------------------------------------------------------------------------------------*/
832: static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
833: {
834:   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
835:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
836:   PetscErrorCode        ierr;
837:   PetscInt              i;

840:   if (bjac && bjac->pmat) {
841:     MatDestroyMatrices(jac->n_local,&bjac->pmat);
842:     if (pc->useAmat) {
843:       MatDestroyMatrices(jac->n_local,&bjac->mat);
844:     }
845:   }

847:   for (i=0; i<jac->n_local; i++) {
848:     KSPReset(jac->ksp[i]);
849:     if (bjac && bjac->x) {
850:       VecDestroy(&bjac->x[i]);
851:       VecDestroy(&bjac->y[i]);
852:       ISDestroy(&bjac->is[i]);
853:     }
854:   }
855:   PetscFree(jac->l_lens);
856:   PetscFree(jac->g_lens);
857:   return(0);
858: }

860: static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
861: {
862:   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
863:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
864:   PetscErrorCode        ierr;
865:   PetscInt              i;

868:   PCReset_BJacobi_Multiblock(pc);
869:   if (bjac) {
870:     PetscFree2(bjac->x,bjac->y);
871:     PetscFree(bjac->starts);
872:     PetscFree(bjac->is);
873:   }
874:   PetscFree(jac->data);
875:   for (i=0; i<jac->n_local; i++) {
876:     KSPDestroy(&jac->ksp[i]);
877:   }
878:   PetscFree(jac->ksp);
879:   PetscFree(pc->data);
880:   return(0);
881: }

883: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
884: {
885:   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
886:   PetscErrorCode     ierr;
887:   PetscInt           i,n_local = jac->n_local;
888:   KSPConvergedReason reason;

891:   for (i=0; i<n_local; i++) {
892:     KSPSetUp(jac->ksp[i]);
893:     KSPGetConvergedReason(jac->ksp[i],&reason);
894:     if (reason == KSP_DIVERGED_PC_FAILED) {
895:       pc->failedreason = PC_SUBPC_ERROR;
896:     }
897:   }
898:   return(0);
899: }

901: /*
902:       Preconditioner for block Jacobi
903: */
904: static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
905: {
906:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
907:   PetscErrorCode        ierr;
908:   PetscInt              i,n_local = jac->n_local;
909:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
910:   PetscScalar           *yin;
911:   const PetscScalar     *xin;

914:   VecGetArrayRead(x,&xin);
915:   VecGetArray(y,&yin);
916:   for (i=0; i<n_local; i++) {
917:     /*
918:        To avoid copying the subvector from x into a workspace we instead
919:        make the workspace vector array point to the subpart of the array of
920:        the global vector.
921:     */
922:     VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);
923:     VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);

925:     PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);
926:     KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);
927:     KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);
928:     PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);

930:     VecResetArray(bjac->x[i]);
931:     VecResetArray(bjac->y[i]);
932:   }
933:   VecRestoreArrayRead(x,&xin);
934:   VecRestoreArray(y,&yin);
935:   return(0);
936: }

938: /*
939:       Preconditioner for block Jacobi
940: */
941: static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
942: {
943:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
944:   PetscErrorCode        ierr;
945:   PetscInt              i,n_local = jac->n_local;
946:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
947:   PetscScalar           *yin;
948:   const PetscScalar     *xin;

951:   VecGetArrayRead(x,&xin);
952:   VecGetArray(y,&yin);
953:   for (i=0; i<n_local; i++) {
954:     /*
955:        To avoid copying the subvector from x into a workspace we instead
956:        make the workspace vector array point to the subpart of the array of
957:        the global vector.
958:     */
959:     VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);
960:     VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);

962:     PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);
963:     KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);
964:     KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);
965:     PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);

967:     VecResetArray(bjac->x[i]);
968:     VecResetArray(bjac->y[i]);
969:   }
970:   VecRestoreArrayRead(x,&xin);
971:   VecRestoreArray(y,&yin);
972:   return(0);
973: }

975: static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
976: {
977:   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
978:   PetscErrorCode        ierr;
979:   PetscInt              m,n_local,N,M,start,i;
980:   const char            *prefix;
981:   KSP                   ksp;
982:   Vec                   x,y;
983:   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
984:   PC                    subpc;
985:   IS                    is;
986:   MatReuse              scall;
987:   VecType               vectype;

990:   MatGetLocalSize(pc->pmat,&M,&N);

992:   n_local = jac->n_local;

994:   if (pc->useAmat) {
995:     PetscBool same;
996:     PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);
997:     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
998:   }

1000:   if (!pc->setupcalled) {
1001:     scall = MAT_INITIAL_MATRIX;

1003:     if (!jac->ksp) {
1004:       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1005:       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1006:       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1007:       pc->ops->matapply      = NULL;
1008:       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1009:       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;

1011:       PetscNewLog(pc,&bjac);
1012:       PetscMalloc1(n_local,&jac->ksp);
1013:       PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));
1014:       PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);
1015:       PetscMalloc1(n_local,&bjac->starts);
1016:       PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));

1018:       jac->data = (void*)bjac;
1019:       PetscMalloc1(n_local,&bjac->is);
1020:       PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));

1022:       for (i=0; i<n_local; i++) {
1023:         KSPCreate(PETSC_COMM_SELF,&ksp);
1024:         KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
1025:         PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
1026:         PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
1027:         KSPSetType(ksp,KSPPREONLY);
1028:         KSPGetPC(ksp,&subpc);
1029:         PCGetOptionsPrefix(pc,&prefix);
1030:         KSPSetOptionsPrefix(ksp,prefix);
1031:         KSPAppendOptionsPrefix(ksp,"sub_");

1033:         jac->ksp[i] = ksp;
1034:       }
1035:     } else {
1036:       bjac = (PC_BJacobi_Multiblock*)jac->data;
1037:     }

1039:     start = 0;
1040:     MatGetVecType(pmat,&vectype);
1041:     for (i=0; i<n_local; i++) {
1042:       m = jac->l_lens[i];
1043:       /*
1044:       The reason we need to generate these vectors is to serve
1045:       as the right-hand side and solution vector for the solve on the
1046:       block. We do not need to allocate space for the vectors since
1047:       that is provided via VecPlaceArray() just before the call to
1048:       KSPSolve() on the block.

1050:       */
1051:       VecCreateSeq(PETSC_COMM_SELF,m,&x);
1052:       VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);
1053:       VecSetType(x,vectype);
1054:       VecSetType(y,vectype);
1055:       PetscLogObjectParent((PetscObject)pc,(PetscObject)x);
1056:       PetscLogObjectParent((PetscObject)pc,(PetscObject)y);

1058:       bjac->x[i]      = x;
1059:       bjac->y[i]      = y;
1060:       bjac->starts[i] = start;

1062:       ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);
1063:       bjac->is[i] = is;
1064:       PetscLogObjectParent((PetscObject)pc,(PetscObject)is);

1066:       start += m;
1067:     }
1068:   } else {
1069:     bjac = (PC_BJacobi_Multiblock*)jac->data;
1070:     /*
1071:        Destroy the blocks from the previous iteration
1072:     */
1073:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1074:       MatDestroyMatrices(n_local,&bjac->pmat);
1075:       if (pc->useAmat) {
1076:         MatDestroyMatrices(n_local,&bjac->mat);
1077:       }
1078:       scall = MAT_INITIAL_MATRIX;
1079:     } else scall = MAT_REUSE_MATRIX;
1080:   }

1082:   MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);
1083:   if (pc->useAmat) {
1084:     MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);
1085:   }
1086:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1087:      different boundary conditions for the submatrices than for the global problem) */
1088:   PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);

1090:   for (i=0; i<n_local; i++) {
1091:     PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);
1092:     KSPGetOptionsPrefix(jac->ksp[i],&prefix);
1093:     if (pc->useAmat) {
1094:       PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);
1095:       KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);
1096:       MatSetOptionsPrefix(bjac->mat[i],prefix);
1097:     } else {
1098:       KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);
1099:     }
1100:     MatSetOptionsPrefix(bjac->pmat[i],prefix);
1101:     if (pc->setfromoptionscalled) {
1102:       KSPSetFromOptions(jac->ksp[i]);
1103:     }
1104:   }
1105:   return(0);
1106: }

1108: /* ---------------------------------------------------------------------------------------------*/
1109: /*
1110:       These are for a single block with multiple processes
1111: */
1112: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiproc(PC pc)
1113: {
1114:   PetscErrorCode     ierr;
1115:   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
1116:   KSP                subksp = jac->ksp[0];
1117:   KSPConvergedReason reason;

1120:   KSPSetUp(subksp);
1121:   KSPGetConvergedReason(subksp,&reason);
1122:   if (reason == KSP_DIVERGED_PC_FAILED) {
1123:     pc->failedreason = PC_SUBPC_ERROR;
1124:   }
1125:   return(0);
1126: }

1128: static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1129: {
1130:   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1131:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1132:   PetscErrorCode       ierr;

1135:   VecDestroy(&mpjac->ysub);
1136:   VecDestroy(&mpjac->xsub);
1137:   MatDestroy(&mpjac->submats);
1138:   if (jac->ksp) {KSPReset(jac->ksp[0]);}
1139:   return(0);
1140: }

1142: static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1143: {
1144:   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1145:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1146:   PetscErrorCode       ierr;

1149:   PCReset_BJacobi_Multiproc(pc);
1150:   KSPDestroy(&jac->ksp[0]);
1151:   PetscFree(jac->ksp);
1152:   PetscSubcommDestroy(&mpjac->psubcomm);

1154:   PetscFree(mpjac);
1155:   PetscFree(pc->data);
1156:   return(0);
1157: }

1159: static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1160: {
1161:   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1162:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1163:   PetscErrorCode       ierr;
1164:   PetscScalar          *yarray;
1165:   const PetscScalar    *xarray;
1166:   KSPConvergedReason   reason;

1169:   /* place x's and y's local arrays into xsub and ysub */
1170:   VecGetArrayRead(x,&xarray);
1171:   VecGetArray(y,&yarray);
1172:   VecPlaceArray(mpjac->xsub,xarray);
1173:   VecPlaceArray(mpjac->ysub,yarray);

1175:   /* apply preconditioner on each matrix block */
1176:   PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);
1177:   KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);
1178:   KSPCheckSolve(jac->ksp[0],pc,mpjac->ysub);
1179:   PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);
1180:   KSPGetConvergedReason(jac->ksp[0],&reason);
1181:   if (reason == KSP_DIVERGED_PC_FAILED) {
1182:     pc->failedreason = PC_SUBPC_ERROR;
1183:   }

1185:   VecResetArray(mpjac->xsub);
1186:   VecResetArray(mpjac->ysub);
1187:   VecRestoreArrayRead(x,&xarray);
1188:   VecRestoreArray(y,&yarray);
1189:   return(0);
1190: }

1192: static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc,Mat X,Mat Y)
1193: {
1194:   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1195:   KSPConvergedReason   reason;
1196:   Mat                  sX,sY;
1197:   const PetscScalar    *x;
1198:   PetscScalar          *y;
1199:   PetscInt             m,N,lda,ldb;
1200:   PetscErrorCode       ierr;

1203:   /* apply preconditioner on each matrix block */
1204:   MatGetLocalSize(X,&m,NULL);
1205:   MatGetSize(X,NULL,&N);
1206:   MatDenseGetLDA(X,&lda);
1207:   MatDenseGetLDA(Y,&ldb);
1208:   MatDenseGetArrayRead(X,&x);
1209:   MatDenseGetArrayWrite(Y,&y);
1210:   MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,(PetscScalar*)x,&sX);
1211:   MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,y,&sY);
1212:   MatDenseSetLDA(sX,lda);
1213:   MatDenseSetLDA(sY,ldb);
1214:   PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);
1215:   KSPMatSolve(jac->ksp[0],sX,sY);
1216:   KSPCheckSolve(jac->ksp[0],pc,NULL);
1217:   PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);
1218:   MatDestroy(&sY);
1219:   MatDestroy(&sX);
1220:   MatDenseRestoreArrayWrite(Y,&y);
1221:   MatDenseRestoreArrayRead(X,&x);
1222:   KSPGetConvergedReason(jac->ksp[0],&reason);
1223:   if (reason == KSP_DIVERGED_PC_FAILED) {
1224:     pc->failedreason = PC_SUBPC_ERROR;
1225:   }
1226:   return(0);
1227: }

1229: static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1230: {
1231:   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1232:   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1233:   PetscErrorCode       ierr;
1234:   PetscInt             m,n;
1235:   MPI_Comm             comm,subcomm=0;
1236:   const char           *prefix;
1237:   PetscBool            wasSetup = PETSC_TRUE;
1238:   VecType              vectype;

1241:   PetscObjectGetComm((PetscObject)pc,&comm);
1242:   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1243:   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1244:   if (!pc->setupcalled) {
1245:     wasSetup  = PETSC_FALSE;
1246:     PetscNewLog(pc,&mpjac);
1247:     jac->data = (void*)mpjac;

1249:     /* initialize datastructure mpjac */
1250:     if (!jac->psubcomm) {
1251:       /* Create default contiguous subcommunicatiors if user does not provide them */
1252:       PetscSubcommCreate(comm,&jac->psubcomm);
1253:       PetscSubcommSetNumber(jac->psubcomm,jac->n);
1254:       PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
1255:       PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
1256:     }
1257:     mpjac->psubcomm = jac->psubcomm;
1258:     subcomm         = PetscSubcommChild(mpjac->psubcomm);

1260:     /* Get matrix blocks of pmat */
1261:     MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);

1263:     /* create a new PC that processors in each subcomm have copy of */
1264:     PetscMalloc1(1,&jac->ksp);
1265:     KSPCreate(subcomm,&jac->ksp[0]);
1266:     KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);
1267:     PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);
1268:     PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);
1269:     KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);
1270:     KSPGetPC(jac->ksp[0],&mpjac->pc);

1272:     PCGetOptionsPrefix(pc,&prefix);
1273:     KSPSetOptionsPrefix(jac->ksp[0],prefix);
1274:     KSPAppendOptionsPrefix(jac->ksp[0],"sub_");
1275:     KSPGetOptionsPrefix(jac->ksp[0],&prefix);
1276:     MatSetOptionsPrefix(mpjac->submats,prefix);

1278:     /* create dummy vectors xsub and ysub */
1279:     MatGetLocalSize(mpjac->submats,&m,&n);
1280:     VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);
1281:     VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);
1282:     MatGetVecType(mpjac->submats,&vectype);
1283:     VecSetType(mpjac->xsub,vectype);
1284:     VecSetType(mpjac->ysub,vectype);
1285:     PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);
1286:     PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);

1288:     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiproc;
1289:     pc->ops->reset         = PCReset_BJacobi_Multiproc;
1290:     pc->ops->destroy       = PCDestroy_BJacobi_Multiproc;
1291:     pc->ops->apply         = PCApply_BJacobi_Multiproc;
1292:     pc->ops->matapply      = PCMatApply_BJacobi_Multiproc;
1293:   } else { /* pc->setupcalled */
1294:     subcomm = PetscSubcommChild(mpjac->psubcomm);
1295:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1296:       /* destroy old matrix blocks, then get new matrix blocks */
1297:       if (mpjac->submats) {MatDestroy(&mpjac->submats);}
1298:       MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);
1299:     } else {
1300:       MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);
1301:     }
1302:     KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);
1303:   }

1305:   if (!wasSetup && pc->setfromoptionscalled) {
1306:     KSPSetFromOptions(jac->ksp[0]);
1307:   }
1308:   return(0);
1309: }